@keymanapp/kmc-model 17.0.85-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.nyc_output/coverage-10524-1681239236645-0.json +1 -0
- package/Makefile +38 -0
- package/build/cjs-src/lexical-model-compiler.cjs +152688 -0
- package/build/src/build-trie.d.ts +40 -0
- package/build/src/build-trie.d.ts.map +1 -0
- package/build/src/build-trie.js +362 -0
- package/build/src/build-trie.js.map +1 -0
- package/build/src/join-word-breaker-decorator.d.ts +10 -0
- package/build/src/join-word-breaker-decorator.d.ts.map +1 -0
- package/build/src/join-word-breaker-decorator.js +121 -0
- package/build/src/join-word-breaker-decorator.js.map +1 -0
- package/build/src/lexical-model-compiler.d.ts +19 -0
- package/build/src/lexical-model-compiler.d.ts.map +1 -0
- package/build/src/lexical-model-compiler.js +155 -0
- package/build/src/lexical-model-compiler.js.map +1 -0
- package/build/src/lexical-model.d.ts +135 -0
- package/build/src/lexical-model.d.ts.map +1 -0
- package/build/src/lexical-model.js +6 -0
- package/build/src/lexical-model.js.map +1 -0
- package/build/src/main.d.ts +15 -0
- package/build/src/main.d.ts.map +1 -0
- package/build/src/main.js +46 -0
- package/build/src/main.js.map +1 -0
- package/build/src/model-compiler-errors.d.ts +77 -0
- package/build/src/model-compiler-errors.d.ts.map +1 -0
- package/build/src/model-compiler-errors.js +156 -0
- package/build/src/model-compiler-errors.js.map +1 -0
- package/build/src/model-defaults.d.ts +56 -0
- package/build/src/model-defaults.d.ts.map +1 -0
- package/build/src/model-defaults.js +106 -0
- package/build/src/model-defaults.js.map +1 -0
- package/build/src/model-definitions.d.ts +71 -0
- package/build/src/model-definitions.d.ts.map +1 -0
- package/build/src/model-definitions.js +189 -0
- package/build/src/model-definitions.js.map +1 -0
- package/build/src/script-overrides-decorator.d.ts +4 -0
- package/build/src/script-overrides-decorator.d.ts.map +1 -0
- package/build/src/script-overrides-decorator.js +63 -0
- package/build/src/script-overrides-decorator.js.map +1 -0
- package/build/test/helpers/index.d.ts +69 -0
- package/build/test/helpers/index.d.ts.map +1 -0
- package/build/test/helpers/index.js +160 -0
- package/build/test/helpers/index.js.map +1 -0
- package/build/test/test-compile-model-with-pseudoclosure.d.ts +2 -0
- package/build/test/test-compile-model-with-pseudoclosure.d.ts.map +1 -0
- package/build/test/test-compile-model-with-pseudoclosure.js +200 -0
- package/build/test/test-compile-model-with-pseudoclosure.js.map +1 -0
- package/build/test/test-compile-model.d.ts +2 -0
- package/build/test/test-compile-model.d.ts.map +1 -0
- package/build/test/test-compile-model.js +30 -0
- package/build/test/test-compile-model.js.map +1 -0
- package/build/test/test-compile-trie.d.ts +2 -0
- package/build/test/test-compile-trie.d.ts.map +1 -0
- package/build/test/test-compile-trie.js +125 -0
- package/build/test/test-compile-trie.js.map +1 -0
- package/build/test/test-default-apply-case.d.ts +2 -0
- package/build/test/test-default-apply-case.d.ts.map +1 -0
- package/build/test/test-default-apply-case.js +105 -0
- package/build/test/test-default-apply-case.js.map +1 -0
- package/build/test/test-default-search-term-to-key.d.ts +2 -0
- package/build/test/test-default-search-term-to-key.d.ts.map +1 -0
- package/build/test/test-default-search-term-to-key.js +148 -0
- package/build/test/test-default-search-term-to-key.js.map +1 -0
- package/build/test/test-error-logger.d.ts +2 -0
- package/build/test/test-error-logger.d.ts.map +1 -0
- package/build/test/test-error-logger.js +26 -0
- package/build/test/test-error-logger.js.map +1 -0
- package/build/test/test-join-word-breaker.d.ts +2 -0
- package/build/test/test-join-word-breaker.d.ts.map +1 -0
- package/build/test/test-join-word-breaker.js +84 -0
- package/build/test/test-join-word-breaker.js.map +1 -0
- package/build/test/test-model-definitions.d.ts +2 -0
- package/build/test/test-model-definitions.d.ts.map +1 -0
- package/build/test/test-model-definitions.js +165 -0
- package/build/test/test-model-definitions.js.map +1 -0
- package/build/test/test-override-script-defaults.d.ts +2 -0
- package/build/test/test-override-script-defaults.d.ts.map +1 -0
- package/build/test/test-override-script-defaults.js +28 -0
- package/build/test/test-override-script-defaults.js.map +1 -0
- package/build/test/test-parse-wordlist.d.ts +2 -0
- package/build/test/test-parse-wordlist.d.ts.map +1 -0
- package/build/test/test-parse-wordlist.js +110 -0
- package/build/test/test-parse-wordlist.js.map +1 -0
- package/build/test/test-punctuation.d.ts +2 -0
- package/build/test/test-punctuation.d.ts.map +1 -0
- package/build/test/test-punctuation.js +31 -0
- package/build/test/test-punctuation.js.map +1 -0
- package/build/test/tsconfig.tsbuildinfo +1 -0
- package/build/test/wordbreakers/data.d.ts +35 -0
- package/build/test/wordbreakers/data.d.ts.map +1 -0
- package/build/test/wordbreakers/data.js +1778 -0
- package/build/test/wordbreakers/data.js.map +1 -0
- package/build/test/wordbreakers/default-wordbreaker-esm.d.ts +10 -0
- package/build/test/wordbreakers/default-wordbreaker-esm.d.ts.map +1 -0
- package/build/test/wordbreakers/default-wordbreaker-esm.js +354 -0
- package/build/test/wordbreakers/default-wordbreaker-esm.js.map +1 -0
- package/build/tsconfig.tsbuildinfo +1 -0
- package/build.sh +73 -0
- package/coverage/lcov-report/base.css +224 -0
- package/coverage/lcov-report/block-navigation.js +87 -0
- package/coverage/lcov-report/favicon.png +0 -0
- package/coverage/lcov-report/index.html +161 -0
- package/coverage/lcov-report/prettify.css +1 -0
- package/coverage/lcov-report/prettify.js +2 -0
- package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
- package/coverage/lcov-report/sorter.js +196 -0
- package/coverage/lcov-report/src/build-trie.ts.html +1618 -0
- package/coverage/lcov-report/src/index.html +221 -0
- package/coverage/lcov-report/src/join-word-breaker-decorator.ts.html +487 -0
- package/coverage/lcov-report/src/lexical-model-compiler.ts.html +622 -0
- package/coverage/lcov-report/src/main.ts.html +271 -0
- package/coverage/lcov-report/src/model-compiler-errors.ts.html +691 -0
- package/coverage/lcov-report/src/model-defaults.ts.html +415 -0
- package/coverage/lcov-report/src/model-definitions.ts.html +748 -0
- package/coverage/lcov-report/src/script-overrides-decorator.ts.html +310 -0
- package/coverage/lcov-report/test/helpers/index.html +116 -0
- package/coverage/lcov-report/test/helpers/index.ts.html +646 -0
- package/coverage/lcov-report/test/index.html +266 -0
- package/coverage/lcov-report/test/test-compile-model-with-pseudoclosure.ts.html +802 -0
- package/coverage/lcov-report/test/test-compile-model.ts.html +187 -0
- package/coverage/lcov-report/test/test-compile-trie.ts.html +541 -0
- package/coverage/lcov-report/test/test-default-apply-case.ts.html +466 -0
- package/coverage/lcov-report/test/test-default-search-term-to-key.ts.html +628 -0
- package/coverage/lcov-report/test/test-error-logger.ts.html +196 -0
- package/coverage/lcov-report/test/test-join-word-breaker.ts.html +376 -0
- package/coverage/lcov-report/test/test-model-definitions.ts.html +676 -0
- package/coverage/lcov-report/test/test-override-script-defaults.ts.html +184 -0
- package/coverage/lcov-report/test/test-parse-wordlist.ts.html +466 -0
- package/coverage/lcov-report/test/test-punctuation.ts.html +190 -0
- package/coverage/lcov-report/test/wordbreakers/data.ts.html +5413 -0
- package/coverage/lcov-report/test/wordbreakers/default-wordbreaker-esm.ts.html +1234 -0
- package/coverage/lcov-report/test/wordbreakers/index.html +131 -0
- package/coverage/lcov.info +5969 -0
- package/package.json +61 -0
- package/src/build-trie.ts +511 -0
- package/src/join-word-breaker-decorator.ts +134 -0
- package/src/lexical-model-compiler.ts +179 -0
- package/src/lexical-model.ts +150 -0
- package/src/main.ts +62 -0
- package/src/model-compiler-errors.ts +203 -0
- package/src/model-defaults.ts +111 -0
- package/src/model-definitions.ts +222 -0
- package/src/script-overrides-decorator.ts +75 -0
- package/test/README.md +15 -0
- package/test/fixtures/example.qaa.joinwordbreaker/example.qaa.joinwordbreaker.model.ts +10 -0
- package/test/fixtures/example.qaa.joinwordbreaker/wordlist.tsv +3 -0
- package/test/fixtures/example.qaa.scriptusesspaces/example.qaa.scriptusesspaces.model.ts +10 -0
- package/test/fixtures/example.qaa.scriptusesspaces/wordlist.tsv +8 -0
- package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.kmp.json +45 -0
- package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.kps +35 -0
- package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.ts +6 -0
- package/test/fixtures/example.qaa.sencoten/wordlist.tsv +10 -0
- package/test/fixtures/example.qaa.smp/example.qaa.smp.model.ts +6 -0
- package/test/fixtures/example.qaa.smp/wordlist.tsv +5 -0
- package/test/fixtures/example.qaa.trivial/example.qaa.trivial.model.ts +5 -0
- package/test/fixtures/example.qaa.trivial/wordlist.tsv +3 -0
- package/test/fixtures/example.qaa.utf16be/example.qaa.utf16be.model.ts +5 -0
- package/test/fixtures/example.qaa.utf16be/wordlist.txt +0 -0
- package/test/fixtures/example.qaa.utf16le/example.qaa.utf16le.model.ts +5 -0
- package/test/fixtures/example.qaa.utf16le/wordlist.txt +0 -0
- package/test/fixtures/example.qaa.wordbreaker/example.qaa.wordbreaker.model.ts +9 -0
- package/test/fixtures/example.qaa.wordbreaker/wordlist.tsv +3 -0
- package/test/helpers/index.ts +187 -0
- package/test/test-compile-model-with-pseudoclosure.ts +239 -0
- package/test/test-compile-model.ts +34 -0
- package/test/test-compile-trie.ts +152 -0
- package/test/test-default-apply-case.ts +128 -0
- package/test/test-default-search-term-to-key.ts +181 -0
- package/test/test-error-logger.ts +38 -0
- package/test/test-join-word-breaker.ts +97 -0
- package/test/test-model-definitions.ts +198 -0
- package/test/test-override-script-defaults.ts +33 -0
- package/test/test-parse-wordlist.ts +127 -0
- package/test/test-punctuation.ts +35 -0
- package/test/tsconfig.json +22 -0
- package/test/wordbreakers/README.md +3 -0
- package/test/wordbreakers/data.ts +1776 -0
- package/test/wordbreakers/default-wordbreaker-esm.ts +383 -0
- package/tools/create-override-script-regexp.ts +145 -0
- package/tsconfig.json +17 -0
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* A word list is (conceptually) an array of pairs: the concrete word form itself + a
|
|
3
|
+
* non-negative count.
|
|
4
|
+
*
|
|
5
|
+
* Since each word should only appear once within the list, we represent it with
|
|
6
|
+
* an associative array pattern keyed by the wordform.
|
|
7
|
+
*/
|
|
8
|
+
export type WordList = {
|
|
9
|
+
[wordform: string]: number;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Returns a data structure that can be loaded by the TrieModel.
|
|
13
|
+
*
|
|
14
|
+
* It implements a **weighted** trie, whose indices (paths down the trie) are
|
|
15
|
+
* generated by a search key, and not concrete wordforms themselves.
|
|
16
|
+
*
|
|
17
|
+
* @param sourceFiles an array of source files that will be read to generate the trie.
|
|
18
|
+
*/
|
|
19
|
+
export declare function createTrieDataStructure(filenames: string[], searchTermToKey?: (wf: string) => string): string;
|
|
20
|
+
/**
|
|
21
|
+
* Parses a word list from a file, merging duplicate entries.
|
|
22
|
+
*
|
|
23
|
+
* The word list may be encoded in:
|
|
24
|
+
*
|
|
25
|
+
* - UTF-8, with or without BOM [exported by most software]
|
|
26
|
+
* - UTF-16, little endian, with BOM [exported by Microsoft Excel]
|
|
27
|
+
*
|
|
28
|
+
* @param wordlist word list to merge entries into (may have existing entries)
|
|
29
|
+
* @param filename filename of the word list
|
|
30
|
+
*/
|
|
31
|
+
export declare function parseWordListFromFilename(wordlist: WordList, filename: string): void;
|
|
32
|
+
/**
|
|
33
|
+
* Parses a word list from a string. The string should have multiple lines
|
|
34
|
+
* with LF or CRLF line terminators.
|
|
35
|
+
*
|
|
36
|
+
* @param wordlist word list to merge entries into (may have existing entries)
|
|
37
|
+
* @param filename filename of the word list
|
|
38
|
+
*/
|
|
39
|
+
export declare function parseWordListFromContents(wordlist: WordList, contents: string): void;
|
|
40
|
+
//# sourceMappingURL=build-trie.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"build-trie.d.ts","sourceRoot":"","sources":["../../src/build-trie.ts"],"names":[],"mappings":"AAMA;;;;;;GAMG;AACH,MAAM,MAAM,QAAQ,GAAG;IAAC,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAAA;CAAC,CAAC;AAEpD;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE,eAAe,CAAC,EAAE,CAAC,EAAE,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,CAU7G;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAEpF;AAED;;;;;;GAMG;AACH,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAEpF"}
|
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
import { readFileSync } from "fs";
|
|
2
|
+
import { log, KeymanCompilerError } from "./model-compiler-errors.js";
|
|
3
|
+
// Supports LF or CRLF line terminators.
|
|
4
|
+
const NEWLINE_SEPARATOR = /\u000d?\u000a/;
|
|
5
|
+
/**
|
|
6
|
+
* Returns a data structure that can be loaded by the TrieModel.
|
|
7
|
+
*
|
|
8
|
+
* It implements a **weighted** trie, whose indices (paths down the trie) are
|
|
9
|
+
* generated by a search key, and not concrete wordforms themselves.
|
|
10
|
+
*
|
|
11
|
+
* @param sourceFiles an array of source files that will be read to generate the trie.
|
|
12
|
+
*/
|
|
13
|
+
export function createTrieDataStructure(filenames, searchTermToKey) {
|
|
14
|
+
if (typeof searchTermToKey !== "function") {
|
|
15
|
+
throw new TypeError("searchTermToKey must be explicitly specified");
|
|
16
|
+
}
|
|
17
|
+
// Make one big word list out of all of the filenames provided.
|
|
18
|
+
let wordlist = {};
|
|
19
|
+
filenames.forEach(filename => parseWordListFromFilename(wordlist, filename));
|
|
20
|
+
let trie = Trie.buildTrie(wordlist, searchTermToKey);
|
|
21
|
+
return JSON.stringify(trie);
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Parses a word list from a file, merging duplicate entries.
|
|
25
|
+
*
|
|
26
|
+
* The word list may be encoded in:
|
|
27
|
+
*
|
|
28
|
+
* - UTF-8, with or without BOM [exported by most software]
|
|
29
|
+
* - UTF-16, little endian, with BOM [exported by Microsoft Excel]
|
|
30
|
+
*
|
|
31
|
+
* @param wordlist word list to merge entries into (may have existing entries)
|
|
32
|
+
* @param filename filename of the word list
|
|
33
|
+
*/
|
|
34
|
+
export function parseWordListFromFilename(wordlist, filename) {
|
|
35
|
+
_parseWordList(wordlist, new WordListFromFilename(filename));
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Parses a word list from a string. The string should have multiple lines
|
|
39
|
+
* with LF or CRLF line terminators.
|
|
40
|
+
*
|
|
41
|
+
* @param wordlist word list to merge entries into (may have existing entries)
|
|
42
|
+
* @param filename filename of the word list
|
|
43
|
+
*/
|
|
44
|
+
export function parseWordListFromContents(wordlist, contents) {
|
|
45
|
+
_parseWordList(wordlist, new WordListFromMemory(contents));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Reads a tab-separated values file into a word list. This function converts all
|
|
49
|
+
* entries into NFC and merges duplicate entries across wordlists. Duplication is
|
|
50
|
+
* on the basis of character-for-character equality after normalisation to NFC.
|
|
51
|
+
*
|
|
52
|
+
* Format specification:
|
|
53
|
+
*
|
|
54
|
+
* - the file is a UTF-8 encoded text file.
|
|
55
|
+
* - new lines are either LF or CRLF.
|
|
56
|
+
* - the file MAY start with the UTF-8 byte-order mark (BOM); that is, if the
|
|
57
|
+
* first three bytes of the file are EF BB BF, these will be interepreted as
|
|
58
|
+
* the BOM and will be ignored.
|
|
59
|
+
* - the file either consists of a comment or an entry.
|
|
60
|
+
* - comment lines MUST start with the '#' character on the very first column.
|
|
61
|
+
* - entries are one to three columns, separated by the (horizontal) tab
|
|
62
|
+
* character.
|
|
63
|
+
* - column 1 (REQUIRED): the wordform: can have any character except tab, CR,
|
|
64
|
+
* LF. Surrounding whitespace characters are trimmed.
|
|
65
|
+
* - column 2 (optional): the count: a non-negative integer specifying how many
|
|
66
|
+
* times this entry has appeared in the corpus. Blank means 'indeterminate';
|
|
67
|
+
* commas are permissible in the digits.
|
|
68
|
+
* - column 3 (optional): comment: an informative comment, ignored by the tool.
|
|
69
|
+
*
|
|
70
|
+
* @param wordlist word list to merge entries into (may have existing entries)
|
|
71
|
+
* @param contents contents of the file to import
|
|
72
|
+
*/
|
|
73
|
+
function _parseWordList(wordlist, source) {
|
|
74
|
+
const TAB = "\t";
|
|
75
|
+
let wordsSeenInThisFile = new Set();
|
|
76
|
+
for (let [lineno, line] of source.lines()) {
|
|
77
|
+
// Remove the byte-order mark (BOM) from the beginning of the string.
|
|
78
|
+
// Because `contents` can be the concatenation of several files, we have to remove
|
|
79
|
+
// the BOM from every possible start of file -- i.e., beginning of every line.
|
|
80
|
+
line = line.replace(/^\uFEFF/, '').trim();
|
|
81
|
+
if (line.startsWith('#') || line === "") {
|
|
82
|
+
continue; // skip comments and empty lines
|
|
83
|
+
}
|
|
84
|
+
// The third column is the comment. Always ignored!
|
|
85
|
+
let [wordform, countText] = line.split(TAB);
|
|
86
|
+
// Clean the word form.
|
|
87
|
+
let original = wordform;
|
|
88
|
+
wordform = wordform.normalize('NFC');
|
|
89
|
+
if (original !== wordform) {
|
|
90
|
+
// Mixed normalization forms are yucky! Warn about it.
|
|
91
|
+
log(KeymanCompilerError.CWARN_MixedNormalizationForms, `“${wordform}” is not in Unicode NFC. Automatically converting to NFC.`, { filename: source.name, lineno });
|
|
92
|
+
}
|
|
93
|
+
wordform = wordform.trim();
|
|
94
|
+
countText = (countText || '').trim().replace(/,/g, '');
|
|
95
|
+
let count = parseInt(countText, 10);
|
|
96
|
+
// When parsing a decimal integer fails (e.g., blank or something else):
|
|
97
|
+
if (!isFinite(count) || count < 0) {
|
|
98
|
+
// TODO: is this the right thing to do?
|
|
99
|
+
// Treat it like a hapax legonmenom -- it exist, but only once.
|
|
100
|
+
count = 1;
|
|
101
|
+
}
|
|
102
|
+
if (wordsSeenInThisFile.has(wordform)) {
|
|
103
|
+
// The same word seen across multiple files is fine,
|
|
104
|
+
// but a word seen multiple times in one file is a problem!
|
|
105
|
+
log(KeymanCompilerError.CWARN_DuplicateWordInSameFile, `duplicate word “${wordform}” found in same file; summing counts`, { filename: source.name, lineno });
|
|
106
|
+
}
|
|
107
|
+
wordsSeenInThisFile.add(wordform);
|
|
108
|
+
wordlist[wordform] = (isNaN(wordlist[wordform]) ? 0 : wordlist[wordform] || 0) + count;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
class WordListFromMemory {
|
|
112
|
+
name = '<memory>';
|
|
113
|
+
_contents;
|
|
114
|
+
constructor(contents) {
|
|
115
|
+
this._contents = contents;
|
|
116
|
+
}
|
|
117
|
+
*lines() {
|
|
118
|
+
yield* enumerateLines(this._contents.split(NEWLINE_SEPARATOR));
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
class WordListFromFilename {
|
|
122
|
+
name;
|
|
123
|
+
constructor(filename) {
|
|
124
|
+
this.name = filename;
|
|
125
|
+
}
|
|
126
|
+
*lines() {
|
|
127
|
+
let contents = readFileSync(this.name, detectEncoding(this.name));
|
|
128
|
+
yield* enumerateLines(contents.split(NEWLINE_SEPARATOR));
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Yields pairs of [lineno, line], given an Array of lines.
|
|
133
|
+
*/
|
|
134
|
+
function* enumerateLines(lines) {
|
|
135
|
+
let i = 1;
|
|
136
|
+
for (let line of lines) {
|
|
137
|
+
yield [i, line];
|
|
138
|
+
i++;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
var Trie;
|
|
142
|
+
(function (Trie_1) {
|
|
143
|
+
/**
|
|
144
|
+
* A sentinel value for when an internal node has contents and requires an
|
|
145
|
+
* "internal" leaf. That is, this internal node has content. Instead of placing
|
|
146
|
+
* entries as children in an internal node, a "fake" leaf is created, and its
|
|
147
|
+
* key is this special internal value.
|
|
148
|
+
*
|
|
149
|
+
* The value is a valid Unicode BMP code point, but it is a "non-character".
|
|
150
|
+
* Unicode will never assign semantics to these characters, as they are
|
|
151
|
+
* intended to be used internally as sentinel values.
|
|
152
|
+
*/
|
|
153
|
+
const INTERNAL_VALUE = '\uFDD0';
|
|
154
|
+
/**
|
|
155
|
+
* Builds a trie from a word list.
|
|
156
|
+
*
|
|
157
|
+
* @param wordlist The wordlist with non-negative weights.
|
|
158
|
+
* @param keyFunction Function that converts word forms into indexed search keys
|
|
159
|
+
* @returns A JSON-serialiable object that can be given to the TrieModel constructor.
|
|
160
|
+
*/
|
|
161
|
+
function buildTrie(wordlist, keyFunction) {
|
|
162
|
+
let root = new Trie(keyFunction).buildFromWordList(wordlist).root;
|
|
163
|
+
return {
|
|
164
|
+
totalWeight: sumWeights(root),
|
|
165
|
+
root: root
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
Trie_1.buildTrie = buildTrie;
|
|
169
|
+
/**
|
|
170
|
+
* Wrapper class for the trie and its nodes and wordform to search
|
|
171
|
+
*/
|
|
172
|
+
class Trie {
|
|
173
|
+
root = createRootNode();
|
|
174
|
+
toKey;
|
|
175
|
+
constructor(wordform2key) {
|
|
176
|
+
this.toKey = wordform2key;
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Populates the trie with the contents of an entire wordlist.
|
|
180
|
+
* @param words a list of word and count pairs.
|
|
181
|
+
*/
|
|
182
|
+
buildFromWordList(words) {
|
|
183
|
+
for (let [wordform, weight] of Object.entries(words)) {
|
|
184
|
+
let key = this.toKey(wordform);
|
|
185
|
+
addUnsorted(this.root, { key, weight, content: wordform }, 0);
|
|
186
|
+
}
|
|
187
|
+
sortTrie(this.root);
|
|
188
|
+
return this;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
// "Constructors"
|
|
192
|
+
function createRootNode() {
|
|
193
|
+
return {
|
|
194
|
+
type: 'leaf',
|
|
195
|
+
weight: 0,
|
|
196
|
+
entries: []
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
// Implement Trie creation.
|
|
200
|
+
/**
|
|
201
|
+
* Adds an entry to the trie.
|
|
202
|
+
*
|
|
203
|
+
* Note that the trie will likely be unsorted after the add occurs. Before
|
|
204
|
+
* performing a lookup on the trie, use call sortTrie() on the root note!
|
|
205
|
+
*
|
|
206
|
+
* @param node Which node should the entry be added to?
|
|
207
|
+
* @param entry the wordform/weight/key to add to the trie
|
|
208
|
+
* @param index the index in the key and also the trie depth. Should be set to
|
|
209
|
+
* zero when adding onto the root node of the trie.
|
|
210
|
+
*/
|
|
211
|
+
function addUnsorted(node, entry, index = 0) {
|
|
212
|
+
// Each node stores the MAXIMUM weight out of all of its decesdents, to
|
|
213
|
+
// enable a greedy search through the trie.
|
|
214
|
+
node.weight = Math.max(node.weight, entry.weight);
|
|
215
|
+
// When should a leaf become an interior node?
|
|
216
|
+
// When it already has a value, but the key of the current value is longer
|
|
217
|
+
// than the prefix.
|
|
218
|
+
if (node.type === 'leaf' && index < entry.key.length && node.entries.length >= 1) {
|
|
219
|
+
convertLeafToInternalNode(node, index);
|
|
220
|
+
}
|
|
221
|
+
if (node.type === 'leaf') {
|
|
222
|
+
// The key matches this leaf node, so add yet another entry.
|
|
223
|
+
addItemToLeaf(node, entry);
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
// Push the node down to a lower node.
|
|
227
|
+
addItemToInternalNode(node, entry, index);
|
|
228
|
+
}
|
|
229
|
+
node.unsorted = true;
|
|
230
|
+
}
|
|
231
|
+
/**
|
|
232
|
+
* Adds an item to the internal node at a given depth.
|
|
233
|
+
* @param item
|
|
234
|
+
* @param index
|
|
235
|
+
*/
|
|
236
|
+
function addItemToInternalNode(node, item, index) {
|
|
237
|
+
let char = item.key[index];
|
|
238
|
+
if (!node.children[char]) {
|
|
239
|
+
node.children[char] = createRootNode();
|
|
240
|
+
node.values.push(char);
|
|
241
|
+
}
|
|
242
|
+
addUnsorted(node.children[char], item, index + 1);
|
|
243
|
+
}
|
|
244
|
+
function addItemToLeaf(leaf, item) {
|
|
245
|
+
leaf.entries.push(item);
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Mutates the given Leaf to turn it into an InternalNode.
|
|
249
|
+
*
|
|
250
|
+
* NOTE: the node passed in will be DESTRUCTIVELY CHANGED into a different
|
|
251
|
+
* type when passed into this function!
|
|
252
|
+
*
|
|
253
|
+
* @param depth depth of the trie at this level.
|
|
254
|
+
*/
|
|
255
|
+
function convertLeafToInternalNode(leaf, depth) {
|
|
256
|
+
let entries = leaf.entries;
|
|
257
|
+
// Alias the current node, as the desired type.
|
|
258
|
+
let internal = leaf;
|
|
259
|
+
internal.type = 'internal';
|
|
260
|
+
delete leaf.entries;
|
|
261
|
+
internal.values = [];
|
|
262
|
+
internal.children = {};
|
|
263
|
+
// Convert the old values array into the format for interior nodes.
|
|
264
|
+
for (let item of entries) {
|
|
265
|
+
let char;
|
|
266
|
+
if (depth < item.key.length) {
|
|
267
|
+
char = item.key[depth];
|
|
268
|
+
}
|
|
269
|
+
else {
|
|
270
|
+
char = INTERNAL_VALUE;
|
|
271
|
+
}
|
|
272
|
+
if (!internal.children[char]) {
|
|
273
|
+
internal.children[char] = createRootNode();
|
|
274
|
+
internal.values.push(char);
|
|
275
|
+
}
|
|
276
|
+
addUnsorted(internal.children[char], item, depth + 1);
|
|
277
|
+
}
|
|
278
|
+
internal.unsorted = true;
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Recursively sort the trie, in descending order of weight.
|
|
282
|
+
* @param node any node in the trie
|
|
283
|
+
*/
|
|
284
|
+
function sortTrie(node) {
|
|
285
|
+
if (node.type === 'leaf') {
|
|
286
|
+
if (!node.unsorted) {
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
node.entries.sort(function (a, b) { return b.weight - a.weight; });
|
|
290
|
+
}
|
|
291
|
+
else {
|
|
292
|
+
// We MUST recurse and sort children before returning.
|
|
293
|
+
for (let char of node.values) {
|
|
294
|
+
sortTrie(node.children[char]);
|
|
295
|
+
}
|
|
296
|
+
if (!node.unsorted) {
|
|
297
|
+
return;
|
|
298
|
+
}
|
|
299
|
+
node.values.sort((a, b) => {
|
|
300
|
+
return node.children[b].weight - node.children[a].weight;
|
|
301
|
+
});
|
|
302
|
+
}
|
|
303
|
+
delete node.unsorted;
|
|
304
|
+
}
|
|
305
|
+
/**
|
|
306
|
+
* O(n) recursive traversal to sum the total weight of all leaves in the
|
|
307
|
+
* trie, starting at the provided node.
|
|
308
|
+
*
|
|
309
|
+
* @param node The node to start summing weights.
|
|
310
|
+
*/
|
|
311
|
+
function sumWeights(node) {
|
|
312
|
+
let val;
|
|
313
|
+
if (node.type === 'leaf') {
|
|
314
|
+
val = node.entries
|
|
315
|
+
.map(entry => entry.weight)
|
|
316
|
+
//.map(entry => isNaN(entry.weight) ? 1 : entry.weight)
|
|
317
|
+
.reduce((acc, count) => acc + count, 0);
|
|
318
|
+
}
|
|
319
|
+
else {
|
|
320
|
+
val = Object.keys(node.children)
|
|
321
|
+
.map((key) => sumWeights(node.children[key]))
|
|
322
|
+
.reduce((acc, count) => acc + count, 0);
|
|
323
|
+
}
|
|
324
|
+
if (isNaN(val)) {
|
|
325
|
+
console.error("Unexpected NaN has appeared!");
|
|
326
|
+
}
|
|
327
|
+
return val;
|
|
328
|
+
}
|
|
329
|
+
})(Trie || (Trie = {}));
|
|
330
|
+
/**
|
|
331
|
+
* Detects the encoding of a text file.
|
|
332
|
+
*
|
|
333
|
+
* Supported encodings are:
|
|
334
|
+
*
|
|
335
|
+
* - UTF-8, with or without BOM
|
|
336
|
+
* - UTF-16, little endian, with BOM
|
|
337
|
+
*
|
|
338
|
+
* UTF-16 in big endian is explicitly NOT supported! The reason is two-fold:
|
|
339
|
+
* 1) Node does not support it without resorting to an external library (or
|
|
340
|
+
* swapping every byte in the file!); and 2) I'm not sure anything actually
|
|
341
|
+
* outputs in this format anyway!
|
|
342
|
+
*
|
|
343
|
+
* @param filename filename of the file to detect encoding
|
|
344
|
+
*/
|
|
345
|
+
function detectEncoding(filename) {
|
|
346
|
+
let buffer = readFileSync(filename);
|
|
347
|
+
// Note: BOM is U+FEFF
|
|
348
|
+
// In little endian, this is 0xFF 0xFE
|
|
349
|
+
if (buffer[0] == 0xFF && buffer[1] == 0xFE) {
|
|
350
|
+
return 'utf16le';
|
|
351
|
+
}
|
|
352
|
+
else if (buffer[0] == 0xFE && buffer[1] == 0xFF) {
|
|
353
|
+
// Big Endian, is NOT supported because Node does not support it (???)
|
|
354
|
+
// See: https://stackoverflow.com/a/14551669/6626414
|
|
355
|
+
throw new Error('UTF-16BE is unsupported');
|
|
356
|
+
}
|
|
357
|
+
else {
|
|
358
|
+
// Assume its in UTF-8, with or without a BOM.
|
|
359
|
+
return 'utf8';
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
//# sourceMappingURL=build-trie.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"build-trie.js","sourceRoot":"","sources":["../../src/build-trie.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,IAAI,CAAC;AAClC,OAAO,EAAE,GAAG,EAAE,mBAAmB,EAAE,MAAM,4BAA4B,CAAC;AAEtE,wCAAwC;AACxC,MAAM,iBAAiB,GAAG,eAAe,CAAC;AAW1C;;;;;;;GAOG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAmB,EAAE,eAAwC;IACnG,IAAI,OAAO,eAAe,KAAK,UAAU,EAAE;QACzC,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAA;KACpE;IACD,+DAA+D;IAC/D,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,yBAAyB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC;IAE7E,IAAI,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,eAAuC,CAAC,CAAC;IAC7E,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,yBAAyB,CAAC,QAAkB,EAAE,QAAgB;IAC5E,cAAc,CAAC,QAAQ,EAAE,IAAI,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC/D,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,yBAAyB,CAAC,QAAkB,EAAE,QAAgB;IAC5E,cAAc,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,SAAS,cAAc,CAAC,QAAkB,EAAE,MAAuB;IACjE,MAAM,GAAG,GAAG,IAAI,CAAC;IAEjB,IAAI,mBAAmB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE5C,KAAK,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,KAAK,EAAE,EAAE;QACzC,qEAAqE;QACrE,kFAAkF;QAClF,8EAA8E;QAC9E,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC;QAE1C,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,IAAI,KAAK,EAAE,EAAE;YACvC,SAAS,CAAC,gCAAgC;SAC3C;QAED,mDAAmD;QACnD,IAAI,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAE5C,uBAAuB;QACvB,IAAI,QAAQ,GAAG,QAAQ,CAAC;QAExB,QAAQ,GAAG,QAAQ,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QACrC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,sDAAsD;YACtD,GAAG,CACD,mBAAmB,CAAC,6BAA6B,EACjD,IAAI,QAAQ,2DAA2D,EACvE,EAAC,QAAQ,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,EAAC,CAChC,CAAA;SACF;QAED,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAA;QAE1B,SAAS,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QACvD,IAAI,KAAK,GAAG,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;QAEpC,wEAAwE;QACxE,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC,EAAE;YACjC,uCAAuC;YACvC,+DAA+D;YAC/D,KAAK,GAAG,CAAC,CAAC;SACX;QAED,IAAI,mBAAmB,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YACrC,oDAAoD;YACpD,2DAA2D;YAC3D,GAAG,CACD,mBAAmB,CAAC,6BAA6B,EACjD,mBAAmB,QAAQ,sCAAsC,EACjE,EAAC,QAAQ,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,EAAC,CAChC,CAAA;SACF;QACD,mBAAmB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QAElC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,CAAC;KACxF;AACH,CAAC;AASD,MAAM,kBAAkB;IACb,IAAI,GAAG,UAAU,CAAC;IACV,SAAS,CAAS;IAEnC,YAAY,QAAgB;QAC1B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,CAAC,KAAK;QACJ,KAAM,CAAC,CAAA,cAAc,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC,CAAC;IACjE,CAAC;CACF;AAED,MAAM,oBAAoB;IACf,IAAI,CAAS;IACtB,YAAY,QAAgB;QAC1B,IAAI,CAAC,IAAI,GAAG,QAAQ,CAAC;IACvB,CAAC;IAED,CAAC,KAAK;QACJ,IAAI,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QAClE,KAAM,CAAC,CAAA,cAAc,CAAC,QAAQ,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,cAAc,CAAC,KAAe;IACpC,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE;QACtB,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QAChB,CAAC,EAAE,CAAC;KACL;AACL,CAAC;AAED,IAAU,IAAI,CAmSb;AAnSD,WAAU,MAAI;IAsFZ;;;;;;;;;OASG;IACH,MAAM,cAAc,GAAG,QAAQ,CAAC;IAEhC;;;;;;OAMG;IACH,SAAgB,SAAS,CAAC,QAAkB,EAAE,WAA4B;QACxE,IAAI,IAAI,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,iBAAiB,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC;QAClE,OAAO;YACL,WAAW,EAAE,UAAU,CAAC,IAAI,CAAC;YAC7B,IAAI,EAAE,IAAI;SACX,CAAA;IACH,CAAC;IANe,gBAAS,YAMxB,CAAA;IAED;;OAEG;IACH,MAAM,IAAI;QACC,IAAI,GAAG,cAAc,EAAE,CAAC;QACjC,KAAK,CAAkB;QACvB,YAAY,YAA6B;YACvC,IAAI,CAAC,KAAK,GAAG,YAAY,CAAC;QAC5B,CAAC;QAED;;;WAGG;QACH,iBAAiB,CAAC,KAAe;YAC/B,KAAK,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;gBACpD,IAAI,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;gBAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC;aAC/D;YACD,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;KACF;IAED,iBAAiB;IACjB,SAAS,cAAc;QACrB,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,MAAM,EAAE,CAAC;YACT,OAAO,EAAE,EAAE;SACZ,CAAC;IACJ,CAAC;IAED,2BAA2B;IAE3B;;;;;;;;;;OAUG;IACH,SAAS,WAAW,CAAC,IAAU,EAAE,KAAY,EAAE,QAAgB,CAAC;QAC9D,uEAAuE;QACvE,2CAA2C;QAC3C,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAElD,8CAA8C;QAC9C,0EAA0E;QAC1E,mBAAmB;QACnB,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,MAAM,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,EAAE;YAChF,yBAAyB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;SACxC;QAED,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE;YACxB,4DAA4D;YAC5D,aAAa,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;SAC5B;aAAM;YACL,sCAAsC;YACtC,qBAAqB,CAAC,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC3C;QAED,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,SAAS,qBAAqB,CAAC,IAAkB,EAAE,IAAW,EAAE,KAAa;QAC3E,IAAI,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACxB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,cAAc,EAAE,CAAC;YACvC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACxB;QACD,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;IACpD,CAAC;IAED,SAAS,aAAa,CAAC,IAAU,EAAE,IAAW;QAC5C,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC1B,CAAC;IAED;;;;;;;OAOG;IACH,SAAS,yBAAyB,CAAC,IAAU,EAAE,KAAa;QAC1D,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;QAE3B,+CAA+C;QAC/C,IAAI,QAAQ,GAAc,IAAqB,CAAC;QAChD,QAAQ,CAAC,IAAI,GAAG,UAAU,CAAC;QAE3B,OAAO,IAAI,CAAC,OAAO,CAAC;QACpB,QAAQ,CAAC,MAAM,GAAG,EAAE,CAAC;QACrB,QAAQ,CAAC,QAAQ,GAAG,EAAE,CAAC;QAEvB,mEAAmE;QACnE,KAAK,IAAI,IAAI,IAAI,OAAO,EAAE;YACxB,IAAI,IAAY,CAAC;YACjB,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;gBAC3B,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;aACxB;iBAAM;gBACL,IAAI,GAAG,cAAc,CAAC;aACvB;YAED,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAC5B,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,cAAc,EAAE,CAAC;gBAC3C,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC5B;YACD,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;SACvD;QAED,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC;IAC3B,CAAC;IAED;;;OAGG;IACH,SAAS,QAAQ,CAAC,IAAU;QAC1B,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;gBAClB,OAAO;aACR;YAED,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,IAAI,OAAO,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;SACpE;aAAM;YACL,sDAAsD;YACtD,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,EAAE;gBAC5B,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;aAC/B;YAED,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;gBAClB,OAAO;aACR;YAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBACxB,OAAO,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;YAC3D,CAAC,CAAC,CAAC;SACJ;QAED,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED;;;;;OAKG;IACH,SAAS,UAAU,CAAC,IAAU;QAC5B,IAAI,GAAW,CAAC;QAChB,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE;YACxB,GAAG,GAAG,IAAI,CAAC,OAAO;iBACf,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;gBAC3B,uDAAuD;iBACtD,MAAM,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC,CAAC,CAAC;SAC3C;aAAM;YACL,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC;iBAC7B,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;iBAC5C,MAAM,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC,CAAC,CAAC;SAC3C;QAED,IAAG,KAAK,CAAC,GAAG,CAAC,EAAE;YACb,OAAO,CAAC,KAAK,CAAC,8BAA8B,CAAC,CAAC;SAC/C;QACD,OAAO,GAAG,CAAC;IACb,CAAC;AACH,CAAC,EAnSS,IAAI,KAAJ,IAAI,QAmSb;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,cAAc,CAAC,QAAgB;IACtC,IAAI,MAAM,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,sBAAsB;IACtB,sCAAsC;IACtC,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;QAC1C,OAAO,SAAS,CAAC;KAClB;SAAM,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;QACjD,sEAAsE;QACtE,oDAAoD;QACpD,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAA;KAC3C;SAAM;QACL,8CAA8C;QAC9C,OAAO,MAAM,CAAC;KACf;AACH,CAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/// <reference types="@keymanapp/models-types" />
|
|
2
|
+
/**
|
|
3
|
+
* Returns a word breaker that joins spans of an existing word breaker.
|
|
4
|
+
* Spans are joined if they are connected by a delimiter.
|
|
5
|
+
*
|
|
6
|
+
* @param breaker The word breaker whose results will be decorated.
|
|
7
|
+
* @param joiners What delimiters should be used to join spans.
|
|
8
|
+
*/
|
|
9
|
+
export declare function decorateWithJoin(breaker: WordBreakingFunction, joiners: string[]): WordBreakingFunction;
|
|
10
|
+
//# sourceMappingURL=join-word-breaker-decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"join-word-breaker-decorator.d.ts","sourceRoot":"","sources":["../../src/join-word-breaker-decorator.ts"],"names":[],"mappings":";AAEA;;;;;;GAMG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,oBAAoB,CA4HvG"}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/// <reference types="@keymanapp/models-types" />
|
|
2
|
+
/**
|
|
3
|
+
* Returns a word breaker that joins spans of an existing word breaker.
|
|
4
|
+
* Spans are joined if they are connected by a delimiter.
|
|
5
|
+
*
|
|
6
|
+
* @param breaker The word breaker whose results will be decorated.
|
|
7
|
+
* @param joiners What delimiters should be used to join spans.
|
|
8
|
+
*/
|
|
9
|
+
export function decorateWithJoin(breaker, joiners) {
|
|
10
|
+
// Make a copy so that if the original array is accidentally mutated, it
|
|
11
|
+
// won't affect the joiner.
|
|
12
|
+
const delimiters = joiners.concat();
|
|
13
|
+
return function (input) {
|
|
14
|
+
let originalSpans = breaker(input);
|
|
15
|
+
// Implements a finite-state transducer (FST) where:
|
|
16
|
+
// - Transductions are pushed onto a stack
|
|
17
|
+
// - There are three states:
|
|
18
|
+
// - empty stack (initial state)
|
|
19
|
+
// - unjoined
|
|
20
|
+
// - joined
|
|
21
|
+
// - all three states are accepting states
|
|
22
|
+
// - there is NO backtracking on the input
|
|
23
|
+
// (hence the for-loop over the input tape)
|
|
24
|
+
// - each state is a JavaScript callback (function)
|
|
25
|
+
let state = emptyStack;
|
|
26
|
+
let stack = [];
|
|
27
|
+
for (let span of originalSpans) {
|
|
28
|
+
state = state(span);
|
|
29
|
+
}
|
|
30
|
+
return stack;
|
|
31
|
+
/******************* States *******************/
|
|
32
|
+
function emptyStack(span) {
|
|
33
|
+
stack.push(span);
|
|
34
|
+
if (isJoiner(span)) {
|
|
35
|
+
return joined;
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
return unjoined;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
function unjoined(span) {
|
|
42
|
+
// NB: stack has at least one span in it
|
|
43
|
+
if (isJoiner(span)) {
|
|
44
|
+
if (spansAreBackToBack(lastFrom(stack), span)) {
|
|
45
|
+
concatLastSpanInStackWith(span);
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
// Spans are non-contiguous, so don't join them!
|
|
49
|
+
stack.push(span);
|
|
50
|
+
}
|
|
51
|
+
return joined;
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
// Span cannot be joined
|
|
55
|
+
stack.push(span);
|
|
56
|
+
return unjoined;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
function joined(span) {
|
|
60
|
+
// NB: stack has at least one span in it
|
|
61
|
+
if (!spansAreBackToBack(lastFrom(stack), span)) {
|
|
62
|
+
// Spans are non-contiguous and cannot be joined:
|
|
63
|
+
stack.push(span);
|
|
64
|
+
return unjoined;
|
|
65
|
+
}
|
|
66
|
+
// Spans are contiguous
|
|
67
|
+
concatLastSpanInStackWith(span);
|
|
68
|
+
if (isJoiner(span)) {
|
|
69
|
+
return joined;
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
return unjoined;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
/****************** Helpers ******************/
|
|
76
|
+
function concatLastSpanInStackWith(span) {
|
|
77
|
+
let lastIndex = stack.length - 1;
|
|
78
|
+
let top = stack[lastIndex];
|
|
79
|
+
let joinedSpan = concatenateSpans(top, span);
|
|
80
|
+
stack[lastIndex] = joinedSpan;
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
function isJoiner(span) {
|
|
84
|
+
return includes(delimiters, span.text);
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Returns true when the spans are contiguous.
|
|
88
|
+
* Order matters when calling this function!
|
|
89
|
+
*/
|
|
90
|
+
function spansAreBackToBack(former, latter) {
|
|
91
|
+
return former.end === latter.start;
|
|
92
|
+
}
|
|
93
|
+
function concatenateSpans(former, latter) {
|
|
94
|
+
if (latter.start !== former.end) {
|
|
95
|
+
throw new Error(`Cannot concatenate non-contiguous spans: ${JSON.stringify(former)}/${JSON.stringify(latter)}`);
|
|
96
|
+
}
|
|
97
|
+
return {
|
|
98
|
+
start: former.start,
|
|
99
|
+
end: latter.end,
|
|
100
|
+
length: former.length + latter.length,
|
|
101
|
+
text: former.text + latter.text
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* When Array.prototype.include() doesn't exist:
|
|
106
|
+
*/
|
|
107
|
+
function includes(haystack, needle) {
|
|
108
|
+
for (let item of haystack) {
|
|
109
|
+
if (item === needle)
|
|
110
|
+
return true;
|
|
111
|
+
}
|
|
112
|
+
return false;
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Get the last element from the array.
|
|
116
|
+
*/
|
|
117
|
+
function lastFrom(array) {
|
|
118
|
+
return array[array.length - 1];
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
//# sourceMappingURL=join-word-breaker-decorator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"join-word-breaker-decorator.js","sourceRoot":"","sources":["../../src/join-word-breaker-decorator.ts"],"names":[],"mappings":"AAAA,iDAAiD;AAEjD;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAAC,OAA6B,EAAE,OAAiB;IAC/E,wEAAwE;IACxE,2BAA2B;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAEpC,OAAO,UAAU,KAAa;QAC5B,IAAI,aAAa,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;QAEnC,oDAAoD;QACpD,2CAA2C;QAC3C,6BAA6B;QAC7B,mCAAmC;QACnC,gBAAgB;QAChB,cAAc;QACd,0CAA0C;QAC1C,0CAA0C;QAC1C,6CAA6C;QAC7C,mDAAmD;QACnD,IAAI,KAAK,GAAG,UAAU,CAAC;QACvB,IAAI,KAAK,GAAW,EAAE,CAAC;QACvB,KAAK,IAAI,IAAI,IAAI,aAAa,EAAE;YAC9B,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC;SACrB;QAED,OAAO,KAAK,CAAC;QAEb,gDAAgD;QAChD,SAAS,UAAU,CAAC,IAAU;YAC5B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAEjB,IAAI,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAClB,OAAO,MAAM,CAAC;aACf;iBAAM;gBACL,OAAO,QAAQ,CAAA;aAChB;QACH,CAAC;QAED,SAAS,QAAQ,CAAC,IAAU;YAC1B,wCAAwC;YACxC,IAAI,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAClB,IAAI,kBAAkB,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,EAAE;oBAC7C,yBAAyB,CAAC,IAAI,CAAC,CAAC;iBACjC;qBAAM;oBACL,gDAAgD;oBAChD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAClB;gBACD,OAAO,MAAM,CAAC;aAEf;iBAAM;gBACL,wBAAwB;gBACxB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACjB,OAAO,QAAQ,CAAC;aACjB;QACH,CAAC;QAED,SAAS,MAAM,CAAC,IAAU;YACxB,wCAAwC;YACxC,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,EAAE;gBAC9C,iDAAiD;gBACjD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACjB,OAAO,QAAQ,CAAC;aACjB;YAED,uBAAuB;YACvB,yBAAyB,CAAC,IAAI,CAAC,CAAC;YAChC,IAAI,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAClB,OAAO,MAAM,CAAC;aACf;iBAAM;gBACL,OAAO,QAAQ,CAAC;aACjB;QACH,CAAC;QAED,+CAA+C;QAC/C,SAAS,yBAAyB,CAAC,IAAU;YAC3C,IAAI,SAAS,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;YAEjC,IAAI,GAAG,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC;YAC3B,IAAI,UAAU,GAAG,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;YAC7C,KAAK,CAAC,SAAS,CAAC,GAAG,UAAU,CAAC;QAChC,CAAC;IACH,CAAC,CAAA;IAED,SAAS,QAAQ,CAAC,IAAU;QAC1B,OAAO,QAAQ,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,CAAC;IAED;;;OAGG;IACH,SAAS,kBAAkB,CAAC,MAAY,EAAE,MAAY;QACpD,OAAO,MAAM,CAAC,GAAG,KAAK,MAAM,CAAC,KAAK,CAAC;IACrC,CAAC;IAED,SAAS,gBAAgB,CAAC,MAAY,EAAE,MAAY;QAClD,IAAI,MAAM,CAAC,KAAK,KAAK,MAAM,CAAC,GAAG,EAAE;YAC/B,MAAM,IAAI,KAAK,CAAC,4CAA4C,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;SACjH;QAED,OAAO;YACL,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,GAAG,EAAE,MAAM,CAAC,GAAG;YACf,MAAM,EAAE,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM;YACrC,IAAI,EAAE,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;SAChC,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,SAAS,QAAQ,CAAI,QAAa,EAAE,MAAS;QAC3C,KAAK,IAAI,IAAI,IAAI,QAAQ,EAAE;YACzB,IAAI,IAAI,KAAK,MAAM;gBACjB,OAAO,IAAI,CAAC;SACf;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;OAEG;IACH,SAAS,QAAQ,CAAI,KAAU;QAC7B,OAAO,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACjC,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/// <reference path="lexical-model.d.ts" />
|
|
2
|
+
import { LexicalModelSource } from "./lexical-model.js";
|
|
3
|
+
export default class LexicalModelCompiler {
|
|
4
|
+
/**
|
|
5
|
+
* Returns the generated code for the model that will ultimately be loaded by
|
|
6
|
+
* the LMLayer worker. This code contains all model parameters, and specifies
|
|
7
|
+
* word breakers and auxilary functions that may be required.
|
|
8
|
+
*
|
|
9
|
+
* @param model_id The model ID. TODO: not sure if this is actually required!
|
|
10
|
+
* @param modelSource A specification of the model to compile
|
|
11
|
+
* @param sourcePath Where to find auxilary sources files
|
|
12
|
+
*/
|
|
13
|
+
generateLexicalModelCode(model_id: string, modelSource: LexicalModelSource, sourcePath: string): string;
|
|
14
|
+
transpileSources(sources: Array<string>): Array<string>;
|
|
15
|
+
logError(s: string): void;
|
|
16
|
+
}
|
|
17
|
+
export declare class ModelSourceError extends Error {
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=lexical-model-compiler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lexical-model-compiler.d.ts","sourceRoot":"","sources":["../../src/lexical-model-compiler.ts"],"names":[],"mappings":";AAaA,OAAO,EAAE,kBAAkB,EAA0C,MAAM,oBAAoB,CAAC;AAEhG,MAAM,CAAC,OAAO,OAAO,oBAAoB;IAEvC;;;;;;;;OAQG;IACH,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,kBAAkB,EAAE,UAAU,EAAE,MAAM;IAiE9F,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC;IAUvD,QAAQ,CAAC,CAAC,EAAE,MAAM;CAGnB;AAED,qBAAa,gBAAiB,SAAQ,KAAK;CAC1C"}
|