lindera-wasm-nodejs-ko-dic 1.2.1 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -12,36 +12,36 @@ WebAssembly of Lindera
12
12
 
13
13
  ### Web
14
14
 
15
- - <https://www.npmjs.com/package/lindera-wasm-web-cjk>
15
+ - <https://www.npmjs.com/package/lindera-wasm-web-cjk>
16
16
  Lindera WASM with CJK dictionaries (IPADIC, ko-dic, CC-CEDICT) for Web
17
17
 
18
- - <https://www.npmjs.com/package/lindera-wasm-web-ipadic>
18
+ - <https://www.npmjs.com/package/lindera-wasm-web-ipadic>
19
19
  Lindera WASM with Japanese dictionary (IPADIC) for Web
20
20
 
21
- - <https://www.npmjs.com/package/lindera-wasm-web-unidic>
21
+ - <https://www.npmjs.com/package/lindera-wasm-web-unidic>
22
22
  Lindera WASM with Japanese dictionary (UniDic) for Web
23
23
 
24
- - <https://www.npmjs.com/package/lindera-wasm-web-ko-dic>
24
+ - <https://www.npmjs.com/package/lindera-wasm-web-ko-dic>
25
25
  Lindera WASM with Korean dictionary (ko-dic) for Web
26
26
 
27
- - <https://www.npmjs.com/package/lindera-wasm-web-cc-cedict>
27
+ - <https://www.npmjs.com/package/lindera-wasm-web-cc-cedict>
28
28
  Lindera WASM with Chinese dictionary (CC-CEDICT) for Web
29
29
 
30
30
  ### Node.js
31
31
 
32
- - <https://www.npmjs.com/package/lindera-wasm-nodejs-cjk>
32
+ - <https://www.npmjs.com/package/lindera-wasm-nodejs-cjk>
33
33
  Lindera WASM with CJK dictionaries (IPADIC, ko-dic, CC-CEDICT) for Node.js
34
34
 
35
- - <https://www.npmjs.com/package/lindera-wasm-nodejs-ipadic>
35
+ - <https://www.npmjs.com/package/lindera-wasm-nodejs-ipadic>
36
36
  Lindera WASM with Japanese dictionary (IPADIC) for Node.js
37
37
 
38
- - <https://www.npmjs.com/package/lindera-wasm-nodejs-unidic>
38
+ - <https://www.npmjs.com/package/lindera-wasm-nodejs-unidic>
39
39
  Lindera WASM with Japanese dictionary (UniDic) for Node.js
40
40
 
41
- - <https://www.npmjs.com/package/lindera-wasm-nodejs-ko-dic>
41
+ - <https://www.npmjs.com/package/lindera-wasm-nodejs-ko-dic>
42
42
  Lindera WASM with Korean dictionary (ko-dic) for Node.js
43
43
 
44
- - <https://www.npmjs.com/package/lindera-wasm-nodejs-cc-cedict>
44
+ - <https://www.npmjs.com/package/lindera-wasm-nodejs-cc-cedict>
45
45
  Lindera WASM with Chinese dictionary (CC-CEDICT) for Node.js
46
46
 
47
47
  ## Usage
@@ -96,7 +96,6 @@ export default defineConfig({
96
96
  and set the `content_security_policy` to contains `wasm-unsafe-eval` in manifest.json:
97
97
 
98
98
  ```json
99
- // manifest.json
100
99
  "content_security_policy": {
101
100
  "extension_pages": "script-src 'self' 'wasm-unsafe-eval';"
102
101
  }
package/lindera_wasm.d.ts CHANGED
@@ -1,42 +1,6 @@
1
1
  /* tslint:disable */
2
2
  /* eslint-disable */
3
- /**
4
- * Gets the version of the lindera-wasm library.
5
- *
6
- * # Returns
7
- *
8
- * The version string of the library (e.g., "1.0.0").
9
- *
10
- * # Examples
11
- *
12
- * ```javascript
13
- * import { getVersion } from 'lindera-wasm';
14
- * console.log(getVersion()); // "1.0.0"
15
- * ```
16
- */
17
- export function getVersion(): string;
18
- /**
19
- * A tokenizer for morphological analysis.
20
- *
21
- * The `Tokenizer` performs text tokenization based on the configuration
22
- * provided by [`TokenizerBuilder`].
23
- *
24
- * # Examples
25
- *
26
- * ```javascript
27
- * const builder = new TokenizerBuilder();
28
- * builder.setDictionary("embedded://ipadic");
29
- * builder.setMode("normal");
30
- *
31
- * const tokenizer = builder.build();
32
- * const tokens = tokenizer.tokenize("関西国際空港");
33
- * console.log(tokens);
34
- * // Output: [
35
- * // { surface: "関西国際空港", ... },
36
- * // ...
37
- * // ]
38
- * ```
39
- */
3
+
40
4
  export class Tokenizer {
41
5
  private constructor();
42
6
  free(): void;
@@ -74,184 +38,181 @@ export class Tokenizer {
74
38
  */
75
39
  tokenize(input_text: string): any;
76
40
  }
77
- /**
78
- * Builder for creating a [`Tokenizer`] instance.
79
- *
80
- * `TokenizerBuilder` provides a fluent API for configuring and building a tokenizer
81
- * with various options such as dictionary selection, tokenization mode, character filters,
82
- * and token filters.
83
- *
84
- * # Examples
85
- *
86
- * ```javascript
87
- * const builder = new TokenizerBuilder();
88
- * builder.setDictionary("embedded://ipadic");
89
- * builder.setMode("normal");
90
- * builder.setKeepWhitespace(false);
91
- * builder.appendCharacterFilter("unicode_normalize", { "kind": "nfkc" });
92
- * builder.appendTokenFilter("lowercase");
93
- *
94
- * const tokenizer = builder.build();
95
- * ```
96
- */
41
+
97
42
  export class TokenizerBuilder {
98
43
  free(): void;
99
44
  [Symbol.dispose](): void;
100
45
  /**
101
- * Creates a new `TokenizerBuilder` instance.
102
- *
103
- * # Returns
104
- *
105
- * A new `TokenizerBuilder` instance.
46
+ * Sets the dictionary to use for tokenization.
106
47
  *
107
- * # Errors
48
+ * # Parameters
108
49
  *
109
- * Returns an error if the builder cannot be initialized.
50
+ * - `uri`: The dictionary URI. Valid embedded dictionaries are:
51
+ * - `"embedded://ipadic"`: Japanese IPADIC dictionary
52
+ * - `"embedded://unidic"`: Japanese UniDic dictionary
53
+ * - `"embedded://ko-dic"`: Korean ko-dic dictionary
54
+ * - `"embedded://cc-cedict"`: Chinese CC-CEDICT dictionary
110
55
  *
111
56
  * # Examples
112
57
  *
113
58
  * ```javascript
114
- * const builder = new TokenizerBuilder();
59
+ * builder.setDictionary("embedded://ipadic");
115
60
  * ```
116
61
  */
117
- constructor();
62
+ setDictionary(uri: string): void;
118
63
  /**
119
- * Builds and returns a configured [`Tokenizer`] instance.
64
+ * Appends a token filter to the tokenization pipeline.
120
65
  *
121
- * This method consumes the builder and creates the final tokenizer with all
122
- * configured settings.
66
+ * Token filters transform or filter the tokens after tokenization.
123
67
  *
124
- * # Returns
68
+ * # Parameters
125
69
  *
126
- * A configured `Tokenizer` instance.
70
+ * - `name`: The name of the token filter (e.g., `"lowercase"`, `"japanese_number"`).
71
+ * - `args`: A JavaScript object containing filter-specific arguments.
127
72
  *
128
73
  * # Errors
129
74
  *
130
- * Returns an error if the tokenizer cannot be built with the current configuration.
75
+ * Returns an error if the arguments cannot be parsed.
131
76
  *
132
77
  * # Examples
133
78
  *
134
79
  * ```javascript
135
- * const builder = new TokenizerBuilder();
136
- * builder.setDictionary("embedded://ipadic");
137
- * const tokenizer = builder.build();
80
+ * builder.appendTokenFilter("lowercase");
81
+ * builder.appendTokenFilter("japanese_number", { "tags": ["名詞,数"] });
138
82
  * ```
139
83
  */
140
- build(): Tokenizer;
84
+ appendTokenFilter(name: string, args: any): void;
141
85
  /**
142
- * Sets the tokenization mode.
86
+ * Sets whether to keep whitespace tokens in the output.
143
87
  *
144
88
  * # Parameters
145
89
  *
146
- * - `mode`: The tokenization mode. Valid values are:
147
- * - `"normal"`: Standard tokenization
148
- * - `"decompose"`: Decomposes compound words into their components
149
- *
150
- * # Errors
151
- *
152
- * Returns an error if the mode string is invalid.
90
+ * - `keep`: If `true`, whitespace tokens are preserved; if `false`, they are removed.
153
91
  *
154
92
  * # Examples
155
93
  *
156
94
  * ```javascript
157
- * builder.setMode("normal");
95
+ * builder.setKeepWhitespace(false); // Remove whitespace tokens
158
96
  * // or
159
- * builder.setMode("decompose");
97
+ * builder.setKeepWhitespace(true); // Keep whitespace tokens
160
98
  * ```
161
99
  */
162
- setMode(mode: string): void;
100
+ setKeepWhitespace(keep: boolean): void;
163
101
  /**
164
- * Sets the dictionary to use for tokenization.
102
+ * Sets a user-defined dictionary.
103
+ *
104
+ * User dictionaries allow you to add custom words and their properties
105
+ * to supplement the main dictionary.
165
106
  *
166
107
  * # Parameters
167
108
  *
168
- * - `uri`: The dictionary URI. Valid embedded dictionaries are:
169
- * - `"embedded://ipadic"`: Japanese IPADIC dictionary
170
- * - `"embedded://unidic"`: Japanese UniDic dictionary
171
- * - `"embedded://ko-dic"`: Korean ko-dic dictionary
172
- * - `"embedded://cc-cedict"`: Chinese CC-CEDICT dictionary
109
+ * - `uri`: The URI to the user dictionary file.
173
110
  *
174
111
  * # Examples
175
112
  *
176
113
  * ```javascript
177
- * builder.setDictionary("embedded://ipadic");
114
+ * builder.setUserDictionary("path/to/user_dict.csv");
178
115
  * ```
179
116
  */
180
- setDictionary(uri: string): void;
117
+ setUserDictionary(uri: string): void;
181
118
  /**
182
- * Sets a user-defined dictionary.
119
+ * Appends a character filter to the tokenization pipeline.
183
120
  *
184
- * User dictionaries allow you to add custom words and their properties
185
- * to supplement the main dictionary.
121
+ * Character filters transform the input text before tokenization.
186
122
  *
187
123
  * # Parameters
188
124
  *
189
- * - `uri`: The URI to the user dictionary file.
125
+ * - `name`: The name of the character filter (e.g., `"unicode_normalize"`).
126
+ * - `args`: A JavaScript object containing filter-specific arguments.
127
+ *
128
+ * # Errors
129
+ *
130
+ * Returns an error if the arguments cannot be parsed.
190
131
  *
191
132
  * # Examples
192
133
  *
193
134
  * ```javascript
194
- * builder.setUserDictionary("path/to/user_dict.csv");
135
+ * builder.appendCharacterFilter("unicode_normalize", { "kind": "nfkc" });
195
136
  * ```
196
137
  */
197
- setUserDictionary(uri: string): void;
138
+ appendCharacterFilter(name: string, args: any): void;
198
139
  /**
199
- * Sets whether to keep whitespace tokens in the output.
140
+ * Creates a new `TokenizerBuilder` instance.
200
141
  *
201
- * # Parameters
142
+ * # Returns
202
143
  *
203
- * - `keep`: If `true`, whitespace tokens are preserved; if `false`, they are removed.
144
+ * A new `TokenizerBuilder` instance.
145
+ *
146
+ * # Errors
147
+ *
148
+ * Returns an error if the builder cannot be initialized.
204
149
  *
205
150
  * # Examples
206
151
  *
207
152
  * ```javascript
208
- * builder.setKeepWhitespace(false); // Remove whitespace tokens
209
- * // or
210
- * builder.setKeepWhitespace(true); // Keep whitespace tokens
153
+ * const builder = new TokenizerBuilder();
211
154
  * ```
212
155
  */
213
- setKeepWhitespace(keep: boolean): void;
156
+ constructor();
214
157
  /**
215
- * Appends a character filter to the tokenization pipeline.
158
+ * Builds and returns a configured [`Tokenizer`] instance.
216
159
  *
217
- * Character filters transform the input text before tokenization.
160
+ * This method consumes the builder and creates the final tokenizer with all
161
+ * configured settings.
218
162
  *
219
- * # Parameters
163
+ * # Returns
220
164
  *
221
- * - `name`: The name of the character filter (e.g., `"unicode_normalize"`).
222
- * - `args`: A JavaScript object containing filter-specific arguments.
165
+ * A configured `Tokenizer` instance.
223
166
  *
224
167
  * # Errors
225
168
  *
226
- * Returns an error if the arguments cannot be parsed.
169
+ * Returns an error if the tokenizer cannot be built with the current configuration.
227
170
  *
228
171
  * # Examples
229
172
  *
230
173
  * ```javascript
231
- * builder.appendCharacterFilter("unicode_normalize", { "kind": "nfkc" });
174
+ * const builder = new TokenizerBuilder();
175
+ * builder.setDictionary("embedded://ipadic");
176
+ * const tokenizer = builder.build();
232
177
  * ```
233
178
  */
234
- appendCharacterFilter(name: string, args: any): void;
179
+ build(): Tokenizer;
235
180
  /**
236
- * Appends a token filter to the tokenization pipeline.
237
- *
238
- * Token filters transform or filter the tokens after tokenization.
181
+ * Sets the tokenization mode.
239
182
  *
240
183
  * # Parameters
241
184
  *
242
- * - `name`: The name of the token filter (e.g., `"lowercase"`, `"japanese_number"`).
243
- * - `args`: A JavaScript object containing filter-specific arguments.
185
+ * - `mode`: The tokenization mode. Valid values are:
186
+ * - `"normal"`: Standard tokenization
187
+ * - `"decompose"`: Decomposes compound words into their components
244
188
  *
245
189
  * # Errors
246
190
  *
247
- * Returns an error if the arguments cannot be parsed.
191
+ * Returns an error if the mode string is invalid.
248
192
  *
249
193
  * # Examples
250
194
  *
251
195
  * ```javascript
252
- * builder.appendTokenFilter("lowercase");
253
- * builder.appendTokenFilter("japanese_number", { "tags": ["名詞,数"] });
196
+ * builder.setMode("normal");
197
+ * // or
198
+ * builder.setMode("decompose");
254
199
  * ```
255
200
  */
256
- appendTokenFilter(name: string, args: any): void;
201
+ setMode(mode: string): void;
257
202
  }
203
+
204
+ /**
205
+ * Gets the version of the lindera-wasm library.
206
+ *
207
+ * # Returns
208
+ *
209
+ * The version string of the library (e.g., "1.0.0").
210
+ *
211
+ * # Examples
212
+ *
213
+ * ```javascript
214
+ * import { getVersion } from 'lindera-wasm';
215
+ * console.log(getVersion()); // "1.0.0"
216
+ * ```
217
+ */
218
+ export function getVersion(): string;
package/lindera_wasm.js CHANGED
@@ -2,115 +2,12 @@
2
2
  let imports = {};
3
3
  imports['__wbindgen_placeholder__'] = module.exports;
4
4
 
5
- let cachedUint8ArrayMemory0 = null;
6
-
7
- function getUint8ArrayMemory0() {
8
- if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.byteLength === 0) {
9
- cachedUint8ArrayMemory0 = new Uint8Array(wasm.memory.buffer);
10
- }
11
- return cachedUint8ArrayMemory0;
12
- }
13
-
14
- let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
15
-
16
- cachedTextDecoder.decode();
17
-
18
- function decodeText(ptr, len) {
19
- return cachedTextDecoder.decode(getUint8ArrayMemory0().subarray(ptr, ptr + len));
20
- }
21
-
22
- function getStringFromWasm0(ptr, len) {
23
- ptr = ptr >>> 0;
24
- return decodeText(ptr, len);
25
- }
26
-
27
- let WASM_VECTOR_LEN = 0;
28
-
29
- const cachedTextEncoder = new TextEncoder();
30
-
31
- if (!('encodeInto' in cachedTextEncoder)) {
32
- cachedTextEncoder.encodeInto = function (arg, view) {
33
- const buf = cachedTextEncoder.encode(arg);
34
- view.set(buf);
35
- return {
36
- read: arg.length,
37
- written: buf.length
38
- };
39
- }
40
- }
41
-
42
- function passStringToWasm0(arg, malloc, realloc) {
43
-
44
- if (realloc === undefined) {
45
- const buf = cachedTextEncoder.encode(arg);
46
- const ptr = malloc(buf.length, 1) >>> 0;
47
- getUint8ArrayMemory0().subarray(ptr, ptr + buf.length).set(buf);
48
- WASM_VECTOR_LEN = buf.length;
49
- return ptr;
50
- }
51
-
52
- let len = arg.length;
53
- let ptr = malloc(len, 1) >>> 0;
54
-
55
- const mem = getUint8ArrayMemory0();
56
-
57
- let offset = 0;
58
-
59
- for (; offset < len; offset++) {
60
- const code = arg.charCodeAt(offset);
61
- if (code > 0x7F) break;
62
- mem[ptr + offset] = code;
63
- }
64
-
65
- if (offset !== len) {
66
- if (offset !== 0) {
67
- arg = arg.slice(offset);
68
- }
69
- ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0;
70
- const view = getUint8ArrayMemory0().subarray(ptr + offset, ptr + len);
71
- const ret = cachedTextEncoder.encodeInto(arg, view);
72
-
73
- offset += ret.written;
74
- ptr = realloc(ptr, len, offset, 1) >>> 0;
75
- }
76
-
77
- WASM_VECTOR_LEN = offset;
78
- return ptr;
79
- }
80
-
81
- let cachedDataViewMemory0 = null;
82
-
83
- function getDataViewMemory0() {
84
- if (cachedDataViewMemory0 === null || cachedDataViewMemory0.buffer.detached === true || (cachedDataViewMemory0.buffer.detached === undefined && cachedDataViewMemory0.buffer !== wasm.memory.buffer)) {
85
- cachedDataViewMemory0 = new DataView(wasm.memory.buffer);
86
- }
87
- return cachedDataViewMemory0;
88
- }
89
-
90
5
  function addToExternrefTable0(obj) {
91
6
  const idx = wasm.__externref_table_alloc();
92
- wasm.__wbindgen_export_4.set(idx, obj);
7
+ wasm.__wbindgen_externrefs.set(idx, obj);
93
8
  return idx;
94
9
  }
95
10
 
96
- function handleError(f, args) {
97
- try {
98
- return f.apply(this, args);
99
- } catch (e) {
100
- const idx = addToExternrefTable0(e);
101
- wasm.__wbindgen_exn_store(idx);
102
- }
103
- }
104
-
105
- function getArrayU8FromWasm0(ptr, len) {
106
- ptr = ptr >>> 0;
107
- return getUint8ArrayMemory0().subarray(ptr / 1, ptr / 1 + len);
108
- }
109
-
110
- function isLikeNone(x) {
111
- return x === undefined || x === null;
112
- }
113
-
114
11
  function debugString(val) {
115
12
  // primitive types
116
13
  const type = typeof val;
@@ -175,43 +72,118 @@ function debugString(val) {
175
72
  // TODO we could test for more things here, like `Set`s and `Map`s.
176
73
  return className;
177
74
  }
178
- /**
179
- * Gets the version of the lindera-wasm library.
180
- *
181
- * # Returns
182
- *
183
- * The version string of the library (e.g., "1.0.0").
184
- *
185
- * # Examples
186
- *
187
- * ```javascript
188
- * import { getVersion } from 'lindera-wasm';
189
- * console.log(getVersion()); // "1.0.0"
190
- * ```
191
- * @returns {string}
192
- */
193
- exports.getVersion = function() {
194
- let deferred1_0;
195
- let deferred1_1;
75
+
76
+ function getArrayU8FromWasm0(ptr, len) {
77
+ ptr = ptr >>> 0;
78
+ return getUint8ArrayMemory0().subarray(ptr / 1, ptr / 1 + len);
79
+ }
80
+
81
+ let cachedDataViewMemory0 = null;
82
+ function getDataViewMemory0() {
83
+ if (cachedDataViewMemory0 === null || cachedDataViewMemory0.buffer.detached === true || (cachedDataViewMemory0.buffer.detached === undefined && cachedDataViewMemory0.buffer !== wasm.memory.buffer)) {
84
+ cachedDataViewMemory0 = new DataView(wasm.memory.buffer);
85
+ }
86
+ return cachedDataViewMemory0;
87
+ }
88
+
89
+ function getStringFromWasm0(ptr, len) {
90
+ ptr = ptr >>> 0;
91
+ return decodeText(ptr, len);
92
+ }
93
+
94
+ let cachedUint8ArrayMemory0 = null;
95
+ function getUint8ArrayMemory0() {
96
+ if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.byteLength === 0) {
97
+ cachedUint8ArrayMemory0 = new Uint8Array(wasm.memory.buffer);
98
+ }
99
+ return cachedUint8ArrayMemory0;
100
+ }
101
+
102
+ function handleError(f, args) {
196
103
  try {
197
- const ret = wasm.getVersion();
198
- deferred1_0 = ret[0];
199
- deferred1_1 = ret[1];
200
- return getStringFromWasm0(ret[0], ret[1]);
201
- } finally {
202
- wasm.__wbindgen_free(deferred1_0, deferred1_1, 1);
104
+ return f.apply(this, args);
105
+ } catch (e) {
106
+ const idx = addToExternrefTable0(e);
107
+ wasm.__wbindgen_exn_store(idx);
203
108
  }
204
- };
109
+ }
110
+
111
+ function isLikeNone(x) {
112
+ return x === undefined || x === null;
113
+ }
114
+
115
+ function passStringToWasm0(arg, malloc, realloc) {
116
+ if (realloc === undefined) {
117
+ const buf = cachedTextEncoder.encode(arg);
118
+ const ptr = malloc(buf.length, 1) >>> 0;
119
+ getUint8ArrayMemory0().subarray(ptr, ptr + buf.length).set(buf);
120
+ WASM_VECTOR_LEN = buf.length;
121
+ return ptr;
122
+ }
123
+
124
+ let len = arg.length;
125
+ let ptr = malloc(len, 1) >>> 0;
126
+
127
+ const mem = getUint8ArrayMemory0();
128
+
129
+ let offset = 0;
130
+
131
+ for (; offset < len; offset++) {
132
+ const code = arg.charCodeAt(offset);
133
+ if (code > 0x7F) break;
134
+ mem[ptr + offset] = code;
135
+ }
136
+ if (offset !== len) {
137
+ if (offset !== 0) {
138
+ arg = arg.slice(offset);
139
+ }
140
+ ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0;
141
+ const view = getUint8ArrayMemory0().subarray(ptr + offset, ptr + len);
142
+ const ret = cachedTextEncoder.encodeInto(arg, view);
143
+
144
+ offset += ret.written;
145
+ ptr = realloc(ptr, len, offset, 1) >>> 0;
146
+ }
147
+
148
+ WASM_VECTOR_LEN = offset;
149
+ return ptr;
150
+ }
205
151
 
206
152
  function takeFromExternrefTable0(idx) {
207
- const value = wasm.__wbindgen_export_4.get(idx);
153
+ const value = wasm.__wbindgen_externrefs.get(idx);
208
154
  wasm.__externref_table_dealloc(idx);
209
155
  return value;
210
156
  }
211
157
 
158
+ let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
159
+ cachedTextDecoder.decode();
160
+ function decodeText(ptr, len) {
161
+ return cachedTextDecoder.decode(getUint8ArrayMemory0().subarray(ptr, ptr + len));
162
+ }
163
+
164
+ const cachedTextEncoder = new TextEncoder();
165
+
166
+ if (!('encodeInto' in cachedTextEncoder)) {
167
+ cachedTextEncoder.encodeInto = function (arg, view) {
168
+ const buf = cachedTextEncoder.encode(arg);
169
+ view.set(buf);
170
+ return {
171
+ read: arg.length,
172
+ written: buf.length
173
+ };
174
+ }
175
+ }
176
+
177
+ let WASM_VECTOR_LEN = 0;
178
+
212
179
  const TokenizerFinalization = (typeof FinalizationRegistry === 'undefined')
213
180
  ? { register: () => {}, unregister: () => {} }
214
181
  : new FinalizationRegistry(ptr => wasm.__wbg_tokenizer_free(ptr >>> 0, 1));
182
+
183
+ const TokenizerBuilderFinalization = (typeof FinalizationRegistry === 'undefined')
184
+ ? { register: () => {}, unregister: () => {} }
185
+ : new FinalizationRegistry(ptr => wasm.__wbg_tokenizerbuilder_free(ptr >>> 0, 1));
186
+
215
187
  /**
216
188
  * A tokenizer for morphological analysis.
217
189
  *
@@ -235,7 +207,6 @@ const TokenizerFinalization = (typeof FinalizationRegistry === 'undefined')
235
207
  * ```
236
208
  */
237
209
  class Tokenizer {
238
-
239
210
  static __wrap(ptr) {
240
211
  ptr = ptr >>> 0;
241
212
  const obj = Object.create(Tokenizer.prototype);
@@ -243,14 +214,12 @@ class Tokenizer {
243
214
  TokenizerFinalization.register(obj, obj.__wbg_ptr, obj);
244
215
  return obj;
245
216
  }
246
-
247
217
  __destroy_into_raw() {
248
218
  const ptr = this.__wbg_ptr;
249
219
  this.__wbg_ptr = 0;
250
220
  TokenizerFinalization.unregister(this);
251
221
  return ptr;
252
222
  }
253
-
254
223
  free() {
255
224
  const ptr = this.__destroy_into_raw();
256
225
  wasm.__wbg_tokenizer_free(ptr, 0);
@@ -299,12 +268,8 @@ class Tokenizer {
299
268
  }
300
269
  }
301
270
  if (Symbol.dispose) Tokenizer.prototype[Symbol.dispose] = Tokenizer.prototype.free;
302
-
303
271
  exports.Tokenizer = Tokenizer;
304
272
 
305
- const TokenizerBuilderFinalization = (typeof FinalizationRegistry === 'undefined')
306
- ? { register: () => {}, unregister: () => {} }
307
- : new FinalizationRegistry(ptr => wasm.__wbg_tokenizerbuilder_free(ptr >>> 0, 1));
308
273
  /**
309
274
  * Builder for creating a [`Tokenizer`] instance.
310
275
  *
@@ -326,127 +291,91 @@ const TokenizerBuilderFinalization = (typeof FinalizationRegistry === 'undefined
326
291
  * ```
327
292
  */
328
293
  class TokenizerBuilder {
329
-
330
294
  __destroy_into_raw() {
331
295
  const ptr = this.__wbg_ptr;
332
296
  this.__wbg_ptr = 0;
333
297
  TokenizerBuilderFinalization.unregister(this);
334
298
  return ptr;
335
299
  }
336
-
337
300
  free() {
338
301
  const ptr = this.__destroy_into_raw();
339
302
  wasm.__wbg_tokenizerbuilder_free(ptr, 0);
340
303
  }
341
304
  /**
342
- * Creates a new `TokenizerBuilder` instance.
343
- *
344
- * # Returns
345
- *
346
- * A new `TokenizerBuilder` instance.
347
- *
348
- * # Errors
349
- *
350
- * Returns an error if the builder cannot be initialized.
351
- *
352
- * # Examples
353
- *
354
- * ```javascript
355
- * const builder = new TokenizerBuilder();
356
- * ```
357
- */
358
- constructor() {
359
- const ret = wasm.tokenizerbuilder_new();
360
- if (ret[2]) {
361
- throw takeFromExternrefTable0(ret[1]);
362
- }
363
- this.__wbg_ptr = ret[0] >>> 0;
364
- TokenizerBuilderFinalization.register(this, this.__wbg_ptr, this);
365
- return this;
366
- }
367
- /**
368
- * Builds and returns a configured [`Tokenizer`] instance.
369
- *
370
- * This method consumes the builder and creates the final tokenizer with all
371
- * configured settings.
372
- *
373
- * # Returns
374
- *
375
- * A configured `Tokenizer` instance.
305
+ * Sets the dictionary to use for tokenization.
376
306
  *
377
- * # Errors
307
+ * # Parameters
378
308
  *
379
- * Returns an error if the tokenizer cannot be built with the current configuration.
309
+ * - `uri`: The dictionary URI. Valid embedded dictionaries are:
310
+ * - `"embedded://ipadic"`: Japanese IPADIC dictionary
311
+ * - `"embedded://unidic"`: Japanese UniDic dictionary
312
+ * - `"embedded://ko-dic"`: Korean ko-dic dictionary
313
+ * - `"embedded://cc-cedict"`: Chinese CC-CEDICT dictionary
380
314
  *
381
315
  * # Examples
382
316
  *
383
317
  * ```javascript
384
- * const builder = new TokenizerBuilder();
385
318
  * builder.setDictionary("embedded://ipadic");
386
- * const tokenizer = builder.build();
387
319
  * ```
388
- * @returns {Tokenizer}
320
+ * @param {string} uri
389
321
  */
390
- build() {
391
- const ptr = this.__destroy_into_raw();
392
- const ret = wasm.tokenizerbuilder_build(ptr);
393
- if (ret[2]) {
394
- throw takeFromExternrefTable0(ret[1]);
322
+ setDictionary(uri) {
323
+ const ptr0 = passStringToWasm0(uri, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
324
+ const len0 = WASM_VECTOR_LEN;
325
+ const ret = wasm.tokenizerbuilder_setDictionary(this.__wbg_ptr, ptr0, len0);
326
+ if (ret[1]) {
327
+ throw takeFromExternrefTable0(ret[0]);
395
328
  }
396
- return Tokenizer.__wrap(ret[0]);
397
329
  }
398
330
  /**
399
- * Sets the tokenization mode.
331
+ * Appends a token filter to the tokenization pipeline.
332
+ *
333
+ * Token filters transform or filter the tokens after tokenization.
400
334
  *
401
335
  * # Parameters
402
336
  *
403
- * - `mode`: The tokenization mode. Valid values are:
404
- * - `"normal"`: Standard tokenization
405
- * - `"decompose"`: Decomposes compound words into their components
337
+ * - `name`: The name of the token filter (e.g., `"lowercase"`, `"japanese_number"`).
338
+ * - `args`: A JavaScript object containing filter-specific arguments.
406
339
  *
407
340
  * # Errors
408
341
  *
409
- * Returns an error if the mode string is invalid.
342
+ * Returns an error if the arguments cannot be parsed.
410
343
  *
411
344
  * # Examples
412
345
  *
413
346
  * ```javascript
414
- * builder.setMode("normal");
415
- * // or
416
- * builder.setMode("decompose");
347
+ * builder.appendTokenFilter("lowercase");
348
+ * builder.appendTokenFilter("japanese_number", { "tags": ["名詞,数"] });
417
349
  * ```
418
- * @param {string} mode
350
+ * @param {string} name
351
+ * @param {any} args
419
352
  */
420
- setMode(mode) {
421
- const ptr0 = passStringToWasm0(mode, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
353
+ appendTokenFilter(name, args) {
354
+ const ptr0 = passStringToWasm0(name, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
422
355
  const len0 = WASM_VECTOR_LEN;
423
- const ret = wasm.tokenizerbuilder_setMode(this.__wbg_ptr, ptr0, len0);
356
+ const ret = wasm.tokenizerbuilder_appendTokenFilter(this.__wbg_ptr, ptr0, len0, args);
424
357
  if (ret[1]) {
425
358
  throw takeFromExternrefTable0(ret[0]);
426
359
  }
427
360
  }
428
361
  /**
429
- * Sets the dictionary to use for tokenization.
362
+ * Sets whether to keep whitespace tokens in the output.
430
363
  *
431
364
  * # Parameters
432
365
  *
433
- * - `uri`: The dictionary URI. Valid embedded dictionaries are:
434
- * - `"embedded://ipadic"`: Japanese IPADIC dictionary
435
- * - `"embedded://unidic"`: Japanese UniDic dictionary
436
- * - `"embedded://ko-dic"`: Korean ko-dic dictionary
437
- * - `"embedded://cc-cedict"`: Chinese CC-CEDICT dictionary
366
+ * - `keep`: If `true`, whitespace tokens are preserved; if `false`, they are removed.
438
367
  *
439
368
  * # Examples
440
369
  *
441
370
  * ```javascript
442
- * builder.setDictionary("embedded://ipadic");
443
- * ```
444
- * @param {string} uri
445
- */
446
- setDictionary(uri) {
447
- const ptr0 = passStringToWasm0(uri, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
448
- const len0 = WASM_VECTOR_LEN;
449
- const ret = wasm.tokenizerbuilder_setDictionary(this.__wbg_ptr, ptr0, len0);
371
+ * builder.setKeepWhitespace(false); // Remove whitespace tokens
372
+ * // or
373
+ * builder.setKeepWhitespace(true); // Keep whitespace tokens
374
+ * ```
375
+ * @param {boolean} keep
376
+ */
377
+ setKeepWhitespace(keep) {
378
+ const ret = wasm.tokenizerbuilder_setKeepWhitespace(this.__wbg_ptr, keep);
450
379
  if (ret[1]) {
451
380
  throw takeFromExternrefTable0(ret[0]);
452
381
  }
@@ -476,28 +405,6 @@ class TokenizerBuilder {
476
405
  throw takeFromExternrefTable0(ret[0]);
477
406
  }
478
407
  }
479
- /**
480
- * Sets whether to keep whitespace tokens in the output.
481
- *
482
- * # Parameters
483
- *
484
- * - `keep`: If `true`, whitespace tokens are preserved; if `false`, they are removed.
485
- *
486
- * # Examples
487
- *
488
- * ```javascript
489
- * builder.setKeepWhitespace(false); // Remove whitespace tokens
490
- * // or
491
- * builder.setKeepWhitespace(true); // Keep whitespace tokens
492
- * ```
493
- * @param {boolean} keep
494
- */
495
- setKeepWhitespace(keep) {
496
- const ret = wasm.tokenizerbuilder_setKeepWhitespace(this.__wbg_ptr, keep);
497
- if (ret[1]) {
498
- throw takeFromExternrefTable0(ret[0]);
499
- }
500
- }
501
408
  /**
502
409
  * Appends a character filter to the tokenization pipeline.
503
410
  *
@@ -529,42 +436,126 @@ class TokenizerBuilder {
529
436
  }
530
437
  }
531
438
  /**
532
- * Appends a token filter to the tokenization pipeline.
439
+ * Creates a new `TokenizerBuilder` instance.
533
440
  *
534
- * Token filters transform or filter the tokens after tokenization.
441
+ * # Returns
442
+ *
443
+ * A new `TokenizerBuilder` instance.
444
+ *
445
+ * # Errors
446
+ *
447
+ * Returns an error if the builder cannot be initialized.
448
+ *
449
+ * # Examples
450
+ *
451
+ * ```javascript
452
+ * const builder = new TokenizerBuilder();
453
+ * ```
454
+ */
455
+ constructor() {
456
+ const ret = wasm.tokenizerbuilder_new();
457
+ if (ret[2]) {
458
+ throw takeFromExternrefTable0(ret[1]);
459
+ }
460
+ this.__wbg_ptr = ret[0] >>> 0;
461
+ TokenizerBuilderFinalization.register(this, this.__wbg_ptr, this);
462
+ return this;
463
+ }
464
+ /**
465
+ * Builds and returns a configured [`Tokenizer`] instance.
466
+ *
467
+ * This method consumes the builder and creates the final tokenizer with all
468
+ * configured settings.
469
+ *
470
+ * # Returns
471
+ *
472
+ * A configured `Tokenizer` instance.
473
+ *
474
+ * # Errors
475
+ *
476
+ * Returns an error if the tokenizer cannot be built with the current configuration.
477
+ *
478
+ * # Examples
479
+ *
480
+ * ```javascript
481
+ * const builder = new TokenizerBuilder();
482
+ * builder.setDictionary("embedded://ipadic");
483
+ * const tokenizer = builder.build();
484
+ * ```
485
+ * @returns {Tokenizer}
486
+ */
487
+ build() {
488
+ const ptr = this.__destroy_into_raw();
489
+ const ret = wasm.tokenizerbuilder_build(ptr);
490
+ if (ret[2]) {
491
+ throw takeFromExternrefTable0(ret[1]);
492
+ }
493
+ return Tokenizer.__wrap(ret[0]);
494
+ }
495
+ /**
496
+ * Sets the tokenization mode.
535
497
  *
536
498
  * # Parameters
537
499
  *
538
- * - `name`: The name of the token filter (e.g., `"lowercase"`, `"japanese_number"`).
539
- * - `args`: A JavaScript object containing filter-specific arguments.
500
+ * - `mode`: The tokenization mode. Valid values are:
501
+ * - `"normal"`: Standard tokenization
502
+ * - `"decompose"`: Decomposes compound words into their components
540
503
  *
541
504
  * # Errors
542
505
  *
543
- * Returns an error if the arguments cannot be parsed.
506
+ * Returns an error if the mode string is invalid.
544
507
  *
545
508
  * # Examples
546
509
  *
547
510
  * ```javascript
548
- * builder.appendTokenFilter("lowercase");
549
- * builder.appendTokenFilter("japanese_number", { "tags": ["名詞,数"] });
511
+ * builder.setMode("normal");
512
+ * // or
513
+ * builder.setMode("decompose");
550
514
  * ```
551
- * @param {string} name
552
- * @param {any} args
515
+ * @param {string} mode
553
516
  */
554
- appendTokenFilter(name, args) {
555
- const ptr0 = passStringToWasm0(name, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
517
+ setMode(mode) {
518
+ const ptr0 = passStringToWasm0(mode, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
556
519
  const len0 = WASM_VECTOR_LEN;
557
- const ret = wasm.tokenizerbuilder_appendTokenFilter(this.__wbg_ptr, ptr0, len0, args);
520
+ const ret = wasm.tokenizerbuilder_setMode(this.__wbg_ptr, ptr0, len0);
558
521
  if (ret[1]) {
559
522
  throw takeFromExternrefTable0(ret[0]);
560
523
  }
561
524
  }
562
525
  }
563
526
  if (Symbol.dispose) TokenizerBuilder.prototype[Symbol.dispose] = TokenizerBuilder.prototype.free;
564
-
565
527
  exports.TokenizerBuilder = TokenizerBuilder;
566
528
 
567
- exports.__wbg_Error_e17e777aac105295 = function(arg0, arg1) {
529
+ /**
530
+ * Gets the version of the lindera-wasm library.
531
+ *
532
+ * # Returns
533
+ *
534
+ * The version string of the library (e.g., "1.0.0").
535
+ *
536
+ * # Examples
537
+ *
538
+ * ```javascript
539
+ * import { getVersion } from 'lindera-wasm';
540
+ * console.log(getVersion()); // "1.0.0"
541
+ * ```
542
+ * @returns {string}
543
+ */
544
+ function getVersion() {
545
+ let deferred1_0;
546
+ let deferred1_1;
547
+ try {
548
+ const ret = wasm.getVersion();
549
+ deferred1_0 = ret[0];
550
+ deferred1_1 = ret[1];
551
+ return getStringFromWasm0(ret[0], ret[1]);
552
+ } finally {
553
+ wasm.__wbindgen_free(deferred1_0, deferred1_1, 1);
554
+ }
555
+ }
556
+ exports.getVersion = getVersion;
557
+
558
+ exports.__wbg_Error_52673b7de5a0ca89 = function(arg0, arg1) {
568
559
  const ret = Error(getStringFromWasm0(arg0, arg1));
569
560
  return ret;
570
561
  };
@@ -577,32 +568,104 @@ exports.__wbg_String_8f0eb39a4a4c2f66 = function(arg0, arg1) {
577
568
  getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
578
569
  };
579
570
 
580
- exports.__wbg_call_13410aac570ffff7 = function() { return handleError(function (arg0, arg1) {
571
+ exports.__wbg___wbindgen_bigint_get_as_i64_6e32f5e6aff02e1d = function(arg0, arg1) {
572
+ const v = arg1;
573
+ const ret = typeof(v) === 'bigint' ? v : undefined;
574
+ getDataViewMemory0().setBigInt64(arg0 + 8 * 1, isLikeNone(ret) ? BigInt(0) : ret, true);
575
+ getDataViewMemory0().setInt32(arg0 + 4 * 0, !isLikeNone(ret), true);
576
+ };
577
+
578
+ exports.__wbg___wbindgen_boolean_get_dea25b33882b895b = function(arg0) {
579
+ const v = arg0;
580
+ const ret = typeof(v) === 'boolean' ? v : undefined;
581
+ return isLikeNone(ret) ? 0xFFFFFF : ret ? 1 : 0;
582
+ };
583
+
584
+ exports.__wbg___wbindgen_debug_string_adfb662ae34724b6 = function(arg0, arg1) {
585
+ const ret = debugString(arg1);
586
+ const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
587
+ const len1 = WASM_VECTOR_LEN;
588
+ getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
589
+ getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
590
+ };
591
+
592
+ exports.__wbg___wbindgen_in_0d3e1e8f0c669317 = function(arg0, arg1) {
593
+ const ret = arg0 in arg1;
594
+ return ret;
595
+ };
596
+
597
+ exports.__wbg___wbindgen_is_bigint_0e1a2e3f55cfae27 = function(arg0) {
598
+ const ret = typeof(arg0) === 'bigint';
599
+ return ret;
600
+ };
601
+
602
+ exports.__wbg___wbindgen_is_function_8d400b8b1af978cd = function(arg0) {
603
+ const ret = typeof(arg0) === 'function';
604
+ return ret;
605
+ };
606
+
607
+ exports.__wbg___wbindgen_is_object_ce774f3490692386 = function(arg0) {
608
+ const val = arg0;
609
+ const ret = typeof(val) === 'object' && val !== null;
610
+ return ret;
611
+ };
612
+
613
+ exports.__wbg___wbindgen_jsval_eq_b6101cc9cef1fe36 = function(arg0, arg1) {
614
+ const ret = arg0 === arg1;
615
+ return ret;
616
+ };
617
+
618
+ exports.__wbg___wbindgen_jsval_loose_eq_766057600fdd1b0d = function(arg0, arg1) {
619
+ const ret = arg0 == arg1;
620
+ return ret;
621
+ };
622
+
623
+ exports.__wbg___wbindgen_number_get_9619185a74197f95 = function(arg0, arg1) {
624
+ const obj = arg1;
625
+ const ret = typeof(obj) === 'number' ? obj : undefined;
626
+ getDataViewMemory0().setFloat64(arg0 + 8 * 1, isLikeNone(ret) ? 0 : ret, true);
627
+ getDataViewMemory0().setInt32(arg0 + 4 * 0, !isLikeNone(ret), true);
628
+ };
629
+
630
+ exports.__wbg___wbindgen_string_get_a2a31e16edf96e42 = function(arg0, arg1) {
631
+ const obj = arg1;
632
+ const ret = typeof(obj) === 'string' ? obj : undefined;
633
+ var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
634
+ var len1 = WASM_VECTOR_LEN;
635
+ getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
636
+ getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
637
+ };
638
+
639
+ exports.__wbg___wbindgen_throw_dd24417ed36fc46e = function(arg0, arg1) {
640
+ throw new Error(getStringFromWasm0(arg0, arg1));
641
+ };
642
+
643
+ exports.__wbg_call_abb4ff46ce38be40 = function() { return handleError(function (arg0, arg1) {
581
644
  const ret = arg0.call(arg1);
582
645
  return ret;
583
646
  }, arguments) };
584
647
 
585
- exports.__wbg_done_75ed0ee6dd243d9d = function(arg0) {
648
+ exports.__wbg_done_62ea16af4ce34b24 = function(arg0) {
586
649
  const ret = arg0.done;
587
650
  return ret;
588
651
  };
589
652
 
590
- exports.__wbg_entries_2be2f15bd5554996 = function(arg0) {
653
+ exports.__wbg_entries_83c79938054e065f = function(arg0) {
591
654
  const ret = Object.entries(arg0);
592
655
  return ret;
593
656
  };
594
657
 
595
- exports.__wbg_get_0da715ceaecea5c8 = function(arg0, arg1) {
658
+ exports.__wbg_get_6b7bd52aca3f9671 = function(arg0, arg1) {
596
659
  const ret = arg0[arg1 >>> 0];
597
660
  return ret;
598
661
  };
599
662
 
600
- exports.__wbg_get_458e874b43b18b25 = function() { return handleError(function (arg0, arg1) {
663
+ exports.__wbg_get_af9dab7e9603ea93 = function() { return handleError(function (arg0, arg1) {
601
664
  const ret = Reflect.get(arg0, arg1);
602
665
  return ret;
603
666
  }, arguments) };
604
667
 
605
- exports.__wbg_instanceof_ArrayBuffer_67f3012529f6a2dd = function(arg0) {
668
+ exports.__wbg_instanceof_ArrayBuffer_f3320d2419cd0355 = function(arg0) {
606
669
  let result;
607
670
  try {
608
671
  result = arg0 instanceof ArrayBuffer;
@@ -613,7 +676,7 @@ exports.__wbg_instanceof_ArrayBuffer_67f3012529f6a2dd = function(arg0) {
613
676
  return ret;
614
677
  };
615
678
 
616
- exports.__wbg_instanceof_Map_ebb01a5b6b5ffd0b = function(arg0) {
679
+ exports.__wbg_instanceof_Map_084be8da74364158 = function(arg0) {
617
680
  let result;
618
681
  try {
619
682
  result = arg0 instanceof Map;
@@ -624,7 +687,7 @@ exports.__wbg_instanceof_Map_ebb01a5b6b5ffd0b = function(arg0) {
624
687
  return ret;
625
688
  };
626
689
 
627
- exports.__wbg_instanceof_Uint8Array_9a8378d955933db7 = function(arg0) {
690
+ exports.__wbg_instanceof_Uint8Array_da54ccc9d3e09434 = function(arg0) {
628
691
  let result;
629
692
  try {
630
693
  result = arg0 instanceof Uint8Array;
@@ -635,147 +698,75 @@ exports.__wbg_instanceof_Uint8Array_9a8378d955933db7 = function(arg0) {
635
698
  return ret;
636
699
  };
637
700
 
638
- exports.__wbg_isArray_030cce220591fb41 = function(arg0) {
701
+ exports.__wbg_isArray_51fd9e6422c0a395 = function(arg0) {
639
702
  const ret = Array.isArray(arg0);
640
703
  return ret;
641
704
  };
642
705
 
643
- exports.__wbg_isSafeInteger_1c0d1af5542e102a = function(arg0) {
706
+ exports.__wbg_isSafeInteger_ae7d3f054d55fa16 = function(arg0) {
644
707
  const ret = Number.isSafeInteger(arg0);
645
708
  return ret;
646
709
  };
647
710
 
648
- exports.__wbg_iterator_f370b34483c71a1c = function() {
711
+ exports.__wbg_iterator_27b7c8b35ab3e86b = function() {
649
712
  const ret = Symbol.iterator;
650
713
  return ret;
651
714
  };
652
715
 
653
- exports.__wbg_length_186546c51cd61acd = function(arg0) {
716
+ exports.__wbg_length_22ac23eaec9d8053 = function(arg0) {
654
717
  const ret = arg0.length;
655
718
  return ret;
656
719
  };
657
720
 
658
- exports.__wbg_length_6bb7e81f9d7713e4 = function(arg0) {
721
+ exports.__wbg_length_d45040a40c570362 = function(arg0) {
659
722
  const ret = arg0.length;
660
723
  return ret;
661
724
  };
662
725
 
663
- exports.__wbg_new_19c25a3f2fa63a02 = function() {
726
+ exports.__wbg_new_1ba21ce319a06297 = function() {
664
727
  const ret = new Object();
665
728
  return ret;
666
729
  };
667
730
 
668
- exports.__wbg_new_1f3a344cf3123716 = function() {
731
+ exports.__wbg_new_25f239778d6112b9 = function() {
669
732
  const ret = new Array();
670
733
  return ret;
671
734
  };
672
735
 
673
- exports.__wbg_new_638ebfaedbf32a5e = function(arg0) {
736
+ exports.__wbg_new_6421f6084cc5bc5a = function(arg0) {
674
737
  const ret = new Uint8Array(arg0);
675
738
  return ret;
676
739
  };
677
740
 
678
- exports.__wbg_next_5b3530e612fde77d = function(arg0) {
741
+ exports.__wbg_next_138a17bbf04e926c = function(arg0) {
679
742
  const ret = arg0.next;
680
743
  return ret;
681
744
  };
682
745
 
683
- exports.__wbg_next_692e82279131b03c = function() { return handleError(function (arg0) {
746
+ exports.__wbg_next_3cfe5c0fe2a4cc53 = function() { return handleError(function (arg0) {
684
747
  const ret = arg0.next();
685
748
  return ret;
686
749
  }, arguments) };
687
750
 
688
- exports.__wbg_prototypesetcall_3d4a26c1ed734349 = function(arg0, arg1, arg2) {
751
+ exports.__wbg_prototypesetcall_dfe9b766cdc1f1fd = function(arg0, arg1, arg2) {
689
752
  Uint8Array.prototype.set.call(getArrayU8FromWasm0(arg0, arg1), arg2);
690
753
  };
691
754
 
692
- exports.__wbg_push_330b2eb93e4e1212 = function(arg0, arg1) {
755
+ exports.__wbg_push_7d9be8f38fc13975 = function(arg0, arg1) {
693
756
  const ret = arg0.push(arg1);
694
757
  return ret;
695
758
  };
696
759
 
697
- exports.__wbg_set_453345bcda80b89a = function() { return handleError(function (arg0, arg1, arg2) {
760
+ exports.__wbg_set_781438a03c0c3c81 = function() { return handleError(function (arg0, arg1, arg2) {
698
761
  const ret = Reflect.set(arg0, arg1, arg2);
699
762
  return ret;
700
763
  }, arguments) };
701
764
 
702
- exports.__wbg_value_dd9372230531eade = function(arg0) {
765
+ exports.__wbg_value_57b7b035e117f7ee = function(arg0) {
703
766
  const ret = arg0.value;
704
767
  return ret;
705
768
  };
706
769
 
707
- exports.__wbg_wbindgenbigintgetasi64_ac743ece6ab9bba1 = function(arg0, arg1) {
708
- const v = arg1;
709
- const ret = typeof(v) === 'bigint' ? v : undefined;
710
- getDataViewMemory0().setBigInt64(arg0 + 8 * 1, isLikeNone(ret) ? BigInt(0) : ret, true);
711
- getDataViewMemory0().setInt32(arg0 + 4 * 0, !isLikeNone(ret), true);
712
- };
713
-
714
- exports.__wbg_wbindgenbooleanget_3fe6f642c7d97746 = function(arg0) {
715
- const v = arg0;
716
- const ret = typeof(v) === 'boolean' ? v : undefined;
717
- return isLikeNone(ret) ? 0xFFFFFF : ret ? 1 : 0;
718
- };
719
-
720
- exports.__wbg_wbindgendebugstring_99ef257a3ddda34d = function(arg0, arg1) {
721
- const ret = debugString(arg1);
722
- const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
723
- const len1 = WASM_VECTOR_LEN;
724
- getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
725
- getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
726
- };
727
-
728
- exports.__wbg_wbindgenin_d7a1ee10933d2d55 = function(arg0, arg1) {
729
- const ret = arg0 in arg1;
730
- return ret;
731
- };
732
-
733
- exports.__wbg_wbindgenisbigint_ecb90cc08a5a9154 = function(arg0) {
734
- const ret = typeof(arg0) === 'bigint';
735
- return ret;
736
- };
737
-
738
- exports.__wbg_wbindgenisfunction_8cee7dce3725ae74 = function(arg0) {
739
- const ret = typeof(arg0) === 'function';
740
- return ret;
741
- };
742
-
743
- exports.__wbg_wbindgenisobject_307a53c6bd97fbf8 = function(arg0) {
744
- const val = arg0;
745
- const ret = typeof(val) === 'object' && val !== null;
746
- return ret;
747
- };
748
-
749
- exports.__wbg_wbindgenjsvaleq_e6f2ad59ccae1b58 = function(arg0, arg1) {
750
- const ret = arg0 === arg1;
751
- return ret;
752
- };
753
-
754
- exports.__wbg_wbindgenjsvallooseeq_9bec8c9be826bed1 = function(arg0, arg1) {
755
- const ret = arg0 == arg1;
756
- return ret;
757
- };
758
-
759
- exports.__wbg_wbindgennumberget_f74b4c7525ac05cb = function(arg0, arg1) {
760
- const obj = arg1;
761
- const ret = typeof(obj) === 'number' ? obj : undefined;
762
- getDataViewMemory0().setFloat64(arg0 + 8 * 1, isLikeNone(ret) ? 0 : ret, true);
763
- getDataViewMemory0().setInt32(arg0 + 4 * 0, !isLikeNone(ret), true);
764
- };
765
-
766
- exports.__wbg_wbindgenstringget_0f16a6ddddef376f = function(arg0, arg1) {
767
- const obj = arg1;
768
- const ret = typeof(obj) === 'string' ? obj : undefined;
769
- var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
770
- var len1 = WASM_VECTOR_LEN;
771
- getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
772
- getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
773
- };
774
-
775
- exports.__wbg_wbindgenthrow_451ec1a8469d7eb6 = function(arg0, arg1) {
776
- throw new Error(getStringFromWasm0(arg0, arg1));
777
- };
778
-
779
770
  exports.__wbindgen_cast_2241b6af4c4b2941 = function(arg0, arg1) {
780
771
  // Cast intrinsic for `Ref(String) -> Externref`.
781
772
  const ret = getStringFromWasm0(arg0, arg1);
@@ -801,14 +792,13 @@ exports.__wbindgen_cast_d6cd19b81560fd6e = function(arg0) {
801
792
  };
802
793
 
803
794
  exports.__wbindgen_init_externref_table = function() {
804
- const table = wasm.__wbindgen_export_4;
795
+ const table = wasm.__wbindgen_externrefs;
805
796
  const offset = table.grow(4);
806
797
  table.set(0, undefined);
807
798
  table.set(offset + 0, undefined);
808
799
  table.set(offset + 1, null);
809
800
  table.set(offset + 2, true);
810
801
  table.set(offset + 3, false);
811
- ;
812
802
  };
813
803
 
814
804
  const wasmPath = `${__dirname}/lindera_wasm_bg.wasm`;
@@ -817,4 +807,3 @@ const wasmModule = new WebAssembly.Module(wasmBytes);
817
807
  const wasm = exports.__wasm = new WebAssembly.Instance(wasmModule, imports).exports;
818
808
 
819
809
  wasm.__wbindgen_start();
820
-
Binary file
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "lindera-wasm-nodejs-ko-dic",
3
3
  "description": "Lindera WASM with Korean dictionary (ko-dic) (nodejs target)",
4
- "version": "1.2.1",
4
+ "version": "1.3.0",
5
5
  "license": "MIT",
6
6
  "repository": {
7
7
  "type": "git",