llonebot-dist 6.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dist/default_config.json +68 -0
- dist/llonebot.js +48348 -0
- dist/llonebot.js.map +1 -0
- dist/node_modules/@borewit/text-codec/LICENSE.txt +9 -0
- dist/node_modules/@borewit/text-codec/README.md +76 -0
- dist/node_modules/@borewit/text-codec/lib/index.d.ts +8 -0
- dist/node_modules/@borewit/text-codec/lib/index.js +161 -0
- dist/node_modules/@borewit/text-codec/package.json +68 -0
- dist/node_modules/@minatojs/sql.js/LICENSE +44 -0
- dist/node_modules/@minatojs/sql.js/README.md +357 -0
- dist/node_modules/@minatojs/sql.js/dist/sql-wasm.d.ts +316 -0
- dist/node_modules/@minatojs/sql.js/dist/sql-wasm.js +225 -0
- dist/node_modules/@minatojs/sql.js/dist/sql-wasm.wasm +0 -0
- dist/node_modules/@minatojs/sql.js/package.json +58 -0
- dist/node_modules/@tokenizer/inflate/LICENSE +15 -0
- dist/node_modules/@tokenizer/inflate/README.md +114 -0
- dist/node_modules/@tokenizer/inflate/lib/GzipHandler.d.ts +6 -0
- dist/node_modules/@tokenizer/inflate/lib/GzipHandler.js +19 -0
- dist/node_modules/@tokenizer/inflate/lib/ZipHandler.d.ts +26 -0
- dist/node_modules/@tokenizer/inflate/lib/ZipHandler.js +233 -0
- dist/node_modules/@tokenizer/inflate/lib/ZipToken.d.ts +94 -0
- dist/node_modules/@tokenizer/inflate/lib/ZipToken.js +117 -0
- dist/node_modules/@tokenizer/inflate/lib/index.d.ts +3 -0
- dist/node_modules/@tokenizer/inflate/lib/index.js +2 -0
- dist/node_modules/@tokenizer/inflate/package.json +76 -0
- dist/node_modules/@tokenizer/token/README.md +19 -0
- dist/node_modules/@tokenizer/token/index.d.ts +30 -0
- dist/node_modules/@tokenizer/token/package.json +33 -0
- dist/node_modules/debug/LICENSE +20 -0
- dist/node_modules/debug/README.md +481 -0
- dist/node_modules/debug/package.json +64 -0
- dist/node_modules/debug/src/browser.js +272 -0
- dist/node_modules/debug/src/common.js +292 -0
- dist/node_modules/debug/src/index.js +10 -0
- dist/node_modules/debug/src/node.js +263 -0
- dist/node_modules/file-type/core.d.ts +253 -0
- dist/node_modules/file-type/core.js +1899 -0
- dist/node_modules/file-type/index.d.ts +98 -0
- dist/node_modules/file-type/index.js +86 -0
- dist/node_modules/file-type/license +9 -0
- dist/node_modules/file-type/package.json +288 -0
- dist/node_modules/file-type/readme.md +674 -0
- dist/node_modules/file-type/supported.js +356 -0
- dist/node_modules/file-type/util.js +60 -0
- dist/node_modules/ieee754/LICENSE +11 -0
- dist/node_modules/ieee754/README.md +51 -0
- dist/node_modules/ieee754/index.d.ts +10 -0
- dist/node_modules/ieee754/index.js +85 -0
- dist/node_modules/ieee754/package.json +52 -0
- dist/node_modules/ms/index.js +162 -0
- dist/node_modules/ms/license.md +21 -0
- dist/node_modules/ms/package.json +38 -0
- dist/node_modules/ms/readme.md +59 -0
- dist/node_modules/silk-wasm/LICENSE +21 -0
- dist/node_modules/silk-wasm/README.md +85 -0
- dist/node_modules/silk-wasm/lib/index.cjs +16 -0
- dist/node_modules/silk-wasm/lib/index.d.ts +70 -0
- dist/node_modules/silk-wasm/lib/index.mjs +16 -0
- dist/node_modules/silk-wasm/lib/silk.wasm +0 -0
- dist/node_modules/silk-wasm/lib/utils.d.ts +4 -0
- dist/node_modules/silk-wasm/package.json +39 -0
- dist/node_modules/strtok3/LICENSE.txt +21 -0
- dist/node_modules/strtok3/README.md +399 -0
- dist/node_modules/strtok3/lib/AbstractTokenizer.d.ts +76 -0
- dist/node_modules/strtok3/lib/AbstractTokenizer.js +108 -0
- dist/node_modules/strtok3/lib/BlobTokenizer.d.ts +29 -0
- dist/node_modules/strtok3/lib/BlobTokenizer.js +53 -0
- dist/node_modules/strtok3/lib/BufferTokenizer.d.ts +29 -0
- dist/node_modules/strtok3/lib/BufferTokenizer.js +52 -0
- dist/node_modules/strtok3/lib/FileTokenizer.d.ts +37 -0
- dist/node_modules/strtok3/lib/FileTokenizer.js +61 -0
- dist/node_modules/strtok3/lib/ReadStreamTokenizer.d.ts +31 -0
- dist/node_modules/strtok3/lib/ReadStreamTokenizer.js +102 -0
- dist/node_modules/strtok3/lib/core.d.ts +40 -0
- dist/node_modules/strtok3/lib/core.js +62 -0
- dist/node_modules/strtok3/lib/index.d.ts +16 -0
- dist/node_modules/strtok3/lib/index.js +22 -0
- dist/node_modules/strtok3/lib/stream/AbstractStreamReader.d.ts +54 -0
- dist/node_modules/strtok3/lib/stream/AbstractStreamReader.js +71 -0
- dist/node_modules/strtok3/lib/stream/Deferred.d.ts +6 -0
- dist/node_modules/strtok3/lib/stream/Deferred.js +10 -0
- dist/node_modules/strtok3/lib/stream/Errors.d.ts +10 -0
- dist/node_modules/strtok3/lib/stream/Errors.js +16 -0
- dist/node_modules/strtok3/lib/stream/StreamReader.d.ts +29 -0
- dist/node_modules/strtok3/lib/stream/StreamReader.js +83 -0
- dist/node_modules/strtok3/lib/stream/WebStreamByobReader.d.ts +14 -0
- dist/node_modules/strtok3/lib/stream/WebStreamByobReader.js +27 -0
- dist/node_modules/strtok3/lib/stream/WebStreamDefaultReader.d.ts +19 -0
- dist/node_modules/strtok3/lib/stream/WebStreamDefaultReader.js +62 -0
- dist/node_modules/strtok3/lib/stream/WebStreamReader.d.ts +14 -0
- dist/node_modules/strtok3/lib/stream/WebStreamReader.js +13 -0
- dist/node_modules/strtok3/lib/stream/WebStreamReaderFactory.d.ts +5 -0
- dist/node_modules/strtok3/lib/stream/WebStreamReaderFactory.js +19 -0
- dist/node_modules/strtok3/lib/stream/index.d.ts +6 -0
- dist/node_modules/strtok3/lib/stream/index.js +5 -0
- dist/node_modules/strtok3/lib/types.d.ts +139 -0
- dist/node_modules/strtok3/lib/types.js +1 -0
- dist/node_modules/strtok3/package.json +94 -0
- dist/node_modules/token-types/LICENSE.txt +9 -0
- dist/node_modules/token-types/README.md +120 -0
- dist/node_modules/token-types/lib/index.d.ts +135 -0
- dist/node_modules/token-types/lib/index.js +401 -0
- dist/node_modules/token-types/package.json +81 -0
- dist/node_modules/uint8array-extras/index.d.ts +312 -0
- dist/node_modules/uint8array-extras/index.js +321 -0
- dist/node_modules/uint8array-extras/license +9 -0
- dist/node_modules/uint8array-extras/package.json +54 -0
- dist/node_modules/uint8array-extras/readme.md +301 -0
- dist/node_modules/ws/LICENSE +20 -0
- dist/node_modules/ws/README.md +548 -0
- dist/node_modules/ws/browser.js +8 -0
- dist/node_modules/ws/index.js +13 -0
- dist/node_modules/ws/lib/buffer-util.js +131 -0
- dist/node_modules/ws/lib/constants.js +18 -0
- dist/node_modules/ws/lib/event-target.js +292 -0
- dist/node_modules/ws/lib/extension.js +203 -0
- dist/node_modules/ws/lib/limiter.js +55 -0
- dist/node_modules/ws/lib/permessage-deflate.js +528 -0
- dist/node_modules/ws/lib/receiver.js +706 -0
- dist/node_modules/ws/lib/sender.js +602 -0
- dist/node_modules/ws/lib/stream.js +161 -0
- dist/node_modules/ws/lib/subprotocol.js +62 -0
- dist/node_modules/ws/lib/validation.js +152 -0
- dist/node_modules/ws/lib/websocket-server.js +550 -0
- dist/node_modules/ws/lib/websocket.js +1388 -0
- dist/node_modules/ws/package.json +69 -0
- dist/node_modules/ws/wrapper.mjs +8 -0
- dist/package.json +1 -0
- dist/webui/assets/index-B9vGhdCO.js +256 -0
- dist/webui/assets/index-DaqFU7JR.css +1 -0
- dist/webui/index.html +13 -0
- dist/webui/logo.jpg +0 -0
- dist//344/275/277/347/224/250/350/257/264/346/230/216.txt +11 -0
- dist//346/233/264/346/226/260/346/227/245/345/277/227.txt +399 -0
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { EndOfStreamError } from './stream/index.js';
|
|
2
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
3
|
+
export class BlobTokenizer extends AbstractTokenizer {
|
|
4
|
+
/**
|
|
5
|
+
* Construct BufferTokenizer
|
|
6
|
+
* @param blob - Uint8Array to tokenize
|
|
7
|
+
* @param options Tokenizer options
|
|
8
|
+
*/
|
|
9
|
+
constructor(blob, options) {
|
|
10
|
+
super(options);
|
|
11
|
+
this.blob = blob;
|
|
12
|
+
this.fileInfo = { ...options?.fileInfo ?? {}, ...{ size: blob.size, mimeType: blob.type } };
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Read buffer from tokenizer
|
|
16
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
17
|
+
* @param options - Read behaviour options
|
|
18
|
+
* @returns {Promise<number>}
|
|
19
|
+
*/
|
|
20
|
+
async readBuffer(uint8Array, options) {
|
|
21
|
+
if (options?.position) {
|
|
22
|
+
this.position = options.position;
|
|
23
|
+
}
|
|
24
|
+
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
25
|
+
this.position += bytesRead;
|
|
26
|
+
return bytesRead;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Peek (read ahead) buffer from tokenizer
|
|
30
|
+
* @param buffer
|
|
31
|
+
* @param options - Read behaviour options
|
|
32
|
+
* @returns {Promise<number>}
|
|
33
|
+
*/
|
|
34
|
+
async peekBuffer(buffer, options) {
|
|
35
|
+
const normOptions = this.normalizeOptions(buffer, options);
|
|
36
|
+
const bytes2read = Math.min(this.blob.size - normOptions.position, normOptions.length);
|
|
37
|
+
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
38
|
+
throw new EndOfStreamError();
|
|
39
|
+
}
|
|
40
|
+
const arrayBuffer = await this.blob.slice(normOptions.position, normOptions.position + bytes2read).arrayBuffer();
|
|
41
|
+
buffer.set(new Uint8Array(arrayBuffer));
|
|
42
|
+
return bytes2read;
|
|
43
|
+
}
|
|
44
|
+
close() {
|
|
45
|
+
return super.close();
|
|
46
|
+
}
|
|
47
|
+
supportsRandomAccess() {
|
|
48
|
+
return true;
|
|
49
|
+
}
|
|
50
|
+
setPosition(position) {
|
|
51
|
+
this.position = position;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import type { ITokenizerOptions, IReadChunkOptions, IRandomAccessFileInfo, IRandomAccessTokenizer } from './types.js';
|
|
2
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
3
|
+
export declare class BufferTokenizer extends AbstractTokenizer implements IRandomAccessTokenizer {
|
|
4
|
+
private uint8Array;
|
|
5
|
+
fileInfo: IRandomAccessFileInfo;
|
|
6
|
+
/**
|
|
7
|
+
* Construct BufferTokenizer
|
|
8
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
9
|
+
* @param options Tokenizer options
|
|
10
|
+
*/
|
|
11
|
+
constructor(uint8Array: Uint8Array, options?: ITokenizerOptions);
|
|
12
|
+
/**
|
|
13
|
+
* Read buffer from tokenizer
|
|
14
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
15
|
+
* @param options - Read behaviour options
|
|
16
|
+
* @returns {Promise<number>}
|
|
17
|
+
*/
|
|
18
|
+
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
19
|
+
/**
|
|
20
|
+
* Peek (read ahead) buffer from tokenizer
|
|
21
|
+
* @param uint8Array
|
|
22
|
+
* @param options - Read behaviour options
|
|
23
|
+
* @returns {Promise<number>}
|
|
24
|
+
*/
|
|
25
|
+
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
26
|
+
close(): Promise<void>;
|
|
27
|
+
supportsRandomAccess(): boolean;
|
|
28
|
+
setPosition(position: number): void;
|
|
29
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { EndOfStreamError } from './stream/index.js';
|
|
2
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
3
|
+
export class BufferTokenizer extends AbstractTokenizer {
|
|
4
|
+
/**
|
|
5
|
+
* Construct BufferTokenizer
|
|
6
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
7
|
+
* @param options Tokenizer options
|
|
8
|
+
*/
|
|
9
|
+
constructor(uint8Array, options) {
|
|
10
|
+
super(options);
|
|
11
|
+
this.uint8Array = uint8Array;
|
|
12
|
+
this.fileInfo = { ...options?.fileInfo ?? {}, ...{ size: uint8Array.length } };
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Read buffer from tokenizer
|
|
16
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
17
|
+
* @param options - Read behaviour options
|
|
18
|
+
* @returns {Promise<number>}
|
|
19
|
+
*/
|
|
20
|
+
async readBuffer(uint8Array, options) {
|
|
21
|
+
if (options?.position) {
|
|
22
|
+
this.position = options.position;
|
|
23
|
+
}
|
|
24
|
+
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
25
|
+
this.position += bytesRead;
|
|
26
|
+
return bytesRead;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Peek (read ahead) buffer from tokenizer
|
|
30
|
+
* @param uint8Array
|
|
31
|
+
* @param options - Read behaviour options
|
|
32
|
+
* @returns {Promise<number>}
|
|
33
|
+
*/
|
|
34
|
+
async peekBuffer(uint8Array, options) {
|
|
35
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
36
|
+
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|
37
|
+
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
38
|
+
throw new EndOfStreamError();
|
|
39
|
+
}
|
|
40
|
+
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read));
|
|
41
|
+
return bytes2read;
|
|
42
|
+
}
|
|
43
|
+
close() {
|
|
44
|
+
return super.close();
|
|
45
|
+
}
|
|
46
|
+
supportsRandomAccess() {
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
setPosition(position) {
|
|
50
|
+
this.position = position;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
2
|
+
import type { IRandomAccessTokenizer, IRandomAccessFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js';
|
|
3
|
+
import { type FileHandle } from 'node:fs/promises';
|
|
4
|
+
interface IFileTokenizerOptions extends ITokenizerOptions {
|
|
5
|
+
/**
|
|
6
|
+
* Pass additional file information to the tokenizer
|
|
7
|
+
*/
|
|
8
|
+
fileInfo: IRandomAccessFileInfo;
|
|
9
|
+
}
|
|
10
|
+
export declare class FileTokenizer extends AbstractTokenizer implements IRandomAccessTokenizer {
|
|
11
|
+
private fileHandle;
|
|
12
|
+
fileInfo: IRandomAccessFileInfo;
|
|
13
|
+
/**
|
|
14
|
+
* Create tokenizer from provided file path
|
|
15
|
+
* @param sourceFilePath File path
|
|
16
|
+
*/
|
|
17
|
+
static fromFile(sourceFilePath: string): Promise<FileTokenizer>;
|
|
18
|
+
protected constructor(fileHandle: FileHandle, options: IFileTokenizerOptions);
|
|
19
|
+
/**
|
|
20
|
+
* Read buffer from file
|
|
21
|
+
* @param uint8Array - Uint8Array to write result to
|
|
22
|
+
* @param options - Read behaviour options
|
|
23
|
+
* @returns Promise number of bytes read
|
|
24
|
+
*/
|
|
25
|
+
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
26
|
+
/**
|
|
27
|
+
* Peek buffer from file
|
|
28
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
29
|
+
* @param options - Read behaviour options
|
|
30
|
+
* @returns Promise number of bytes read
|
|
31
|
+
*/
|
|
32
|
+
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
33
|
+
close(): Promise<void>;
|
|
34
|
+
setPosition(position: number): void;
|
|
35
|
+
supportsRandomAccess(): boolean;
|
|
36
|
+
}
|
|
37
|
+
export {};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
2
|
+
import { EndOfStreamError } from './stream/index.js';
|
|
3
|
+
import { open as fsOpen } from 'node:fs/promises';
|
|
4
|
+
export class FileTokenizer extends AbstractTokenizer {
|
|
5
|
+
/**
|
|
6
|
+
* Create tokenizer from provided file path
|
|
7
|
+
* @param sourceFilePath File path
|
|
8
|
+
*/
|
|
9
|
+
static async fromFile(sourceFilePath) {
|
|
10
|
+
const fileHandle = await fsOpen(sourceFilePath, 'r');
|
|
11
|
+
const stat = await fileHandle.stat();
|
|
12
|
+
return new FileTokenizer(fileHandle, { fileInfo: { path: sourceFilePath, size: stat.size } });
|
|
13
|
+
}
|
|
14
|
+
constructor(fileHandle, options) {
|
|
15
|
+
super(options);
|
|
16
|
+
this.fileHandle = fileHandle;
|
|
17
|
+
this.fileInfo = options.fileInfo;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Read buffer from file
|
|
21
|
+
* @param uint8Array - Uint8Array to write result to
|
|
22
|
+
* @param options - Read behaviour options
|
|
23
|
+
* @returns Promise number of bytes read
|
|
24
|
+
*/
|
|
25
|
+
async readBuffer(uint8Array, options) {
|
|
26
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
27
|
+
this.position = normOptions.position;
|
|
28
|
+
if (normOptions.length === 0)
|
|
29
|
+
return 0;
|
|
30
|
+
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
|
|
31
|
+
this.position += res.bytesRead;
|
|
32
|
+
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
|
|
33
|
+
throw new EndOfStreamError();
|
|
34
|
+
}
|
|
35
|
+
return res.bytesRead;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Peek buffer from file
|
|
39
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
40
|
+
* @param options - Read behaviour options
|
|
41
|
+
* @returns Promise number of bytes read
|
|
42
|
+
*/
|
|
43
|
+
async peekBuffer(uint8Array, options) {
|
|
44
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
45
|
+
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
|
|
46
|
+
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
|
|
47
|
+
throw new EndOfStreamError();
|
|
48
|
+
}
|
|
49
|
+
return res.bytesRead;
|
|
50
|
+
}
|
|
51
|
+
async close() {
|
|
52
|
+
await this.fileHandle.close();
|
|
53
|
+
return super.close();
|
|
54
|
+
}
|
|
55
|
+
setPosition(position) {
|
|
56
|
+
this.position = position;
|
|
57
|
+
}
|
|
58
|
+
supportsRandomAccess() {
|
|
59
|
+
return true;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
2
|
+
import { type IStreamReader } from './stream/index.js';
|
|
3
|
+
import type { IFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js';
|
|
4
|
+
export declare class ReadStreamTokenizer extends AbstractTokenizer {
|
|
5
|
+
private streamReader;
|
|
6
|
+
fileInfo: IFileInfo;
|
|
7
|
+
/**
|
|
8
|
+
* Constructor
|
|
9
|
+
* @param streamReader stream-reader to read from
|
|
10
|
+
* @param options Tokenizer options
|
|
11
|
+
*/
|
|
12
|
+
constructor(streamReader: IStreamReader, options?: ITokenizerOptions);
|
|
13
|
+
/**
|
|
14
|
+
* Read buffer from tokenizer
|
|
15
|
+
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
|
|
16
|
+
* @param options - Read behaviour options
|
|
17
|
+
* @returns Promise with number of bytes read
|
|
18
|
+
*/
|
|
19
|
+
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
20
|
+
/**
|
|
21
|
+
* Peek (read ahead) buffer from tokenizer
|
|
22
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
23
|
+
* @param options - Read behaviour options
|
|
24
|
+
* @returns Promise with number of bytes peeked
|
|
25
|
+
*/
|
|
26
|
+
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
|
|
27
|
+
ignore(length: number): Promise<number>;
|
|
28
|
+
abort(): Promise<void>;
|
|
29
|
+
close(): Promise<void>;
|
|
30
|
+
supportsRandomAccess(): boolean;
|
|
31
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
2
|
+
import { EndOfStreamError } from './stream/index.js';
|
|
3
|
+
const maxBufferSize = 256000;
|
|
4
|
+
export class ReadStreamTokenizer extends AbstractTokenizer {
|
|
5
|
+
/**
|
|
6
|
+
* Constructor
|
|
7
|
+
* @param streamReader stream-reader to read from
|
|
8
|
+
* @param options Tokenizer options
|
|
9
|
+
*/
|
|
10
|
+
constructor(streamReader, options) {
|
|
11
|
+
super(options);
|
|
12
|
+
this.streamReader = streamReader;
|
|
13
|
+
this.fileInfo = options?.fileInfo ?? {};
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Read buffer from tokenizer
|
|
17
|
+
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
|
|
18
|
+
* @param options - Read behaviour options
|
|
19
|
+
* @returns Promise with number of bytes read
|
|
20
|
+
*/
|
|
21
|
+
async readBuffer(uint8Array, options) {
|
|
22
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
23
|
+
const skipBytes = normOptions.position - this.position;
|
|
24
|
+
if (skipBytes > 0) {
|
|
25
|
+
await this.ignore(skipBytes);
|
|
26
|
+
return this.readBuffer(uint8Array, options);
|
|
27
|
+
}
|
|
28
|
+
if (skipBytes < 0) {
|
|
29
|
+
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
30
|
+
}
|
|
31
|
+
if (normOptions.length === 0) {
|
|
32
|
+
return 0;
|
|
33
|
+
}
|
|
34
|
+
const bytesRead = await this.streamReader.read(uint8Array.subarray(0, normOptions.length), normOptions.mayBeLess);
|
|
35
|
+
this.position += bytesRead;
|
|
36
|
+
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
|
|
37
|
+
throw new EndOfStreamError();
|
|
38
|
+
}
|
|
39
|
+
return bytesRead;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Peek (read ahead) buffer from tokenizer
|
|
43
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
44
|
+
* @param options - Read behaviour options
|
|
45
|
+
* @returns Promise with number of bytes peeked
|
|
46
|
+
*/
|
|
47
|
+
async peekBuffer(uint8Array, options) {
|
|
48
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
49
|
+
let bytesRead = 0;
|
|
50
|
+
if (normOptions.position) {
|
|
51
|
+
const skipBytes = normOptions.position - this.position;
|
|
52
|
+
if (skipBytes > 0) {
|
|
53
|
+
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
|
|
54
|
+
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
|
|
55
|
+
uint8Array.set(skipBuffer.subarray(skipBytes));
|
|
56
|
+
return bytesRead - skipBytes;
|
|
57
|
+
}
|
|
58
|
+
if (skipBytes < 0) {
|
|
59
|
+
throw new Error('Cannot peek from a negative offset in a stream');
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (normOptions.length > 0) {
|
|
63
|
+
try {
|
|
64
|
+
bytesRead = await this.streamReader.peek(uint8Array.subarray(0, normOptions.length), normOptions.mayBeLess);
|
|
65
|
+
}
|
|
66
|
+
catch (err) {
|
|
67
|
+
if (options?.mayBeLess && err instanceof EndOfStreamError) {
|
|
68
|
+
return 0;
|
|
69
|
+
}
|
|
70
|
+
throw err;
|
|
71
|
+
}
|
|
72
|
+
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
|
|
73
|
+
throw new EndOfStreamError();
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return bytesRead;
|
|
77
|
+
}
|
|
78
|
+
async ignore(length) {
|
|
79
|
+
// debug(`ignore ${this.position}...${this.position + length - 1}`);
|
|
80
|
+
const bufSize = Math.min(maxBufferSize, length);
|
|
81
|
+
const buf = new Uint8Array(bufSize);
|
|
82
|
+
let totBytesRead = 0;
|
|
83
|
+
while (totBytesRead < length) {
|
|
84
|
+
const remaining = length - totBytesRead;
|
|
85
|
+
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
|
|
86
|
+
if (bytesRead < 0) {
|
|
87
|
+
return bytesRead;
|
|
88
|
+
}
|
|
89
|
+
totBytesRead += bytesRead;
|
|
90
|
+
}
|
|
91
|
+
return totBytesRead;
|
|
92
|
+
}
|
|
93
|
+
abort() {
|
|
94
|
+
return this.streamReader.abort();
|
|
95
|
+
}
|
|
96
|
+
async close() {
|
|
97
|
+
return this.streamReader.close();
|
|
98
|
+
}
|
|
99
|
+
supportsRandomAccess() {
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import type { Readable } from 'node:stream';
|
|
2
|
+
import { type AnyWebByteStream } from './stream/index.js';
|
|
3
|
+
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
|
|
4
|
+
import { BufferTokenizer } from './BufferTokenizer.js';
|
|
5
|
+
import type { ITokenizerOptions } from './types.js';
|
|
6
|
+
import { BlobTokenizer } from './BlobTokenizer.js';
|
|
7
|
+
export { EndOfStreamError, AbortError, type AnyWebByteStream } from './stream/index.js';
|
|
8
|
+
export type { ITokenizer, IRandomAccessTokenizer, IFileInfo, IRandomAccessFileInfo, ITokenizerOptions, IReadChunkOptions, OnClose } from './types.js';
|
|
9
|
+
export type { IToken, IGetToken } from '@tokenizer/token';
|
|
10
|
+
export { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
11
|
+
/**
|
|
12
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
13
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
14
|
+
* @param stream - Read from Node.js Stream.Readable
|
|
15
|
+
* @param options - Tokenizer options
|
|
16
|
+
* @returns ReadStreamTokenizer
|
|
17
|
+
*/
|
|
18
|
+
export declare function fromStream(stream: Readable, options?: ITokenizerOptions): ReadStreamTokenizer;
|
|
19
|
+
/**
|
|
20
|
+
* Construct ReadStreamTokenizer from given ReadableStream (WebStream API).
|
|
21
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
22
|
+
* @param webStream - Read from Node.js Stream.Readable (must be a byte stream)
|
|
23
|
+
* @param options - Tokenizer options
|
|
24
|
+
* @returns ReadStreamTokenizer
|
|
25
|
+
*/
|
|
26
|
+
export declare function fromWebStream(webStream: AnyWebByteStream, options?: ITokenizerOptions): ReadStreamTokenizer;
|
|
27
|
+
/**
|
|
28
|
+
* Construct ReadStreamTokenizer from given Buffer.
|
|
29
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
30
|
+
* @param options - Tokenizer options
|
|
31
|
+
* @returns BufferTokenizer
|
|
32
|
+
*/
|
|
33
|
+
export declare function fromBuffer(uint8Array: Uint8Array, options?: ITokenizerOptions): BufferTokenizer;
|
|
34
|
+
/**
|
|
35
|
+
* Construct ReadStreamTokenizer from given Blob.
|
|
36
|
+
* @param blob - Uint8Array to tokenize
|
|
37
|
+
* @param options - Tokenizer options
|
|
38
|
+
* @returns BufferTokenizer
|
|
39
|
+
*/
|
|
40
|
+
export declare function fromBlob(blob: Blob, options?: ITokenizerOptions): BlobTokenizer;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { StreamReader, makeWebStreamReader } from './stream/index.js';
|
|
2
|
+
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
|
|
3
|
+
import { BufferTokenizer } from './BufferTokenizer.js';
|
|
4
|
+
import { BlobTokenizer } from './BlobTokenizer.js';
|
|
5
|
+
export { EndOfStreamError, AbortError } from './stream/index.js';
|
|
6
|
+
export { AbstractTokenizer } from './AbstractTokenizer.js';
|
|
7
|
+
/**
|
|
8
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
9
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
10
|
+
* @param stream - Read from Node.js Stream.Readable
|
|
11
|
+
* @param options - Tokenizer options
|
|
12
|
+
* @returns ReadStreamTokenizer
|
|
13
|
+
*/
|
|
14
|
+
export function fromStream(stream, options) {
|
|
15
|
+
const streamReader = new StreamReader(stream);
|
|
16
|
+
const _options = options ?? {};
|
|
17
|
+
const chainedClose = _options.onClose;
|
|
18
|
+
_options.onClose = async () => {
|
|
19
|
+
await streamReader.close();
|
|
20
|
+
if (chainedClose) {
|
|
21
|
+
return chainedClose();
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
return new ReadStreamTokenizer(streamReader, _options);
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Construct ReadStreamTokenizer from given ReadableStream (WebStream API).
|
|
28
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
29
|
+
* @param webStream - Read from Node.js Stream.Readable (must be a byte stream)
|
|
30
|
+
* @param options - Tokenizer options
|
|
31
|
+
* @returns ReadStreamTokenizer
|
|
32
|
+
*/
|
|
33
|
+
export function fromWebStream(webStream, options) {
|
|
34
|
+
const webStreamReader = makeWebStreamReader(webStream);
|
|
35
|
+
const _options = options ?? {};
|
|
36
|
+
const chainedClose = _options.onClose;
|
|
37
|
+
_options.onClose = async () => {
|
|
38
|
+
await webStreamReader.close();
|
|
39
|
+
if (chainedClose) {
|
|
40
|
+
return chainedClose();
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
return new ReadStreamTokenizer(webStreamReader, _options);
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Construct ReadStreamTokenizer from given Buffer.
|
|
47
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
48
|
+
* @param options - Tokenizer options
|
|
49
|
+
* @returns BufferTokenizer
|
|
50
|
+
*/
|
|
51
|
+
export function fromBuffer(uint8Array, options) {
|
|
52
|
+
return new BufferTokenizer(uint8Array, options);
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Construct ReadStreamTokenizer from given Blob.
|
|
56
|
+
* @param blob - Uint8Array to tokenize
|
|
57
|
+
* @param options - Tokenizer options
|
|
58
|
+
* @returns BufferTokenizer
|
|
59
|
+
*/
|
|
60
|
+
export function fromBlob(blob, options) {
|
|
61
|
+
return new BlobTokenizer(blob, options);
|
|
62
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { Readable } from 'node:stream';
|
|
2
|
+
import type { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
|
|
3
|
+
import { type ITokenizerOptions } from './core.js';
|
|
4
|
+
import { FileTokenizer } from "./FileTokenizer.js";
|
|
5
|
+
export { FileTokenizer } from './FileTokenizer.js';
|
|
6
|
+
export * from './core.js';
|
|
7
|
+
export type { IToken, IGetToken } from '@tokenizer/token';
|
|
8
|
+
/**
|
|
9
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
10
|
+
* Will set fileSize, if provided given Stream has set the .path property.
|
|
11
|
+
* @param stream - Node.js Stream.Readable
|
|
12
|
+
* @param options - Pass additional file information to the tokenizer
|
|
13
|
+
* @returns Tokenizer
|
|
14
|
+
*/
|
|
15
|
+
export declare function fromStream(stream: Readable, options?: ITokenizerOptions): Promise<ReadStreamTokenizer>;
|
|
16
|
+
export declare const fromFile: typeof FileTokenizer.fromFile;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { stat as fsStat } from 'node:fs/promises';
|
|
2
|
+
import { fromStream as coreFromStream } from './core.js';
|
|
3
|
+
import { FileTokenizer } from "./FileTokenizer.js";
|
|
4
|
+
export { FileTokenizer } from './FileTokenizer.js';
|
|
5
|
+
export * from './core.js';
|
|
6
|
+
/**
|
|
7
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
8
|
+
* Will set fileSize, if provided given Stream has set the .path property.
|
|
9
|
+
* @param stream - Node.js Stream.Readable
|
|
10
|
+
* @param options - Pass additional file information to the tokenizer
|
|
11
|
+
* @returns Tokenizer
|
|
12
|
+
*/
|
|
13
|
+
export async function fromStream(stream, options) {
|
|
14
|
+
const rst = coreFromStream(stream, options);
|
|
15
|
+
if (stream.path) {
|
|
16
|
+
const stat = await fsStat(stream.path);
|
|
17
|
+
rst.fileInfo.path = stream.path;
|
|
18
|
+
rst.fileInfo.size = stat.size;
|
|
19
|
+
}
|
|
20
|
+
return rst;
|
|
21
|
+
}
|
|
22
|
+
export const fromFile = FileTokenizer.fromFile;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
export interface IStreamReader {
|
|
2
|
+
/**
|
|
3
|
+
* Peak ahead (peek) from stream. Subsequent read or peeks will return the same data.
|
|
4
|
+
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
|
|
5
|
+
* @param mayBeLess - Allow the read to complete, without the buffer being fully filled (length may be smaller)
|
|
6
|
+
* @returns Number of bytes peeked. If `maybeLess` is set, this shall be the `uint8Array.length`.
|
|
7
|
+
*/
|
|
8
|
+
peek(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
|
|
9
|
+
/**
|
|
10
|
+
* Read from stream the stream.
|
|
11
|
+
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
|
|
12
|
+
* @param mayBeLess - Allow the read to complete, without the buffer being fully filled (length may be smaller)
|
|
13
|
+
* @returns Number of actually bytes read. If `maybeLess` is set, this shall be the `uint8Array.length`.
|
|
14
|
+
*/
|
|
15
|
+
read(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
|
|
16
|
+
close(): Promise<void>;
|
|
17
|
+
/**
|
|
18
|
+
* Abort any active asynchronous operation are active, abort those before they may have completed.
|
|
19
|
+
*/
|
|
20
|
+
abort(): Promise<void>;
|
|
21
|
+
}
|
|
22
|
+
export declare abstract class AbstractStreamReader implements IStreamReader {
|
|
23
|
+
protected endOfStream: boolean;
|
|
24
|
+
protected interrupted: boolean;
|
|
25
|
+
/**
|
|
26
|
+
* Store peeked data
|
|
27
|
+
* @type {Array}
|
|
28
|
+
*/
|
|
29
|
+
protected peekQueue: Uint8Array[];
|
|
30
|
+
peek(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
|
|
31
|
+
read(buffer: Uint8Array, mayBeLess?: boolean): Promise<number>;
|
|
32
|
+
/**
|
|
33
|
+
* Read chunk from stream
|
|
34
|
+
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
|
|
35
|
+
* @returns Number of bytes read
|
|
36
|
+
*/
|
|
37
|
+
protected readFromPeekBuffer(buffer: Uint8Array): number;
|
|
38
|
+
readRemainderFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
|
|
39
|
+
/**
|
|
40
|
+
* Read from stream
|
|
41
|
+
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
|
|
42
|
+
* @param mayBeLess - If true, may fill the buffer partially
|
|
43
|
+
* @protected Bytes read
|
|
44
|
+
*/
|
|
45
|
+
protected abstract readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
|
|
46
|
+
/**
|
|
47
|
+
* abort synchronous operations
|
|
48
|
+
*/
|
|
49
|
+
abstract close(): Promise<void>;
|
|
50
|
+
/**
|
|
51
|
+
* Abort any active asynchronous operation are active, abort those before they may have completed.
|
|
52
|
+
*/
|
|
53
|
+
abstract abort(): Promise<void>;
|
|
54
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { EndOfStreamError, AbortError } from "./Errors.js";
|
|
2
|
+
export class AbstractStreamReader {
|
|
3
|
+
constructor() {
|
|
4
|
+
this.endOfStream = false;
|
|
5
|
+
this.interrupted = false;
|
|
6
|
+
/**
|
|
7
|
+
* Store peeked data
|
|
8
|
+
* @type {Array}
|
|
9
|
+
*/
|
|
10
|
+
this.peekQueue = [];
|
|
11
|
+
}
|
|
12
|
+
async peek(uint8Array, mayBeLess = false) {
|
|
13
|
+
const bytesRead = await this.read(uint8Array, mayBeLess);
|
|
14
|
+
this.peekQueue.push(uint8Array.subarray(0, bytesRead)); // Put read data back to peek buffer
|
|
15
|
+
return bytesRead;
|
|
16
|
+
}
|
|
17
|
+
async read(buffer, mayBeLess = false) {
|
|
18
|
+
if (buffer.length === 0) {
|
|
19
|
+
return 0;
|
|
20
|
+
}
|
|
21
|
+
let bytesRead = this.readFromPeekBuffer(buffer);
|
|
22
|
+
if (!this.endOfStream) {
|
|
23
|
+
bytesRead += await this.readRemainderFromStream(buffer.subarray(bytesRead), mayBeLess);
|
|
24
|
+
}
|
|
25
|
+
if (bytesRead === 0 && !mayBeLess) {
|
|
26
|
+
throw new EndOfStreamError();
|
|
27
|
+
}
|
|
28
|
+
return bytesRead;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Read chunk from stream
|
|
32
|
+
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
|
|
33
|
+
* @returns Number of bytes read
|
|
34
|
+
*/
|
|
35
|
+
readFromPeekBuffer(buffer) {
|
|
36
|
+
let remaining = buffer.length;
|
|
37
|
+
let bytesRead = 0;
|
|
38
|
+
// consume peeked data first
|
|
39
|
+
while (this.peekQueue.length > 0 && remaining > 0) {
|
|
40
|
+
const peekData = this.peekQueue.pop(); // Front of queue
|
|
41
|
+
if (!peekData)
|
|
42
|
+
throw new Error('peekData should be defined');
|
|
43
|
+
const lenCopy = Math.min(peekData.length, remaining);
|
|
44
|
+
buffer.set(peekData.subarray(0, lenCopy), bytesRead);
|
|
45
|
+
bytesRead += lenCopy;
|
|
46
|
+
remaining -= lenCopy;
|
|
47
|
+
if (lenCopy < peekData.length) {
|
|
48
|
+
// remainder back to queue
|
|
49
|
+
this.peekQueue.push(peekData.subarray(lenCopy));
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return bytesRead;
|
|
53
|
+
}
|
|
54
|
+
async readRemainderFromStream(buffer, mayBeLess) {
|
|
55
|
+
let bytesRead = 0;
|
|
56
|
+
// Continue reading from stream if required
|
|
57
|
+
while (bytesRead < buffer.length && !this.endOfStream) {
|
|
58
|
+
if (this.interrupted) {
|
|
59
|
+
throw new AbortError();
|
|
60
|
+
}
|
|
61
|
+
const chunkLen = await this.readFromStream(buffer.subarray(bytesRead), mayBeLess);
|
|
62
|
+
if (chunkLen === 0)
|
|
63
|
+
break;
|
|
64
|
+
bytesRead += chunkLen;
|
|
65
|
+
}
|
|
66
|
+
if (!mayBeLess && bytesRead < buffer.length) {
|
|
67
|
+
throw new EndOfStreamError();
|
|
68
|
+
}
|
|
69
|
+
return bytesRead;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export declare const defaultMessages = "End-Of-Stream";
|
|
2
|
+
/**
|
|
3
|
+
* Thrown on read operation of the end of file or stream has been reached
|
|
4
|
+
*/
|
|
5
|
+
export declare class EndOfStreamError extends Error {
|
|
6
|
+
constructor();
|
|
7
|
+
}
|
|
8
|
+
export declare class AbortError extends Error {
|
|
9
|
+
constructor(message?: string);
|
|
10
|
+
}
|