@builderbot/provider-sherpa 1.3.15-alpha.149 → 1.3.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +199 -220
- package/dist/index.mjs +199 -220
- package/package.json +3 -3
package/dist/index.cjs
CHANGED
|
@@ -26450,7 +26450,7 @@ FsPromise.readFile = readFile;
|
|
|
26450
26450
|
|
|
26451
26451
|
var core$3 = {};
|
|
26452
26452
|
|
|
26453
|
-
var ReadStreamTokenizer = {};
|
|
26453
|
+
var ReadStreamTokenizer$1 = {};
|
|
26454
26454
|
|
|
26455
26455
|
var AbstractTokenizer$1 = {};
|
|
26456
26456
|
|
|
@@ -26638,7 +26638,7 @@ Deferred$1.Deferred = Deferred;
|
|
|
26638
26638
|
|
|
26639
26639
|
Object.defineProperty(AbstractTokenizer$1, "__esModule", { value: true });
|
|
26640
26640
|
AbstractTokenizer$1.AbstractTokenizer = void 0;
|
|
26641
|
-
const peek_readable_1$
|
|
26641
|
+
const peek_readable_1$3 = lib$8;
|
|
26642
26642
|
/**
|
|
26643
26643
|
* Core tokenizer
|
|
26644
26644
|
*/
|
|
@@ -26661,7 +26661,7 @@ class AbstractTokenizer {
|
|
|
26661
26661
|
const uint8Array = Buffer.alloc(token.len);
|
|
26662
26662
|
const len = await this.readBuffer(uint8Array, { position });
|
|
26663
26663
|
if (len < token.len)
|
|
26664
|
-
throw new peek_readable_1$
|
|
26664
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26665
26665
|
return token.get(uint8Array, 0);
|
|
26666
26666
|
}
|
|
26667
26667
|
/**
|
|
@@ -26674,7 +26674,7 @@ class AbstractTokenizer {
|
|
|
26674
26674
|
const uint8Array = Buffer.alloc(token.len);
|
|
26675
26675
|
const len = await this.peekBuffer(uint8Array, { position });
|
|
26676
26676
|
if (len < token.len)
|
|
26677
|
-
throw new peek_readable_1$
|
|
26677
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26678
26678
|
return token.get(uint8Array, 0);
|
|
26679
26679
|
}
|
|
26680
26680
|
/**
|
|
@@ -26685,7 +26685,7 @@ class AbstractTokenizer {
|
|
|
26685
26685
|
async readNumber(token) {
|
|
26686
26686
|
const len = await this.readBuffer(this.numBuffer, { length: token.len });
|
|
26687
26687
|
if (len < token.len)
|
|
26688
|
-
throw new peek_readable_1$
|
|
26688
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26689
26689
|
return token.get(this.numBuffer, 0);
|
|
26690
26690
|
}
|
|
26691
26691
|
/**
|
|
@@ -26696,7 +26696,7 @@ class AbstractTokenizer {
|
|
|
26696
26696
|
async peekNumber(token) {
|
|
26697
26697
|
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
|
|
26698
26698
|
if (len < token.len)
|
|
26699
|
-
throw new peek_readable_1$
|
|
26699
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26700
26700
|
return token.get(this.numBuffer, 0);
|
|
26701
26701
|
}
|
|
26702
26702
|
/**
|
|
@@ -26740,212 +26740,191 @@ class AbstractTokenizer {
|
|
|
26740
26740
|
}
|
|
26741
26741
|
AbstractTokenizer$1.AbstractTokenizer = AbstractTokenizer;
|
|
26742
26742
|
|
|
26743
|
-
|
|
26744
|
-
|
|
26745
|
-
|
|
26746
|
-
|
|
26747
|
-
|
|
26748
|
-
|
|
26749
|
-
|
|
26750
|
-
|
|
26751
|
-
|
|
26752
|
-
|
|
26753
|
-
|
|
26754
|
-
|
|
26755
|
-
|
|
26756
|
-
|
|
26757
|
-
|
|
26758
|
-
|
|
26759
|
-
|
|
26760
|
-
|
|
26761
|
-
|
|
26762
|
-
|
|
26763
|
-
|
|
26764
|
-
|
|
26765
|
-
|
|
26766
|
-
|
|
26767
|
-
|
|
26768
|
-
|
|
26769
|
-
|
|
26770
|
-
|
|
26771
|
-
|
|
26772
|
-
|
|
26773
|
-
|
|
26774
|
-
|
|
26775
|
-
|
|
26776
|
-
|
|
26777
|
-
|
|
26778
|
-
|
|
26779
|
-
|
|
26780
|
-
|
|
26781
|
-
|
|
26782
|
-
|
|
26783
|
-
|
|
26784
|
-
|
|
26785
|
-
|
|
26786
|
-
|
|
26787
|
-
|
|
26788
|
-
|
|
26789
|
-
|
|
26790
|
-
|
|
26791
|
-
|
|
26792
|
-
|
|
26793
|
-
|
|
26794
|
-
|
|
26795
|
-
|
|
26796
|
-
|
|
26797
|
-
|
|
26798
|
-
|
|
26799
|
-
|
|
26800
|
-
|
|
26801
|
-
|
|
26802
|
-
|
|
26803
|
-
|
|
26804
|
-
|
|
26805
|
-
|
|
26806
|
-
|
|
26807
|
-
|
|
26808
|
-
|
|
26809
|
-
|
|
26810
|
-
|
|
26811
|
-
|
|
26812
|
-
|
|
26813
|
-
|
|
26814
|
-
|
|
26815
|
-
|
|
26816
|
-
|
|
26817
|
-
|
|
26818
|
-
|
|
26819
|
-
|
|
26820
|
-
|
|
26821
|
-
|
|
26822
|
-
|
|
26823
|
-
|
|
26824
|
-
|
|
26825
|
-
|
|
26826
|
-
|
|
26827
|
-
|
|
26828
|
-
|
|
26829
|
-
|
|
26830
|
-
|
|
26831
|
-
|
|
26832
|
-
|
|
26833
|
-
|
|
26834
|
-
|
|
26835
|
-
|
|
26836
|
-
|
|
26837
|
-
|
|
26838
|
-
|
|
26839
|
-
|
|
26840
|
-
}
|
|
26841
|
-
return totBytesRead;
|
|
26842
|
-
}
|
|
26843
|
-
};
|
|
26844
|
-
ReadStreamTokenizer.ReadStreamTokenizer = ReadStreamTokenizer$1;
|
|
26845
|
-
return ReadStreamTokenizer;
|
|
26846
|
-
}
|
|
26847
|
-
|
|
26848
|
-
var BufferTokenizer = {};
|
|
26849
|
-
|
|
26850
|
-
var hasRequiredBufferTokenizer;
|
|
26743
|
+
Object.defineProperty(ReadStreamTokenizer$1, "__esModule", { value: true });
|
|
26744
|
+
ReadStreamTokenizer$1.ReadStreamTokenizer = void 0;
|
|
26745
|
+
const AbstractTokenizer_1$2 = AbstractTokenizer$1;
|
|
26746
|
+
const peek_readable_1$2 = lib$8;
|
|
26747
|
+
const maxBufferSize = 256000;
|
|
26748
|
+
class ReadStreamTokenizer extends AbstractTokenizer_1$2.AbstractTokenizer {
|
|
26749
|
+
constructor(stream, fileInfo) {
|
|
26750
|
+
super(fileInfo);
|
|
26751
|
+
this.streamReader = new peek_readable_1$2.StreamReader(stream);
|
|
26752
|
+
}
|
|
26753
|
+
/**
|
|
26754
|
+
* Get file information, an HTTP-client may implement this doing a HEAD request
|
|
26755
|
+
* @return Promise with file information
|
|
26756
|
+
*/
|
|
26757
|
+
async getFileInfo() {
|
|
26758
|
+
return this.fileInfo;
|
|
26759
|
+
}
|
|
26760
|
+
/**
|
|
26761
|
+
* Read buffer from tokenizer
|
|
26762
|
+
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
|
|
26763
|
+
* @param options - Read behaviour options
|
|
26764
|
+
* @returns Promise with number of bytes read
|
|
26765
|
+
*/
|
|
26766
|
+
async readBuffer(uint8Array, options) {
|
|
26767
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26768
|
+
const skipBytes = normOptions.position - this.position;
|
|
26769
|
+
if (skipBytes > 0) {
|
|
26770
|
+
await this.ignore(skipBytes);
|
|
26771
|
+
return this.readBuffer(uint8Array, options);
|
|
26772
|
+
}
|
|
26773
|
+
else if (skipBytes < 0) {
|
|
26774
|
+
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26775
|
+
}
|
|
26776
|
+
if (normOptions.length === 0) {
|
|
26777
|
+
return 0;
|
|
26778
|
+
}
|
|
26779
|
+
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
|
|
26780
|
+
this.position += bytesRead;
|
|
26781
|
+
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
|
|
26782
|
+
throw new peek_readable_1$2.EndOfStreamError();
|
|
26783
|
+
}
|
|
26784
|
+
return bytesRead;
|
|
26785
|
+
}
|
|
26786
|
+
/**
|
|
26787
|
+
* Peek (read ahead) buffer from tokenizer
|
|
26788
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
26789
|
+
* @param options - Read behaviour options
|
|
26790
|
+
* @returns Promise with number of bytes peeked
|
|
26791
|
+
*/
|
|
26792
|
+
async peekBuffer(uint8Array, options) {
|
|
26793
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26794
|
+
let bytesRead = 0;
|
|
26795
|
+
if (normOptions.position) {
|
|
26796
|
+
const skipBytes = normOptions.position - this.position;
|
|
26797
|
+
if (skipBytes > 0) {
|
|
26798
|
+
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
|
|
26799
|
+
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
|
|
26800
|
+
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
|
|
26801
|
+
return bytesRead - skipBytes;
|
|
26802
|
+
}
|
|
26803
|
+
else if (skipBytes < 0) {
|
|
26804
|
+
throw new Error('Cannot peek from a negative offset in a stream');
|
|
26805
|
+
}
|
|
26806
|
+
}
|
|
26807
|
+
if (normOptions.length > 0) {
|
|
26808
|
+
try {
|
|
26809
|
+
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
|
|
26810
|
+
}
|
|
26811
|
+
catch (err) {
|
|
26812
|
+
if (options && options.mayBeLess && err instanceof peek_readable_1$2.EndOfStreamError) {
|
|
26813
|
+
return 0;
|
|
26814
|
+
}
|
|
26815
|
+
throw err;
|
|
26816
|
+
}
|
|
26817
|
+
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
|
|
26818
|
+
throw new peek_readable_1$2.EndOfStreamError();
|
|
26819
|
+
}
|
|
26820
|
+
}
|
|
26821
|
+
return bytesRead;
|
|
26822
|
+
}
|
|
26823
|
+
async ignore(length) {
|
|
26824
|
+
// debug(`ignore ${this.position}...${this.position + length - 1}`);
|
|
26825
|
+
const bufSize = Math.min(maxBufferSize, length);
|
|
26826
|
+
const buf = new Uint8Array(bufSize);
|
|
26827
|
+
let totBytesRead = 0;
|
|
26828
|
+
while (totBytesRead < length) {
|
|
26829
|
+
const remaining = length - totBytesRead;
|
|
26830
|
+
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
|
|
26831
|
+
if (bytesRead < 0) {
|
|
26832
|
+
return bytesRead;
|
|
26833
|
+
}
|
|
26834
|
+
totBytesRead += bytesRead;
|
|
26835
|
+
}
|
|
26836
|
+
return totBytesRead;
|
|
26837
|
+
}
|
|
26838
|
+
}
|
|
26839
|
+
ReadStreamTokenizer$1.ReadStreamTokenizer = ReadStreamTokenizer;
|
|
26851
26840
|
|
|
26852
|
-
|
|
26853
|
-
if (hasRequiredBufferTokenizer) return BufferTokenizer;
|
|
26854
|
-
hasRequiredBufferTokenizer = 1;
|
|
26855
|
-
Object.defineProperty(BufferTokenizer, "__esModule", { value: true });
|
|
26856
|
-
BufferTokenizer.BufferTokenizer = void 0;
|
|
26857
|
-
const peek_readable_1 = lib$8;
|
|
26858
|
-
const AbstractTokenizer_1 = AbstractTokenizer$1;
|
|
26859
|
-
let BufferTokenizer$1 = class BufferTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
|
|
26860
|
-
/**
|
|
26861
|
-
* Construct BufferTokenizer
|
|
26862
|
-
* @param uint8Array - Uint8Array to tokenize
|
|
26863
|
-
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26864
|
-
*/
|
|
26865
|
-
constructor(uint8Array, fileInfo) {
|
|
26866
|
-
super(fileInfo);
|
|
26867
|
-
this.uint8Array = uint8Array;
|
|
26868
|
-
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
|
|
26869
|
-
}
|
|
26870
|
-
/**
|
|
26871
|
-
* Read buffer from tokenizer
|
|
26872
|
-
* @param uint8Array - Uint8Array to tokenize
|
|
26873
|
-
* @param options - Read behaviour options
|
|
26874
|
-
* @returns {Promise<number>}
|
|
26875
|
-
*/
|
|
26876
|
-
async readBuffer(uint8Array, options) {
|
|
26877
|
-
if (options && options.position) {
|
|
26878
|
-
if (options.position < this.position) {
|
|
26879
|
-
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26880
|
-
}
|
|
26881
|
-
this.position = options.position;
|
|
26882
|
-
}
|
|
26883
|
-
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
26884
|
-
this.position += bytesRead;
|
|
26885
|
-
return bytesRead;
|
|
26886
|
-
}
|
|
26887
|
-
/**
|
|
26888
|
-
* Peek (read ahead) buffer from tokenizer
|
|
26889
|
-
* @param uint8Array
|
|
26890
|
-
* @param options - Read behaviour options
|
|
26891
|
-
* @returns {Promise<number>}
|
|
26892
|
-
*/
|
|
26893
|
-
async peekBuffer(uint8Array, options) {
|
|
26894
|
-
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26895
|
-
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|
26896
|
-
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
26897
|
-
throw new peek_readable_1.EndOfStreamError();
|
|
26898
|
-
}
|
|
26899
|
-
else {
|
|
26900
|
-
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
|
|
26901
|
-
return bytes2read;
|
|
26902
|
-
}
|
|
26903
|
-
}
|
|
26904
|
-
async close() {
|
|
26905
|
-
// empty
|
|
26906
|
-
}
|
|
26907
|
-
};
|
|
26908
|
-
BufferTokenizer.BufferTokenizer = BufferTokenizer$1;
|
|
26909
|
-
return BufferTokenizer;
|
|
26910
|
-
}
|
|
26841
|
+
var BufferTokenizer$1 = {};
|
|
26911
26842
|
|
|
26912
|
-
|
|
26843
|
+
Object.defineProperty(BufferTokenizer$1, "__esModule", { value: true });
|
|
26844
|
+
BufferTokenizer$1.BufferTokenizer = void 0;
|
|
26845
|
+
const peek_readable_1$1 = lib$8;
|
|
26846
|
+
const AbstractTokenizer_1$1 = AbstractTokenizer$1;
|
|
26847
|
+
class BufferTokenizer extends AbstractTokenizer_1$1.AbstractTokenizer {
|
|
26848
|
+
/**
|
|
26849
|
+
* Construct BufferTokenizer
|
|
26850
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26851
|
+
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26852
|
+
*/
|
|
26853
|
+
constructor(uint8Array, fileInfo) {
|
|
26854
|
+
super(fileInfo);
|
|
26855
|
+
this.uint8Array = uint8Array;
|
|
26856
|
+
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
|
|
26857
|
+
}
|
|
26858
|
+
/**
|
|
26859
|
+
* Read buffer from tokenizer
|
|
26860
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26861
|
+
* @param options - Read behaviour options
|
|
26862
|
+
* @returns {Promise<number>}
|
|
26863
|
+
*/
|
|
26864
|
+
async readBuffer(uint8Array, options) {
|
|
26865
|
+
if (options && options.position) {
|
|
26866
|
+
if (options.position < this.position) {
|
|
26867
|
+
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26868
|
+
}
|
|
26869
|
+
this.position = options.position;
|
|
26870
|
+
}
|
|
26871
|
+
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
26872
|
+
this.position += bytesRead;
|
|
26873
|
+
return bytesRead;
|
|
26874
|
+
}
|
|
26875
|
+
/**
|
|
26876
|
+
* Peek (read ahead) buffer from tokenizer
|
|
26877
|
+
* @param uint8Array
|
|
26878
|
+
* @param options - Read behaviour options
|
|
26879
|
+
* @returns {Promise<number>}
|
|
26880
|
+
*/
|
|
26881
|
+
async peekBuffer(uint8Array, options) {
|
|
26882
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26883
|
+
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|
26884
|
+
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
26885
|
+
throw new peek_readable_1$1.EndOfStreamError();
|
|
26886
|
+
}
|
|
26887
|
+
else {
|
|
26888
|
+
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
|
|
26889
|
+
return bytes2read;
|
|
26890
|
+
}
|
|
26891
|
+
}
|
|
26892
|
+
async close() {
|
|
26893
|
+
// empty
|
|
26894
|
+
}
|
|
26895
|
+
}
|
|
26896
|
+
BufferTokenizer$1.BufferTokenizer = BufferTokenizer;
|
|
26913
26897
|
|
|
26914
|
-
function
|
|
26915
|
-
|
|
26916
|
-
|
|
26917
|
-
|
|
26918
|
-
|
|
26919
|
-
|
|
26920
|
-
|
|
26921
|
-
|
|
26922
|
-
|
|
26923
|
-
|
|
26924
|
-
|
|
26925
|
-
|
|
26926
|
-
|
|
26927
|
-
|
|
26928
|
-
|
|
26929
|
-
|
|
26930
|
-
|
|
26931
|
-
|
|
26932
|
-
|
|
26933
|
-
|
|
26934
|
-
|
|
26935
|
-
|
|
26936
|
-
|
|
26937
|
-
|
|
26938
|
-
|
|
26939
|
-
|
|
26940
|
-
|
|
26941
|
-
|
|
26942
|
-
|
|
26943
|
-
|
|
26944
|
-
}
|
|
26945
|
-
exports$1.fromBuffer = fromBuffer;
|
|
26946
|
-
} (core$3));
|
|
26947
|
-
return core$3;
|
|
26948
|
-
}
|
|
26898
|
+
(function (exports$1) {
|
|
26899
|
+
Object.defineProperty(exports$1, "__esModule", { value: true });
|
|
26900
|
+
exports$1.fromBuffer = exports$1.fromStream = exports$1.EndOfStreamError = void 0;
|
|
26901
|
+
const ReadStreamTokenizer_1 = ReadStreamTokenizer$1;
|
|
26902
|
+
const BufferTokenizer_1 = BufferTokenizer$1;
|
|
26903
|
+
var peek_readable_1 = lib$8;
|
|
26904
|
+
Object.defineProperty(exports$1, "EndOfStreamError", { enumerable: true, get: function () { return peek_readable_1.EndOfStreamError; } });
|
|
26905
|
+
/**
|
|
26906
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
26907
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
26908
|
+
* @param stream - Read from Node.js Stream.Readable
|
|
26909
|
+
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
|
|
26910
|
+
* @returns ReadStreamTokenizer
|
|
26911
|
+
*/
|
|
26912
|
+
function fromStream(stream, fileInfo) {
|
|
26913
|
+
fileInfo = fileInfo ? fileInfo : {};
|
|
26914
|
+
return new ReadStreamTokenizer_1.ReadStreamTokenizer(stream, fileInfo);
|
|
26915
|
+
}
|
|
26916
|
+
exports$1.fromStream = fromStream;
|
|
26917
|
+
/**
|
|
26918
|
+
* Construct ReadStreamTokenizer from given Buffer.
|
|
26919
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26920
|
+
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26921
|
+
* @returns BufferTokenizer
|
|
26922
|
+
*/
|
|
26923
|
+
function fromBuffer(uint8Array, fileInfo) {
|
|
26924
|
+
return new BufferTokenizer_1.BufferTokenizer(uint8Array, fileInfo);
|
|
26925
|
+
}
|
|
26926
|
+
exports$1.fromBuffer = fromBuffer;
|
|
26927
|
+
} (core$3));
|
|
26949
26928
|
|
|
26950
26929
|
var FileTokenizer$1 = {};
|
|
26951
26930
|
|
|
@@ -27008,10 +26987,10 @@ FileTokenizer$1.fromFile = fromFile$1;
|
|
|
27008
26987
|
Object.defineProperty(exports$1, "__esModule", { value: true });
|
|
27009
26988
|
exports$1.fromStream = exports$1.fromBuffer = exports$1.EndOfStreamError = exports$1.fromFile = void 0;
|
|
27010
26989
|
const fs = FsPromise;
|
|
27011
|
-
const core =
|
|
26990
|
+
const core = core$3;
|
|
27012
26991
|
var FileTokenizer_1 = FileTokenizer$1;
|
|
27013
26992
|
Object.defineProperty(exports$1, "fromFile", { enumerable: true, get: function () { return FileTokenizer_1.fromFile; } });
|
|
27014
|
-
var core_1 =
|
|
26993
|
+
var core_1 = core$3;
|
|
27015
26994
|
Object.defineProperty(exports$1, "EndOfStreamError", { enumerable: true, get: function () { return core_1.EndOfStreamError; } });
|
|
27016
26995
|
Object.defineProperty(exports$1, "fromBuffer", { enumerable: true, get: function () { return core_1.fromBuffer; } });
|
|
27017
26996
|
/**
|
|
@@ -27901,7 +27880,7 @@ var supported$1 = {
|
|
|
27901
27880
|
};
|
|
27902
27881
|
|
|
27903
27882
|
const Token = lib$7;
|
|
27904
|
-
const strtok3$1 =
|
|
27883
|
+
const strtok3$1 = core$3;
|
|
27905
27884
|
const {
|
|
27906
27885
|
stringToBytes,
|
|
27907
27886
|
tarHeaderChecksumMatches,
|
|
@@ -280816,7 +280795,7 @@ function requireAPEv2Parser () {
|
|
|
280816
280795
|
Object.defineProperty(APEv2Parser, "__esModule", { value: true });
|
|
280817
280796
|
APEv2Parser.APEv2Parser = void 0;
|
|
280818
280797
|
const debug_1 = requireSrc$1();
|
|
280819
|
-
const strtok3 =
|
|
280798
|
+
const strtok3 = core$3;
|
|
280820
280799
|
const token_types_1 = lib$7;
|
|
280821
280800
|
const util = requireUtil();
|
|
280822
280801
|
const BasicParser_1 = requireBasicParser();
|
|
@@ -281741,7 +281720,7 @@ function requireAiffParser () {
|
|
|
281741
281720
|
AiffParser.AIFFParser = void 0;
|
|
281742
281721
|
const Token = lib$7;
|
|
281743
281722
|
const debug_1 = requireSrc$1();
|
|
281744
|
-
const strtok3 =
|
|
281723
|
+
const strtok3 = core$3;
|
|
281745
281724
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
281746
281725
|
const FourCC_1 = requireFourCC();
|
|
281747
281726
|
const BasicParser_1 = requireBasicParser();
|
|
@@ -282654,7 +282633,7 @@ function requireAbstractID3Parser () {
|
|
|
282654
282633
|
hasRequiredAbstractID3Parser = 1;
|
|
282655
282634
|
Object.defineProperty(AbstractID3Parser, "__esModule", { value: true });
|
|
282656
282635
|
AbstractID3Parser.AbstractID3Parser = void 0;
|
|
282657
|
-
const core_1 =
|
|
282636
|
+
const core_1 = core$3;
|
|
282658
282637
|
const debug_1 = requireSrc$1();
|
|
282659
282638
|
const ID3v2Token_1 = requireID3v2Token();
|
|
282660
282639
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
@@ -284306,7 +284285,7 @@ function requireMpegParser () {
|
|
|
284306
284285
|
Object.defineProperty(MpegParser, "__esModule", { value: true });
|
|
284307
284286
|
MpegParser.MpegParser = void 0;
|
|
284308
284287
|
const Token = lib$7;
|
|
284309
|
-
const core_1 =
|
|
284288
|
+
const core_1 = core$3;
|
|
284310
284289
|
const debug_1 = requireSrc$1();
|
|
284311
284290
|
const common = requireUtil();
|
|
284312
284291
|
const AbstractID3Parser_1 = requireAbstractID3Parser();
|
|
@@ -285470,7 +285449,7 @@ function requireOggParser () {
|
|
|
285470
285449
|
Object.defineProperty(OggParser, "__esModule", { value: true });
|
|
285471
285450
|
OggParser.OggParser = OggParser.SegmentTable = void 0;
|
|
285472
285451
|
const Token = lib$7;
|
|
285473
|
-
const core_1 =
|
|
285452
|
+
const core_1 = core$3;
|
|
285474
285453
|
const debug_1 = requireSrc$1();
|
|
285475
285454
|
const util = requireUtil();
|
|
285476
285455
|
const FourCC_1 = requireFourCC();
|
|
@@ -285755,7 +285734,7 @@ function requireWaveParser () {
|
|
|
285755
285734
|
hasRequiredWaveParser = 1;
|
|
285756
285735
|
Object.defineProperty(WaveParser, "__esModule", { value: true });
|
|
285757
285736
|
WaveParser.WaveParser = void 0;
|
|
285758
|
-
const strtok3 =
|
|
285737
|
+
const strtok3 = core$3;
|
|
285759
285738
|
const Token = lib$7;
|
|
285760
285739
|
const debug_1 = requireSrc$1();
|
|
285761
285740
|
const riff = requireRiffChunk();
|
|
@@ -286281,7 +286260,7 @@ function requireDsdiffParser () {
|
|
|
286281
286260
|
DsdiffParser.DsdiffParser = void 0;
|
|
286282
286261
|
const Token = lib$7;
|
|
286283
286262
|
const debug_1 = requireSrc$1();
|
|
286284
|
-
const strtok3 =
|
|
286263
|
+
const strtok3 = core$3;
|
|
286285
286264
|
const FourCC_1 = requireFourCC();
|
|
286286
286265
|
const BasicParser_1 = requireBasicParser();
|
|
286287
286266
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
@@ -287284,7 +287263,7 @@ function requireCore () {
|
|
|
287284
287263
|
hasRequiredCore = 1;
|
|
287285
287264
|
Object.defineProperty(core, "__esModule", { value: true });
|
|
287286
287265
|
core.scanAppendingHeaders = core.selectCover = core.ratingToStars = core.orderTags = core.parseFromTokenizer = core.parseBuffer = core.parseStream = void 0;
|
|
287287
|
-
const strtok3 =
|
|
287266
|
+
const strtok3 = core$3;
|
|
287288
287267
|
const ParserFactory_1 = requireParserFactory();
|
|
287289
287268
|
const RandomUint8ArrayReader_1 = requireRandomUint8ArrayReader();
|
|
287290
287269
|
const APEv2Parser_1 = requireAPEv2Parser();
|
package/dist/index.mjs
CHANGED
|
@@ -26448,7 +26448,7 @@ FsPromise.readFile = readFile;
|
|
|
26448
26448
|
|
|
26449
26449
|
var core$3 = {};
|
|
26450
26450
|
|
|
26451
|
-
var ReadStreamTokenizer = {};
|
|
26451
|
+
var ReadStreamTokenizer$1 = {};
|
|
26452
26452
|
|
|
26453
26453
|
var AbstractTokenizer$1 = {};
|
|
26454
26454
|
|
|
@@ -26636,7 +26636,7 @@ Deferred$1.Deferred = Deferred;
|
|
|
26636
26636
|
|
|
26637
26637
|
Object.defineProperty(AbstractTokenizer$1, "__esModule", { value: true });
|
|
26638
26638
|
AbstractTokenizer$1.AbstractTokenizer = void 0;
|
|
26639
|
-
const peek_readable_1$
|
|
26639
|
+
const peek_readable_1$3 = lib$8;
|
|
26640
26640
|
/**
|
|
26641
26641
|
* Core tokenizer
|
|
26642
26642
|
*/
|
|
@@ -26659,7 +26659,7 @@ class AbstractTokenizer {
|
|
|
26659
26659
|
const uint8Array = Buffer.alloc(token.len);
|
|
26660
26660
|
const len = await this.readBuffer(uint8Array, { position });
|
|
26661
26661
|
if (len < token.len)
|
|
26662
|
-
throw new peek_readable_1$
|
|
26662
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26663
26663
|
return token.get(uint8Array, 0);
|
|
26664
26664
|
}
|
|
26665
26665
|
/**
|
|
@@ -26672,7 +26672,7 @@ class AbstractTokenizer {
|
|
|
26672
26672
|
const uint8Array = Buffer.alloc(token.len);
|
|
26673
26673
|
const len = await this.peekBuffer(uint8Array, { position });
|
|
26674
26674
|
if (len < token.len)
|
|
26675
|
-
throw new peek_readable_1$
|
|
26675
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26676
26676
|
return token.get(uint8Array, 0);
|
|
26677
26677
|
}
|
|
26678
26678
|
/**
|
|
@@ -26683,7 +26683,7 @@ class AbstractTokenizer {
|
|
|
26683
26683
|
async readNumber(token) {
|
|
26684
26684
|
const len = await this.readBuffer(this.numBuffer, { length: token.len });
|
|
26685
26685
|
if (len < token.len)
|
|
26686
|
-
throw new peek_readable_1$
|
|
26686
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26687
26687
|
return token.get(this.numBuffer, 0);
|
|
26688
26688
|
}
|
|
26689
26689
|
/**
|
|
@@ -26694,7 +26694,7 @@ class AbstractTokenizer {
|
|
|
26694
26694
|
async peekNumber(token) {
|
|
26695
26695
|
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
|
|
26696
26696
|
if (len < token.len)
|
|
26697
|
-
throw new peek_readable_1$
|
|
26697
|
+
throw new peek_readable_1$3.EndOfStreamError();
|
|
26698
26698
|
return token.get(this.numBuffer, 0);
|
|
26699
26699
|
}
|
|
26700
26700
|
/**
|
|
@@ -26738,212 +26738,191 @@ class AbstractTokenizer {
|
|
|
26738
26738
|
}
|
|
26739
26739
|
AbstractTokenizer$1.AbstractTokenizer = AbstractTokenizer;
|
|
26740
26740
|
|
|
26741
|
-
|
|
26742
|
-
|
|
26743
|
-
|
|
26744
|
-
|
|
26745
|
-
|
|
26746
|
-
|
|
26747
|
-
|
|
26748
|
-
|
|
26749
|
-
|
|
26750
|
-
|
|
26751
|
-
|
|
26752
|
-
|
|
26753
|
-
|
|
26754
|
-
|
|
26755
|
-
|
|
26756
|
-
|
|
26757
|
-
|
|
26758
|
-
|
|
26759
|
-
|
|
26760
|
-
|
|
26761
|
-
|
|
26762
|
-
|
|
26763
|
-
|
|
26764
|
-
|
|
26765
|
-
|
|
26766
|
-
|
|
26767
|
-
|
|
26768
|
-
|
|
26769
|
-
|
|
26770
|
-
|
|
26771
|
-
|
|
26772
|
-
|
|
26773
|
-
|
|
26774
|
-
|
|
26775
|
-
|
|
26776
|
-
|
|
26777
|
-
|
|
26778
|
-
|
|
26779
|
-
|
|
26780
|
-
|
|
26781
|
-
|
|
26782
|
-
|
|
26783
|
-
|
|
26784
|
-
|
|
26785
|
-
|
|
26786
|
-
|
|
26787
|
-
|
|
26788
|
-
|
|
26789
|
-
|
|
26790
|
-
|
|
26791
|
-
|
|
26792
|
-
|
|
26793
|
-
|
|
26794
|
-
|
|
26795
|
-
|
|
26796
|
-
|
|
26797
|
-
|
|
26798
|
-
|
|
26799
|
-
|
|
26800
|
-
|
|
26801
|
-
|
|
26802
|
-
|
|
26803
|
-
|
|
26804
|
-
|
|
26805
|
-
|
|
26806
|
-
|
|
26807
|
-
|
|
26808
|
-
|
|
26809
|
-
|
|
26810
|
-
|
|
26811
|
-
|
|
26812
|
-
|
|
26813
|
-
|
|
26814
|
-
|
|
26815
|
-
|
|
26816
|
-
|
|
26817
|
-
|
|
26818
|
-
|
|
26819
|
-
|
|
26820
|
-
|
|
26821
|
-
|
|
26822
|
-
|
|
26823
|
-
|
|
26824
|
-
|
|
26825
|
-
|
|
26826
|
-
|
|
26827
|
-
|
|
26828
|
-
|
|
26829
|
-
|
|
26830
|
-
|
|
26831
|
-
|
|
26832
|
-
|
|
26833
|
-
|
|
26834
|
-
|
|
26835
|
-
|
|
26836
|
-
|
|
26837
|
-
|
|
26838
|
-
}
|
|
26839
|
-
return totBytesRead;
|
|
26840
|
-
}
|
|
26841
|
-
};
|
|
26842
|
-
ReadStreamTokenizer.ReadStreamTokenizer = ReadStreamTokenizer$1;
|
|
26843
|
-
return ReadStreamTokenizer;
|
|
26844
|
-
}
|
|
26845
|
-
|
|
26846
|
-
var BufferTokenizer = {};
|
|
26847
|
-
|
|
26848
|
-
var hasRequiredBufferTokenizer;
|
|
26741
|
+
Object.defineProperty(ReadStreamTokenizer$1, "__esModule", { value: true });
|
|
26742
|
+
ReadStreamTokenizer$1.ReadStreamTokenizer = void 0;
|
|
26743
|
+
const AbstractTokenizer_1$2 = AbstractTokenizer$1;
|
|
26744
|
+
const peek_readable_1$2 = lib$8;
|
|
26745
|
+
const maxBufferSize = 256000;
|
|
26746
|
+
class ReadStreamTokenizer extends AbstractTokenizer_1$2.AbstractTokenizer {
|
|
26747
|
+
constructor(stream, fileInfo) {
|
|
26748
|
+
super(fileInfo);
|
|
26749
|
+
this.streamReader = new peek_readable_1$2.StreamReader(stream);
|
|
26750
|
+
}
|
|
26751
|
+
/**
|
|
26752
|
+
* Get file information, an HTTP-client may implement this doing a HEAD request
|
|
26753
|
+
* @return Promise with file information
|
|
26754
|
+
*/
|
|
26755
|
+
async getFileInfo() {
|
|
26756
|
+
return this.fileInfo;
|
|
26757
|
+
}
|
|
26758
|
+
/**
|
|
26759
|
+
* Read buffer from tokenizer
|
|
26760
|
+
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
|
|
26761
|
+
* @param options - Read behaviour options
|
|
26762
|
+
* @returns Promise with number of bytes read
|
|
26763
|
+
*/
|
|
26764
|
+
async readBuffer(uint8Array, options) {
|
|
26765
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26766
|
+
const skipBytes = normOptions.position - this.position;
|
|
26767
|
+
if (skipBytes > 0) {
|
|
26768
|
+
await this.ignore(skipBytes);
|
|
26769
|
+
return this.readBuffer(uint8Array, options);
|
|
26770
|
+
}
|
|
26771
|
+
else if (skipBytes < 0) {
|
|
26772
|
+
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26773
|
+
}
|
|
26774
|
+
if (normOptions.length === 0) {
|
|
26775
|
+
return 0;
|
|
26776
|
+
}
|
|
26777
|
+
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
|
|
26778
|
+
this.position += bytesRead;
|
|
26779
|
+
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
|
|
26780
|
+
throw new peek_readable_1$2.EndOfStreamError();
|
|
26781
|
+
}
|
|
26782
|
+
return bytesRead;
|
|
26783
|
+
}
|
|
26784
|
+
/**
|
|
26785
|
+
* Peek (read ahead) buffer from tokenizer
|
|
26786
|
+
* @param uint8Array - Uint8Array (or Buffer) to write data to
|
|
26787
|
+
* @param options - Read behaviour options
|
|
26788
|
+
* @returns Promise with number of bytes peeked
|
|
26789
|
+
*/
|
|
26790
|
+
async peekBuffer(uint8Array, options) {
|
|
26791
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26792
|
+
let bytesRead = 0;
|
|
26793
|
+
if (normOptions.position) {
|
|
26794
|
+
const skipBytes = normOptions.position - this.position;
|
|
26795
|
+
if (skipBytes > 0) {
|
|
26796
|
+
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
|
|
26797
|
+
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
|
|
26798
|
+
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
|
|
26799
|
+
return bytesRead - skipBytes;
|
|
26800
|
+
}
|
|
26801
|
+
else if (skipBytes < 0) {
|
|
26802
|
+
throw new Error('Cannot peek from a negative offset in a stream');
|
|
26803
|
+
}
|
|
26804
|
+
}
|
|
26805
|
+
if (normOptions.length > 0) {
|
|
26806
|
+
try {
|
|
26807
|
+
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
|
|
26808
|
+
}
|
|
26809
|
+
catch (err) {
|
|
26810
|
+
if (options && options.mayBeLess && err instanceof peek_readable_1$2.EndOfStreamError) {
|
|
26811
|
+
return 0;
|
|
26812
|
+
}
|
|
26813
|
+
throw err;
|
|
26814
|
+
}
|
|
26815
|
+
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
|
|
26816
|
+
throw new peek_readable_1$2.EndOfStreamError();
|
|
26817
|
+
}
|
|
26818
|
+
}
|
|
26819
|
+
return bytesRead;
|
|
26820
|
+
}
|
|
26821
|
+
async ignore(length) {
|
|
26822
|
+
// debug(`ignore ${this.position}...${this.position + length - 1}`);
|
|
26823
|
+
const bufSize = Math.min(maxBufferSize, length);
|
|
26824
|
+
const buf = new Uint8Array(bufSize);
|
|
26825
|
+
let totBytesRead = 0;
|
|
26826
|
+
while (totBytesRead < length) {
|
|
26827
|
+
const remaining = length - totBytesRead;
|
|
26828
|
+
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
|
|
26829
|
+
if (bytesRead < 0) {
|
|
26830
|
+
return bytesRead;
|
|
26831
|
+
}
|
|
26832
|
+
totBytesRead += bytesRead;
|
|
26833
|
+
}
|
|
26834
|
+
return totBytesRead;
|
|
26835
|
+
}
|
|
26836
|
+
}
|
|
26837
|
+
ReadStreamTokenizer$1.ReadStreamTokenizer = ReadStreamTokenizer;
|
|
26849
26838
|
|
|
26850
|
-
|
|
26851
|
-
if (hasRequiredBufferTokenizer) return BufferTokenizer;
|
|
26852
|
-
hasRequiredBufferTokenizer = 1;
|
|
26853
|
-
Object.defineProperty(BufferTokenizer, "__esModule", { value: true });
|
|
26854
|
-
BufferTokenizer.BufferTokenizer = void 0;
|
|
26855
|
-
const peek_readable_1 = lib$8;
|
|
26856
|
-
const AbstractTokenizer_1 = AbstractTokenizer$1;
|
|
26857
|
-
let BufferTokenizer$1 = class BufferTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
|
|
26858
|
-
/**
|
|
26859
|
-
* Construct BufferTokenizer
|
|
26860
|
-
* @param uint8Array - Uint8Array to tokenize
|
|
26861
|
-
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26862
|
-
*/
|
|
26863
|
-
constructor(uint8Array, fileInfo) {
|
|
26864
|
-
super(fileInfo);
|
|
26865
|
-
this.uint8Array = uint8Array;
|
|
26866
|
-
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
|
|
26867
|
-
}
|
|
26868
|
-
/**
|
|
26869
|
-
* Read buffer from tokenizer
|
|
26870
|
-
* @param uint8Array - Uint8Array to tokenize
|
|
26871
|
-
* @param options - Read behaviour options
|
|
26872
|
-
* @returns {Promise<number>}
|
|
26873
|
-
*/
|
|
26874
|
-
async readBuffer(uint8Array, options) {
|
|
26875
|
-
if (options && options.position) {
|
|
26876
|
-
if (options.position < this.position) {
|
|
26877
|
-
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26878
|
-
}
|
|
26879
|
-
this.position = options.position;
|
|
26880
|
-
}
|
|
26881
|
-
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
26882
|
-
this.position += bytesRead;
|
|
26883
|
-
return bytesRead;
|
|
26884
|
-
}
|
|
26885
|
-
/**
|
|
26886
|
-
* Peek (read ahead) buffer from tokenizer
|
|
26887
|
-
* @param uint8Array
|
|
26888
|
-
* @param options - Read behaviour options
|
|
26889
|
-
* @returns {Promise<number>}
|
|
26890
|
-
*/
|
|
26891
|
-
async peekBuffer(uint8Array, options) {
|
|
26892
|
-
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26893
|
-
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|
26894
|
-
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
26895
|
-
throw new peek_readable_1.EndOfStreamError();
|
|
26896
|
-
}
|
|
26897
|
-
else {
|
|
26898
|
-
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
|
|
26899
|
-
return bytes2read;
|
|
26900
|
-
}
|
|
26901
|
-
}
|
|
26902
|
-
async close() {
|
|
26903
|
-
// empty
|
|
26904
|
-
}
|
|
26905
|
-
};
|
|
26906
|
-
BufferTokenizer.BufferTokenizer = BufferTokenizer$1;
|
|
26907
|
-
return BufferTokenizer;
|
|
26908
|
-
}
|
|
26839
|
+
var BufferTokenizer$1 = {};
|
|
26909
26840
|
|
|
26910
|
-
|
|
26841
|
+
Object.defineProperty(BufferTokenizer$1, "__esModule", { value: true });
|
|
26842
|
+
BufferTokenizer$1.BufferTokenizer = void 0;
|
|
26843
|
+
const peek_readable_1$1 = lib$8;
|
|
26844
|
+
const AbstractTokenizer_1$1 = AbstractTokenizer$1;
|
|
26845
|
+
class BufferTokenizer extends AbstractTokenizer_1$1.AbstractTokenizer {
|
|
26846
|
+
/**
|
|
26847
|
+
* Construct BufferTokenizer
|
|
26848
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26849
|
+
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26850
|
+
*/
|
|
26851
|
+
constructor(uint8Array, fileInfo) {
|
|
26852
|
+
super(fileInfo);
|
|
26853
|
+
this.uint8Array = uint8Array;
|
|
26854
|
+
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
|
|
26855
|
+
}
|
|
26856
|
+
/**
|
|
26857
|
+
* Read buffer from tokenizer
|
|
26858
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26859
|
+
* @param options - Read behaviour options
|
|
26860
|
+
* @returns {Promise<number>}
|
|
26861
|
+
*/
|
|
26862
|
+
async readBuffer(uint8Array, options) {
|
|
26863
|
+
if (options && options.position) {
|
|
26864
|
+
if (options.position < this.position) {
|
|
26865
|
+
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|
26866
|
+
}
|
|
26867
|
+
this.position = options.position;
|
|
26868
|
+
}
|
|
26869
|
+
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|
26870
|
+
this.position += bytesRead;
|
|
26871
|
+
return bytesRead;
|
|
26872
|
+
}
|
|
26873
|
+
/**
|
|
26874
|
+
* Peek (read ahead) buffer from tokenizer
|
|
26875
|
+
* @param uint8Array
|
|
26876
|
+
* @param options - Read behaviour options
|
|
26877
|
+
* @returns {Promise<number>}
|
|
26878
|
+
*/
|
|
26879
|
+
async peekBuffer(uint8Array, options) {
|
|
26880
|
+
const normOptions = this.normalizeOptions(uint8Array, options);
|
|
26881
|
+
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|
26882
|
+
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|
26883
|
+
throw new peek_readable_1$1.EndOfStreamError();
|
|
26884
|
+
}
|
|
26885
|
+
else {
|
|
26886
|
+
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
|
|
26887
|
+
return bytes2read;
|
|
26888
|
+
}
|
|
26889
|
+
}
|
|
26890
|
+
async close() {
|
|
26891
|
+
// empty
|
|
26892
|
+
}
|
|
26893
|
+
}
|
|
26894
|
+
BufferTokenizer$1.BufferTokenizer = BufferTokenizer;
|
|
26911
26895
|
|
|
26912
|
-
function
|
|
26913
|
-
|
|
26914
|
-
|
|
26915
|
-
|
|
26916
|
-
|
|
26917
|
-
|
|
26918
|
-
|
|
26919
|
-
|
|
26920
|
-
|
|
26921
|
-
|
|
26922
|
-
|
|
26923
|
-
|
|
26924
|
-
|
|
26925
|
-
|
|
26926
|
-
|
|
26927
|
-
|
|
26928
|
-
|
|
26929
|
-
|
|
26930
|
-
|
|
26931
|
-
|
|
26932
|
-
|
|
26933
|
-
|
|
26934
|
-
|
|
26935
|
-
|
|
26936
|
-
|
|
26937
|
-
|
|
26938
|
-
|
|
26939
|
-
|
|
26940
|
-
|
|
26941
|
-
|
|
26942
|
-
}
|
|
26943
|
-
exports$1.fromBuffer = fromBuffer;
|
|
26944
|
-
} (core$3));
|
|
26945
|
-
return core$3;
|
|
26946
|
-
}
|
|
26896
|
+
(function (exports$1) {
|
|
26897
|
+
Object.defineProperty(exports$1, "__esModule", { value: true });
|
|
26898
|
+
exports$1.fromBuffer = exports$1.fromStream = exports$1.EndOfStreamError = void 0;
|
|
26899
|
+
const ReadStreamTokenizer_1 = ReadStreamTokenizer$1;
|
|
26900
|
+
const BufferTokenizer_1 = BufferTokenizer$1;
|
|
26901
|
+
var peek_readable_1 = lib$8;
|
|
26902
|
+
Object.defineProperty(exports$1, "EndOfStreamError", { enumerable: true, get: function () { return peek_readable_1.EndOfStreamError; } });
|
|
26903
|
+
/**
|
|
26904
|
+
* Construct ReadStreamTokenizer from given Stream.
|
|
26905
|
+
* Will set fileSize, if provided given Stream has set the .path property/
|
|
26906
|
+
* @param stream - Read from Node.js Stream.Readable
|
|
26907
|
+
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
|
|
26908
|
+
* @returns ReadStreamTokenizer
|
|
26909
|
+
*/
|
|
26910
|
+
function fromStream(stream, fileInfo) {
|
|
26911
|
+
fileInfo = fileInfo ? fileInfo : {};
|
|
26912
|
+
return new ReadStreamTokenizer_1.ReadStreamTokenizer(stream, fileInfo);
|
|
26913
|
+
}
|
|
26914
|
+
exports$1.fromStream = fromStream;
|
|
26915
|
+
/**
|
|
26916
|
+
* Construct ReadStreamTokenizer from given Buffer.
|
|
26917
|
+
* @param uint8Array - Uint8Array to tokenize
|
|
26918
|
+
* @param fileInfo - Pass additional file information to the tokenizer
|
|
26919
|
+
* @returns BufferTokenizer
|
|
26920
|
+
*/
|
|
26921
|
+
function fromBuffer(uint8Array, fileInfo) {
|
|
26922
|
+
return new BufferTokenizer_1.BufferTokenizer(uint8Array, fileInfo);
|
|
26923
|
+
}
|
|
26924
|
+
exports$1.fromBuffer = fromBuffer;
|
|
26925
|
+
} (core$3));
|
|
26947
26926
|
|
|
26948
26927
|
var FileTokenizer$1 = {};
|
|
26949
26928
|
|
|
@@ -27006,10 +26985,10 @@ FileTokenizer$1.fromFile = fromFile$1;
|
|
|
27006
26985
|
Object.defineProperty(exports$1, "__esModule", { value: true });
|
|
27007
26986
|
exports$1.fromStream = exports$1.fromBuffer = exports$1.EndOfStreamError = exports$1.fromFile = void 0;
|
|
27008
26987
|
const fs = FsPromise;
|
|
27009
|
-
const core =
|
|
26988
|
+
const core = core$3;
|
|
27010
26989
|
var FileTokenizer_1 = FileTokenizer$1;
|
|
27011
26990
|
Object.defineProperty(exports$1, "fromFile", { enumerable: true, get: function () { return FileTokenizer_1.fromFile; } });
|
|
27012
|
-
var core_1 =
|
|
26991
|
+
var core_1 = core$3;
|
|
27013
26992
|
Object.defineProperty(exports$1, "EndOfStreamError", { enumerable: true, get: function () { return core_1.EndOfStreamError; } });
|
|
27014
26993
|
Object.defineProperty(exports$1, "fromBuffer", { enumerable: true, get: function () { return core_1.fromBuffer; } });
|
|
27015
26994
|
/**
|
|
@@ -27899,7 +27878,7 @@ var supported$1 = {
|
|
|
27899
27878
|
};
|
|
27900
27879
|
|
|
27901
27880
|
const Token = lib$7;
|
|
27902
|
-
const strtok3$1 =
|
|
27881
|
+
const strtok3$1 = core$3;
|
|
27903
27882
|
const {
|
|
27904
27883
|
stringToBytes,
|
|
27905
27884
|
tarHeaderChecksumMatches,
|
|
@@ -280814,7 +280793,7 @@ function requireAPEv2Parser () {
|
|
|
280814
280793
|
Object.defineProperty(APEv2Parser, "__esModule", { value: true });
|
|
280815
280794
|
APEv2Parser.APEv2Parser = void 0;
|
|
280816
280795
|
const debug_1 = requireSrc$1();
|
|
280817
|
-
const strtok3 =
|
|
280796
|
+
const strtok3 = core$3;
|
|
280818
280797
|
const token_types_1 = lib$7;
|
|
280819
280798
|
const util = requireUtil();
|
|
280820
280799
|
const BasicParser_1 = requireBasicParser();
|
|
@@ -281739,7 +281718,7 @@ function requireAiffParser () {
|
|
|
281739
281718
|
AiffParser.AIFFParser = void 0;
|
|
281740
281719
|
const Token = lib$7;
|
|
281741
281720
|
const debug_1 = requireSrc$1();
|
|
281742
|
-
const strtok3 =
|
|
281721
|
+
const strtok3 = core$3;
|
|
281743
281722
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
281744
281723
|
const FourCC_1 = requireFourCC();
|
|
281745
281724
|
const BasicParser_1 = requireBasicParser();
|
|
@@ -282652,7 +282631,7 @@ function requireAbstractID3Parser () {
|
|
|
282652
282631
|
hasRequiredAbstractID3Parser = 1;
|
|
282653
282632
|
Object.defineProperty(AbstractID3Parser, "__esModule", { value: true });
|
|
282654
282633
|
AbstractID3Parser.AbstractID3Parser = void 0;
|
|
282655
|
-
const core_1 =
|
|
282634
|
+
const core_1 = core$3;
|
|
282656
282635
|
const debug_1 = requireSrc$1();
|
|
282657
282636
|
const ID3v2Token_1 = requireID3v2Token();
|
|
282658
282637
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
@@ -284304,7 +284283,7 @@ function requireMpegParser () {
|
|
|
284304
284283
|
Object.defineProperty(MpegParser, "__esModule", { value: true });
|
|
284305
284284
|
MpegParser.MpegParser = void 0;
|
|
284306
284285
|
const Token = lib$7;
|
|
284307
|
-
const core_1 =
|
|
284286
|
+
const core_1 = core$3;
|
|
284308
284287
|
const debug_1 = requireSrc$1();
|
|
284309
284288
|
const common = requireUtil();
|
|
284310
284289
|
const AbstractID3Parser_1 = requireAbstractID3Parser();
|
|
@@ -285468,7 +285447,7 @@ function requireOggParser () {
|
|
|
285468
285447
|
Object.defineProperty(OggParser, "__esModule", { value: true });
|
|
285469
285448
|
OggParser.OggParser = OggParser.SegmentTable = void 0;
|
|
285470
285449
|
const Token = lib$7;
|
|
285471
|
-
const core_1 =
|
|
285450
|
+
const core_1 = core$3;
|
|
285472
285451
|
const debug_1 = requireSrc$1();
|
|
285473
285452
|
const util = requireUtil();
|
|
285474
285453
|
const FourCC_1 = requireFourCC();
|
|
@@ -285753,7 +285732,7 @@ function requireWaveParser () {
|
|
|
285753
285732
|
hasRequiredWaveParser = 1;
|
|
285754
285733
|
Object.defineProperty(WaveParser, "__esModule", { value: true });
|
|
285755
285734
|
WaveParser.WaveParser = void 0;
|
|
285756
|
-
const strtok3 =
|
|
285735
|
+
const strtok3 = core$3;
|
|
285757
285736
|
const Token = lib$7;
|
|
285758
285737
|
const debug_1 = requireSrc$1();
|
|
285759
285738
|
const riff = requireRiffChunk();
|
|
@@ -286279,7 +286258,7 @@ function requireDsdiffParser () {
|
|
|
286279
286258
|
DsdiffParser.DsdiffParser = void 0;
|
|
286280
286259
|
const Token = lib$7;
|
|
286281
286260
|
const debug_1 = requireSrc$1();
|
|
286282
|
-
const strtok3 =
|
|
286261
|
+
const strtok3 = core$3;
|
|
286283
286262
|
const FourCC_1 = requireFourCC();
|
|
286284
286263
|
const BasicParser_1 = requireBasicParser();
|
|
286285
286264
|
const ID3v2Parser_1 = requireID3v2Parser();
|
|
@@ -287282,7 +287261,7 @@ function requireCore () {
|
|
|
287282
287261
|
hasRequiredCore = 1;
|
|
287283
287262
|
Object.defineProperty(core, "__esModule", { value: true });
|
|
287284
287263
|
core.scanAppendingHeaders = core.selectCover = core.ratingToStars = core.orderTags = core.parseFromTokenizer = core.parseBuffer = core.parseStream = void 0;
|
|
287285
|
-
const strtok3 =
|
|
287264
|
+
const strtok3 = core$3;
|
|
287286
287265
|
const ParserFactory_1 = requireParserFactory();
|
|
287287
287266
|
const RandomUint8ArrayReader_1 = requireRandomUint8ArrayReader();
|
|
287288
287267
|
const APEv2Parser_1 = requireAPEv2Parser();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@builderbot/provider-sherpa",
|
|
3
|
-
"version": "1.3.15
|
|
3
|
+
"version": "1.3.15",
|
|
4
4
|
"description": "Provider Sherpa for BuilderBot - WhatsApp integration using Whaileys",
|
|
5
5
|
"keywords": [],
|
|
6
6
|
"author": "Leifer Mendez <leifer33@gmail.com>",
|
|
@@ -38,7 +38,7 @@
|
|
|
38
38
|
},
|
|
39
39
|
"homepage": "https://github.com/codigoencasa/bot-whatsapp#readme",
|
|
40
40
|
"devDependencies": {
|
|
41
|
-
"@builderbot/bot": "1.3.15
|
|
41
|
+
"@builderbot/bot": "1.3.15",
|
|
42
42
|
"@hapi/boom": "^10.0.1",
|
|
43
43
|
"@jest/globals": "^29.7.0",
|
|
44
44
|
"@rollup/plugin-commonjs": "^25.0.7",
|
|
@@ -80,5 +80,5 @@
|
|
|
80
80
|
"typescript": "^5.9.3",
|
|
81
81
|
"whaileys": "6.3.8"
|
|
82
82
|
},
|
|
83
|
-
"gitHead": "
|
|
83
|
+
"gitHead": "57189dbd8bd8f19cd6354cbdc7a749b26f5907cc"
|
|
84
84
|
}
|