sonic-ws 1.1.1-patch → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -3
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/ws/client/core/ClientCore.d.ts +2 -1
- package/dist/ws/client/core/ClientCore.js +17 -15
- package/dist/ws/packets/PacketProcessors.d.ts +4 -10
- package/dist/ws/packets/PacketProcessors.js +198 -124
- package/dist/ws/packets/PacketType.d.ts +7 -1
- package/dist/ws/packets/PacketType.js +6 -0
- package/dist/ws/packets/Packets.d.ts +10 -6
- package/dist/ws/packets/Packets.js +39 -40
- package/dist/ws/server/SonicWSConnection.d.ts +1 -1
- package/dist/ws/server/SonicWSConnection.js +5 -5
- package/dist/ws/server/SonicWSServer.d.ts +4 -2
- package/dist/ws/server/SonicWSServer.js +20 -17
- package/dist/ws/util/packets/BatchHelper.d.ts +1 -1
- package/dist/ws/util/packets/BatchHelper.js +2 -2
- package/dist/ws/util/packets/CompressionUtil.d.ts +7 -0
- package/dist/ws/util/packets/CompressionUtil.js +202 -1
- package/dist/ws/util/packets/PacketHolder.d.ts +1 -1
- package/dist/ws/util/packets/PacketHolder.js +1 -1
- package/dist/ws/util/packets/PacketUtils.d.ts +10 -1
- package/dist/ws/util/packets/PacketUtils.js +26 -12
- package/package.json +2 -1
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
* limitations under the License.
|
|
16
16
|
*/
|
|
17
17
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
-
exports.decompressBools = exports.compressBools = exports.varIntOverflowPow = exports.ONE_FOURTH = exports.ONE_EIGHT = exports.MAX_VARINT = exports.MAX_UVARINT = exports.MAX_VSECT_SIZE = exports.VARINT_OVERFLOW = exports.NEGATIVE_VARINT = exports.VARINT_CHAIN_FLAG = exports.UVARINT_OVERFLOW = exports.MAX_INT_D = exports.SHORT_OVERFLOW = exports.SHORT_CC_OVERFLOW = exports.NEGATIVE_SHORT = exports.MAX_SHORT = exports.BYTE_OVERFLOW = exports.NEGATIVE_BYTE = exports.MAX_BYTE = void 0;
|
|
18
|
+
exports.decompressJSON = exports.compressJSON = exports.decompressBools = exports.compressBools = exports.varIntOverflowPow = exports.EMPTY_UINT8 = exports.ONE_FOURTH = exports.ONE_EIGHT = exports.MAX_VARINT = exports.MAX_UVARINT = exports.MAX_VSECT_SIZE = exports.VARINT_OVERFLOW = exports.NEGATIVE_VARINT = exports.VARINT_CHAIN_FLAG = exports.UVARINT_OVERFLOW = exports.MAX_INT_D = exports.SHORT_OVERFLOW = exports.SHORT_CC_OVERFLOW = exports.NEGATIVE_SHORT = exports.MAX_SHORT = exports.BYTE_OVERFLOW = exports.NEGATIVE_BYTE = exports.MAX_BYTE = void 0;
|
|
19
19
|
exports.fromShort = fromShort;
|
|
20
20
|
exports.toShort = toShort;
|
|
21
21
|
exports.toByte = toByte;
|
|
@@ -36,6 +36,10 @@ exports.bytesToBits = bytesToBits;
|
|
|
36
36
|
exports.bitsToBytes = bitsToBytes;
|
|
37
37
|
exports.encodeHuffman = encodeHuffman;
|
|
38
38
|
exports.decodeHuffman = decodeHuffman;
|
|
39
|
+
exports.compressGzip = compressGzip;
|
|
40
|
+
exports.decompressGzip = decompressGzip;
|
|
41
|
+
exports.bytesToHex = bytesToHex;
|
|
42
|
+
exports.hexToBytes = hexToBytes;
|
|
39
43
|
const ArrayUtil_1 = require("../ArrayUtil");
|
|
40
44
|
// this shit is so complex so i commented it...
|
|
41
45
|
// the highest 8-bit
|
|
@@ -72,6 +76,7 @@ exports.MAX_VARINT = Math.floor(exports.MAX_UVARINT / 2);
|
|
|
72
76
|
// constants
|
|
73
77
|
exports.ONE_EIGHT = 1 / 8;
|
|
74
78
|
exports.ONE_FOURTH = 1 / 4;
|
|
79
|
+
exports.EMPTY_UINT8 = new Uint8Array([]);
|
|
75
80
|
// precompute powers
|
|
76
81
|
const VARINT_OVERFLOW_POWS = [];
|
|
77
82
|
const varIntOverflowPow = (num) => VARINT_OVERFLOW_POWS[num] ??= exports.UVARINT_OVERFLOW ** num;
|
|
@@ -332,3 +337,199 @@ function decodeHuffman(bits) {
|
|
|
332
337
|
return result;
|
|
333
338
|
}
|
|
334
339
|
;
|
|
340
|
+
const gzipError = "Your browser is too old to support compression. Please update!";
|
|
341
|
+
async function compressGzip(data, ident = "") {
|
|
342
|
+
if (typeof CompressionStream === "undefined") {
|
|
343
|
+
if (typeof window !== "undefined")
|
|
344
|
+
window.alert(gzipError);
|
|
345
|
+
throw new Error(gzipError);
|
|
346
|
+
}
|
|
347
|
+
const stream = new Blob([data]).stream().pipeThrough(new CompressionStream("deflate-raw"));
|
|
348
|
+
const buffer = await new Response(stream).arrayBuffer();
|
|
349
|
+
if (data.length <= buffer.byteLength && ident != "") {
|
|
350
|
+
console.warn("WARN: Packet '" + ident + "' is small, and compressing it makes the size bigger!");
|
|
351
|
+
}
|
|
352
|
+
return new Uint8Array(buffer);
|
|
353
|
+
}
|
|
354
|
+
async function decompressGzip(data) {
|
|
355
|
+
if (typeof DecompressionStream === "undefined") {
|
|
356
|
+
if (typeof window !== "undefined")
|
|
357
|
+
window.alert(gzipError);
|
|
358
|
+
throw new Error(gzipError);
|
|
359
|
+
}
|
|
360
|
+
const stream = new Blob([data]).stream().pipeThrough(new DecompressionStream("deflate-raw"));
|
|
361
|
+
const buffer = await new Response(stream).arrayBuffer();
|
|
362
|
+
return new Uint8Array(buffer);
|
|
363
|
+
}
|
|
364
|
+
// BOOLEANS
|
|
365
|
+
var JSONType;
|
|
366
|
+
(function (JSONType) {
|
|
367
|
+
JSONType[JSONType["NULL"] = 0] = "NULL";
|
|
368
|
+
JSONType[JSONType["BOOL"] = 1] = "BOOL";
|
|
369
|
+
JSONType[JSONType["INT"] = 2] = "INT";
|
|
370
|
+
JSONType[JSONType["FLOAT"] = 3] = "FLOAT";
|
|
371
|
+
JSONType[JSONType["STRING"] = 4] = "STRING";
|
|
372
|
+
JSONType[JSONType["ARRAY"] = 5] = "ARRAY";
|
|
373
|
+
JSONType[JSONType["OBJECT"] = 6] = "OBJECT";
|
|
374
|
+
})(JSONType || (JSONType = {}));
|
|
375
|
+
const encodeString = (str) => {
|
|
376
|
+
const encoder = new TextEncoder();
|
|
377
|
+
const data = encoder.encode(str);
|
|
378
|
+
return [...convertVarInt(data.length), ...data];
|
|
379
|
+
};
|
|
380
|
+
const decodeString = (bytes, offset) => {
|
|
381
|
+
const [off, len] = readVarInt(bytes, offset);
|
|
382
|
+
const decoder = new TextDecoder();
|
|
383
|
+
return { value: decoder.decode(bytes.subarray(off, off + len)), length: off + len - offset };
|
|
384
|
+
};
|
|
385
|
+
// utility: pack 3-bit values into bytes
|
|
386
|
+
const packTypeBits = (types) => {
|
|
387
|
+
let bits = '';
|
|
388
|
+
for (const t of types)
|
|
389
|
+
bits += t.toString(2).padStart(3, '0');
|
|
390
|
+
return bitsToBytes(bits);
|
|
391
|
+
};
|
|
392
|
+
// utility: unpack bytes into 3-bit type array
|
|
393
|
+
const unpackTypeBits = (bytes, totalValues) => {
|
|
394
|
+
const bitStr = bytesToBits(bytes);
|
|
395
|
+
const types = [];
|
|
396
|
+
for (let i = 0; i < totalValues; i++) {
|
|
397
|
+
types.push(parseInt(bitStr.slice(i * 3, i * 3 + 3), 2));
|
|
398
|
+
}
|
|
399
|
+
return types;
|
|
400
|
+
};
|
|
401
|
+
// main compression
|
|
402
|
+
const compressJSON = (value) => {
|
|
403
|
+
const bools = [];
|
|
404
|
+
const payload = [];
|
|
405
|
+
const typeList = [];
|
|
406
|
+
const encodeValue = (val) => {
|
|
407
|
+
if (val === null) {
|
|
408
|
+
typeList.push(JSONType.NULL);
|
|
409
|
+
}
|
|
410
|
+
else if (typeof val === 'boolean') {
|
|
411
|
+
typeList.push(JSONType.BOOL);
|
|
412
|
+
bools.push(val);
|
|
413
|
+
}
|
|
414
|
+
else if (Number.isInteger(val)) {
|
|
415
|
+
typeList.push(JSONType.INT);
|
|
416
|
+
payload.push(...convertVarInt(mapZigZag(val)));
|
|
417
|
+
}
|
|
418
|
+
else if (typeof val === 'number') {
|
|
419
|
+
typeList.push(JSONType.FLOAT);
|
|
420
|
+
payload.push(...convertFloat(val));
|
|
421
|
+
}
|
|
422
|
+
else if (typeof val === 'string') {
|
|
423
|
+
typeList.push(JSONType.STRING);
|
|
424
|
+
payload.push(...encodeString(val));
|
|
425
|
+
}
|
|
426
|
+
else if (Array.isArray(val)) {
|
|
427
|
+
typeList.push(JSONType.ARRAY);
|
|
428
|
+
payload.push(...convertVarInt(val.length));
|
|
429
|
+
for (const item of val)
|
|
430
|
+
encodeValue(item);
|
|
431
|
+
}
|
|
432
|
+
else if (typeof val === 'object') {
|
|
433
|
+
typeList.push(JSONType.OBJECT);
|
|
434
|
+
const keys = Object.keys(val);
|
|
435
|
+
payload.push(...convertVarInt(keys.length));
|
|
436
|
+
for (const key of keys) {
|
|
437
|
+
payload.push(...encodeString(key));
|
|
438
|
+
encodeValue(val[key]);
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
else {
|
|
442
|
+
throw new Error('Unsupported type');
|
|
443
|
+
}
|
|
444
|
+
};
|
|
445
|
+
encodeValue(value);
|
|
446
|
+
// boolean bitmap bytes
|
|
447
|
+
const boolBytes = bools.length
|
|
448
|
+
? (0, ArrayUtil_1.splitArray)(bools, 8).map((slice) => (0, exports.compressBools)(slice))
|
|
449
|
+
: [];
|
|
450
|
+
// type map bytes (3-bit per value)
|
|
451
|
+
const typeBytes = packTypeBits(typeList);
|
|
452
|
+
// prepend lengths of boolBytes and typeBytes as varints
|
|
453
|
+
const header = [...convertVarInt(boolBytes.length), ...convertVarInt(typeBytes.length)];
|
|
454
|
+
return Uint8Array.from([...header, ...boolBytes.flat(), ...typeBytes, ...payload]);
|
|
455
|
+
};
|
|
456
|
+
exports.compressJSON = compressJSON;
|
|
457
|
+
// decompression
|
|
458
|
+
const decompressJSON = (bytes) => {
|
|
459
|
+
let offset = 0;
|
|
460
|
+
// read lengths
|
|
461
|
+
const [off1, boolByteLen] = readVarInt(bytes, offset);
|
|
462
|
+
offset = off1;
|
|
463
|
+
const [off2, typeByteLen] = readVarInt(bytes, offset);
|
|
464
|
+
offset = off2;
|
|
465
|
+
// boolean bitmap
|
|
466
|
+
const boolStream = [];
|
|
467
|
+
for (let i = 0; i < boolByteLen; i++) {
|
|
468
|
+
boolStream.push(...(0, exports.decompressBools)(bytes[offset++]));
|
|
469
|
+
}
|
|
470
|
+
let boolIndex = 0;
|
|
471
|
+
// type map
|
|
472
|
+
const typeBytes = bytes.subarray(offset, offset + typeByteLen);
|
|
473
|
+
offset += typeByteLen;
|
|
474
|
+
const typeList = unpackTypeBits(typeBytes, typeBytes.length * 8 / 3); // overestimate, will only use while decoding
|
|
475
|
+
let typeIndex = 0;
|
|
476
|
+
const decodeValue = (depth) => {
|
|
477
|
+
if (depth > 500)
|
|
478
|
+
throw new Error("JSON array too deep.");
|
|
479
|
+
const type = typeList[typeIndex++];
|
|
480
|
+
switch (type) {
|
|
481
|
+
case JSONType.NULL: return null;
|
|
482
|
+
case JSONType.BOOL: return boolStream[boolIndex++];
|
|
483
|
+
case JSONType.INT: {
|
|
484
|
+
const [off, n] = readVarInt(bytes, offset);
|
|
485
|
+
offset = off;
|
|
486
|
+
return demapZigZag(n);
|
|
487
|
+
}
|
|
488
|
+
case JSONType.FLOAT: {
|
|
489
|
+
const val = deconvertFloat(Array.from(bytes.subarray(offset, offset + 4)));
|
|
490
|
+
offset += 4;
|
|
491
|
+
return val;
|
|
492
|
+
}
|
|
493
|
+
case JSONType.STRING: {
|
|
494
|
+
const { value, length } = decodeString(bytes, offset);
|
|
495
|
+
offset += length;
|
|
496
|
+
return value;
|
|
497
|
+
}
|
|
498
|
+
case JSONType.ARRAY: {
|
|
499
|
+
const [off, len] = readVarInt(bytes, offset);
|
|
500
|
+
offset = off;
|
|
501
|
+
const arr = [];
|
|
502
|
+
for (let i = 0; i < len; i++)
|
|
503
|
+
arr.push(decodeValue(depth + 1));
|
|
504
|
+
return arr;
|
|
505
|
+
}
|
|
506
|
+
case JSONType.OBJECT: {
|
|
507
|
+
const [off, numKeys] = readVarInt(bytes, offset);
|
|
508
|
+
offset = off;
|
|
509
|
+
const obj = {};
|
|
510
|
+
for (let i = 0; i < numKeys; i++) {
|
|
511
|
+
const { value: key, length: keyLen } = decodeString(bytes, offset);
|
|
512
|
+
offset += keyLen;
|
|
513
|
+
obj[key] = decodeValue(depth + 1);
|
|
514
|
+
}
|
|
515
|
+
return obj;
|
|
516
|
+
}
|
|
517
|
+
default:
|
|
518
|
+
throw new Error(`Unknown type ${type}`);
|
|
519
|
+
}
|
|
520
|
+
};
|
|
521
|
+
return decodeValue(0);
|
|
522
|
+
};
|
|
523
|
+
exports.decompressJSON = decompressJSON;
|
|
524
|
+
function bytesToHex(bytes) {
|
|
525
|
+
return Array.from(bytes, b => b.toString(16).padStart(2, '0')).join('');
|
|
526
|
+
}
|
|
527
|
+
;
|
|
528
|
+
function hexToBytes(hex) {
|
|
529
|
+
const bytes = new Uint8Array(hex.length / 2);
|
|
530
|
+
for (let i = 0; i < hex.length; i += 2) {
|
|
531
|
+
bytes[i / 2] = parseInt(hex.substring(i, i + 2), 16);
|
|
532
|
+
}
|
|
533
|
+
return bytes;
|
|
534
|
+
}
|
|
535
|
+
;
|
|
@@ -9,7 +9,7 @@ import { EnumPackage } from "../enums/EnumType";
|
|
|
9
9
|
* @param values The values
|
|
10
10
|
* @returns The indexed code, the data, and the packet schema
|
|
11
11
|
*/
|
|
12
|
-
export declare function processPacket(packets: PacketHolder, tag: string, values: any[]): [code: number, data: Uint8Array, packet: Packet<any>]
|
|
12
|
+
export declare function processPacket(packets: PacketHolder, tag: string, values: any[]): Promise<[code: number, data: Uint8Array, packet: Packet<any>]>;
|
|
13
13
|
/**
|
|
14
14
|
* Calls the listener for a packet with error callback
|
|
15
15
|
* @param listened The listened data
|
|
@@ -49,6 +49,8 @@ export type SharedPacketSettings = {
|
|
|
49
49
|
validator?: ValidatorFunction;
|
|
50
50
|
/** If this is true, other packets will be processed even if this one isn't finished; it'll still prevent it from calling twice before this finishes though. Defaults to false. */
|
|
51
51
|
async?: boolean;
|
|
52
|
+
/** If this is true, the packet will be Gzip compressed. Defaults to false on all types but JSON. */
|
|
53
|
+
gzipCompression?: boolean;
|
|
52
54
|
};
|
|
53
55
|
/** Settings for single-typed packets */
|
|
54
56
|
export type SinglePacketSettings = SharedPacketSettings & {
|
|
@@ -58,6 +60,8 @@ export type SinglePacketSettings = SharedPacketSettings & {
|
|
|
58
60
|
dataMax?: number;
|
|
59
61
|
/** The minimum amount of values that can be sent through this packet; defaults to the max */
|
|
60
62
|
dataMin?: number;
|
|
63
|
+
/** If this is true, it will save the last received of this value, and if no data is sent, it'll re-use the previous value. This is not compatible with dataMin: 0. Defaults to false. */
|
|
64
|
+
rereference?: boolean;
|
|
61
65
|
};
|
|
62
66
|
/** Settings for multi-typed packets */
|
|
63
67
|
export type MultiPacketSettings = SharedPacketSettings & {
|
|
@@ -79,6 +83,10 @@ export type EnumPacketSettings = SharedPacketSettings & {
|
|
|
79
83
|
/** The minimum amount of values that can be sent through this packet; defaults to the max */
|
|
80
84
|
dataMin?: number;
|
|
81
85
|
};
|
|
86
|
+
export type KeyEffectivePacketSettings = SharedPacketSettings & {
|
|
87
|
+
/** Amount of keys to consume in order to have differing values; defaults to 2. Must be 2+ */
|
|
88
|
+
count?: number;
|
|
89
|
+
};
|
|
82
90
|
/**
|
|
83
91
|
* Creates a structure for a simple single-typed packet.
|
|
84
92
|
* This packet can be sent and received with the specified tag, type, and data cap.
|
|
@@ -108,6 +116,7 @@ export declare function CreateObjPacket<T extends readonly ArguableType[], V ext
|
|
|
108
116
|
* @returns The constructed packet structure data.
|
|
109
117
|
*/
|
|
110
118
|
export declare function CreateEnumPacket(settings: EnumPacketSettings): Packet<PacketType.ENUMS>;
|
|
119
|
+
export declare function CreateKeyEffective(settings: KeyEffectivePacketSettings): Packet<PacketType.KEY_EFFECTIVE>;
|
|
111
120
|
/**
|
|
112
121
|
* Flattens a 2-depth array for efficient wire transfer
|
|
113
122
|
* Turns [[x,y,z],[x,y,z]...] to [[x,x...],[y,y...],[z,z...]]
|
|
@@ -20,6 +20,7 @@ exports.listenPacket = listenPacket;
|
|
|
20
20
|
exports.CreatePacket = CreatePacket;
|
|
21
21
|
exports.CreateObjPacket = CreateObjPacket;
|
|
22
22
|
exports.CreateEnumPacket = CreateEnumPacket;
|
|
23
|
+
exports.CreateKeyEffective = CreateKeyEffective;
|
|
23
24
|
exports.FlattenData = FlattenData;
|
|
24
25
|
exports.UnFlattenData = UnFlattenData;
|
|
25
26
|
const Packets_1 = require("../../packets/Packets");
|
|
@@ -32,7 +33,7 @@ const EnumType_1 = require("../enums/EnumType");
|
|
|
32
33
|
* @param values The values
|
|
33
34
|
* @returns The indexed code, the data, and the packet schema
|
|
34
35
|
*/
|
|
35
|
-
function processPacket(packets, tag, values) {
|
|
36
|
+
async function processPacket(packets, tag, values) {
|
|
36
37
|
const code = packets.getKey(tag);
|
|
37
38
|
const packet = packets.getPacket(tag);
|
|
38
39
|
if (packet.autoFlatten) {
|
|
@@ -45,9 +46,11 @@ function processPacket(packets, tag, values) {
|
|
|
45
46
|
throw new Error(`Packet "${tag}" requires at least ${packet.minSize} values!`);
|
|
46
47
|
}
|
|
47
48
|
if (!packet.object) {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
49
|
+
if (packet.type != PacketType_1.PacketType.JSON) {
|
|
50
|
+
const found = values.find(v => typeof v == 'object' && v != null);
|
|
51
|
+
if (found)
|
|
52
|
+
console.warn(`Passing an array will result in undefined behavior (${JSON.stringify(found)}). Spread the array with ...arr`);
|
|
53
|
+
}
|
|
51
54
|
}
|
|
52
55
|
else {
|
|
53
56
|
// also map non arrays to arrays to keep some code cleaner
|
|
@@ -65,7 +68,7 @@ function processPacket(packets, tag, values) {
|
|
|
65
68
|
}
|
|
66
69
|
}
|
|
67
70
|
}
|
|
68
|
-
return [code, values.length > 0 ? packet.processSend(values) : new Uint8Array([]), packet];
|
|
71
|
+
return [code, values.length > 0 ? await packet.processSend(values) : new Uint8Array([]), packet];
|
|
69
72
|
}
|
|
70
73
|
/**
|
|
71
74
|
* Calls the listener for a packet with error callback
|
|
@@ -90,12 +93,13 @@ async function listenPacket(listened, listeners, errorCB) {
|
|
|
90
93
|
}
|
|
91
94
|
}
|
|
92
95
|
catch (err) {
|
|
96
|
+
console.error(err);
|
|
93
97
|
errorCB(err);
|
|
94
98
|
}
|
|
95
99
|
}
|
|
96
|
-
/** Determines if a type is a
|
|
100
|
+
/** Determines if a type is a invalid packet type */
|
|
97
101
|
function isInvalidType(type) {
|
|
98
|
-
return !(typeof type == 'number' && type in PacketType_1.PacketType) && !(type instanceof EnumType_1.EnumPackage);
|
|
102
|
+
return (!(typeof type == 'number' && type in PacketType_1.PacketType) && !(type instanceof EnumType_1.EnumPackage)) || type == PacketType_1.PacketType.KEY_EFFECTIVE;
|
|
99
103
|
}
|
|
100
104
|
const MAX_DATA_MAX = 2048383;
|
|
101
105
|
/** Clamps data max between 0 and MAX_DATA_MAX */
|
|
@@ -132,19 +136,21 @@ function clampDataMin(dataMin, dataMax) {
|
|
|
132
136
|
* @throws {Error} If the `type` is invalid.
|
|
133
137
|
*/
|
|
134
138
|
function CreatePacket(settings) {
|
|
135
|
-
let { tag, type = PacketType_1.PacketType.NONE, dataMax = 1, dataMin = 1, noDataRange = false, dontSpread = false, validator = null, dataBatching = 0, maxBatchSize = 10, rateLimit = 0, enabled = true, async = false } = settings;
|
|
139
|
+
let { tag, type = PacketType_1.PacketType.NONE, dataMax = 1, dataMin = 1, noDataRange = false, dontSpread = false, validator = null, dataBatching = 0, maxBatchSize = 10, rateLimit = 0, enabled = true, async = false, gzipCompression = type == PacketType_1.PacketType.JSON, rereference = false } = settings;
|
|
136
140
|
if (!tag)
|
|
137
141
|
throw new Error("Tag not selected!");
|
|
138
142
|
if (noDataRange) {
|
|
139
|
-
dataMin = 0;
|
|
143
|
+
dataMin = rereference ? 1 : 0;
|
|
140
144
|
dataMax = MAX_DATA_MAX;
|
|
141
145
|
}
|
|
142
146
|
else if (dataMin == undefined)
|
|
143
147
|
dataMin = type == PacketType_1.PacketType.NONE ? 0 : dataMax;
|
|
148
|
+
if (rereference && dataMin == 0)
|
|
149
|
+
throw new Error("Rereference cannot be true if the dataMin is 0");
|
|
144
150
|
if (isInvalidType(type)) {
|
|
145
151
|
throw new Error(`Invalid packet type: ${type}`);
|
|
146
152
|
}
|
|
147
|
-
const schema = Packets_1.PacketSchema.single(type, clampDataMax(dataMax), clampDataMin(dataMin, dataMax), dontSpread, dataBatching, maxBatchSize, rateLimit, async);
|
|
153
|
+
const schema = Packets_1.PacketSchema.single(type, clampDataMax(dataMax), clampDataMin(dataMin, dataMax), dontSpread, dataBatching, maxBatchSize, rateLimit, async, gzipCompression, rereference);
|
|
148
154
|
return new Packets_1.Packet(tag, schema, validator, enabled, false);
|
|
149
155
|
}
|
|
150
156
|
/**
|
|
@@ -155,7 +161,7 @@ function CreatePacket(settings) {
|
|
|
155
161
|
* @throws {Error} If any type in `types` is invalid.
|
|
156
162
|
*/
|
|
157
163
|
function CreateObjPacket(settings) {
|
|
158
|
-
let { tag, types = [], dataMaxes, dataMins, noDataRange = false, dontSpread = false, autoFlatten = false, validator = null, dataBatching = 0, maxBatchSize = 10, rateLimit = 0, enabled = true, async = false } = settings;
|
|
164
|
+
let { tag, types = [], dataMaxes, dataMins, noDataRange = false, dontSpread = false, autoFlatten = false, validator = null, dataBatching = 0, maxBatchSize = 10, rateLimit = 0, enabled = true, async = false, gzipCompression = types && types.includes(PacketType_1.PacketType.JSON) } = settings;
|
|
159
165
|
if (!tag)
|
|
160
166
|
throw new Error("Tag not selected!");
|
|
161
167
|
if (types.length == 0)
|
|
@@ -181,7 +187,7 @@ function CreateObjPacket(settings) {
|
|
|
181
187
|
}
|
|
182
188
|
const clampedDataMaxes = dataMaxes.map(clampDataMax);
|
|
183
189
|
const clampedDataMins = dataMins.map((m, i) => types[i] == PacketType_1.PacketType.NONE ? 0 : clampDataMin(m, clampedDataMaxes[i]));
|
|
184
|
-
const schema = Packets_1.PacketSchema.object(types, clampedDataMaxes, clampedDataMins, dontSpread, autoFlatten, dataBatching, maxBatchSize, rateLimit, async);
|
|
190
|
+
const schema = Packets_1.PacketSchema.object(types, clampedDataMaxes, clampedDataMins, dontSpread, autoFlatten, dataBatching, maxBatchSize, rateLimit, async, gzipCompression);
|
|
185
191
|
return new Packets_1.Packet(tag, schema, validator, enabled, false);
|
|
186
192
|
}
|
|
187
193
|
/**
|
|
@@ -207,6 +213,14 @@ function CreateEnumPacket(settings) {
|
|
|
207
213
|
async,
|
|
208
214
|
});
|
|
209
215
|
}
|
|
216
|
+
function CreateKeyEffective(settings) {
|
|
217
|
+
const { tag, count = 2, validator = null, async = false } = settings;
|
|
218
|
+
if (!tag)
|
|
219
|
+
throw new Error("Tag not selected!");
|
|
220
|
+
if (count < 2)
|
|
221
|
+
throw new Error("Must have at least 2 key consumptions on key effective packet!");
|
|
222
|
+
throw new Error("Currently W.I.P.");
|
|
223
|
+
}
|
|
210
224
|
/**
|
|
211
225
|
* Flattens a 2-depth array for efficient wire transfer
|
|
212
226
|
* Turns [[x,y,z],[x,y,z]...] to [[x,x...],[y,y...],[z,z...]]
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "sonic-ws",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.2.0",
|
|
4
4
|
"description": "Ultra-lightweight, high-performance, and bandwidth efficient websocket library",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -31,6 +31,7 @@
|
|
|
31
31
|
"@types/node": "^24.2.1",
|
|
32
32
|
"@types/ws": "^8.18.1",
|
|
33
33
|
"cpy-cli": "^5.0.0",
|
|
34
|
+
"esbuild-loader": "^4.4.2",
|
|
34
35
|
"form-data": "^4.0.4",
|
|
35
36
|
"rimraf": "^6.0.1",
|
|
36
37
|
"ts-loader": "^9.5.2",
|