@colyseus/schema 3.0.0-alpha.40 → 3.0.0-alpha.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -3
- package/build/cjs/index.js +1205 -1223
- package/build/cjs/index.js.map +1 -1
- package/build/esm/index.mjs +1205 -1224
- package/build/esm/index.mjs.map +1 -1
- package/build/umd/index.js +1205 -1223
- package/lib/annotations.d.ts +1 -1
- package/lib/annotations.js.map +1 -1
- package/lib/codegen/languages/csharp.js +1 -44
- package/lib/codegen/languages/csharp.js.map +1 -1
- package/lib/decoder/DecodeOperation.d.ts +3 -3
- package/lib/decoder/DecodeOperation.js +9 -9
- package/lib/decoder/DecodeOperation.js.map +1 -1
- package/lib/decoder/Decoder.js +5 -5
- package/lib/decoder/Decoder.js.map +1 -1
- package/lib/encoder/EncodeOperation.js +7 -7
- package/lib/encoder/EncodeOperation.js.map +1 -1
- package/lib/encoder/Encoder.js +4 -4
- package/lib/encoder/Encoder.js.map +1 -1
- package/lib/encoding/assert.d.ts +5 -5
- package/lib/encoding/assert.js +8 -0
- package/lib/encoding/assert.js.map +1 -1
- package/lib/encoding/decode.d.ts +35 -20
- package/lib/encoding/decode.js +43 -86
- package/lib/encoding/decode.js.map +1 -1
- package/lib/encoding/encode.d.ts +36 -17
- package/lib/encoding/encode.js +49 -39
- package/lib/encoding/encode.js.map +1 -1
- package/lib/encoding/spec.d.ts +2 -4
- package/lib/encoding/spec.js +0 -2
- package/lib/encoding/spec.js.map +1 -1
- package/lib/index.d.ts +4 -6
- package/lib/index.js +6 -5
- package/lib/index.js.map +1 -1
- package/lib/types/registry.d.ts +8 -1
- package/lib/types/registry.js +20 -2
- package/lib/types/registry.js.map +1 -1
- package/package.json +1 -1
- package/src/annotations.ts +3 -1
- package/src/codegen/languages/csharp.ts +1 -47
- package/src/decoder/DecodeOperation.ts +6 -6
- package/src/decoder/Decoder.ts +3 -3
- package/src/encoder/EncodeOperation.ts +1 -1
- package/src/encoder/Encoder.ts +1 -1
- package/src/encoding/assert.ts +13 -5
- package/src/encoding/decode.ts +62 -97
- package/src/encoding/encode.ts +64 -36
- package/src/encoding/spec.ts +2 -5
- package/src/index.ts +4 -6
- package/src/types/registry.ts +22 -3
|
@@ -59,16 +59,9 @@ function generateClass(klass: Class, namespace: string) {
|
|
|
59
59
|
return `${getCommentHeader()}
|
|
60
60
|
|
|
61
61
|
using Colyseus.Schema;
|
|
62
|
-
using Action = System.Action;
|
|
63
62
|
${namespace ? `\nnamespace ${namespace} {` : ""}
|
|
64
63
|
${indent}public partial class ${klass.name} : ${klass.extends} {
|
|
65
64
|
${klass.properties.map((prop) => generateProperty(prop, indent)).join("\n\n")}
|
|
66
|
-
|
|
67
|
-
${indent}\t/*
|
|
68
|
-
${indent}\t * Support for individual property change callbacks below...
|
|
69
|
-
${indent}\t */
|
|
70
|
-
|
|
71
|
-
${generateAllFieldCallbacks(klass, indent)}
|
|
72
65
|
${indent}}
|
|
73
66
|
${namespace ? "}" : ""}
|
|
74
67
|
`;
|
|
@@ -119,7 +112,7 @@ function generateProperty(prop: Property, indent: string = "") {
|
|
|
119
112
|
typeArgs += `, "${prop.childType}"`;
|
|
120
113
|
}
|
|
121
114
|
|
|
122
|
-
initializer = `
|
|
115
|
+
initializer = `null`;
|
|
123
116
|
|
|
124
117
|
} else {
|
|
125
118
|
langType = getType(prop);
|
|
@@ -147,45 +140,6 @@ ${namespace ? "}" : ""}
|
|
|
147
140
|
`;
|
|
148
141
|
}
|
|
149
142
|
|
|
150
|
-
function generateAllFieldCallbacks(klass: Class, indent: string) {
|
|
151
|
-
//
|
|
152
|
-
// TODO: improve me. It would be great to generate less boilerplate in favor
|
|
153
|
-
// of a single implementation on C# Schema class itself.
|
|
154
|
-
//
|
|
155
|
-
const eventNames: string[] = [];
|
|
156
|
-
return `${klass.properties
|
|
157
|
-
.filter(prop => !prop.deprecated) // generate only for properties that haven't been deprecated.
|
|
158
|
-
.map(prop => {
|
|
159
|
-
const eventName = `__${prop.name}Change`;
|
|
160
|
-
eventNames.push(eventName);
|
|
161
|
-
|
|
162
|
-
const defaultNull = (prop.childType)
|
|
163
|
-
? "null"
|
|
164
|
-
: `default(${getType(prop)})`;
|
|
165
|
-
|
|
166
|
-
return `\t${indent}protected event PropertyChangeHandler<${getType(prop)}> ${eventName};
|
|
167
|
-
\t${indent}public Action On${capitalize(prop.name)}Change(PropertyChangeHandler<${getType(prop)}> __handler, bool __immediate = true) {
|
|
168
|
-
\t${indent}\tif (__callbacks == null) { __callbacks = new SchemaCallbacks(); }
|
|
169
|
-
\t${indent}\t__callbacks.AddPropertyCallback(nameof(this.${prop.name}));
|
|
170
|
-
\t${indent}\t${eventName} += __handler;
|
|
171
|
-
\t${indent}\tif (__immediate && this.${prop.name} != ${defaultNull}) { __handler(this.${prop.name}, ${defaultNull}); }
|
|
172
|
-
\t${indent}\treturn () => {
|
|
173
|
-
\t${indent}\t\t__callbacks.RemovePropertyCallback(nameof(${prop.name}));
|
|
174
|
-
\t${indent}\t\t${eventName} -= __handler;
|
|
175
|
-
\t${indent}\t};
|
|
176
|
-
\t${indent}}`;
|
|
177
|
-
}).join("\n\n")}
|
|
178
|
-
|
|
179
|
-
\t${indent}protected override void TriggerFieldChange(DataChange change) {
|
|
180
|
-
\t${indent}\tswitch (change.Field) {
|
|
181
|
-
${klass.properties.filter(prop => !prop.deprecated).map((prop, i) => {
|
|
182
|
-
return `\t${indent}\t\tcase nameof(${prop.name}): ${eventNames[i]}?.Invoke((${getType(prop)}) change.Value, (${getType(prop)}) change.PreviousValue); break;`;
|
|
183
|
-
}).join("\n")}
|
|
184
|
-
\t${indent}\t\tdefault: break;
|
|
185
|
-
\t\t${indent}}
|
|
186
|
-
\t${indent}}`;
|
|
187
|
-
}
|
|
188
|
-
|
|
189
143
|
function getChildType(prop: Property) {
|
|
190
144
|
return typeMaps[prop.childType];
|
|
191
145
|
}
|
|
@@ -3,7 +3,7 @@ import { Metadata } from "../Metadata";
|
|
|
3
3
|
import { Schema } from "../Schema";
|
|
4
4
|
import type { Ref } from "../encoder/ChangeTree";
|
|
5
5
|
import type { Decoder } from "./Decoder";
|
|
6
|
-
import
|
|
6
|
+
import { Iterator, decode } from "../encoding/decode";
|
|
7
7
|
import { $childType, $deleteByIndex, $getByIndex } from "../types/symbols";
|
|
8
8
|
|
|
9
9
|
import type { MapSchema } from "../types/custom/MapSchema";
|
|
@@ -28,7 +28,7 @@ export const DEFINITION_MISMATCH = -1;
|
|
|
28
28
|
export type DecodeOperation<T extends Schema = any> = (
|
|
29
29
|
decoder: Decoder<T>,
|
|
30
30
|
bytes: Buffer,
|
|
31
|
-
it:
|
|
31
|
+
it: Iterator,
|
|
32
32
|
ref: Ref,
|
|
33
33
|
allChanges: DataChange[],
|
|
34
34
|
) => number | void;
|
|
@@ -40,7 +40,7 @@ export function decodeValue(
|
|
|
40
40
|
index: number,
|
|
41
41
|
type: any,
|
|
42
42
|
bytes: Buffer,
|
|
43
|
-
it:
|
|
43
|
+
it: Iterator,
|
|
44
44
|
allChanges: DataChange[],
|
|
45
45
|
) {
|
|
46
46
|
const $root = decoder.root;
|
|
@@ -176,7 +176,7 @@ export function decodeValue(
|
|
|
176
176
|
export const decodeSchemaOperation: DecodeOperation = function (
|
|
177
177
|
decoder: Decoder<any>,
|
|
178
178
|
bytes: Buffer,
|
|
179
|
-
it:
|
|
179
|
+
it: Iterator,
|
|
180
180
|
ref: Ref,
|
|
181
181
|
allChanges: DataChange[],
|
|
182
182
|
) {
|
|
@@ -225,7 +225,7 @@ export const decodeSchemaOperation: DecodeOperation = function (
|
|
|
225
225
|
export const decodeKeyValueOperation: DecodeOperation = function (
|
|
226
226
|
decoder: Decoder<any>,
|
|
227
227
|
bytes: Buffer,
|
|
228
|
-
it:
|
|
228
|
+
it: Iterator,
|
|
229
229
|
ref: Ref,
|
|
230
230
|
allChanges: DataChange[]
|
|
231
231
|
) {
|
|
@@ -309,7 +309,7 @@ export const decodeKeyValueOperation: DecodeOperation = function (
|
|
|
309
309
|
export const decodeArray: DecodeOperation = function (
|
|
310
310
|
decoder: Decoder<any>,
|
|
311
311
|
bytes: Buffer,
|
|
312
|
-
it:
|
|
312
|
+
it: Iterator,
|
|
313
313
|
ref: ArraySchema,
|
|
314
314
|
allChanges: DataChange[]
|
|
315
315
|
) {
|
package/src/decoder/Decoder.ts
CHANGED
|
@@ -2,7 +2,7 @@ import { TypeContext } from "../types/TypeContext";
|
|
|
2
2
|
import { $changes, $childType, $decoder, $onDecodeEnd } from "../types/symbols";
|
|
3
3
|
import { Schema } from "../Schema";
|
|
4
4
|
|
|
5
|
-
import
|
|
5
|
+
import { decode } from "../encoding/decode";
|
|
6
6
|
import { OPERATION, SWITCH_TO_STRUCTURE, TYPE_ID } from '../encoding/spec';
|
|
7
7
|
import type { Ref } from "../encoder/ChangeTree";
|
|
8
8
|
import type { Iterator } from "../encoding/decode";
|
|
@@ -82,9 +82,9 @@ export class Decoder<T extends Schema = any> {
|
|
|
82
82
|
// keep skipping next bytes until reaches a known structure
|
|
83
83
|
// by local decoder.
|
|
84
84
|
//
|
|
85
|
-
const nextIterator:
|
|
85
|
+
const nextIterator: Iterator = { offset: it.offset };
|
|
86
86
|
while (it.offset < totalBytes) {
|
|
87
|
-
if (
|
|
87
|
+
if (bytes[it.offset] === SWITCH_TO_STRUCTURE) {
|
|
88
88
|
nextIterator.offset = it.offset + 1;
|
|
89
89
|
if ($root.refs.has(decode.number(bytes, nextIterator))) {
|
|
90
90
|
break;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { OPERATION } from "../encoding/spec";
|
|
2
2
|
import { $changes, $childType, $getByIndex } from "../types/symbols";
|
|
3
3
|
|
|
4
|
-
import
|
|
4
|
+
import { encode } from "../encoding/encode";
|
|
5
5
|
|
|
6
6
|
import type { ChangeTree, Ref } from "./ChangeTree";
|
|
7
7
|
import type { Encoder } from "./Encoder";
|
package/src/encoder/Encoder.ts
CHANGED
|
@@ -2,7 +2,7 @@ import type { Schema } from "../Schema";
|
|
|
2
2
|
import { TypeContext } from "../types/TypeContext";
|
|
3
3
|
import { $changes, $encoder, $filter, $onEncodeEnd } from "../types/symbols";
|
|
4
4
|
|
|
5
|
-
import
|
|
5
|
+
import { encode } from "../encoding/encode";
|
|
6
6
|
import type { Iterator } from "../encoding/decode";
|
|
7
7
|
|
|
8
8
|
import { OPERATION, SWITCH_TO_STRUCTURE, TYPE_ID } from '../encoding/spec';
|
package/src/encoding/assert.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { Schema } from "../Schema";
|
|
2
|
-
import { CollectionSchema } from "../types/custom/CollectionSchema";
|
|
3
|
-
import { MapSchema } from "../types/custom/MapSchema";
|
|
4
|
-
import { SetSchema } from "../types/custom/SetSchema";
|
|
5
|
-
import { ArraySchema } from "../types/custom/ArraySchema";
|
|
1
|
+
import type { Schema } from "../Schema";
|
|
2
|
+
import type { CollectionSchema } from "../types/custom/CollectionSchema";
|
|
3
|
+
import type { MapSchema } from "../types/custom/MapSchema";
|
|
4
|
+
import type { SetSchema } from "../types/custom/SetSchema";
|
|
5
|
+
import type { ArraySchema } from "../types/custom/ArraySchema";
|
|
6
6
|
import type { Ref } from "../encoder/ChangeTree";
|
|
7
7
|
|
|
8
8
|
export class EncodeSchemaError extends Error {}
|
|
@@ -28,6 +28,10 @@ export function assertType(value: any, type: string, klass: Schema, field: strin
|
|
|
28
28
|
console.log(`trying to encode "NaN" in ${klass.constructor.name}#${field}`);
|
|
29
29
|
}
|
|
30
30
|
break;
|
|
31
|
+
case "bigint64":
|
|
32
|
+
case "biguint64":
|
|
33
|
+
typeofTarget = "bigint";
|
|
34
|
+
break;
|
|
31
35
|
case "string":
|
|
32
36
|
typeofTarget = "string";
|
|
33
37
|
allowNull = true;
|
|
@@ -35,6 +39,10 @@ export function assertType(value: any, type: string, klass: Schema, field: strin
|
|
|
35
39
|
case "boolean":
|
|
36
40
|
// boolean is always encoded as true/false based on truthiness
|
|
37
41
|
return;
|
|
42
|
+
default:
|
|
43
|
+
// skip assertion for custom types
|
|
44
|
+
// TODO: allow custom types to define their own assertions
|
|
45
|
+
return;
|
|
38
46
|
}
|
|
39
47
|
|
|
40
48
|
if (typeof (value) !== typeofTarget && (!allowNull || (allowNull && value !== null))) {
|
package/src/encoding/decode.ts
CHANGED
|
@@ -21,7 +21,6 @@
|
|
|
21
21
|
* SOFTWARE
|
|
22
22
|
*/
|
|
23
23
|
|
|
24
|
-
import { SWITCH_TO_STRUCTURE } from "./spec";
|
|
25
24
|
import type { BufferLike } from "./encode";
|
|
26
25
|
|
|
27
26
|
/**
|
|
@@ -31,7 +30,17 @@ import type { BufferLike } from "./encode";
|
|
|
31
30
|
|
|
32
31
|
export interface Iterator { offset: number; }
|
|
33
32
|
|
|
34
|
-
|
|
33
|
+
// force little endian to facilitate decoding on multiple implementations
|
|
34
|
+
const _isLittleEndian = true; // new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;
|
|
35
|
+
const _convoBuffer = new ArrayBuffer(8);
|
|
36
|
+
|
|
37
|
+
const _int32 = new Int32Array(_convoBuffer);
|
|
38
|
+
const _float32 = new Float32Array(_convoBuffer);
|
|
39
|
+
const _float64 = new Float64Array(_convoBuffer);
|
|
40
|
+
const _uint64 = new BigUint64Array(_convoBuffer);
|
|
41
|
+
const _int64 = new BigInt64Array(_convoBuffer);
|
|
42
|
+
|
|
43
|
+
function utf8Read(bytes: BufferLike, it: Iterator, length: number) {
|
|
35
44
|
var string = '', chr = 0;
|
|
36
45
|
for (var i = it.offset, end = it.offset + length; i < end; i++) {
|
|
37
46
|
var byte = bytes[i];
|
|
@@ -76,72 +85,70 @@ export function utf8Read(bytes: BufferLike, it: Iterator, length: number) {
|
|
|
76
85
|
return string;
|
|
77
86
|
}
|
|
78
87
|
|
|
79
|
-
|
|
88
|
+
function int8 (bytes: BufferLike, it: Iterator) {
|
|
80
89
|
return uint8(bytes, it) << 24 >> 24;
|
|
81
90
|
};
|
|
82
91
|
|
|
83
|
-
|
|
92
|
+
function uint8 (bytes: BufferLike, it: Iterator) {
|
|
84
93
|
return bytes[it.offset++];
|
|
85
94
|
};
|
|
86
95
|
|
|
87
|
-
|
|
96
|
+
function int16 (bytes: BufferLike, it: Iterator) {
|
|
88
97
|
return uint16(bytes, it) << 16 >> 16;
|
|
89
98
|
};
|
|
90
99
|
|
|
91
|
-
|
|
100
|
+
function uint16 (bytes: BufferLike, it: Iterator) {
|
|
92
101
|
return bytes[it.offset++] | bytes[it.offset++] << 8;
|
|
93
102
|
};
|
|
94
103
|
|
|
95
|
-
|
|
104
|
+
function int32 (bytes: BufferLike, it: Iterator) {
|
|
96
105
|
return bytes[it.offset++] | bytes[it.offset++] << 8 | bytes[it.offset++] << 16 | bytes[it.offset++] << 24;
|
|
97
106
|
};
|
|
98
107
|
|
|
99
|
-
|
|
108
|
+
function uint32 (bytes: BufferLike, it: Iterator) {
|
|
100
109
|
return int32(bytes, it) >>> 0;
|
|
101
110
|
};
|
|
102
111
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
112
|
+
function float32 (bytes: BufferLike, it: Iterator) {
|
|
113
|
+
_int32[0] = int32(bytes, it);
|
|
114
|
+
return _float32[0];
|
|
115
|
+
};
|
|
106
116
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
117
|
+
function float64 (bytes: BufferLike, it: Iterator) {
|
|
118
|
+
_int32[_isLittleEndian ? 0 : 1] = int32(bytes, it);
|
|
119
|
+
_int32[_isLittleEndian ? 1 : 0] = int32(bytes, it);
|
|
120
|
+
return _float64[0];
|
|
121
|
+
};
|
|
110
122
|
|
|
111
|
-
|
|
123
|
+
function int64(bytes: BufferLike, it: Iterator) {
|
|
112
124
|
const low = uint32(bytes, it);
|
|
113
125
|
const high = int32(bytes, it) * Math.pow(2, 32);
|
|
114
126
|
return high + low;
|
|
115
127
|
};
|
|
116
128
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
129
|
+
function uint64(bytes: BufferLike, it: Iterator) {
|
|
130
|
+
const low = uint32(bytes, it);
|
|
131
|
+
const high = uint32(bytes, it) * Math.pow(2, 32);
|
|
132
|
+
return high + low;
|
|
121
133
|
};
|
|
122
134
|
|
|
123
|
-
|
|
124
|
-
const _isLittleEndian = true; // new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;
|
|
125
|
-
const _int32 = new Int32Array(2);
|
|
126
|
-
const _float32 = new Float32Array(_int32.buffer);
|
|
127
|
-
const _float64 = new Float64Array(_int32.buffer);
|
|
128
|
-
|
|
129
|
-
export function readFloat32 (bytes: BufferLike, it: Iterator) {
|
|
135
|
+
function bigint64(bytes: BufferLike, it: Iterator) {
|
|
130
136
|
_int32[0] = int32(bytes, it);
|
|
131
|
-
|
|
132
|
-
|
|
137
|
+
_int32[1] = int32(bytes, it);
|
|
138
|
+
return _int64[0];
|
|
139
|
+
}
|
|
133
140
|
|
|
134
|
-
|
|
135
|
-
_int32[
|
|
136
|
-
_int32[
|
|
137
|
-
return
|
|
138
|
-
}
|
|
141
|
+
function biguint64(bytes: BufferLike, it: Iterator) {
|
|
142
|
+
_int32[0] = int32(bytes, it);
|
|
143
|
+
_int32[1] = int32(bytes, it);
|
|
144
|
+
return _uint64[0];
|
|
145
|
+
}
|
|
139
146
|
|
|
140
|
-
|
|
147
|
+
function boolean (bytes: BufferLike, it: Iterator) {
|
|
141
148
|
return uint8(bytes, it) > 0;
|
|
142
149
|
};
|
|
143
150
|
|
|
144
|
-
|
|
151
|
+
function string (bytes: BufferLike, it: Iterator) {
|
|
145
152
|
const prefix = bytes[it.offset++];
|
|
146
153
|
let length: number;
|
|
147
154
|
|
|
@@ -162,21 +169,7 @@ export function string (bytes: BufferLike, it: Iterator) {
|
|
|
162
169
|
return utf8Read(bytes, it, length);
|
|
163
170
|
}
|
|
164
171
|
|
|
165
|
-
|
|
166
|
-
const prefix = bytes[it.offset];
|
|
167
|
-
return (
|
|
168
|
-
// fixstr
|
|
169
|
-
(prefix < 0xc0 && prefix > 0xa0) ||
|
|
170
|
-
// str 8
|
|
171
|
-
prefix === 0xd9 ||
|
|
172
|
-
// str 16
|
|
173
|
-
prefix === 0xda ||
|
|
174
|
-
// str 32
|
|
175
|
-
prefix === 0xdb
|
|
176
|
-
);
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
export function number (bytes: BufferLike, it: Iterator) {
|
|
172
|
+
function number (bytes: BufferLike, it: Iterator) {
|
|
180
173
|
const prefix = bytes[it.offset++];
|
|
181
174
|
|
|
182
175
|
if (prefix < 0x80) {
|
|
@@ -185,11 +178,11 @@ export function number (bytes: BufferLike, it: Iterator) {
|
|
|
185
178
|
|
|
186
179
|
} else if (prefix === 0xca) {
|
|
187
180
|
// float 32
|
|
188
|
-
return
|
|
181
|
+
return float32(bytes, it);
|
|
189
182
|
|
|
190
183
|
} else if (prefix === 0xcb) {
|
|
191
184
|
// float 64
|
|
192
|
-
return
|
|
185
|
+
return float64(bytes, it);
|
|
193
186
|
|
|
194
187
|
} else if (prefix === 0xcc) {
|
|
195
188
|
// uint 8
|
|
@@ -229,49 +222,21 @@ export function number (bytes: BufferLike, it: Iterator) {
|
|
|
229
222
|
}
|
|
230
223
|
};
|
|
231
224
|
|
|
232
|
-
export
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
}
|
|
250
|
-
|
|
251
|
-
export function arrayCheck (bytes: BufferLike, it: Iterator) {
|
|
252
|
-
return bytes[it.offset] < 0xa0;
|
|
253
|
-
|
|
254
|
-
// const prefix = bytes[it.offset] ;
|
|
255
|
-
|
|
256
|
-
// if (prefix < 0xa0) {
|
|
257
|
-
// return prefix;
|
|
258
|
-
|
|
259
|
-
// // array
|
|
260
|
-
// } else if (prefix === 0xdc) {
|
|
261
|
-
// it.offset += 2;
|
|
262
|
-
|
|
263
|
-
// } else if (0xdd) {
|
|
264
|
-
// it.offset += 4;
|
|
265
|
-
// }
|
|
266
|
-
|
|
267
|
-
// return prefix;
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
export function switchStructureCheck(bytes: BufferLike, it: Iterator) {
|
|
271
|
-
return (
|
|
272
|
-
// previous byte should be `SWITCH_TO_STRUCTURE`
|
|
273
|
-
bytes[it.offset - 1] === SWITCH_TO_STRUCTURE &&
|
|
274
|
-
// next byte should be a number
|
|
275
|
-
(bytes[it.offset] < 0x80 || (bytes[it.offset] >= 0xca && bytes[it.offset] <= 0xd3))
|
|
276
|
-
);
|
|
225
|
+
export const decode = {
|
|
226
|
+
utf8Read,
|
|
227
|
+
int8,
|
|
228
|
+
uint8,
|
|
229
|
+
int16,
|
|
230
|
+
uint16,
|
|
231
|
+
int32,
|
|
232
|
+
uint32,
|
|
233
|
+
float32,
|
|
234
|
+
float64,
|
|
235
|
+
int64,
|
|
236
|
+
uint64,
|
|
237
|
+
bigint64,
|
|
238
|
+
biguint64,
|
|
239
|
+
boolean,
|
|
240
|
+
string,
|
|
241
|
+
number,
|
|
277
242
|
}
|
package/src/encoding/encode.ts
CHANGED
|
@@ -35,9 +35,17 @@ let textEncoder: TextEncoder;
|
|
|
35
35
|
// @ts-ignore
|
|
36
36
|
try { textEncoder = new TextEncoder(); } catch (e) { }
|
|
37
37
|
|
|
38
|
+
// force little endian to facilitate decoding on multiple implementations
|
|
39
|
+
const _isLittleEndian = true; // new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;
|
|
40
|
+
const _convoBuffer = new ArrayBuffer(8);
|
|
41
|
+
const _int32 = new Int32Array(_convoBuffer);
|
|
42
|
+
const _float32 = new Float32Array(_convoBuffer);
|
|
43
|
+
const _float64 = new Float64Array(_convoBuffer);
|
|
44
|
+
const _int64 = new BigInt64Array(_convoBuffer);
|
|
45
|
+
|
|
38
46
|
const hasBufferByteLength = (typeof Buffer !== 'undefined' && Buffer.byteLength);
|
|
39
47
|
|
|
40
|
-
|
|
48
|
+
const utf8Length: (str: string, _?: any) => number = (hasBufferByteLength)
|
|
41
49
|
? Buffer.byteLength // node
|
|
42
50
|
: function (str: string, _?: any) {
|
|
43
51
|
var c = 0, length = 0;
|
|
@@ -60,7 +68,7 @@ export const utf8Length: (str: string, _?: any) => number = (hasBufferByteLength
|
|
|
60
68
|
return length;
|
|
61
69
|
}
|
|
62
70
|
|
|
63
|
-
|
|
71
|
+
function utf8Write(view: BufferLike, str: string, it: Iterator) {
|
|
64
72
|
var c = 0;
|
|
65
73
|
for (var i = 0, l = str.length; i < l; i++) {
|
|
66
74
|
c = str.charCodeAt(i);
|
|
@@ -90,32 +98,32 @@ export function utf8Write(view: BufferLike, str: string, it: Iterator) {
|
|
|
90
98
|
}
|
|
91
99
|
}
|
|
92
100
|
|
|
93
|
-
|
|
101
|
+
function int8(bytes: BufferLike, value: number, it: Iterator) {
|
|
94
102
|
bytes[it.offset++] = value & 255;
|
|
95
103
|
};
|
|
96
104
|
|
|
97
|
-
|
|
105
|
+
function uint8(bytes: BufferLike, value: number, it: Iterator) {
|
|
98
106
|
bytes[it.offset++] = value & 255;
|
|
99
107
|
};
|
|
100
108
|
|
|
101
|
-
|
|
109
|
+
function int16(bytes: BufferLike, value: number, it: Iterator) {
|
|
102
110
|
bytes[it.offset++] = value & 255;
|
|
103
111
|
bytes[it.offset++] = (value >> 8) & 255;
|
|
104
112
|
};
|
|
105
113
|
|
|
106
|
-
|
|
114
|
+
function uint16(bytes: BufferLike, value: number, it: Iterator) {
|
|
107
115
|
bytes[it.offset++] = value & 255;
|
|
108
116
|
bytes[it.offset++] = (value >> 8) & 255;
|
|
109
117
|
};
|
|
110
118
|
|
|
111
|
-
|
|
119
|
+
function int32(bytes: BufferLike, value: number, it: Iterator) {
|
|
112
120
|
bytes[it.offset++] = value & 255;
|
|
113
121
|
bytes[it.offset++] = (value >> 8) & 255;
|
|
114
122
|
bytes[it.offset++] = (value >> 16) & 255;
|
|
115
123
|
bytes[it.offset++] = (value >> 24) & 255;
|
|
116
124
|
};
|
|
117
125
|
|
|
118
|
-
|
|
126
|
+
function uint32(bytes: BufferLike, value: number, it: Iterator) {
|
|
119
127
|
const b4 = value >> 24;
|
|
120
128
|
const b3 = value >> 16;
|
|
121
129
|
const b2 = value >> 8;
|
|
@@ -126,50 +134,48 @@ export function uint32(bytes: BufferLike, value: number, it: Iterator) {
|
|
|
126
134
|
bytes[it.offset++] = b4 & 255;
|
|
127
135
|
};
|
|
128
136
|
|
|
129
|
-
|
|
137
|
+
function int64(bytes: BufferLike, value: number, it: Iterator) {
|
|
130
138
|
const high = Math.floor(value / Math.pow(2, 32));
|
|
131
139
|
const low = value >>> 0;
|
|
132
140
|
uint32(bytes, low, it);
|
|
133
141
|
uint32(bytes, high, it);
|
|
134
142
|
};
|
|
135
143
|
|
|
136
|
-
|
|
144
|
+
function uint64(bytes: BufferLike, value: number, it: Iterator) {
|
|
137
145
|
const high = (value / Math.pow(2, 32)) >> 0;
|
|
138
146
|
const low = value >>> 0;
|
|
139
147
|
uint32(bytes, low, it);
|
|
140
148
|
uint32(bytes, high, it);
|
|
141
149
|
};
|
|
142
150
|
|
|
143
|
-
|
|
144
|
-
|
|
151
|
+
function bigint64(bytes: BufferLike, value: bigint, it: Iterator) {
|
|
152
|
+
_int64[0] = BigInt.asIntN(64, value);
|
|
153
|
+
int32(bytes, _int32[0], it);
|
|
154
|
+
int32(bytes, _int32[1], it);
|
|
145
155
|
}
|
|
146
156
|
|
|
147
|
-
|
|
148
|
-
|
|
157
|
+
function biguint64(bytes: BufferLike, value: bigint, it: Iterator) {
|
|
158
|
+
_int64[0] = BigInt.asIntN(64, value);
|
|
159
|
+
int32(bytes, _int32[0], it);
|
|
160
|
+
int32(bytes, _int32[1], it);
|
|
149
161
|
}
|
|
150
162
|
|
|
151
|
-
|
|
152
|
-
const _isLittleEndian = true; // new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;
|
|
153
|
-
const _int32 = new Int32Array(2);
|
|
154
|
-
const _float32 = new Float32Array(_int32.buffer);
|
|
155
|
-
const _float64 = new Float64Array(_int32.buffer);
|
|
156
|
-
|
|
157
|
-
export function writeFloat32(bytes: BufferLike, value: number, it: Iterator) {
|
|
163
|
+
function float32(bytes: BufferLike, value: number, it: Iterator) {
|
|
158
164
|
_float32[0] = value;
|
|
159
165
|
int32(bytes, _int32[0], it);
|
|
160
|
-
}
|
|
166
|
+
}
|
|
161
167
|
|
|
162
|
-
|
|
168
|
+
function float64(bytes: BufferLike, value: number, it: Iterator) {
|
|
163
169
|
_float64[0] = value;
|
|
164
170
|
int32(bytes, _int32[_isLittleEndian ? 0 : 1], it);
|
|
165
171
|
int32(bytes, _int32[_isLittleEndian ? 1 : 0], it);
|
|
166
|
-
}
|
|
172
|
+
}
|
|
167
173
|
|
|
168
|
-
|
|
174
|
+
function boolean(bytes: BufferLike, value: number, it: Iterator) {
|
|
169
175
|
bytes[it.offset++] = value ? 1 : 0; // uint8
|
|
170
176
|
};
|
|
171
177
|
|
|
172
|
-
|
|
178
|
+
function string(bytes: BufferLike, value: string, it: Iterator) {
|
|
173
179
|
// encode `null` strings as empty.
|
|
174
180
|
if (!value) { value = ""; }
|
|
175
181
|
|
|
@@ -207,7 +213,7 @@ export function string(bytes: BufferLike, value: string, it: Iterator) {
|
|
|
207
213
|
return size + length;
|
|
208
214
|
}
|
|
209
215
|
|
|
210
|
-
|
|
216
|
+
function number(bytes: BufferLike, value: number, it: Iterator) {
|
|
211
217
|
if (isNaN(value)) {
|
|
212
218
|
return number(bytes, 0, it);
|
|
213
219
|
|
|
@@ -215,17 +221,19 @@ export function number(bytes: BufferLike, value: number, it: Iterator) {
|
|
|
215
221
|
return number(bytes, (value > 0) ? Number.MAX_SAFE_INTEGER : -Number.MAX_SAFE_INTEGER, it);
|
|
216
222
|
|
|
217
223
|
} else if (value !== (value|0)) {
|
|
224
|
+
if (Math.abs(value) <= 3.4028235e+38) { // range check
|
|
225
|
+
_float32[0] = value;
|
|
226
|
+
if (Math.abs(Math.abs(_float32[0]) - Math.abs(value)) < 1e-4) { // precision check; adjust 1e-n (n = precision) to in-/decrease acceptable precision loss
|
|
227
|
+
// now we know value is in range for f32 and has acceptable precision for f32
|
|
228
|
+
bytes[it.offset++] = 0xca;
|
|
229
|
+
float32(bytes, value, it);
|
|
230
|
+
return 5;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
218
234
|
bytes[it.offset++] = 0xcb;
|
|
219
|
-
|
|
235
|
+
float64(bytes, value, it);
|
|
220
236
|
return 9;
|
|
221
|
-
|
|
222
|
-
// TODO: encode float 32?
|
|
223
|
-
// is it possible to differentiate between float32 / float64 here?
|
|
224
|
-
|
|
225
|
-
// // float 32
|
|
226
|
-
// bytes.push(0xca);
|
|
227
|
-
// writeFloat32(bytes, value);
|
|
228
|
-
// return 5;
|
|
229
237
|
}
|
|
230
238
|
|
|
231
239
|
if (value >= 0) {
|
|
@@ -296,3 +304,23 @@ export function number(bytes: BufferLike, value: number, it: Iterator) {
|
|
|
296
304
|
return 9;
|
|
297
305
|
}
|
|
298
306
|
}
|
|
307
|
+
|
|
308
|
+
export const encode = {
|
|
309
|
+
int8,
|
|
310
|
+
uint8,
|
|
311
|
+
int16,
|
|
312
|
+
uint16,
|
|
313
|
+
int32,
|
|
314
|
+
uint32,
|
|
315
|
+
int64,
|
|
316
|
+
uint64,
|
|
317
|
+
bigint64,
|
|
318
|
+
biguint64,
|
|
319
|
+
float32,
|
|
320
|
+
float64,
|
|
321
|
+
boolean,
|
|
322
|
+
string,
|
|
323
|
+
number,
|
|
324
|
+
utf8Write,
|
|
325
|
+
utf8Length,
|
|
326
|
+
}
|
package/src/encoding/spec.ts
CHANGED
|
@@ -8,8 +8,8 @@ export enum OPERATION {
|
|
|
8
8
|
ADD = 128, // (10000000) add new structure/primitive
|
|
9
9
|
REPLACE = 0, // (00000001) replace structure/primitive
|
|
10
10
|
DELETE = 64, // (01000000) delete field
|
|
11
|
-
DELETE_AND_MOVE = 96, // ()
|
|
12
|
-
MOVE_AND_ADD = 160, // ()
|
|
11
|
+
DELETE_AND_MOVE = 96, // () ArraySchema only
|
|
12
|
+
MOVE_AND_ADD = 160, // () ArraySchema only
|
|
13
13
|
DELETE_AND_ADD = 192, // (11000000) DELETE field, followed by an ADD
|
|
14
14
|
|
|
15
15
|
/**
|
|
@@ -20,11 +20,8 @@ export enum OPERATION {
|
|
|
20
20
|
/**
|
|
21
21
|
* ArraySchema operations
|
|
22
22
|
*/
|
|
23
|
-
PUSH = 11,
|
|
24
|
-
UNSHIFT = 12,
|
|
25
23
|
REVERSE = 15,
|
|
26
24
|
MOVE = 32,
|
|
27
25
|
DELETE_BY_REFID = 33, // This operation is only used at ENCODING time. During DECODING, DELETE_BY_REFID is converted to DELETE
|
|
28
26
|
ADD_BY_REFID = 129,
|
|
29
|
-
|
|
30
27
|
}
|
package/src/index.ts
CHANGED
|
@@ -14,8 +14,8 @@ export { CollectionSchema };
|
|
|
14
14
|
import { SetSchema } from "./types/custom/SetSchema";
|
|
15
15
|
export { SetSchema };
|
|
16
16
|
|
|
17
|
-
import { registerType } from "./types/registry";
|
|
18
|
-
export { registerType };
|
|
17
|
+
import { registerType, defineCustomTypes } from "./types/registry";
|
|
18
|
+
export { registerType, defineCustomTypes };
|
|
19
19
|
|
|
20
20
|
registerType("map", { constructor: MapSchema });
|
|
21
21
|
registerType("array", { constructor: ArraySchema });
|
|
@@ -27,10 +27,8 @@ export { dumpChanges } from "./utils";
|
|
|
27
27
|
|
|
28
28
|
// Encoder / Decoder
|
|
29
29
|
export { $track, $encoder, $decoder, $filter, $getByIndex, $deleteByIndex, $changes, $childType } from "./types/symbols";
|
|
30
|
-
export
|
|
31
|
-
|
|
32
|
-
import * as decode from "./encoding/decode";
|
|
33
|
-
export { encode, decode };
|
|
30
|
+
export { encode } from "./encoding/encode";
|
|
31
|
+
export { decode, type Iterator } from "./encoding/decode";
|
|
34
32
|
|
|
35
33
|
// Reflection
|
|
36
34
|
export {
|