querysub 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dependency-cruiser.js +304 -0
- package/.eslintrc.js +51 -0
- package/.github/copilot-instructions.md +1 -0
- package/.vscode/settings.json +25 -0
- package/bin/deploy.js +4 -0
- package/bin/function.js +4 -0
- package/bin/server.js +4 -0
- package/costsBenefits.txt +112 -0
- package/deploy.ts +3 -0
- package/inject.ts +1 -0
- package/package.json +60 -0
- package/prompts.txt +54 -0
- package/spec.txt +820 -0
- package/src/-a-archives/archiveCache.ts +913 -0
- package/src/-a-archives/archives.ts +148 -0
- package/src/-a-archives/archivesBackBlaze.ts +792 -0
- package/src/-a-archives/archivesDisk.ts +418 -0
- package/src/-a-archives/copyLocalToBackblaze.ts +24 -0
- package/src/-a-auth/certs.ts +517 -0
- package/src/-a-auth/der.ts +122 -0
- package/src/-a-auth/ed25519.ts +1015 -0
- package/src/-a-auth/node-forge-ed25519.d.ts +17 -0
- package/src/-b-authorities/dnsAuthority.ts +203 -0
- package/src/-b-authorities/emailAuthority.ts +57 -0
- package/src/-c-identity/IdentityController.ts +200 -0
- package/src/-d-trust/NetworkTrust2.ts +150 -0
- package/src/-e-certs/EdgeCertController.ts +288 -0
- package/src/-e-certs/certAuthority.ts +192 -0
- package/src/-f-node-discovery/NodeDiscovery.ts +543 -0
- package/src/-g-core-values/NodeCapabilities.ts +134 -0
- package/src/-g-core-values/oneTimeForward.ts +91 -0
- package/src/-h-path-value-serialize/PathValueSerializer.ts +769 -0
- package/src/-h-path-value-serialize/stringSerializer.ts +176 -0
- package/src/0-path-value-core/LoggingClient.tsx +24 -0
- package/src/0-path-value-core/NodePathAuthorities.ts +978 -0
- package/src/0-path-value-core/PathController.ts +1 -0
- package/src/0-path-value-core/PathValueCommitter.ts +565 -0
- package/src/0-path-value-core/PathValueController.ts +231 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks.ts +154 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +820 -0
- package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +180 -0
- package/src/0-path-value-core/debugLogs.ts +90 -0
- package/src/0-path-value-core/pathValueArchives.ts +483 -0
- package/src/0-path-value-core/pathValueCore.ts +2217 -0
- package/src/1-path-client/RemoteWatcher.ts +558 -0
- package/src/1-path-client/pathValueClientWatcher.ts +702 -0
- package/src/2-proxy/PathValueProxyWatcher.ts +1857 -0
- package/src/2-proxy/archiveMoveHarness.ts +376 -0
- package/src/2-proxy/garbageCollection.ts +753 -0
- package/src/2-proxy/pathDatabaseProxyBase.ts +37 -0
- package/src/2-proxy/pathValueProxy.ts +139 -0
- package/src/2-proxy/schema2.ts +518 -0
- package/src/3-path-functions/PathFunctionHelpers.ts +129 -0
- package/src/3-path-functions/PathFunctionRunner.ts +619 -0
- package/src/3-path-functions/PathFunctionRunnerMain.ts +67 -0
- package/src/3-path-functions/deployBlock.ts +10 -0
- package/src/3-path-functions/deployCheck.ts +7 -0
- package/src/3-path-functions/deployMain.ts +160 -0
- package/src/3-path-functions/pathFunctionLoader.ts +282 -0
- package/src/3-path-functions/syncSchema.ts +475 -0
- package/src/3-path-functions/tests/functionsTest.ts +135 -0
- package/src/3-path-functions/tests/rejectTest.ts +77 -0
- package/src/4-dom/css.tsx +29 -0
- package/src/4-dom/cssTypes.d.ts +212 -0
- package/src/4-dom/qreact.tsx +2322 -0
- package/src/4-dom/qreactTest.tsx +417 -0
- package/src/4-querysub/Querysub.ts +877 -0
- package/src/4-querysub/QuerysubController.ts +620 -0
- package/src/4-querysub/copyEvent.ts +0 -0
- package/src/4-querysub/permissions.ts +289 -0
- package/src/4-querysub/permissionsShared.ts +1 -0
- package/src/4-querysub/querysubPrediction.ts +525 -0
- package/src/5-diagnostics/FullscreenModal.tsx +67 -0
- package/src/5-diagnostics/GenericFormat.tsx +165 -0
- package/src/5-diagnostics/Modal.tsx +79 -0
- package/src/5-diagnostics/Table.tsx +183 -0
- package/src/5-diagnostics/TimeGrouper.tsx +114 -0
- package/src/5-diagnostics/diskValueAudit.ts +216 -0
- package/src/5-diagnostics/memoryValueAudit.ts +442 -0
- package/src/5-diagnostics/nodeMetadata.ts +135 -0
- package/src/5-diagnostics/qreactDebug.tsx +309 -0
- package/src/5-diagnostics/shared.ts +26 -0
- package/src/5-diagnostics/synchronousLagTracking.ts +47 -0
- package/src/TestController.ts +35 -0
- package/src/allowclient.flag +0 -0
- package/src/bits.ts +86 -0
- package/src/buffers.ts +69 -0
- package/src/config.ts +53 -0
- package/src/config2.ts +48 -0
- package/src/diagnostics/ActionsHistory.ts +56 -0
- package/src/diagnostics/NodeViewer.tsx +503 -0
- package/src/diagnostics/SizeLimiter.ts +62 -0
- package/src/diagnostics/TimeDebug.tsx +18 -0
- package/src/diagnostics/benchmark.ts +139 -0
- package/src/diagnostics/errorLogs/ErrorLogController.ts +515 -0
- package/src/diagnostics/errorLogs/ErrorLogCore.ts +274 -0
- package/src/diagnostics/errorLogs/LogClassifiers.tsx +302 -0
- package/src/diagnostics/errorLogs/LogFilterUI.tsx +84 -0
- package/src/diagnostics/errorLogs/LogNotify.tsx +101 -0
- package/src/diagnostics/errorLogs/LogTimeSelector.tsx +724 -0
- package/src/diagnostics/errorLogs/LogViewer.tsx +757 -0
- package/src/diagnostics/errorLogs/hookErrors.ts +60 -0
- package/src/diagnostics/errorLogs/logFiltering.tsx +149 -0
- package/src/diagnostics/heapTag.ts +13 -0
- package/src/diagnostics/listenOnDebugger.ts +77 -0
- package/src/diagnostics/logs/DiskLoggerPage.tsx +572 -0
- package/src/diagnostics/logs/ObjectDisplay.tsx +165 -0
- package/src/diagnostics/logs/ansiFormat.ts +108 -0
- package/src/diagnostics/logs/diskLogGlobalContext.ts +38 -0
- package/src/diagnostics/logs/diskLogger.ts +305 -0
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +32 -0
- package/src/diagnostics/logs/injectFileLocationToConsole.ts +50 -0
- package/src/diagnostics/logs/logGitHashes.ts +30 -0
- package/src/diagnostics/managementPages.tsx +289 -0
- package/src/diagnostics/periodic.ts +89 -0
- package/src/diagnostics/runSaturationTest.ts +416 -0
- package/src/diagnostics/satSchema.ts +64 -0
- package/src/diagnostics/trackResources.ts +82 -0
- package/src/diagnostics/watchdog.ts +55 -0
- package/src/errors.ts +132 -0
- package/src/forceProduction.ts +3 -0
- package/src/fs.ts +72 -0
- package/src/heapDumps.ts +666 -0
- package/src/https.ts +2 -0
- package/src/inject.ts +1 -0
- package/src/library-components/ATag.tsx +84 -0
- package/src/library-components/Button.tsx +344 -0
- package/src/library-components/ButtonSelector.tsx +64 -0
- package/src/library-components/DropdownCustom.tsx +151 -0
- package/src/library-components/DropdownSelector.tsx +32 -0
- package/src/library-components/Input.tsx +334 -0
- package/src/library-components/InputLabel.tsx +198 -0
- package/src/library-components/InputPicker.tsx +125 -0
- package/src/library-components/LazyComponent.tsx +62 -0
- package/src/library-components/MeasureHeightCSS.tsx +48 -0
- package/src/library-components/MeasuredDiv.tsx +47 -0
- package/src/library-components/ShowMore.tsx +51 -0
- package/src/library-components/SyncedController.ts +171 -0
- package/src/library-components/TimeRangeSelector.tsx +407 -0
- package/src/library-components/URLParam.ts +263 -0
- package/src/library-components/colors.tsx +14 -0
- package/src/library-components/drag.ts +114 -0
- package/src/library-components/icons.tsx +692 -0
- package/src/library-components/niceStringify.ts +50 -0
- package/src/library-components/renderToString.ts +52 -0
- package/src/misc/PromiseRace.ts +101 -0
- package/src/misc/color.ts +30 -0
- package/src/misc/getParentProcessId.cs +53 -0
- package/src/misc/getParentProcessId.ts +53 -0
- package/src/misc/hash.ts +83 -0
- package/src/misc/ipPong.js +13 -0
- package/src/misc/networking.ts +2 -0
- package/src/misc/random.ts +45 -0
- package/src/misc.ts +19 -0
- package/src/noserverhotreload.flag +0 -0
- package/src/path.ts +226 -0
- package/src/persistentLocalStore.ts +37 -0
- package/src/promise.ts +15 -0
- package/src/server.ts +73 -0
- package/src/src.d.ts +1 -0
- package/src/test/heapProcess.ts +36 -0
- package/src/test/mongoSatTest.tsx +55 -0
- package/src/test/satTest.ts +193 -0
- package/src/test/test.tsx +552 -0
- package/src/zip.ts +92 -0
- package/src/zipThreaded.ts +106 -0
- package/src/zipThreadedWorker.js +19 -0
- package/tsconfig.json +27 -0
- package/yarnSpec.txt +56 -0
|
@@ -0,0 +1,769 @@
|
|
|
1
|
+
import { measureBlock, measureFnc } from "socket-function/src/profiling/measure";
|
|
2
|
+
import { PathValue, ReadLock, Time } from "../0-path-value-core/pathValueCore";
|
|
3
|
+
|
|
4
|
+
import { lazy } from "socket-function/src/caching";
|
|
5
|
+
import debugbreak from "debugbreak";
|
|
6
|
+
import { compare, recursiveFreeze } from "socket-function/src/misc";
|
|
7
|
+
import { MaybePromise } from "socket-function/src/types";
|
|
8
|
+
import { StringSerialize } from "./stringSerializer";
|
|
9
|
+
import { Zip } from "../zip";
|
|
10
|
+
import { asBuffer, asInt32, asUint32 } from "../buffers";
|
|
11
|
+
import { setFlag } from "socket-function/require/compileFlags";
|
|
12
|
+
|
|
13
|
+
import cbor from "cbor-x";
|
|
14
|
+
import { atomicObjectWrite, atomicObjectWriteNoFreeze, doAtomicWrites } from "../2-proxy/PathValueProxyWatcher";
|
|
15
|
+
import { formatNumber, formatPercent, formatTime } from "socket-function/src/formatting/format";
|
|
16
|
+
setFlag(require, "cbor-x", "allowclient", true);
|
|
17
|
+
|
|
18
|
+
import * as pako from "pako";
|
|
19
|
+
import { delay } from "socket-function/src/batching";
|
|
20
|
+
setFlag(require, "pako", "allowclient", true);
|
|
21
|
+
|
|
22
|
+
const DEFAULT_BUFFER_SIZE = 1024 * 16;
|
|
23
|
+
const MAX_BUFFER_SIZE = 1024 * 1024 * 64;
|
|
24
|
+
const INPLACE_BUFFER_THRESHOLD = 1024 * 1024 * 16;
|
|
25
|
+
|
|
26
|
+
// Often times large data is compressed at an application level, If we compress it again,
|
|
27
|
+
// it means readers will have to store the compressed buffer AND uncompressed buffer in memory,
|
|
28
|
+
// which can result in a lot of extra memory pressure. Not to mention wasted time uncompressed it.
|
|
29
|
+
const MIN_COMPRESS_FACTOR = 0.75;
|
|
30
|
+
|
|
31
|
+
const cborEncoder = lazy(() => new cbor.Encoder({ structuredClone: true }));
|
|
32
|
+
|
|
33
|
+
const SERIALIZE_LOCK_COUNT = true;
|
|
34
|
+
|
|
35
|
+
/*
|
|
36
|
+
Memory Layout
|
|
37
|
+
DataSettings
|
|
38
|
+
main data (possibly compressed)
|
|
39
|
+
PathValueStructure[]
|
|
40
|
+
ReadLock[][]
|
|
41
|
+
values[]
|
|
42
|
+
string dictionary
|
|
43
|
+
*/
|
|
44
|
+
|
|
45
|
+
// TODO: Add a endian version in settings, so we can switch to LE
|
|
46
|
+
// - Maybe we should benchmark LE vs BE first, but... I assume LE
|
|
47
|
+
// will be faster, as that is the native format for most CPUs.
|
|
48
|
+
|
|
49
|
+
// NOTE: We could heavily optimize how we store Times
|
|
50
|
+
// - Most versions will be 0, and all will be small, so we can pack version and time
|
|
51
|
+
// - Most creatorIds are the same, so we could group by creatorId
|
|
52
|
+
// - endTime will be highly reused, so we can probably share them in some way
|
|
53
|
+
// (although, if Time is already just 64 bits, I guess we don't even need this optimization).
|
|
54
|
+
|
|
55
|
+
interface Writer {
|
|
56
|
+
writeFloat64(value: number): void;
|
|
57
|
+
writeByte(value: number): void;
|
|
58
|
+
writeString(value: string): void;
|
|
59
|
+
writeBuffer(value: Buffer): void;
|
|
60
|
+
}
|
|
61
|
+
interface Reader {
|
|
62
|
+
readFloat64(): number;
|
|
63
|
+
readByte(): number;
|
|
64
|
+
readString(): string;
|
|
65
|
+
readBuffer(): Buffer;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// JSON.stringify, length prefixed
|
|
69
|
+
interface DataSettings {
|
|
70
|
+
compression?: "gzip";
|
|
71
|
+
valueCount: number;
|
|
72
|
+
noLocks?: boolean;
|
|
73
|
+
stripSource?: boolean;
|
|
74
|
+
version: (
|
|
75
|
+
undefined
|
|
76
|
+
// Split strings into multiple buffers
|
|
77
|
+
| 1
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
interface PathValueStructure {
|
|
82
|
+
path: string;
|
|
83
|
+
time: Time;
|
|
84
|
+
|
|
85
|
+
event: boolean;
|
|
86
|
+
isTransparent: boolean;
|
|
87
|
+
valid: boolean;
|
|
88
|
+
canGCValue: boolean;
|
|
89
|
+
source?: string;
|
|
90
|
+
lockCount: number;
|
|
91
|
+
updateCount: number;
|
|
92
|
+
// IMPORTANT! All of these values must be explicitly copied to PathValue
|
|
93
|
+
// (we don't spread it), so if you add a new value here, make sure to
|
|
94
|
+
// update places that use these values (search for canGCValue in this file).
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class PathValueSerializer {
|
|
100
|
+
@measureFnc
|
|
101
|
+
private pathValuesWrite(writer: Writer, pathValues: PathValue[], settings: DataSettings) {
|
|
102
|
+
for (let pathValue of pathValues) {
|
|
103
|
+
writer.writeString(pathValue.path);
|
|
104
|
+
this.timeWrite(writer, pathValue.time);
|
|
105
|
+
|
|
106
|
+
let hasSource = pathValue.source && !settings.stripSource;
|
|
107
|
+
let flag0 = (
|
|
108
|
+
(pathValue.event ? 1 : 0)
|
|
109
|
+
| (pathValue.isTransparent ? 2 : 0)
|
|
110
|
+
| (pathValue.valid ? 4 : 0)
|
|
111
|
+
| (pathValue.canGCValue ? 8 : 0)
|
|
112
|
+
| (hasSource ? 16 : 0)
|
|
113
|
+
| (SERIALIZE_LOCK_COUNT ? 32 : 0)
|
|
114
|
+
| 64
|
|
115
|
+
);
|
|
116
|
+
writer.writeByte(flag0);
|
|
117
|
+
// NOTE: If we ever need flag1, set the last bit of flag0 (| 128),
|
|
118
|
+
// and then write another flag.
|
|
119
|
+
|
|
120
|
+
if (hasSource) {
|
|
121
|
+
writer.writeString(pathValue.source!);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (SERIALIZE_LOCK_COUNT) {
|
|
125
|
+
writer.writeFloat64(pathValue.locks.length || pathValue.lockCount || 0);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
writer.writeFloat64(pathValue.updateCount || 0);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
@measureFnc
|
|
133
|
+
private pathValuesRead(reader: Reader, count: number): PathValueStructure[] {
|
|
134
|
+
let pathValues: PathValueStructure[] = [];
|
|
135
|
+
for (let i = 0; i < count; i++) {
|
|
136
|
+
|
|
137
|
+
let path = reader.readString();
|
|
138
|
+
let time = this.timeRead(reader);
|
|
139
|
+
|
|
140
|
+
let flags: number[] = [];
|
|
141
|
+
while (true) {
|
|
142
|
+
let flag = reader.readByte();
|
|
143
|
+
flags.push(flag);
|
|
144
|
+
if (!(flag & 128)) break;
|
|
145
|
+
}
|
|
146
|
+
let flag0 = flags[0] || 0;
|
|
147
|
+
let event = (flag0 & 1) !== 0;
|
|
148
|
+
let isTransparent = (flag0 & 2) !== 0;
|
|
149
|
+
let valid = (flag0 & 4) !== 0;
|
|
150
|
+
let canGCValue = (flag0 & 8) !== 0;
|
|
151
|
+
let hasSource = (flag0 & 16) !== 0 ? true : undefined;
|
|
152
|
+
let hasLockCount = (flag0 & 32) !== 0;
|
|
153
|
+
let hasUpdateCount = (flag0 & 64) !== 0;
|
|
154
|
+
|
|
155
|
+
let source: string | undefined;
|
|
156
|
+
if (hasSource) {
|
|
157
|
+
source = reader.readString();
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
let lockCount = 0;
|
|
161
|
+
if (hasLockCount) {
|
|
162
|
+
lockCount = reader.readFloat64();
|
|
163
|
+
}
|
|
164
|
+
let updateCount = 0;
|
|
165
|
+
if (hasUpdateCount) {
|
|
166
|
+
updateCount = reader.readFloat64();
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
pathValues.push({
|
|
170
|
+
path,
|
|
171
|
+
time,
|
|
172
|
+
|
|
173
|
+
event,
|
|
174
|
+
isTransparent,
|
|
175
|
+
valid,
|
|
176
|
+
canGCValue,
|
|
177
|
+
source,
|
|
178
|
+
lockCount,
|
|
179
|
+
updateCount,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
return pathValues;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
private timeWrite(writer: Writer, time: Time) {
|
|
186
|
+
writer.writeFloat64(time.time);
|
|
187
|
+
writer.writeFloat64(time.version);
|
|
188
|
+
writer.writeFloat64(time.creatorId);
|
|
189
|
+
}
|
|
190
|
+
private timeRead(reader: Reader): Time {
|
|
191
|
+
// NOTE: We COULD pack time + version, as version will always be small, and time will always
|
|
192
|
+
// use < 42 bits (until the year 2109).
|
|
193
|
+
return {
|
|
194
|
+
time: reader.readFloat64(),
|
|
195
|
+
version: reader.readFloat64(),
|
|
196
|
+
creatorId: reader.readFloat64(),
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
@measureFnc
|
|
202
|
+
private readLocksWrite(writer: Writer, readLocks: ReadLock[][]) {
|
|
203
|
+
let readLockIndexes = new Map<ReadLock[], number>();
|
|
204
|
+
for (let readLock of readLocks) {
|
|
205
|
+
if (readLockIndexes.has(readLock)) continue;
|
|
206
|
+
if (readLock.length === 0) {
|
|
207
|
+
readLockIndexes.set(readLock, -1);
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
readLockIndexes.set(readLock, readLockIndexes.size);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
let uniqueLocks = Array.from(readLockIndexes.keys());
|
|
214
|
+
writer.writeFloat64(uniqueLocks.length);
|
|
215
|
+
for (let readLockList of uniqueLocks) {
|
|
216
|
+
writer.writeFloat64(readLockList.length);
|
|
217
|
+
for (let readLock of readLockList) {
|
|
218
|
+
writer.writeString(readLock.path);
|
|
219
|
+
this.timeWrite(writer, readLock.startTime);
|
|
220
|
+
this.timeWrite(writer, readLock.endTime);
|
|
221
|
+
writer.writeByte(readLock.readIsTranparent ? 1 : 0);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
let indexes = readLocks.map(x => readLockIndexes.get(x)!);
|
|
226
|
+
writer.writeBuffer(asBuffer(new Int32Array(indexes)));
|
|
227
|
+
}
|
|
228
|
+
@measureFnc
|
|
229
|
+
private readLocksRead(reader: Reader): ReadLock[][] {
|
|
230
|
+
let uniqueLockCount = reader.readFloat64();
|
|
231
|
+
let uniqueLocks: ReadLock[][] = [];
|
|
232
|
+
for (let i = 0; i < uniqueLockCount; i++) {
|
|
233
|
+
let lockCount = reader.readFloat64();
|
|
234
|
+
let locks: ReadLock[] = [];
|
|
235
|
+
for (let j = 0; j < lockCount; j++) {
|
|
236
|
+
locks.push({
|
|
237
|
+
path: reader.readString(),
|
|
238
|
+
startTime: this.timeRead(reader),
|
|
239
|
+
endTime: this.timeRead(reader),
|
|
240
|
+
readIsTranparent: reader.readByte() === 1,
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
uniqueLocks.push(locks);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
let indexesBuf = reader.readBuffer();
|
|
247
|
+
let indexes: number[] = [];
|
|
248
|
+
for (let i = 0; i < indexesBuf.length; i += 4) {
|
|
249
|
+
indexes.push(indexesBuf.readInt32LE(i));
|
|
250
|
+
}
|
|
251
|
+
let emptyLocks: ReadLock[] = [];
|
|
252
|
+
let readLocks = Array.from(indexes).map(index => {
|
|
253
|
+
if (index === -1) return emptyLocks;
|
|
254
|
+
if (index < 0 || index >= uniqueLocks.length) {
|
|
255
|
+
debugbreak(2);
|
|
256
|
+
debugger;
|
|
257
|
+
throw new Error(`Invalid read lock index ${index}, expected 0 <= index < ${uniqueLocks.length}`);
|
|
258
|
+
}
|
|
259
|
+
return uniqueLocks[index];
|
|
260
|
+
});
|
|
261
|
+
return readLocks;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
private async valuesWrite(writer: Writer, valuesGroups: PathValue[][]) {
|
|
266
|
+
let totalLength = valuesGroups.reduce((total, x) => total + x.length, 0);
|
|
267
|
+
let types = Buffer.alloc(totalLength);
|
|
268
|
+
let encodedValues: (Buffer | number)[] = [];
|
|
269
|
+
|
|
270
|
+
let i = 0;
|
|
271
|
+
for (let values of valuesGroups) {
|
|
272
|
+
measureBlock(() => {
|
|
273
|
+
// NOTE: Writing one value at a time is about 1.6X slower. BUT, it allows us to very efficient decode
|
|
274
|
+
// values, which... probably saves time overall.
|
|
275
|
+
// - ALSO, wire time is probably the slowest part anyways
|
|
276
|
+
for (let k = 0; k < values.length; k++) {
|
|
277
|
+
let pathValue = values[k];
|
|
278
|
+
let value = pathValue.value;
|
|
279
|
+
if (pathValue.isValueLazy) {
|
|
280
|
+
// If it's already encoded that's great, we can just reuse that buffer in our new encoding
|
|
281
|
+
types[i] = 6;
|
|
282
|
+
// Call getBuffer, in case we lost the lazyValue somehow. This is fine even
|
|
283
|
+
// if it is a number (although a number should NEVER have isValueLazy set),
|
|
284
|
+
// because numbers can be encoded as Buffers (it is just inefficient).
|
|
285
|
+
encodedValues.push(this.getBuffer(pathValue));
|
|
286
|
+
} else if (value === undefined) {
|
|
287
|
+
types[i] = 1;
|
|
288
|
+
} else if (value === null) {
|
|
289
|
+
types[i] = 2;
|
|
290
|
+
} else if (value === false) {
|
|
291
|
+
types[i] = 3;
|
|
292
|
+
} else if (value === true) {
|
|
293
|
+
types[i] = 4;
|
|
294
|
+
} else if (typeof value === "number") {
|
|
295
|
+
types[i] = 5;
|
|
296
|
+
encodedValues.push(value);
|
|
297
|
+
} else {
|
|
298
|
+
types[i] = 6;
|
|
299
|
+
encodedValues.push(cborEncoder().encode(value));
|
|
300
|
+
}
|
|
301
|
+
i++;
|
|
302
|
+
}
|
|
303
|
+
}, "valuesWriteLoop");
|
|
304
|
+
await delay("paintLoop");
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
measureBlock(function valuesWriteTypesBuffer() {
|
|
308
|
+
writer.writeBuffer(types);
|
|
309
|
+
});
|
|
310
|
+
// Break encodeValues into groups of 100, so we can check the time and delay
|
|
311
|
+
// if we are taking too long, but not check it EVERY loop, as that would
|
|
312
|
+
// be too slow.
|
|
313
|
+
let valueGroups: (Buffer | number)[][] = [];
|
|
314
|
+
const VALUE_GROUP_SIZE = 1000;
|
|
315
|
+
for (let i = 0; i < encodedValues.length; i += VALUE_GROUP_SIZE) {
|
|
316
|
+
valueGroups.push(encodedValues.slice(i, i + VALUE_GROUP_SIZE));
|
|
317
|
+
}
|
|
318
|
+
let prevTime = Date.now();
|
|
319
|
+
for (let encodedValues of valueGroups) {
|
|
320
|
+
measureBlock(function valuesWriteTypes() {
|
|
321
|
+
for (let value of encodedValues) {
|
|
322
|
+
if (typeof value === "number") {
|
|
323
|
+
writer.writeFloat64(value);
|
|
324
|
+
} else {
|
|
325
|
+
writer.writeBuffer(value);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
let now = Date.now();
|
|
330
|
+
if (now - prevTime > 10) {
|
|
331
|
+
await delay("paintLoop");
|
|
332
|
+
prevTime = now;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
@measureFnc
|
|
337
|
+
private valuesRead(reader: Reader, count: number, lazy: boolean): {
|
|
338
|
+
values: unknown[];
|
|
339
|
+
valuesAreLazy: boolean[];
|
|
340
|
+
} {
|
|
341
|
+
let values: unknown[] = [];
|
|
342
|
+
let valuesAreLazy: boolean[] = [];
|
|
343
|
+
|
|
344
|
+
let types = reader.readBuffer();
|
|
345
|
+
for (let i = 0; i < count; i++) {
|
|
346
|
+
let type = types[i];
|
|
347
|
+
let isLazy = false;
|
|
348
|
+
if (type === 1) {
|
|
349
|
+
values.push(undefined);
|
|
350
|
+
} else if (type === 2) {
|
|
351
|
+
values.push(null);
|
|
352
|
+
} else if (type === 3) {
|
|
353
|
+
values.push(false);
|
|
354
|
+
} else if (type === 4) {
|
|
355
|
+
values.push(true);
|
|
356
|
+
} else if (type === 5) {
|
|
357
|
+
values.push(reader.readFloat64());
|
|
358
|
+
} else {
|
|
359
|
+
let buf = reader.readBuffer();
|
|
360
|
+
if (lazy) {
|
|
361
|
+
values.push(this.addLazyValue(buf));
|
|
362
|
+
isLazy = true;
|
|
363
|
+
} else {
|
|
364
|
+
// Freeze the read values, as we really never want them to modified
|
|
365
|
+
// (We don't freeze PathValue as applications don't directly access them, and the freeze overhead isn't free.
|
|
366
|
+
// But applications DO access values, and if they change them it breaks so many things, so it is worth the
|
|
367
|
+
// cost to freeze them).
|
|
368
|
+
// NOTE: This makes the code about `10% slower, so... it really not that slow (although in our test we had
|
|
369
|
+
// a lot of other overhead, so... maybe it will be a lot slower in certain cases?)
|
|
370
|
+
values.push(recursiveFreeze(cbor.decode(buf)));
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
valuesAreLazy.push(isLazy);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
return { values, valuesAreLazy };
|
|
377
|
+
// let valuesBuffer = reader.readBuffer();
|
|
378
|
+
// return cbor.decode(valuesBuffer);
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
@measureFnc
|
|
382
|
+
public async serialize(values: PathValue[], config?: {
|
|
383
|
+
noLocks?: boolean;
|
|
384
|
+
// TIMING: Compression is about 2X slower (both compression and decompression), but reduces the size by about 2X
|
|
385
|
+
compress?: boolean;
|
|
386
|
+
singleBuffer?: boolean;
|
|
387
|
+
stripSource?: boolean;
|
|
388
|
+
}): Promise<Buffer[]> {
|
|
389
|
+
const version = 1;
|
|
390
|
+
let settings: DataSettings = {
|
|
391
|
+
valueCount: values.length,
|
|
392
|
+
noLocks: config?.noLocks,
|
|
393
|
+
compression: config?.compress ? "gzip" : undefined,
|
|
394
|
+
stripSource: config?.stripSource,
|
|
395
|
+
version,
|
|
396
|
+
};
|
|
397
|
+
|
|
398
|
+
let totalSize = 0;
|
|
399
|
+
let currentBufferDefaultSize = DEFAULT_BUFFER_SIZE;
|
|
400
|
+
let outputBuffers: Buffer[] = [];
|
|
401
|
+
let currentBuffer = Buffer.alloc(currentBufferDefaultSize);
|
|
402
|
+
let currentBufferPos = 0;
|
|
403
|
+
function ensureBytes(count: number) {
|
|
404
|
+
if (currentBufferPos + count < currentBuffer.length) return;
|
|
405
|
+
if (currentBufferPos !== currentBuffer.length) {
|
|
406
|
+
currentBuffer = currentBuffer.slice(0, currentBufferPos);
|
|
407
|
+
}
|
|
408
|
+
outputBuffers.push(currentBuffer);
|
|
409
|
+
totalSize += currentBufferPos;
|
|
410
|
+
currentBufferDefaultSize = Math.min(MAX_BUFFER_SIZE, Math.max(currentBufferDefaultSize, totalSize * 0.25));
|
|
411
|
+
currentBuffer = Buffer.alloc(Math.max(currentBufferDefaultSize, count));
|
|
412
|
+
currentBufferPos = 0;
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
let stringDictionary = new Map<string, number>();
|
|
416
|
+
let writer: Writer = {
|
|
417
|
+
writeFloat64(value) {
|
|
418
|
+
ensureBytes(8);
|
|
419
|
+
if (version >= 1) {
|
|
420
|
+
currentBuffer.writeDoubleLE(value, currentBufferPos);
|
|
421
|
+
} else {
|
|
422
|
+
currentBuffer.writeDoubleBE(value, currentBufferPos);
|
|
423
|
+
}
|
|
424
|
+
currentBufferPos += 8;
|
|
425
|
+
},
|
|
426
|
+
writeByte(value) {
|
|
427
|
+
ensureBytes(1);
|
|
428
|
+
currentBuffer[currentBufferPos] = value;
|
|
429
|
+
currentBufferPos++;
|
|
430
|
+
},
|
|
431
|
+
writeString(value) {
|
|
432
|
+
let index = stringDictionary.get(value);
|
|
433
|
+
if (index === undefined) {
|
|
434
|
+
index = stringDictionary.size;
|
|
435
|
+
stringDictionary.set(value, index);
|
|
436
|
+
}
|
|
437
|
+
writer.writeFloat64(index);
|
|
438
|
+
},
|
|
439
|
+
writeBuffer(value) {
|
|
440
|
+
// NOTE: I think the inplace optimization is actually slower? We needed to add a length prefix
|
|
441
|
+
// buffer anyways, and currentBufferDefaultSize would drift upwards, resulting
|
|
442
|
+
// in allocating 1GB, slicing the first 8 bytes, adding the large in place buffer,
|
|
443
|
+
// allocating 1GB, etc.
|
|
444
|
+
// Special case, very large buffers. Don't copy them, just add the whole buffer
|
|
445
|
+
// if (value.length > INPLACE_BUFFER_THRESHOLD || value.length > currentBuffer.length) {
|
|
446
|
+
// writer.writeFloat64(value.length);
|
|
447
|
+
// if (currentBufferPos > 0) {
|
|
448
|
+
// outputBuffers.push(currentBuffer.slice(0, currentBufferPos));
|
|
449
|
+
// currentBuffer = Buffer.alloc(currentBufferDefaultSize);
|
|
450
|
+
// currentBufferPos = 0;
|
|
451
|
+
// }
|
|
452
|
+
// outputBuffers.push(value);
|
|
453
|
+
// return;
|
|
454
|
+
// }
|
|
455
|
+
writer.writeFloat64(value.length);
|
|
456
|
+
ensureBytes(value.length);
|
|
457
|
+
value.copy(currentBuffer, currentBufferPos);
|
|
458
|
+
currentBufferPos += value.length;
|
|
459
|
+
},
|
|
460
|
+
};
|
|
461
|
+
|
|
462
|
+
outputBuffers.push(Buffer.from(JSON.stringify(settings)));
|
|
463
|
+
|
|
464
|
+
// Break values up into groups of 10K, so we don't lock up the main thread
|
|
465
|
+
let valueGroups: PathValue[][] = [];
|
|
466
|
+
const VALUE_GROUP_SIZE = 10_000;
|
|
467
|
+
for (let i = 0; i < values.length; i += VALUE_GROUP_SIZE) {
|
|
468
|
+
valueGroups.push(values.slice(i, i + VALUE_GROUP_SIZE));
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
for (let values of valueGroups) {
|
|
472
|
+
this.pathValuesWrite(writer, values, settings);
|
|
473
|
+
await delay("afterPaint");
|
|
474
|
+
}
|
|
475
|
+
if (!settings.noLocks) {
|
|
476
|
+
// TODO: Support breaking locks up
|
|
477
|
+
this.readLocksWrite(writer, values.map(x => x.locks));
|
|
478
|
+
}
|
|
479
|
+
await this.valuesWrite(writer, valueGroups);
|
|
480
|
+
|
|
481
|
+
if (currentBufferPos > 0) {
|
|
482
|
+
outputBuffers.push(currentBuffer.slice(0, currentBufferPos));
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
// NOTE: The dictionary is ordered, so we don't need to sort by the values or anything
|
|
486
|
+
let strings = Array.from(stringDictionary.keys());
|
|
487
|
+
// Assume each character takes up 2 bytes. It will usually take just 1, but... smaller is fine here.
|
|
488
|
+
const MAX_STRING_LENGTH = Math.ceil(MAX_BUFFER_SIZE / 2);
|
|
489
|
+
let stringParts: string[][] = [];
|
|
490
|
+
let curCount = 0;
|
|
491
|
+
let curStrings: string[] = [];
|
|
492
|
+
stringParts.push(curStrings);
|
|
493
|
+
for (let str of strings) {
|
|
494
|
+
if (curCount + str.length > MAX_STRING_LENGTH) {
|
|
495
|
+
curStrings = [];
|
|
496
|
+
stringParts.push(curStrings);
|
|
497
|
+
curCount = 0;
|
|
498
|
+
}
|
|
499
|
+
curStrings.push(str);
|
|
500
|
+
curCount += str.length;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
let stringBuffers: Buffer[] = [];
|
|
504
|
+
stringBuffers.push(asBuffer(new Float64Array([stringParts.length])));
|
|
505
|
+
for (let strings of stringParts) {
|
|
506
|
+
stringBuffers.push(StringSerialize.serializeStrings(strings));
|
|
507
|
+
await delay("paintLoop");
|
|
508
|
+
}
|
|
509
|
+
// Add reversed, so they can be read in backwards order
|
|
510
|
+
stringBuffers.reverse();
|
|
511
|
+
outputBuffers.push(...stringBuffers);
|
|
512
|
+
|
|
513
|
+
if (settings.compression === "gzip") {
|
|
514
|
+
// NOTE: Due to how the LZ77 window works merging buffers probably won't reduce the size by that much.
|
|
515
|
+
let compressedBuffers = await Promise.all(outputBuffers.slice(1).map(x => Zip.gzip(x, 1)));
|
|
516
|
+
let compressedOutputBuffers = [outputBuffers[0], ...compressedBuffers];
|
|
517
|
+
|
|
518
|
+
// If the compress factor is less than a threshold, use the uncompressed buffers
|
|
519
|
+
let uncompressedSize = outputBuffers.reduce((total, x) => total + x.length, 0);
|
|
520
|
+
let compressedSize = compressedOutputBuffers.reduce((total, x) => total + x.length, 0);
|
|
521
|
+
if (compressedSize / uncompressedSize < MIN_COMPRESS_FACTOR) {
|
|
522
|
+
outputBuffers = compressedOutputBuffers;
|
|
523
|
+
} else {
|
|
524
|
+
settings.compression = undefined;
|
|
525
|
+
outputBuffers[0] = Buffer.from(JSON.stringify(settings));
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
if (config?.singleBuffer) {
|
|
530
|
+
let sizes = Buffer.from(new Float64Array([outputBuffers.length, ...outputBuffers.map(x => x.length)]).buffer);
|
|
531
|
+
outputBuffers.unshift(sizes);
|
|
532
|
+
outputBuffers = [Buffer.concat(outputBuffers)];
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
return outputBuffers;
|
|
536
|
+
}
|
|
537
|
+
@measureFnc
|
|
538
|
+
public async deserialize(buffers: Buffer[], config?: {
|
|
539
|
+
// NOTE: The default is to lazily deserialize, requiring calling getPathValue to get the value.
|
|
540
|
+
fullDeserialize?: boolean;
|
|
541
|
+
singleBuffer?: boolean;
|
|
542
|
+
// All strings become empty. This can make deserialization over 2X faster.
|
|
543
|
+
skipStrings?: boolean;
|
|
544
|
+
// All values become undefined. If also skipping strings OR if you have large values,
|
|
545
|
+
// this can make deserialization significantly faster.
|
|
546
|
+
skipValues?: boolean;
|
|
547
|
+
}): Promise<PathValue[]> {
|
|
548
|
+
buffers = buffers.slice();
|
|
549
|
+
|
|
550
|
+
if (config?.singleBuffer) {
|
|
551
|
+
const buffer = buffers[0];
|
|
552
|
+
let bufferCount = buffer.readDoubleLE(0);
|
|
553
|
+
let bufferLengths: number[] = [];
|
|
554
|
+
for (let i = 1; i <= bufferCount; i++) {
|
|
555
|
+
let length = buffer.readDoubleLE(i * 8);
|
|
556
|
+
if (length < 0 || length > buffer.byteLength) {
|
|
557
|
+
throw new Error(`Corrupted data file. Invalid buffer length ${length}, expected 0 <= length <= ${buffer.byteLength}`);
|
|
558
|
+
}
|
|
559
|
+
bufferLengths.push(length);
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
let newBuffers: Buffer[] = [];
|
|
563
|
+
let offset = 8 + bufferCount * 8;
|
|
564
|
+
for (let length of bufferLengths) {
|
|
565
|
+
let end = offset + length;
|
|
566
|
+
if (end > buffer.byteLength) {
|
|
567
|
+
throw new Error(`Corrupted data file. Invalid buffer length ${length}, expected 0 <= length <= ${buffer.byteLength}`);
|
|
568
|
+
}
|
|
569
|
+
newBuffers.push(buffer.slice(offset, offset + length));
|
|
570
|
+
offset += length;
|
|
571
|
+
}
|
|
572
|
+
buffers = newBuffers;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
let settings = JSON.parse(buffers.shift()!.toString("utf8")) as DataSettings;
|
|
576
|
+
let version = settings.version || 0;
|
|
577
|
+
if (settings.compression === "gzip") {
|
|
578
|
+
buffers = await Zip.gunzipBatch(buffers);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
function getLastBuffer(): Buffer {
|
|
582
|
+
let last = buffers.pop();
|
|
583
|
+
if (!last) {
|
|
584
|
+
throw new Error(`Expected another buffer, but there are no more left.`);
|
|
585
|
+
}
|
|
586
|
+
return last;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
let strings: string[] | undefined;
|
|
590
|
+
let stringBuffers: Buffer[] = [];
|
|
591
|
+
// Last buffers are the strings
|
|
592
|
+
if (version === 0) {
|
|
593
|
+
stringBuffers.push(getLastBuffer());
|
|
594
|
+
} else if (version >= 1) {
|
|
595
|
+
let countBuffer = getLastBuffer();
|
|
596
|
+
let count = countBuffer.readDoubleLE(0);
|
|
597
|
+
for (let i = 0; i < count; i++) {
|
|
598
|
+
stringBuffers.push(getLastBuffer());
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
if (!config?.skipStrings) {
|
|
602
|
+
let stringArrays: string[][] = [];
|
|
603
|
+
for (let stringBuffer of stringBuffers) {
|
|
604
|
+
let obj = StringSerialize.deserializeStringsLazy(stringBuffer);
|
|
605
|
+
while (true) {
|
|
606
|
+
let nextStrings = obj.getNextStrings();
|
|
607
|
+
if (!nextStrings) {
|
|
608
|
+
break;
|
|
609
|
+
}
|
|
610
|
+
stringArrays.push(nextStrings);
|
|
611
|
+
if (stringArrays.length > 1) {
|
|
612
|
+
await delay("paintLoop");
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
await delay("paintLoop");
|
|
616
|
+
}
|
|
617
|
+
strings = stringArrays.flat();
|
|
618
|
+
}
|
|
619
|
+
let bufferIndex = 0;
|
|
620
|
+
let bufferPos = 0;
|
|
621
|
+
function ensureBytes(count: number) {
|
|
622
|
+
if (bufferPos + count <= buffers[bufferIndex].length) return;
|
|
623
|
+
if (bufferPos !== buffers[bufferIndex].length) {
|
|
624
|
+
throw new Error(`Reading from two buffers at once. This is unexpected.`);
|
|
625
|
+
}
|
|
626
|
+
bufferIndex++;
|
|
627
|
+
bufferPos = 0;
|
|
628
|
+
}
|
|
629
|
+
let reader: Reader = {
|
|
630
|
+
readFloat64() {
|
|
631
|
+
ensureBytes(8);
|
|
632
|
+
let result = version >= 1 ? buffers[bufferIndex].readDoubleLE(bufferPos) : buffers[bufferIndex].readDoubleBE(bufferPos);
|
|
633
|
+
bufferPos += 8;
|
|
634
|
+
return result;
|
|
635
|
+
},
|
|
636
|
+
readByte() {
|
|
637
|
+
ensureBytes(1);
|
|
638
|
+
let result = buffers[bufferIndex][bufferPos];
|
|
639
|
+
bufferPos++;
|
|
640
|
+
return result;
|
|
641
|
+
},
|
|
642
|
+
readString() {
|
|
643
|
+
ensureBytes(8);
|
|
644
|
+
let index = reader.readFloat64();
|
|
645
|
+
if (!strings) {
|
|
646
|
+
return "";
|
|
647
|
+
}
|
|
648
|
+
if (index < 0 || index >= strings.length) {
|
|
649
|
+
console.log(bufferIndex);
|
|
650
|
+
throw new Error(`Invalid string index ${index}, expected 0 <= index < ${strings.length}`);
|
|
651
|
+
}
|
|
652
|
+
return strings[index];
|
|
653
|
+
},
|
|
654
|
+
readBuffer() {
|
|
655
|
+
let length = reader.readFloat64();
|
|
656
|
+
ensureBytes(length);
|
|
657
|
+
let result = buffers[bufferIndex].slice(bufferPos, bufferPos + length);
|
|
658
|
+
bufferPos += length;
|
|
659
|
+
return result;
|
|
660
|
+
},
|
|
661
|
+
};
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
let partialValues = this.pathValuesRead(reader, settings.valueCount);
|
|
665
|
+
let readLocks = settings.noLocks ? Array(settings.valueCount).fill([]) : this.readLocksRead(reader);
|
|
666
|
+
|
|
667
|
+
let values: unknown[] = [];
|
|
668
|
+
let valuesAreLazy: boolean[] = [];
|
|
669
|
+
if (!config?.skipValues) {
|
|
670
|
+
let valuesReadObj = this.valuesRead(reader, settings.valueCount, !config?.fullDeserialize);
|
|
671
|
+
values = valuesReadObj.values;
|
|
672
|
+
valuesAreLazy = valuesReadObj.valuesAreLazy;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
let pathValues: PathValue[] = [];
|
|
676
|
+
for (let i = 0; i < settings.valueCount; i++) {
|
|
677
|
+
let partialValue = partialValues[i];
|
|
678
|
+
pathValues.push({
|
|
679
|
+
path: partialValue.path,
|
|
680
|
+
time: partialValue.time,
|
|
681
|
+
|
|
682
|
+
locks: readLocks[i],
|
|
683
|
+
value: values[i],
|
|
684
|
+
isValueLazy: valuesAreLazy[i] || false,
|
|
685
|
+
|
|
686
|
+
canGCValue: partialValue.canGCValue,
|
|
687
|
+
event: partialValue.event,
|
|
688
|
+
isTransparent: partialValue.isTransparent,
|
|
689
|
+
valid: partialValue.valid,
|
|
690
|
+
source: partialValue.source,
|
|
691
|
+
lockCount: partialValue.lockCount,
|
|
692
|
+
updateCount: partialValue.updateCount,
|
|
693
|
+
});
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
return pathValues;
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
lazyValues = new WeakMap<{}, Buffer>();
|
|
700
|
+
addLazyValue(buf: Buffer) {
|
|
701
|
+
// NOTE: Using an object makes garbage collection MUCH easier, basically eliminating leaks (and...
|
|
702
|
+
// it should be fairly fast? We create a Buffer anyways... which is probably a lot slower than
|
|
703
|
+
// creating an object).
|
|
704
|
+
let lazyInstance = { call_PathValueSerializer_getPathValue_toGetThisValue: true };
|
|
705
|
+
this.lazyValues.set(lazyInstance, buf);
|
|
706
|
+
return lazyInstance;
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
public getPathValue(pathValue: PathValue | undefined, noMutate?: "noMutate"): unknown {
|
|
710
|
+
if (!pathValue) return undefined;
|
|
711
|
+
if (pathValue.isValueLazy) {
|
|
712
|
+
// NOTE: If this throws, it likely means you used atomicObjectWrite.
|
|
713
|
+
// Use atomicObjectWriteNoFreeze or doAtomicWrites instead.
|
|
714
|
+
if (!noMutate) {
|
|
715
|
+
try {
|
|
716
|
+
pathValue.isValueLazy = false;
|
|
717
|
+
} catch { }
|
|
718
|
+
}
|
|
719
|
+
let buffer = this.lazyValues.get(pathValue.value as {});
|
|
720
|
+
if (!buffer) {
|
|
721
|
+
console.error(`Expected lazy value to have a buffer, but it didn't. Lazy ref has a typeof ${typeof pathValue.value} (${String(pathValue.value)})`);
|
|
722
|
+
return pathValue.value;
|
|
723
|
+
}
|
|
724
|
+
let newValue = recursiveFreeze(cbor.decode(buffer));
|
|
725
|
+
if (!noMutate) {
|
|
726
|
+
try {
|
|
727
|
+
pathValue.value = newValue;
|
|
728
|
+
} catch { }
|
|
729
|
+
}
|
|
730
|
+
return newValue;
|
|
731
|
+
}
|
|
732
|
+
return pathValue.value;
|
|
733
|
+
}
|
|
734
|
+
public getPathValueOrBuffer(pathValue: PathValue | undefined): unknown | Buffer {
|
|
735
|
+
if (!pathValue) return undefined;
|
|
736
|
+
if (pathValue.isValueLazy) {
|
|
737
|
+
let buffer = this.lazyValues.get(pathValue.value as {});
|
|
738
|
+
if (!buffer) {
|
|
739
|
+
// NOTE: Did you pass a raw PathValue and then try to use PathValueSerializer with it?
|
|
740
|
+
// - Instead you should pass a buffer serialized with pathValueSerializer.serialize and
|
|
741
|
+
// deserialized with pathValueSerializer.deserialize.
|
|
742
|
+
console.error(`Expected lazy value to have a buffer, but it didn't. Lazy ref has a typeof ${typeof pathValue.value} (${String(pathValue.value)})`);
|
|
743
|
+
return pathValue.value;
|
|
744
|
+
}
|
|
745
|
+
return buffer;
|
|
746
|
+
}
|
|
747
|
+
return pathValue.value;
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
private getBuffer(pathValue: PathValue): Buffer {
|
|
751
|
+
let buffer = pathValue.isValueLazy && this.lazyValues.get(pathValue.value as {});
|
|
752
|
+
if (buffer) return buffer;
|
|
753
|
+
return cborEncoder().encode(pathValue.value);
|
|
754
|
+
}
|
|
755
|
+
public compareValuePaths(a: PathValue | undefined, b: PathValue | undefined) {
|
|
756
|
+
if (!a || !b) {
|
|
757
|
+
let aValue = this.getPathValue(a);
|
|
758
|
+
let bValue = this.getPathValue(b);
|
|
759
|
+
return compare(aValue, bValue);
|
|
760
|
+
}
|
|
761
|
+
let bufferA = this.getBuffer(a);
|
|
762
|
+
let bufferB = this.getBuffer(b);
|
|
763
|
+
return bufferA.compare(bufferB);
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
export const pathValueSerializer = new PathValueSerializer();
|
|
769
|
+
(global as any).pathValueSerializer = pathValueSerializer;
|