querysub 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dependency-cruiser.js +304 -0
- package/.eslintrc.js +51 -0
- package/.github/copilot-instructions.md +1 -0
- package/.vscode/settings.json +25 -0
- package/bin/deploy.js +4 -0
- package/bin/function.js +4 -0
- package/bin/server.js +4 -0
- package/costsBenefits.txt +112 -0
- package/deploy.ts +3 -0
- package/inject.ts +1 -0
- package/package.json +60 -0
- package/prompts.txt +54 -0
- package/spec.txt +820 -0
- package/src/-a-archives/archiveCache.ts +913 -0
- package/src/-a-archives/archives.ts +148 -0
- package/src/-a-archives/archivesBackBlaze.ts +792 -0
- package/src/-a-archives/archivesDisk.ts +418 -0
- package/src/-a-archives/copyLocalToBackblaze.ts +24 -0
- package/src/-a-auth/certs.ts +517 -0
- package/src/-a-auth/der.ts +122 -0
- package/src/-a-auth/ed25519.ts +1015 -0
- package/src/-a-auth/node-forge-ed25519.d.ts +17 -0
- package/src/-b-authorities/dnsAuthority.ts +203 -0
- package/src/-b-authorities/emailAuthority.ts +57 -0
- package/src/-c-identity/IdentityController.ts +200 -0
- package/src/-d-trust/NetworkTrust2.ts +150 -0
- package/src/-e-certs/EdgeCertController.ts +288 -0
- package/src/-e-certs/certAuthority.ts +192 -0
- package/src/-f-node-discovery/NodeDiscovery.ts +543 -0
- package/src/-g-core-values/NodeCapabilities.ts +134 -0
- package/src/-g-core-values/oneTimeForward.ts +91 -0
- package/src/-h-path-value-serialize/PathValueSerializer.ts +769 -0
- package/src/-h-path-value-serialize/stringSerializer.ts +176 -0
- package/src/0-path-value-core/LoggingClient.tsx +24 -0
- package/src/0-path-value-core/NodePathAuthorities.ts +978 -0
- package/src/0-path-value-core/PathController.ts +1 -0
- package/src/0-path-value-core/PathValueCommitter.ts +565 -0
- package/src/0-path-value-core/PathValueController.ts +231 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks.ts +154 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +820 -0
- package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +180 -0
- package/src/0-path-value-core/debugLogs.ts +90 -0
- package/src/0-path-value-core/pathValueArchives.ts +483 -0
- package/src/0-path-value-core/pathValueCore.ts +2217 -0
- package/src/1-path-client/RemoteWatcher.ts +558 -0
- package/src/1-path-client/pathValueClientWatcher.ts +702 -0
- package/src/2-proxy/PathValueProxyWatcher.ts +1857 -0
- package/src/2-proxy/archiveMoveHarness.ts +376 -0
- package/src/2-proxy/garbageCollection.ts +753 -0
- package/src/2-proxy/pathDatabaseProxyBase.ts +37 -0
- package/src/2-proxy/pathValueProxy.ts +139 -0
- package/src/2-proxy/schema2.ts +518 -0
- package/src/3-path-functions/PathFunctionHelpers.ts +129 -0
- package/src/3-path-functions/PathFunctionRunner.ts +619 -0
- package/src/3-path-functions/PathFunctionRunnerMain.ts +67 -0
- package/src/3-path-functions/deployBlock.ts +10 -0
- package/src/3-path-functions/deployCheck.ts +7 -0
- package/src/3-path-functions/deployMain.ts +160 -0
- package/src/3-path-functions/pathFunctionLoader.ts +282 -0
- package/src/3-path-functions/syncSchema.ts +475 -0
- package/src/3-path-functions/tests/functionsTest.ts +135 -0
- package/src/3-path-functions/tests/rejectTest.ts +77 -0
- package/src/4-dom/css.tsx +29 -0
- package/src/4-dom/cssTypes.d.ts +212 -0
- package/src/4-dom/qreact.tsx +2322 -0
- package/src/4-dom/qreactTest.tsx +417 -0
- package/src/4-querysub/Querysub.ts +877 -0
- package/src/4-querysub/QuerysubController.ts +620 -0
- package/src/4-querysub/copyEvent.ts +0 -0
- package/src/4-querysub/permissions.ts +289 -0
- package/src/4-querysub/permissionsShared.ts +1 -0
- package/src/4-querysub/querysubPrediction.ts +525 -0
- package/src/5-diagnostics/FullscreenModal.tsx +67 -0
- package/src/5-diagnostics/GenericFormat.tsx +165 -0
- package/src/5-diagnostics/Modal.tsx +79 -0
- package/src/5-diagnostics/Table.tsx +183 -0
- package/src/5-diagnostics/TimeGrouper.tsx +114 -0
- package/src/5-diagnostics/diskValueAudit.ts +216 -0
- package/src/5-diagnostics/memoryValueAudit.ts +442 -0
- package/src/5-diagnostics/nodeMetadata.ts +135 -0
- package/src/5-diagnostics/qreactDebug.tsx +309 -0
- package/src/5-diagnostics/shared.ts +26 -0
- package/src/5-diagnostics/synchronousLagTracking.ts +47 -0
- package/src/TestController.ts +35 -0
- package/src/allowclient.flag +0 -0
- package/src/bits.ts +86 -0
- package/src/buffers.ts +69 -0
- package/src/config.ts +53 -0
- package/src/config2.ts +48 -0
- package/src/diagnostics/ActionsHistory.ts +56 -0
- package/src/diagnostics/NodeViewer.tsx +503 -0
- package/src/diagnostics/SizeLimiter.ts +62 -0
- package/src/diagnostics/TimeDebug.tsx +18 -0
- package/src/diagnostics/benchmark.ts +139 -0
- package/src/diagnostics/errorLogs/ErrorLogController.ts +515 -0
- package/src/diagnostics/errorLogs/ErrorLogCore.ts +274 -0
- package/src/diagnostics/errorLogs/LogClassifiers.tsx +302 -0
- package/src/diagnostics/errorLogs/LogFilterUI.tsx +84 -0
- package/src/diagnostics/errorLogs/LogNotify.tsx +101 -0
- package/src/diagnostics/errorLogs/LogTimeSelector.tsx +724 -0
- package/src/diagnostics/errorLogs/LogViewer.tsx +757 -0
- package/src/diagnostics/errorLogs/hookErrors.ts +60 -0
- package/src/diagnostics/errorLogs/logFiltering.tsx +149 -0
- package/src/diagnostics/heapTag.ts +13 -0
- package/src/diagnostics/listenOnDebugger.ts +77 -0
- package/src/diagnostics/logs/DiskLoggerPage.tsx +572 -0
- package/src/diagnostics/logs/ObjectDisplay.tsx +165 -0
- package/src/diagnostics/logs/ansiFormat.ts +108 -0
- package/src/diagnostics/logs/diskLogGlobalContext.ts +38 -0
- package/src/diagnostics/logs/diskLogger.ts +305 -0
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +32 -0
- package/src/diagnostics/logs/injectFileLocationToConsole.ts +50 -0
- package/src/diagnostics/logs/logGitHashes.ts +30 -0
- package/src/diagnostics/managementPages.tsx +289 -0
- package/src/diagnostics/periodic.ts +89 -0
- package/src/diagnostics/runSaturationTest.ts +416 -0
- package/src/diagnostics/satSchema.ts +64 -0
- package/src/diagnostics/trackResources.ts +82 -0
- package/src/diagnostics/watchdog.ts +55 -0
- package/src/errors.ts +132 -0
- package/src/forceProduction.ts +3 -0
- package/src/fs.ts +72 -0
- package/src/heapDumps.ts +666 -0
- package/src/https.ts +2 -0
- package/src/inject.ts +1 -0
- package/src/library-components/ATag.tsx +84 -0
- package/src/library-components/Button.tsx +344 -0
- package/src/library-components/ButtonSelector.tsx +64 -0
- package/src/library-components/DropdownCustom.tsx +151 -0
- package/src/library-components/DropdownSelector.tsx +32 -0
- package/src/library-components/Input.tsx +334 -0
- package/src/library-components/InputLabel.tsx +198 -0
- package/src/library-components/InputPicker.tsx +125 -0
- package/src/library-components/LazyComponent.tsx +62 -0
- package/src/library-components/MeasureHeightCSS.tsx +48 -0
- package/src/library-components/MeasuredDiv.tsx +47 -0
- package/src/library-components/ShowMore.tsx +51 -0
- package/src/library-components/SyncedController.ts +171 -0
- package/src/library-components/TimeRangeSelector.tsx +407 -0
- package/src/library-components/URLParam.ts +263 -0
- package/src/library-components/colors.tsx +14 -0
- package/src/library-components/drag.ts +114 -0
- package/src/library-components/icons.tsx +692 -0
- package/src/library-components/niceStringify.ts +50 -0
- package/src/library-components/renderToString.ts +52 -0
- package/src/misc/PromiseRace.ts +101 -0
- package/src/misc/color.ts +30 -0
- package/src/misc/getParentProcessId.cs +53 -0
- package/src/misc/getParentProcessId.ts +53 -0
- package/src/misc/hash.ts +83 -0
- package/src/misc/ipPong.js +13 -0
- package/src/misc/networking.ts +2 -0
- package/src/misc/random.ts +45 -0
- package/src/misc.ts +19 -0
- package/src/noserverhotreload.flag +0 -0
- package/src/path.ts +226 -0
- package/src/persistentLocalStore.ts +37 -0
- package/src/promise.ts +15 -0
- package/src/server.ts +73 -0
- package/src/src.d.ts +1 -0
- package/src/test/heapProcess.ts +36 -0
- package/src/test/mongoSatTest.tsx +55 -0
- package/src/test/satTest.ts +193 -0
- package/src/test/test.tsx +552 -0
- package/src/zip.ts +92 -0
- package/src/zipThreaded.ts +106 -0
- package/src/zipThreadedWorker.js +19 -0
- package/tsconfig.json +27 -0
- package/yarnSpec.txt +56 -0
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
// yarn typenode src\framework-beta\archiveMoveBase\archiveMoveHarness.ts --nonetwork
|
|
4
|
+
|
|
5
|
+
import { formatNumber, formatTime } from "socket-function/src/formatting/format";
|
|
6
|
+
import { blue, green, red } from "socket-function/src/formatting/logColors";
|
|
7
|
+
import { measureBlock, measureWrap } from "socket-function/src/profiling/measure";
|
|
8
|
+
import { AuthorityPath, pathValueAuthority2 } from "../0-path-value-core/NodePathAuthorities";
|
|
9
|
+
import { FileInfo, ArchiveTransaction } from "../0-path-value-core/archiveLocks/ArchiveLocks";
|
|
10
|
+
import { DecodedValuePath, pathValueArchives, PathValueArchives } from "../0-path-value-core/pathValueArchives";
|
|
11
|
+
import { PathValue, VALUE_GC_THRESHOLD, FILE_VALUE_COUNT_LIMIT, FILE_SIZE_LIMIT, compareTime } from "../0-path-value-core/pathValueCore";
|
|
12
|
+
import { logNodeStats } from "../5-diagnostics/nodeMetadata";
|
|
13
|
+
import { getSingleSizeEstimate } from "../5-diagnostics/shared";
|
|
14
|
+
import { getOurAuthorities } from "../config2";
|
|
15
|
+
import debugbreak from "debugbreak";
|
|
16
|
+
|
|
17
|
+
// NOTE: We probably COULD load values with skipStrings+skipValues, for some GCers, as some
|
|
18
|
+
// GCing is as simple as, "if this value is undefined, delete it". But... we GC
|
|
19
|
+
// so seldomly, that it wouldn't make a difference (going from 1 minute of GC time
|
|
20
|
+
// per day to 20 seconds doesn't really matter).
|
|
21
|
+
|
|
22
|
+
let lastFileWrites = new Map<string, string[]>();
|
|
23
|
+
|
|
24
|
+
export async function runArchiveMover(config: {
|
|
25
|
+
// Defaults to getOurAuthorities
|
|
26
|
+
// NOTE: Runs once per authority, as we want to keep data as sharded as possible
|
|
27
|
+
authorities?: AuthorityPath[];
|
|
28
|
+
|
|
29
|
+
// If set we will provide all values. This means this is a high chance out of order values will
|
|
30
|
+
// be provided. Otherwise a consistent view of data at Date.now() - ARCHIVE_FLUSH_LIMIT
|
|
31
|
+
// (but is quite old, so data won't appear for around an hour).
|
|
32
|
+
readLiveData?: boolean;
|
|
33
|
+
|
|
34
|
+
// Runs without locking the existence of files, so duplicates can appear. Results in less rejections,
|
|
35
|
+
// but... then you might also have duplicate (or even redundant but slightly different) files.
|
|
36
|
+
allowDuplicates?: boolean;
|
|
37
|
+
|
|
38
|
+
maxFileValueCount?: number;
|
|
39
|
+
maxFileByteCount?: number;
|
|
40
|
+
|
|
41
|
+
/** We abort the move if the new count > inputFile * newFileCountThreshold.
|
|
42
|
+
* - runArchiveMover CANNOT add new data, and is only designed to remove unneeded data.
|
|
43
|
+
* Due to how caching works, if we aren't removing enough files, or enough bytes,
|
|
44
|
+
* (for example, if we reduce the size by 10%), then the move will likely result in
|
|
45
|
+
* MORE data being read, and so isn't worth it.
|
|
46
|
+
* (LOWER means we merge less often)
|
|
47
|
+
*/
|
|
48
|
+
newFileCountThreshold?: number;
|
|
49
|
+
/** We abort the move if the new bytes > inputFile * newFileSizeThreshold
|
|
50
|
+
* (LOWER means we merge less often)
|
|
51
|
+
*/
|
|
52
|
+
newFileSizeThreshold?: number;
|
|
53
|
+
|
|
54
|
+
/** Always commit changes, even if the thresholds are not met. */
|
|
55
|
+
force?: boolean;
|
|
56
|
+
|
|
57
|
+
outputType: DecodedValuePath["sourceType"];
|
|
58
|
+
runMover: (
|
|
59
|
+
config: {
|
|
60
|
+
values: PathValue[];
|
|
61
|
+
/** We provide all values on authority, that are <= snapshotTime, that will ever exist
|
|
62
|
+
* (it's an atomic snapshot).
|
|
63
|
+
* - Of course, if readLiveData is set this is Number.POSITIVE_INFINITY, as we
|
|
64
|
+
* try to provide the latest data... making this no longer an atomic snapshot.
|
|
65
|
+
*/
|
|
66
|
+
authority: AuthorityPath;
|
|
67
|
+
snapshotTime: number;
|
|
68
|
+
logKey: string;
|
|
69
|
+
}
|
|
70
|
+
) => Promise<{
|
|
71
|
+
newValues: "abort" | {
|
|
72
|
+
// A file is created per key (with the key somewhere in the file name, although perhaps escaped or hashed).
|
|
73
|
+
// We might split one file further, if the size exceeds certain limits. These limits are mostly
|
|
74
|
+
// for diagnostics, to allow us to view snapshots by time.
|
|
75
|
+
[key: string]: PathValue[];
|
|
76
|
+
};
|
|
77
|
+
}>;
|
|
78
|
+
}) {
|
|
79
|
+
let authorities = config.authorities ?? getOurAuthorities();
|
|
80
|
+
let minAge = config.readLiveData ? Number.NEGATIVE_INFINITY : VALUE_GC_THRESHOLD;
|
|
81
|
+
let minThreshold = Date.now() - minAge;
|
|
82
|
+
// NOTE: If the value count is too high, parsing in the ArchiveViewer might crash the browser.
|
|
83
|
+
const maxFileValueCount = config.maxFileValueCount ?? FILE_VALUE_COUNT_LIMIT;
|
|
84
|
+
|
|
85
|
+
const maxFileByteCount = config.maxFileByteCount ?? FILE_SIZE_LIMIT;
|
|
86
|
+
// NOTE: The file count threshold is quite high, to prevent the genesis writes (even though they
|
|
87
|
+
// are merged already) from causing us to re-merge all the files unnecessarily.
|
|
88
|
+
const newFileCountThreshold = config.newFileCountThreshold ?? (1 / 10);
|
|
89
|
+
const newFileSizeThreshold = config.newFileSizeThreshold ?? 0.6;
|
|
90
|
+
function meetsThreshold(config: {
|
|
91
|
+
inputFiles: number;
|
|
92
|
+
inputBytes: number;
|
|
93
|
+
outputFiles: number;
|
|
94
|
+
outputBytes: number;
|
|
95
|
+
}) {
|
|
96
|
+
let { inputFiles, inputBytes, outputFiles, outputBytes } = config;
|
|
97
|
+
let outputThreshold = Math.ceil(inputFiles * newFileCountThreshold);
|
|
98
|
+
let outputSizeThreshold = Math.ceil(inputBytes * newFileSizeThreshold);
|
|
99
|
+
let meets = (outputFiles < outputThreshold || outputBytes < outputSizeThreshold);
|
|
100
|
+
if (!meets) {
|
|
101
|
+
console.log(`Skipping move due to insufficient reduction. Thresholds not met. Either files ${outputFiles} < ${outputThreshold} OR ${formatNumber(outputBytes)}B < ${formatNumber(outputSizeThreshold)}B, and neither are.`);
|
|
102
|
+
}
|
|
103
|
+
return meets;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// NOTE: We COULD read from all authorities, handling them all at once. This MIGHT making
|
|
107
|
+
// gcing more efficient. But... it also might not. We'll see.
|
|
108
|
+
// - And, sharding shouldn't be random, it should be path based, just like GCing. Although,
|
|
109
|
+
// if we have references, the best we can do is add parallel authorities to nodes,
|
|
110
|
+
// so... we might have to handle all authorities at once, instead of looping like this...
|
|
111
|
+
for (let authority of authorities) {
|
|
112
|
+
let locker = await pathValueArchives.getArchiveLocker();
|
|
113
|
+
let matchedDirs = await pathValueArchives.getAuthorityDirs(authority);
|
|
114
|
+
let authorityDir = pathValueAuthority2.getArchiveDirectory(authority);
|
|
115
|
+
|
|
116
|
+
let startTime = Date.now();
|
|
117
|
+
let readCache = new Map<string, Buffer>();
|
|
118
|
+
let lastAborted = false;
|
|
119
|
+
let lastResult: "accepted" | "rejected" | "aborted" = "accepted";
|
|
120
|
+
|
|
121
|
+
let inputBytes = 0;
|
|
122
|
+
let outputBytes = 0;
|
|
123
|
+
let inputValueCount = 0;
|
|
124
|
+
let outputValueCount = 0;
|
|
125
|
+
let inputFiles = 0;
|
|
126
|
+
let outputFiles = 0;
|
|
127
|
+
let abortedDueToInsufficientReduction = false;
|
|
128
|
+
|
|
129
|
+
let reread = true;
|
|
130
|
+
while (reread) {
|
|
131
|
+
reread = false;
|
|
132
|
+
lastAborted = false;
|
|
133
|
+
lastResult = await locker.atomicSwapFiles({
|
|
134
|
+
allowDuplicates: config.allowDuplicates,
|
|
135
|
+
}, async (valueFiles, readFiles) => {
|
|
136
|
+
valueFiles = valueFiles.filter(x => matchedDirs.some(y => x.file.startsWith(y)));
|
|
137
|
+
|
|
138
|
+
valueFiles = valueFiles.filter(x => {
|
|
139
|
+
let obj = pathValueArchives.decodeDataPath(x.file);
|
|
140
|
+
if (!obj.minTime) return true;
|
|
141
|
+
return !(obj.minTime > minThreshold);
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
let lastWriteHash = JSON.stringify(matchedDirs);
|
|
145
|
+
// NOTE: ACTUALLY, this doesn't work, as the oldFiles might values which have GC timers,
|
|
146
|
+
// or which... are large, and just became deleted.
|
|
147
|
+
// let lastFiles = new Set(lastFileWrites.get(lastWriteHash) || []);
|
|
148
|
+
// let newFiles = valueFiles.filter(x => !lastFiles.has(x.file));
|
|
149
|
+
// // BEST case, ALL oldFiles are collapsed. And if that doesn't even cause a move,
|
|
150
|
+
// // then just skip the check now
|
|
151
|
+
// if (!config.force && meetsThreshold({
|
|
152
|
+
// inputFiles: valueFiles.length,
|
|
153
|
+
// inputBytes: valueFiles.map(x => x.size).reduce((a, b) => a + b, 0),
|
|
154
|
+
// outputFiles: newFiles.length,
|
|
155
|
+
// outputBytes: newFiles.map(x => x.size).reduce((a, b) => a + b, 0),
|
|
156
|
+
// })) {
|
|
157
|
+
// return [];
|
|
158
|
+
// }
|
|
159
|
+
|
|
160
|
+
// Read files, and if any are missing, rerun until we can read all files
|
|
161
|
+
await measureBlock(async function locker_readFiles() {
|
|
162
|
+
let remainingFiles = valueFiles.filter(x => !readCache.has(x.file));
|
|
163
|
+
let buffers = await readFiles(remainingFiles);
|
|
164
|
+
for (let i = 0; i < remainingFiles.length; i++) {
|
|
165
|
+
let buffer = buffers[i];
|
|
166
|
+
if (!buffer) {
|
|
167
|
+
console.log(`File missing, re-reading`, remainingFiles[i].file);
|
|
168
|
+
reread = true;
|
|
169
|
+
break;
|
|
170
|
+
}
|
|
171
|
+
readCache.set(remainingFiles[i].file, buffer);
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
if (reread) {
|
|
175
|
+
console.log(`Files changed, re-reading`);
|
|
176
|
+
return [];
|
|
177
|
+
}
|
|
178
|
+
function isValueIncluded(value: PathValue) {
|
|
179
|
+
if (value.time.time > minThreshold) return false;
|
|
180
|
+
if (!pathValueAuthority2.isInAuthority(authority, value.path)) return false;
|
|
181
|
+
return true;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
let mutatedFiles: FileInfo[] = [];
|
|
185
|
+
// authorityFolder => values
|
|
186
|
+
let floatingValues = new Map<string, PathValue[][]>();
|
|
187
|
+
let inputValues: PathValue[][] = [];
|
|
188
|
+
inputBytes = 0;
|
|
189
|
+
inputFiles = 0;
|
|
190
|
+
inputValueCount = 0;
|
|
191
|
+
|
|
192
|
+
await measureBlock(async function locker_parseFiles() {
|
|
193
|
+
for (let file of valueFiles) {
|
|
194
|
+
let buffer = readCache.get(file.file);
|
|
195
|
+
if (!buffer) {
|
|
196
|
+
throw new Error(`Buffer missing for ${file.file}`);
|
|
197
|
+
}
|
|
198
|
+
inputBytes += buffer.byteLength;
|
|
199
|
+
inputFiles++;
|
|
200
|
+
let valuesObj = await PathValueArchives.loadValuesFromBuffer({
|
|
201
|
+
path: file.file,
|
|
202
|
+
data: buffer,
|
|
203
|
+
});
|
|
204
|
+
let values = valuesObj.values;
|
|
205
|
+
inputValueCount += values.length;
|
|
206
|
+
let includedValues: PathValue[] = [];
|
|
207
|
+
let excludedValues: PathValue[] = [];
|
|
208
|
+
for (let value of values) {
|
|
209
|
+
if (isValueIncluded(value)) {
|
|
210
|
+
includedValues.push(value);
|
|
211
|
+
} else {
|
|
212
|
+
excludedValues.push(value);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
if (includedValues.length === 0) {
|
|
216
|
+
continue;
|
|
217
|
+
}
|
|
218
|
+
if (excludedValues.length > 0) {
|
|
219
|
+
let fileDir = file.file.split("/").slice(0, -1).join("/") + "/";
|
|
220
|
+
let list = floatingValues.get(fileDir);
|
|
221
|
+
if (!list) {
|
|
222
|
+
floatingValues.set(fileDir, list = []);
|
|
223
|
+
}
|
|
224
|
+
list.push(excludedValues);
|
|
225
|
+
}
|
|
226
|
+
inputValues.push(includedValues);
|
|
227
|
+
mutatedFiles.push(file);
|
|
228
|
+
}
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
let inputValuesFlat = inputValues.flat();
|
|
232
|
+
let result = await measureBlock(async function locker_runApp() {
|
|
233
|
+
return config.runMover({
|
|
234
|
+
values: inputValuesFlat,
|
|
235
|
+
authority,
|
|
236
|
+
snapshotTime: minThreshold,
|
|
237
|
+
logKey: authorityDir,
|
|
238
|
+
});
|
|
239
|
+
}, `locker_app_${config.runMover.name}`);
|
|
240
|
+
if (result.newValues === "abort") {
|
|
241
|
+
lastAborted = true;
|
|
242
|
+
return [];
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
let transaction: ArchiveTransaction = {
|
|
246
|
+
createFiles: [],
|
|
247
|
+
deleteFiles: mutatedFiles,
|
|
248
|
+
};
|
|
249
|
+
const addValues = measureWrap(async function locker_addValues(dir: string, values: PathValue[], key: string) {
|
|
250
|
+
let split = splitValuesBelowLimit({
|
|
251
|
+
values,
|
|
252
|
+
maxFileValueCount,
|
|
253
|
+
maxFileByteCount,
|
|
254
|
+
});
|
|
255
|
+
outputFiles += split.length;
|
|
256
|
+
outputValueCount += values.length;
|
|
257
|
+
for (let splitValue of split) {
|
|
258
|
+
let dataObj = await pathValueArchives.encodeValuePaths(splitValue.values, {
|
|
259
|
+
pathOverrides: {
|
|
260
|
+
sourceType: config.outputType,
|
|
261
|
+
misc: key,
|
|
262
|
+
},
|
|
263
|
+
});
|
|
264
|
+
if (!dataObj) continue;
|
|
265
|
+
outputBytes += dataObj.data.byteLength;
|
|
266
|
+
let fileName = pathValueArchives.encodeDataPath({
|
|
267
|
+
...pathValueArchives.getDefaultValuePathInput(),
|
|
268
|
+
valueCount: splitValue.values.length,
|
|
269
|
+
byteCount: dataObj.data.byteLength,
|
|
270
|
+
minTime: splitValue.minTime,
|
|
271
|
+
maxTime: splitValue.maxTime,
|
|
272
|
+
sourceType: config.outputType,
|
|
273
|
+
misc: key,
|
|
274
|
+
});
|
|
275
|
+
transaction.createFiles.push({
|
|
276
|
+
data: dataObj.data,
|
|
277
|
+
file: dir + fileName,
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
for (let [authorityDir, values] of floatingValues) {
|
|
283
|
+
await addValues(authorityDir, values.flat(), "floating");
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
for (let key in result.newValues) {
|
|
287
|
+
let values = result.newValues[key];
|
|
288
|
+
await addValues(authorityDir, values, key);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
if (!config.force && !meetsThreshold({ inputFiles, inputBytes, outputFiles, outputBytes })) {
|
|
292
|
+
abortedDueToInsufficientReduction = true;
|
|
293
|
+
return [];
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
lastFileWrites.set(lastWriteHash, transaction.createFiles.map(x => x.file));
|
|
297
|
+
|
|
298
|
+
return [transaction];
|
|
299
|
+
});
|
|
300
|
+
if (lastAborted) {
|
|
301
|
+
lastResult = "aborted";
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
let time = Date.now() - startTime;
|
|
305
|
+
let resultFormat = (
|
|
306
|
+
lastResult === "accepted" && green("accepted")
|
|
307
|
+
|| lastResult === "aborted" && green("noop")
|
|
308
|
+
|| red("rejected")
|
|
309
|
+
);
|
|
310
|
+
console.log(`ArchiveMover ${config.runMover.name} on ${authorityDir} in ${formatTime(time)}. Result = ${resultFormat}.`);
|
|
311
|
+
if (lastResult === "accepted" && !abortedDueToInsufficientReduction) {
|
|
312
|
+
console.log(
|
|
313
|
+
` { files: ${formatNumber(inputFiles)}, values: ${formatNumber(inputValueCount)}, bytes: ${formatNumber(inputBytes)} }`
|
|
314
|
+
+ "\n=>\n"
|
|
315
|
+
+ ` { files: ${formatNumber(outputFiles)}, values: ${formatNumber(outputValueCount)}, bytes: ${formatNumber(outputBytes)} }`
|
|
316
|
+
);
|
|
317
|
+
logNodeStats("archives|Input Files", formatNumber)(time);
|
|
318
|
+
logNodeStats("archives|Input Values", formatNumber)(time);
|
|
319
|
+
logNodeStats("archives|Input Bytes", formatNumber)(time);
|
|
320
|
+
logNodeStats("archives|Output Files", formatNumber)(time);
|
|
321
|
+
logNodeStats("archives|Output Values", formatNumber)(time);
|
|
322
|
+
logNodeStats("archives|Output Bytes", formatNumber)(time);
|
|
323
|
+
}
|
|
324
|
+
console.log(" ");
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
const splitValuesBelowLimit = measureWrap(function splitValuesBelowLimit(config: {
|
|
329
|
+
values: PathValue[];
|
|
330
|
+
maxFileValueCount: number;
|
|
331
|
+
maxFileByteCount: number;
|
|
332
|
+
}): {
|
|
333
|
+
values: PathValue[];
|
|
334
|
+
minTime: number;
|
|
335
|
+
maxTime: number;
|
|
336
|
+
}[] {
|
|
337
|
+
let { values, maxFileValueCount, maxFileByteCount } = config;
|
|
338
|
+
values = values.slice();
|
|
339
|
+
values.sort((a, b) => compareTime(a.time, b.time));
|
|
340
|
+
|
|
341
|
+
let output: {
|
|
342
|
+
values: PathValue[];
|
|
343
|
+
minTime: number;
|
|
344
|
+
maxTime: number;
|
|
345
|
+
}[] = [];
|
|
346
|
+
|
|
347
|
+
let pending: {
|
|
348
|
+
values: PathValue[];
|
|
349
|
+
byteCount: number;
|
|
350
|
+
} = {
|
|
351
|
+
values: [],
|
|
352
|
+
byteCount: 0,
|
|
353
|
+
};
|
|
354
|
+
function addPending() {
|
|
355
|
+
if (pending.values.length === 0) return;
|
|
356
|
+
let vals = pending.values;
|
|
357
|
+
output.push({
|
|
358
|
+
values: vals,
|
|
359
|
+
minTime: vals[0].time.time,
|
|
360
|
+
maxTime: vals[vals.length - 1].time.time,
|
|
361
|
+
});
|
|
362
|
+
pending.values = [];
|
|
363
|
+
pending.byteCount = 0;
|
|
364
|
+
}
|
|
365
|
+
for (let value of values) {
|
|
366
|
+
let byteCount = getSingleSizeEstimate(value);
|
|
367
|
+
if (pending.values.length >= maxFileValueCount || pending.byteCount + byteCount >= maxFileByteCount) {
|
|
368
|
+
addPending();
|
|
369
|
+
}
|
|
370
|
+
pending.values.push(value);
|
|
371
|
+
pending.byteCount += byteCount;
|
|
372
|
+
}
|
|
373
|
+
addPending();
|
|
374
|
+
|
|
375
|
+
return output;
|
|
376
|
+
});
|