querysub 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/.dependency-cruiser.js +304 -0
  2. package/.eslintrc.js +51 -0
  3. package/.github/copilot-instructions.md +1 -0
  4. package/.vscode/settings.json +25 -0
  5. package/bin/deploy.js +4 -0
  6. package/bin/function.js +4 -0
  7. package/bin/server.js +4 -0
  8. package/costsBenefits.txt +112 -0
  9. package/deploy.ts +3 -0
  10. package/inject.ts +1 -0
  11. package/package.json +60 -0
  12. package/prompts.txt +54 -0
  13. package/spec.txt +820 -0
  14. package/src/-a-archives/archiveCache.ts +913 -0
  15. package/src/-a-archives/archives.ts +148 -0
  16. package/src/-a-archives/archivesBackBlaze.ts +792 -0
  17. package/src/-a-archives/archivesDisk.ts +418 -0
  18. package/src/-a-archives/copyLocalToBackblaze.ts +24 -0
  19. package/src/-a-auth/certs.ts +517 -0
  20. package/src/-a-auth/der.ts +122 -0
  21. package/src/-a-auth/ed25519.ts +1015 -0
  22. package/src/-a-auth/node-forge-ed25519.d.ts +17 -0
  23. package/src/-b-authorities/dnsAuthority.ts +203 -0
  24. package/src/-b-authorities/emailAuthority.ts +57 -0
  25. package/src/-c-identity/IdentityController.ts +200 -0
  26. package/src/-d-trust/NetworkTrust2.ts +150 -0
  27. package/src/-e-certs/EdgeCertController.ts +288 -0
  28. package/src/-e-certs/certAuthority.ts +192 -0
  29. package/src/-f-node-discovery/NodeDiscovery.ts +543 -0
  30. package/src/-g-core-values/NodeCapabilities.ts +134 -0
  31. package/src/-g-core-values/oneTimeForward.ts +91 -0
  32. package/src/-h-path-value-serialize/PathValueSerializer.ts +769 -0
  33. package/src/-h-path-value-serialize/stringSerializer.ts +176 -0
  34. package/src/0-path-value-core/LoggingClient.tsx +24 -0
  35. package/src/0-path-value-core/NodePathAuthorities.ts +978 -0
  36. package/src/0-path-value-core/PathController.ts +1 -0
  37. package/src/0-path-value-core/PathValueCommitter.ts +565 -0
  38. package/src/0-path-value-core/PathValueController.ts +231 -0
  39. package/src/0-path-value-core/archiveLocks/ArchiveLocks.ts +154 -0
  40. package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +820 -0
  41. package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +180 -0
  42. package/src/0-path-value-core/debugLogs.ts +90 -0
  43. package/src/0-path-value-core/pathValueArchives.ts +483 -0
  44. package/src/0-path-value-core/pathValueCore.ts +2217 -0
  45. package/src/1-path-client/RemoteWatcher.ts +558 -0
  46. package/src/1-path-client/pathValueClientWatcher.ts +702 -0
  47. package/src/2-proxy/PathValueProxyWatcher.ts +1857 -0
  48. package/src/2-proxy/archiveMoveHarness.ts +376 -0
  49. package/src/2-proxy/garbageCollection.ts +753 -0
  50. package/src/2-proxy/pathDatabaseProxyBase.ts +37 -0
  51. package/src/2-proxy/pathValueProxy.ts +139 -0
  52. package/src/2-proxy/schema2.ts +518 -0
  53. package/src/3-path-functions/PathFunctionHelpers.ts +129 -0
  54. package/src/3-path-functions/PathFunctionRunner.ts +619 -0
  55. package/src/3-path-functions/PathFunctionRunnerMain.ts +67 -0
  56. package/src/3-path-functions/deployBlock.ts +10 -0
  57. package/src/3-path-functions/deployCheck.ts +7 -0
  58. package/src/3-path-functions/deployMain.ts +160 -0
  59. package/src/3-path-functions/pathFunctionLoader.ts +282 -0
  60. package/src/3-path-functions/syncSchema.ts +475 -0
  61. package/src/3-path-functions/tests/functionsTest.ts +135 -0
  62. package/src/3-path-functions/tests/rejectTest.ts +77 -0
  63. package/src/4-dom/css.tsx +29 -0
  64. package/src/4-dom/cssTypes.d.ts +212 -0
  65. package/src/4-dom/qreact.tsx +2322 -0
  66. package/src/4-dom/qreactTest.tsx +417 -0
  67. package/src/4-querysub/Querysub.ts +877 -0
  68. package/src/4-querysub/QuerysubController.ts +620 -0
  69. package/src/4-querysub/copyEvent.ts +0 -0
  70. package/src/4-querysub/permissions.ts +289 -0
  71. package/src/4-querysub/permissionsShared.ts +1 -0
  72. package/src/4-querysub/querysubPrediction.ts +525 -0
  73. package/src/5-diagnostics/FullscreenModal.tsx +67 -0
  74. package/src/5-diagnostics/GenericFormat.tsx +165 -0
  75. package/src/5-diagnostics/Modal.tsx +79 -0
  76. package/src/5-diagnostics/Table.tsx +183 -0
  77. package/src/5-diagnostics/TimeGrouper.tsx +114 -0
  78. package/src/5-diagnostics/diskValueAudit.ts +216 -0
  79. package/src/5-diagnostics/memoryValueAudit.ts +442 -0
  80. package/src/5-diagnostics/nodeMetadata.ts +135 -0
  81. package/src/5-diagnostics/qreactDebug.tsx +309 -0
  82. package/src/5-diagnostics/shared.ts +26 -0
  83. package/src/5-diagnostics/synchronousLagTracking.ts +47 -0
  84. package/src/TestController.ts +35 -0
  85. package/src/allowclient.flag +0 -0
  86. package/src/bits.ts +86 -0
  87. package/src/buffers.ts +69 -0
  88. package/src/config.ts +53 -0
  89. package/src/config2.ts +48 -0
  90. package/src/diagnostics/ActionsHistory.ts +56 -0
  91. package/src/diagnostics/NodeViewer.tsx +503 -0
  92. package/src/diagnostics/SizeLimiter.ts +62 -0
  93. package/src/diagnostics/TimeDebug.tsx +18 -0
  94. package/src/diagnostics/benchmark.ts +139 -0
  95. package/src/diagnostics/errorLogs/ErrorLogController.ts +515 -0
  96. package/src/diagnostics/errorLogs/ErrorLogCore.ts +274 -0
  97. package/src/diagnostics/errorLogs/LogClassifiers.tsx +302 -0
  98. package/src/diagnostics/errorLogs/LogFilterUI.tsx +84 -0
  99. package/src/diagnostics/errorLogs/LogNotify.tsx +101 -0
  100. package/src/diagnostics/errorLogs/LogTimeSelector.tsx +724 -0
  101. package/src/diagnostics/errorLogs/LogViewer.tsx +757 -0
  102. package/src/diagnostics/errorLogs/hookErrors.ts +60 -0
  103. package/src/diagnostics/errorLogs/logFiltering.tsx +149 -0
  104. package/src/diagnostics/heapTag.ts +13 -0
  105. package/src/diagnostics/listenOnDebugger.ts +77 -0
  106. package/src/diagnostics/logs/DiskLoggerPage.tsx +572 -0
  107. package/src/diagnostics/logs/ObjectDisplay.tsx +165 -0
  108. package/src/diagnostics/logs/ansiFormat.ts +108 -0
  109. package/src/diagnostics/logs/diskLogGlobalContext.ts +38 -0
  110. package/src/diagnostics/logs/diskLogger.ts +305 -0
  111. package/src/diagnostics/logs/diskShimConsoleLogs.ts +32 -0
  112. package/src/diagnostics/logs/injectFileLocationToConsole.ts +50 -0
  113. package/src/diagnostics/logs/logGitHashes.ts +30 -0
  114. package/src/diagnostics/managementPages.tsx +289 -0
  115. package/src/diagnostics/periodic.ts +89 -0
  116. package/src/diagnostics/runSaturationTest.ts +416 -0
  117. package/src/diagnostics/satSchema.ts +64 -0
  118. package/src/diagnostics/trackResources.ts +82 -0
  119. package/src/diagnostics/watchdog.ts +55 -0
  120. package/src/errors.ts +132 -0
  121. package/src/forceProduction.ts +3 -0
  122. package/src/fs.ts +72 -0
  123. package/src/heapDumps.ts +666 -0
  124. package/src/https.ts +2 -0
  125. package/src/inject.ts +1 -0
  126. package/src/library-components/ATag.tsx +84 -0
  127. package/src/library-components/Button.tsx +344 -0
  128. package/src/library-components/ButtonSelector.tsx +64 -0
  129. package/src/library-components/DropdownCustom.tsx +151 -0
  130. package/src/library-components/DropdownSelector.tsx +32 -0
  131. package/src/library-components/Input.tsx +334 -0
  132. package/src/library-components/InputLabel.tsx +198 -0
  133. package/src/library-components/InputPicker.tsx +125 -0
  134. package/src/library-components/LazyComponent.tsx +62 -0
  135. package/src/library-components/MeasureHeightCSS.tsx +48 -0
  136. package/src/library-components/MeasuredDiv.tsx +47 -0
  137. package/src/library-components/ShowMore.tsx +51 -0
  138. package/src/library-components/SyncedController.ts +171 -0
  139. package/src/library-components/TimeRangeSelector.tsx +407 -0
  140. package/src/library-components/URLParam.ts +263 -0
  141. package/src/library-components/colors.tsx +14 -0
  142. package/src/library-components/drag.ts +114 -0
  143. package/src/library-components/icons.tsx +692 -0
  144. package/src/library-components/niceStringify.ts +50 -0
  145. package/src/library-components/renderToString.ts +52 -0
  146. package/src/misc/PromiseRace.ts +101 -0
  147. package/src/misc/color.ts +30 -0
  148. package/src/misc/getParentProcessId.cs +53 -0
  149. package/src/misc/getParentProcessId.ts +53 -0
  150. package/src/misc/hash.ts +83 -0
  151. package/src/misc/ipPong.js +13 -0
  152. package/src/misc/networking.ts +2 -0
  153. package/src/misc/random.ts +45 -0
  154. package/src/misc.ts +19 -0
  155. package/src/noserverhotreload.flag +0 -0
  156. package/src/path.ts +226 -0
  157. package/src/persistentLocalStore.ts +37 -0
  158. package/src/promise.ts +15 -0
  159. package/src/server.ts +73 -0
  160. package/src/src.d.ts +1 -0
  161. package/src/test/heapProcess.ts +36 -0
  162. package/src/test/mongoSatTest.tsx +55 -0
  163. package/src/test/satTest.ts +193 -0
  164. package/src/test/test.tsx +552 -0
  165. package/src/zip.ts +92 -0
  166. package/src/zipThreaded.ts +106 -0
  167. package/src/zipThreadedWorker.js +19 -0
  168. package/tsconfig.json +27 -0
  169. package/yarnSpec.txt +56 -0
@@ -0,0 +1,820 @@
1
+ import { list, sort, timeInDay, timeInHour, timeInMinute } from "socket-function/src/misc";
2
+ import { MaybePromise } from "socket-function/src/types";
3
+ import { decodeNodeId } from "../../-a-auth/certs";
4
+ import { getOwnNodeId, getOwnNodeIdAssert } from "../../-f-node-discovery/NodeDiscovery";
5
+ import { pathValueArchives } from "../pathValueArchives";
6
+ import { ArchiveLocker, ArchiveTransaction } from "./ArchiveLocks";
7
+ import { Archives } from "../../-a-archives/archives";
8
+ import debugbreak from "debugbreak";
9
+ import { formatNumber, formatTime } from "socket-function/src/formatting/format";
10
+ import { blue, green, magenta, red } from "socket-function/src/formatting/logColors";
11
+ import { logNodeStateStats, logNodeStats } from "../../5-diagnostics/nodeMetadata";
12
+ import { devDebugbreak } from "../../config";
13
+ import { logErrors } from "../../errors";
14
+ import { saveSnapshot } from "./archiveSnapshots";
15
+ import { getNodeId } from "socket-function/src/nodeCache";
16
+ import { diskLog } from "../../diagnostics/logs/diskLogger";
17
+
18
+ /** Clean up old files after a while */
19
+ const DEAD_CREATE_THRESHOLD = timeInHour * 12;
20
+ const ARCHIVE_PROPAGATION_TIME = 5000;
21
+ const LOG = false;
22
+ // NOTE: There isn't a reason for transaction to not apply. If applying throws, we throw,
23
+ // and can throw infinite times, but if it doesn't throw, it SHOULD be applied. We retry
24
+ // a few times though, just in case the storage system was having some issues. After enough
25
+ // tries though... we HAVE to just skip it, otherwise we will be stuck forever.
26
+ const MAX_APPLY_TRIES = 10;
27
+ const CONCURRENT_READ_COUNT = 32;
28
+ const CONCURRENT_WRITE_COUNT = 16;
29
+
30
+ export function createArchiveLocker2(config: {
31
+ archiveValues: Archives;
32
+ /** IMPORTANT! This must be the same independent of archiveValues, otherwise we won't
33
+ * lock moves between directories.
34
+ */
35
+ archiveLocks: Archives;
36
+ archiveRecycleBin: Archives;
37
+ }): ArchiveLocker {
38
+ let { archiveValues, archiveLocks, archiveRecycleBin } = config;
39
+ function getArchives(key: string) {
40
+ return (key.endsWith(".data") || key.endsWith(".locked")) ? archiveValues : archiveLocks;
41
+ }
42
+ let debugKey = archiveLocks.getDebugName();
43
+ let storage: StorageType = {
44
+ debugKey,
45
+ propagationTime: ARCHIVE_PROPAGATION_TIME,
46
+
47
+ async getKeys() {
48
+ let filesRawValues = await archiveValues.findInfo("", { type: "files" });
49
+ let filesRawLocks = await archiveLocks.findInfo("", { type: "files" });
50
+ let files: FileInfo[] = [...filesRawValues, ...filesRawLocks].map(x => ({
51
+ file: x.path,
52
+ createTime: x.createTime,
53
+ size: x.size,
54
+ }));
55
+ return files;
56
+ },
57
+
58
+ async setValue(key, value) {
59
+ await getArchives(key).set(key, value);
60
+ logNodeStats(`archives|Created TΔ`, formatNumber)(1);
61
+ },
62
+ async getValue(key) {
63
+ return getArchives(key).get(key);
64
+ },
65
+ async deleteKey(key) {
66
+ let archives = await getArchives(key);
67
+ if (
68
+ !await archives.getInfo(key)
69
+ && !await archives.get(key)
70
+ ) {
71
+ return;
72
+ }
73
+ // ACTUALLY delete confirms, as they don't contain that much information, and just clutter up the recycle bin.
74
+ // - DO archive transactions, as it might be useful to view the old transaction history.
75
+ if (archives === archiveLocks && key.endsWith(".confirm")) {
76
+ try {
77
+ await archives.del(key);
78
+ } catch { }
79
+ return;
80
+ }
81
+ try {
82
+ await archives.move({
83
+ path: key,
84
+ targetPath: key,
85
+ target: archiveRecycleBin,
86
+ });
87
+ logNodeStats(`archives|Deleted TΔ`, formatNumber)(1);
88
+ } catch {
89
+ // It was probably just moved by another process
90
+ }
91
+ },
92
+ };
93
+ async function unsafeSetFiles(files: string[]): Promise<void> {
94
+ let valuePaths = new Set((await archiveValues.findInfo("")).map(x => x.path));
95
+ let correctConfirms = new Set<string>();
96
+ let correctFiles = new Set<string>();
97
+
98
+ async function deleteAllOtherFiles() {
99
+ // Delete all old confirms / transactions / etc
100
+ let allConfirms = (await archiveLocks.findInfo("")).map(x => x.path);
101
+ await Promise.all(allConfirms.map(async confirm => {
102
+ if (correctConfirms.has(confirm)) return;
103
+ await storage.deleteKey(confirm);
104
+ }));
105
+
106
+ valuePaths = new Set((await archiveValues.findInfo("")).map(x => x.path));
107
+ // Delete all files we didn't just set (move to recycle bin)
108
+ await Promise.all(Array.from(valuePaths).map(async file => {
109
+ if (correctFiles.has(file)) return;
110
+ await archiveValues.move({
111
+ path: file,
112
+ targetPath: file,
113
+ target: archiveRecycleBin,
114
+ });
115
+ }));
116
+ }
117
+
118
+ console.log(magenta(`Deleting all other files`));
119
+ // Deleting early deletes any transactions, which might be reapplied due to our non-transaction changes
120
+ await deleteAllOtherFiles();
121
+
122
+ // Undelete, by retrieving all files from the recycle bin
123
+ let promises: Promise<void>[] = [];
124
+ for (let file of files) {
125
+ if (valuePaths.has(file)) {
126
+ correctFiles.add(file);
127
+ }
128
+ let isInRecycleBin = !!(await archiveRecycleBin.getInfo(file));
129
+ if (isInRecycleBin) {
130
+ console.log(`Restoring ${file}`);
131
+ promises.push(archiveRecycleBin.move({
132
+ path: file,
133
+ targetPath: file,
134
+ target: archiveValues,
135
+ }));
136
+ correctFiles.add(file);
137
+ }
138
+ }
139
+ await Promise.all(promises);
140
+
141
+ console.log(magenta(`Creating confirms`));
142
+ // Confirm all files
143
+ await Promise.all(Array.from(correctFiles).map(async file => {
144
+ let confirm = await locker.createConfirm(file);
145
+ correctConfirms.add(confirm);
146
+ }));
147
+
148
+ console.log(magenta(`Deleting all other files again`));
149
+ // Delete again, in case anything else was created while we were restoring. Not great, but... should
150
+ // reduce the race conditions considerably.
151
+ await deleteAllOtherFiles();
152
+ }
153
+ async function unsafeGetFileLocation(file: string): Promise<"live" | "zombie" | "recycled" | "missing"> {
154
+ if (await archiveValues.getInfo(file)) {
155
+ if (!file.endsWith(".locked")) {
156
+ return "live";
157
+ }
158
+ let confirmKey = await locker.getConfirmKey(file);
159
+ if (await archiveLocks.getInfo(confirmKey)) {
160
+ return "live";
161
+ } else {
162
+ return "zombie";
163
+ }
164
+ }
165
+ if (await archiveRecycleBin.getInfo(file)) {
166
+ return "recycled";
167
+ }
168
+ return "missing";
169
+ }
170
+
171
+ let locker = new TransactionLocker(storage);
172
+
173
+ return {
174
+ getAllValidFiles() {
175
+ return locker.getFiles();
176
+ },
177
+ unsafeSetFiles,
178
+ unsafeGetFileLocation,
179
+ async atomicSwapFiles(
180
+ config,
181
+ code: (
182
+ validFiles: FileInfo[],
183
+ readFiles: (files: FileInfo[]) => Promise<(Buffer | undefined)[]>,
184
+ ) => Promise<ArchiveTransaction[]>
185
+ ): Promise<"accepted" | "rejected"> {
186
+ let files = await locker.getFiles();
187
+ let readFiles = async (files: FileInfo[]) => {
188
+ let pendingFiles = files.slice();
189
+ let readResults = new Map<string, Buffer | undefined>();
190
+ async function runReadThread() {
191
+ while (pendingFiles.length > 0) {
192
+ let file = pendingFiles.pop()!;
193
+ let value = await storage.getValue(file.file);
194
+ readResults.set(file.file, value);
195
+ }
196
+ }
197
+ await Promise.all(list(CONCURRENT_READ_COUNT).map(runReadThread));
198
+ return files.map(a => {
199
+ return readResults.get(a.file);
200
+ });
201
+ };
202
+ let transactionRaw = await code(files, readFiles);
203
+ if (transactionRaw.length === 0) return "accepted";
204
+ let newTransaction: Transaction = {
205
+ ops: [],
206
+ };
207
+ if (!config.allowDuplicates) {
208
+ newTransaction.lockedFilesMustEqual = files.map(a => a.file).filter(a => a.endsWith(".locked"));
209
+ }
210
+ for (let base of transactionRaw) {
211
+ for (let obj of base.createFiles) {
212
+ newTransaction.ops.push({ type: "create", key: obj.file, value: obj.data });
213
+ }
214
+ for (let obj of base.deleteFiles) {
215
+ newTransaction.ops.push({ type: "delete", key: obj.file });
216
+ }
217
+ }
218
+ let status = await locker.addTransaction(newTransaction);
219
+ if (status === "accepted") {
220
+ let newFiles = new Set<string>();
221
+ for (let file of files) {
222
+ newFiles.add(file.file);
223
+ }
224
+ for (let op of newTransaction.ops) {
225
+ const type = op.type;
226
+ if (type === "create") {
227
+ newFiles.add(op.key);
228
+ } else if (type === "delete") {
229
+ newFiles.delete(op.key);
230
+ } else {
231
+ let unhandled: never = type;
232
+ }
233
+ }
234
+ logErrors(saveSnapshot({ files: Array.from(newFiles) }));
235
+ }
236
+ return status;
237
+ }
238
+ };
239
+ }
240
+
241
+ export type FileInfo = {
242
+ file: string;
243
+ createTime: number;
244
+ size: number;
245
+ };
246
+ type StorageType = {
247
+ debugKey: string;
248
+ propagationTime: number;
249
+ getKeys(): Promise<FileInfo[]>;
250
+ setValue(key: string, value: Buffer): Promise<void>;
251
+ getValue(key: string): Promise<Buffer | undefined>;
252
+ deleteKey(key: string): Promise<void>;
253
+ /** The amount of time before we have to wait until it is assumed that all reads
254
+ * this old will have been received.
255
+ * - On the local disk it could be 0, but we use a higher value so development is closer to production
256
+ * - On backblaze writes might be routed to different servers, so a value might be written
257
+ * and not be immediately readable. It might also be readable in a different order than
258
+ * we wrote it (random writes might take longer). HOWEVER, if we wait propagationTime
259
+ * all writes should now be readable.
260
+ */
261
+ };
262
+ type Transaction = {
263
+ ops: ({
264
+ type: "create";
265
+ key: string;
266
+ value: Buffer | undefined;
267
+ } | {
268
+ type: "delete";
269
+ key: string;
270
+ })[];
271
+ // Forces the locked files to strictly equal this set. This allows lockfree writes
272
+ // (such as from genesis writers) to be added, but allows writers to do things
273
+ // such as ensuring a file with a certain flag exists, and not have to worry
274
+ // about two files being created due to a race condition.
275
+ lockedFilesMustEqual?: string[];
276
+ };
277
+ /**
278
+ * Guarantees
279
+ * - The output state will never include partial Transactions
280
+ * - Transactions will apply as if they exclusively locked all the files OR,
281
+ * they won't be applied at all.
282
+ * - EXCEPT, A single identical transactions might have deletes applied multiple times,
283
+ * and might be delayed (if the original writer crashes or hangs). It is recommended
284
+ * file names are never reused (annoying, but feasible if you add some random data to
285
+ * file names), which will prevent this from ever causing issues.
286
+ * - IMPORTANT! We only make the file names atomic. If you go to read a file, and it isn't there,
287
+ * you should get the file names again and try again (caching the read file in memory, otherwise
288
+ * you might never catch up to the new writes).
289
+ * - Changes created file name so they end with ".locked"
290
+ * - This is a bit annoying, but it should be easier for callers to ignore this.
291
+ * - Any files created outside of TransactionLocker (that don't end with ".locked", ".confirm", or ".transaction")
292
+ * will automatically confirmed.
293
+ * - We assume all files are immutable
294
+ */
295
+ class TransactionLocker {
296
+ public constructor(
297
+ private storage: StorageType,
298
+ // For example, this could be used to ensure that only 1 file with "singleSourceOfTruth"
299
+ // can exist, unless the new "singleSourceOfTruth" is > a day newer than the old file.
300
+ // - Completely overrides the valid check, so if you still want to verify that deletes
301
+ // exist, you will have to manually check them.
302
+ private isTransactionValidOverride?: (transaction: Transaction, dataFiles: FileInfo[], rawDataFiles: FileInfo[]) => boolean
303
+ ) { }
304
+
305
+ // #region Base File Operations
306
+ public getConfirmKey(key: string): string {
307
+ let { dir, name } = parsePath(key);
308
+ return `${dir}confirm_${name}.confirm`;
309
+ }
310
+ public async createConfirm(key: string) {
311
+ let path = this.getConfirmKey(key);
312
+ if (LOG) {
313
+ console.log(`Creating confirmation for ${key}`);
314
+ }
315
+ await this.storage.setValue(path, Buffer.from(""));
316
+ return path;
317
+ }
318
+ private async deleteDataFile(key: string, reason: string): Promise<void> {
319
+ //await this.storage.setValue(key + ".reason", Buffer.from(reason));
320
+ // Delete file, and confirmation as well
321
+ await this.storage.deleteKey(key);
322
+ await this.storage.deleteKey(this.getConfirmKey(key));
323
+ }
324
+ // Writes the createFiles, so they can be confirmed in the future
325
+ private async prepareTransaction(transaction: Transaction): Promise<void> {
326
+ let opsRemaining = transaction.ops.slice();
327
+ const runThread = async () => {
328
+ while (true) {
329
+ let op = opsRemaining.pop();
330
+ if (!op) return;
331
+ if (op.type === "create") {
332
+ let key = op.key;
333
+ await this.storage.setValue(key, op.value || Buffer.from(""));
334
+ }
335
+ }
336
+ };
337
+ await Promise.all(list(CONCURRENT_WRITE_COUNT).map(runThread));
338
+ }
339
+ // Just writes the transaction (in a format readDataState can read)
340
+ private async writeTransaction(transaction: Transaction & { seqNum: number; }): Promise<void> {
341
+ function ellipsize(text: string, length: number) {
342
+ if (text.length <= length) return text;
343
+ return text.slice(0, length - 3) + "...";
344
+ }
345
+ let niceThreadId = decodeNodeId(getOwnNodeIdAssert())?.threadId.slice(0, 4) || "unknown";
346
+ function debugFileInfo(file: string) {
347
+ let obj = pathValueArchives.decodeDataPath(file);
348
+ if (!obj.seqNum) {
349
+ return "X";
350
+ }
351
+ return obj.seqNum + "@" + obj.threadId.slice(0, 4);
352
+ }
353
+
354
+ let creates = transaction.ops.filter(a => a.type === "create");
355
+ let deletes = transaction.ops.filter(a => a.type === "delete");
356
+ let name = toFileNameKVP({
357
+ tSeqNum: transaction.seqNum + "",
358
+ tWriteTime: Date.now() + "",
359
+ thread: niceThreadId,
360
+ cCount: transaction.ops.filter(a => a.type === "create").length + "",
361
+ dCount: transaction.ops.filter(a => a.type === "delete").length + "",
362
+ create: ellipsize(creates.map(a => debugFileInfo(a.key)).join(","), 50),
363
+ delete: ellipsize(deletes.map(a => debugFileInfo(a.key)).join(","), 50),
364
+ });
365
+
366
+ if (LOG) {
367
+ console.log(`Writing transaction ${transaction.seqNum} with ${transaction.ops.length} ops`);
368
+ }
369
+
370
+ let key = `transaction_${name}.transaction`;
371
+ let strippedTransaction: Transaction = { ops: transaction.ops.map(a => ({ ...a, value: undefined })) };
372
+ let value = Buffer.from(JSON.stringify(strippedTransaction));
373
+ await this.storage.setValue(key, value);
374
+ }
375
+ private async readDataState(): Promise<{
376
+ rawDataFiles: FileInfo[];
377
+ /** Confirmed FileInfos are === the FileInfos in rawDataFiles */
378
+ confirmedDataFiles: FileInfo[];
379
+ transactions: (Transaction & {
380
+ seqNum: number;
381
+ writeTime: number;
382
+ createTime: number;
383
+ source: FileInfo;
384
+ })[];
385
+ }> {
386
+ let bufferCache = new Map<string, Buffer>();
387
+ const tryToRead = async () => {
388
+ let files = await this.storage.getKeys();
389
+
390
+ let transactions: (Transaction & {
391
+ seqNum: number;
392
+ writeTime: number;
393
+ createTime: number;
394
+ source: FileInfo;
395
+ })[] = [];
396
+ for (let file of files) {
397
+ if (!file.file.endsWith(".transaction")) continue;
398
+ let tFile = file;
399
+ let buffer = await this.storage.getValue(tFile.file);
400
+ if (!buffer) return false;
401
+ bufferCache.set(tFile.file, buffer);
402
+ let transaction: Transaction | undefined;
403
+ try {
404
+ transaction = JSON.parse(buffer.toString());
405
+ } catch (e: any) {
406
+ // Maybe it is actively being written to? Or it is just broken. Skip it.
407
+ console.error(`Failed to parse transaction. Ignoring, ${tFile.file}, ${e.stack}`);
408
+ }
409
+ if (!transaction) continue;
410
+
411
+ let { name } = parsePath(tFile.file);
412
+ let kvp = parseFileNameKVP(name);
413
+ transactions.push({
414
+ ...transaction,
415
+ seqNum: Number(kvp.tSeqNum) || 0,
416
+ writeTime: Number(kvp.tWriteTime),
417
+ createTime: tFile.createTime,
418
+ source: tFile,
419
+ });
420
+ }
421
+
422
+ // Check all of our files to see if any have changed (or if there a new files).
423
+ // Because we don't reuse file names, or change them, it means if a file exists
424
+ // it must not have changed (if it was deleted and recreated... then we reuse
425
+ // a file name, so there is no way for the state to have changed).
426
+ // - Of course, if a file was created and deleted while we weren't watching we will miss that,
427
+ // but validity should/can only depend on the current state, not past event transitions
428
+ // (it pretty much only depends on files existing).
429
+ {
430
+ let filesVerify = await this.storage.getKeys();
431
+ let filesVerifySet = new Set(filesVerify.map(a => a.file));
432
+ // If it changes while reading, read again. Otherwise, if there were no changes while reading,
433
+ // we know the state was all the state we read, at one time (maybe not now, but at an instant
434
+ // in the past).
435
+ if (filesVerify.length !== files.length || files.some(a => !filesVerifySet.has(a.file))) {
436
+ let message = "Files changed while reading. Retrying.";
437
+ for (let file of files) {
438
+ if (!filesVerifySet.has(file.file)) {
439
+ message += `\n Missing file: ${file.file}`;
440
+ }
441
+ }
442
+ let fileSet = new Set(files.map(a => a.file));
443
+ for (let file of filesVerify) {
444
+ if (!fileSet.has(file.file)) {
445
+ message += `\n New file: ${file.file}`;
446
+ }
447
+ }
448
+ console.warn(message);
449
+ logNodeStats(`archives|TΔ Atomic Retry`, formatNumber)(1);
450
+ return false;
451
+ }
452
+ }
453
+
454
+ let existingFiles = new Map(files.map(a => [a.file, a]));
455
+
456
+ let currentDataFiles = new Map<string, FileInfo>();
457
+ for (let file of files) {
458
+ if (!(
459
+ file.file.endsWith(".locked")
460
+ || file.file.endsWith(".confirm")
461
+ || file.file.endsWith(".transaction")
462
+ || file.file.endsWith(".reason")
463
+ )) {
464
+ currentDataFiles.set(file.file, file);
465
+ continue;
466
+ }
467
+ if (!file.file.endsWith(".locked")) continue;
468
+ let confirmKey = this.getConfirmKey(file.file);
469
+ let confirmFile = existingFiles.get(confirmKey);
470
+ if (confirmFile) {
471
+ currentDataFiles.set(file.file, file);
472
+ }
473
+ }
474
+
475
+ diskLog(`Read archive state`, {
476
+ rawFilesCount: files.length,
477
+ confirmedCount: currentDataFiles.size,
478
+ rawFiles: files.map(a => a.file),
479
+ confirmedFiles: Array.from(currentDataFiles.values()).map(a => a.file),
480
+ });
481
+
482
+ return {
483
+ rawDataFiles: files,
484
+ confirmedDataFiles: Array.from(currentDataFiles.values()),
485
+ transactions,
486
+ };
487
+ };
488
+
489
+ let startTime = Date.now();
490
+ while (true) {
491
+ let result = await tryToRead();
492
+ if (result) {
493
+ if (LOG) {
494
+ let timeToRead = Date.now() - startTime;
495
+ console.log(`Read data state in ${formatTime(timeToRead)}`);
496
+ }
497
+ return result;
498
+ }
499
+ }
500
+ }
501
+ // #endregion
502
+
503
+ private isTransactionValid(transaction: Transaction, dataFiles: FileInfo[], rawDataFiles: FileInfo[]): boolean {
504
+ let override = this.isTransactionValidOverride;
505
+ if (override) return override(transaction, dataFiles, rawDataFiles);
506
+ if (transaction.lockedFilesMustEqual) {
507
+ let newLockedFiles = new Set(dataFiles.map(a => a.file).filter(a => a.endsWith(".locked")));
508
+ let mustEqualSet = new Set(transaction.lockedFilesMustEqual);
509
+ if (mustEqualSet.size !== newLockedFiles.size) return false;
510
+ if (Array.from(mustEqualSet).some(a => !newLockedFiles.has(a))) return false;
511
+ }
512
+ let existingFiles = new Set(dataFiles.map(a => a.file));
513
+ let existingRawFiles = new Set(rawDataFiles.map(a => a.file));
514
+ return transaction.ops.every(a => {
515
+ const type = a.type;
516
+ // Create files might disappear if we hang and they get cleaned up
517
+ if (type === "create") return existingRawFiles.has(a.key);
518
+ if (type === "delete") return existingFiles.has(a.key);
519
+ let unhandled: never = type;
520
+ throw new Error(`Unhandled type: ${unhandled}`);
521
+ });
522
+ }
523
+
524
+ // NOTE: If the transaction was applied long in the past, this will return false. HOWEVER, this is fine,
525
+ // as if it has been applied, when we look around again, we will no longer see it
526
+ private wasTransactionApplied(
527
+ transaction: Transaction,
528
+ confirmedDataFiles: FileInfo[],
529
+ rawDataFiles: FileInfo[]
530
+ ) {
531
+ let confirmedKeys = new Set(Array.from(confirmedDataFiles).map(a => a.file));
532
+ let rawLookup = new Set(Array.from(rawDataFiles).map(a => a.file));
533
+ // If any creates are not confirmed, it must not have been applied
534
+ if (transaction.ops.some(a => a.type === "create" && rawLookup.has(a.key) && !confirmedKeys.has(a.key))) {
535
+ if (LOG) {
536
+ console.log(`Transaction has pending confirmations of creates.`);
537
+ let pending = transaction.ops.filter(a => a.type === "create" && rawLookup.has(a.key) && !confirmedKeys.has(a.key));
538
+ for (let p of pending) {
539
+ console.log(` ${p.key}`);
540
+ }
541
+ }
542
+ return false;
543
+ }
544
+ // If any deletes still exist, it must not have been applied
545
+ if (transaction.ops.some(a => a.type === "delete" && confirmedKeys.has(a.key))) {
546
+ if (LOG) {
547
+ console.log(`transaction has pending deletes`);
548
+ }
549
+ return false;
550
+ }
551
+ return true;
552
+ }
553
+ private async applyTransaction(transaction: Transaction): Promise<void> {
554
+ let createCount = transaction.ops.filter(a => a.type === "create").length;
555
+ let deleteCount = transaction.ops.filter(a => a.type === "delete").length;
556
+ let lockedFiles = transaction.lockedFilesMustEqual?.length;
557
+ if (LOG) {
558
+ console.log(`Applying transaction with ${createCount} creates and ${deleteCount} deletes. ${lockedFiles !== undefined && `Lock state depends on ${lockedFiles} files` || ""}`);
559
+ }
560
+ logNodeStats(`archives|TΔ Apply`, formatNumber)(1);
561
+ let opsRemaining = transaction.ops.slice();
562
+ // NOTE: Order doesn't matter here. If anything is reading the values
563
+ // 1) If it runs after we start, it will see our transaction and apply it
564
+ // 2) If it runs before we start, if it can't find some Buffers, it will abort and
565
+ // reread all of the files, until it can read all of the Buffers without having
566
+ // a delete happen.
567
+ const runThread = async () => {
568
+ while (true) {
569
+ let op = opsRemaining.pop();
570
+ if (!op) return;
571
+ if (op.type === "create") {
572
+ await this.createConfirm(op.key);
573
+ } else if (op.type === "delete") {
574
+ await this.deleteDataFile(op.key, `transaction (${getOwnNodeId()})`);
575
+ } else {
576
+ let unhandled: never = op;
577
+ throw new Error(`Unhandled type: ${unhandled}`);
578
+ }
579
+ }
580
+ };
581
+ await Promise.all(list(CONCURRENT_WRITE_COUNT).map(runThread));
582
+
583
+ if (LOG) {
584
+ console.log(`Applied transaction with ${createCount} creates and ${deleteCount} deletes. ${lockedFiles !== undefined && `Lock state depends on ${lockedFiles} files` || ""}`);
585
+ }
586
+ }
587
+
588
+ /** Only returns data files (no transaction files, or confirmations).
589
+ * - Might run a transaction
590
+ */
591
+ public async getFiles(): Promise<FileInfo[]> {
592
+ let obj = await this.getFilesBase();
593
+ logNodeStateStats(`ArchiveLock Data File`, formatNumber)(obj.dataFiles.length);
594
+ return obj.dataFiles;
595
+ }
596
+ private transactionAppliedCount = new Map<number, number>();
597
+ private async getFilesBase(): Promise<{
598
+ dataFiles: FileInfo[];
599
+ rawDataFiles: FileInfo[];
600
+ nextSeqNum: number;
601
+ }> {
602
+ let readTime = Date.now();
603
+ let dataState = await this.readDataState();
604
+
605
+ // - Sort transactions by [-seqNum, createTime]
606
+ let transactions = dataState.transactions.slice();
607
+ transactions.sort((a, b) => {
608
+ if (a.seqNum !== b.seqNum) return b.seqNum - a.seqNum;
609
+ return a.createTime - b.createTime;
610
+ });
611
+ {
612
+ // - Also by [-seqNum, writeTime]. If we find the order is different warn,
613
+ // as it might be indicative of a hanging storage writer (although more likely
614
+ // it is due a hanging write of ours).
615
+ let transationsTest = dataState.transactions.slice();
616
+ transationsTest.sort((a, b) => {
617
+ if (a.seqNum !== b.seqNum) return b.seqNum - a.seqNum;
618
+ return a.writeTime - b.writeTime;
619
+ });
620
+ if (transactions.map(a => a.seqNum).join(",") !== transationsTest.map(a => a.seqNum).join(",")) {
621
+ console.warn("Transaction order is different when sorting by writeTime. This is likely due to a hanging writes.");
622
+ logNodeStats(`archives|TΔ Possible Hanging Write`, formatNumber)(1);
623
+ }
624
+ }
625
+
626
+ // IMPORTANT! DO NOT check for isTransactionValid here! This must be done BEFORE
627
+ // we write, otherwise partial applications of transactions will invalidate the transaction
628
+ // itself, which breaks things!
629
+ // - And checking before we write is safe, because we use the next seqNum, if we write
630
+ // a transaction and it is applied, it must mean no transactions snuck in between,
631
+ // so it transaction is still as valid as before it was written!
632
+
633
+ // Initialization code / lock reset recovery code
634
+ if (transactions.length === 0) {
635
+ return {
636
+ dataFiles: dataState.confirmedDataFiles,
637
+ rawDataFiles: dataState.rawDataFiles,
638
+ nextSeqNum: 1,
639
+ };
640
+ }
641
+
642
+ let activeT = transactions[0];
643
+ // Wait until activeTransaction is old enough to be applied,
644
+ // and then rerun the function (if transactions have appeared which are older,
645
+ // which is possible, as storage might give reads out of order)
646
+ let threshold = activeT.createTime + this.storage.propagationTime;
647
+ if (Date.now() < threshold) {
648
+ let waitTime = threshold - Date.now();
649
+ if (LOG) {
650
+ console.log(`Waiting ${formatTime(waitTime)} for transaction ${activeT.seqNum} to settle.`);
651
+ }
652
+ await new Promise(resolve => setTimeout(resolve, waitTime));
653
+ return this.getFilesBase();
654
+ }
655
+
656
+ let applyCount = this.transactionAppliedCount.get(activeT.seqNum) || 0;
657
+
658
+ // IF the transaction is already applied, THEN we can return the data state!
659
+ if (
660
+ applyCount > MAX_APPLY_TRIES
661
+ || this.wasTransactionApplied(activeT, dataState.confirmedDataFiles, dataState.rawDataFiles)
662
+ ) {
663
+ // Delete any transactions with a seqNum < activeTransaction.seqNum
664
+ // (otherwise our transactions stick around forever)
665
+ // - We can't delete the transaction we applied, otherwise the next transaction won't
666
+ // have a seqNum to compare against!
667
+ // - Don't delete their created files. If they were applied, we need those, if not
668
+ // the old orphaned file cleanup will clean them up.
669
+ for (let t of transactions) {
670
+ if (t.seqNum < activeT.seqNum) {
671
+ await this.storage.deleteKey(t.source.file);
672
+ }
673
+ }
674
+
675
+ // Delete any create files that are WAY too old, and not confirmed
676
+ {
677
+ let oldThreshold = readTime - DEAD_CREATE_THRESHOLD;
678
+ let veryOldFiles = dataState.rawDataFiles.filter(x => x.file.endsWith(".locked")).filter(a => a.createTime < oldThreshold);
679
+ let confirmedLookup = new Set(dataState.confirmedDataFiles.map(a => a));
680
+ let unconfirmedOldFiles = veryOldFiles.filter(a => !confirmedLookup.has(a));
681
+
682
+ if (unconfirmedOldFiles.length > 0) {
683
+ let doubleCheck = await this.readDataState();
684
+ let doubleCheckLookup = new Set(doubleCheck.confirmedDataFiles.map(a => a));
685
+ let doubleCheckDataFiles = new Set(doubleCheck.rawDataFiles.map(a => a.file));
686
+ // IMPORTANT! Make sure they still aren't confirmed AND that the rawDataFiles exist. This helps prevent cases
687
+ // where backblaze returns no files? Which it might be doing, as multiple times this code has
688
+ // triggered (without this check), and deletes all of our files...
689
+ let unconfirmedOldFiles2 = veryOldFiles.filter(a => !doubleCheckLookup.has(a) && doubleCheckDataFiles.has(a.file));
690
+ console.warn(red(`Deleted ${unconfirmedOldFiles2.length} very old unconfirmed files`));
691
+ if (LOG) {
692
+ logNodeStats(`archives|TΔ Delete Old Rejected File`, formatNumber)(unconfirmedOldFiles2.length);
693
+ }
694
+ // At the point the file was very old when we started reading, not part of the active transaction.
695
+ for (let file of unconfirmedOldFiles2) {
696
+ await this.deleteDataFile(file.file, `old unconfirmed file (${getOwnNodeId()}, ${process.argv[1]})`);
697
+ }
698
+ }
699
+ }
700
+
701
+
702
+ // Delete any confirmations that have no corresponding file, and are very old
703
+ {
704
+ let oldThreshold = readTime - DEAD_CREATE_THRESHOLD;
705
+ let usedConfirmations = new Set(dataState.confirmedDataFiles.map(a => this.getConfirmKey(a.file)));
706
+ let oldEnoughConfirms = dataState.rawDataFiles.filter(x => x.file.endsWith(".confirm") && x.createTime < oldThreshold);
707
+ let deprecatedFiles = oldEnoughConfirms.filter(a => !usedConfirmations.has(a.file));
708
+ if (deprecatedFiles.length > 0) {
709
+ if (LOG) {
710
+ console.warn(red(`Deleted ${deprecatedFiles.length} / ${oldEnoughConfirms.length} confirmations, for not having corresponding data files`));
711
+ logNodeStats(`archives|TΔ Delete Deprecated Confirm`, formatNumber)(deprecatedFiles.length);
712
+ }
713
+ for (let file of deprecatedFiles) {
714
+ await this.storage.deleteKey(file.file);
715
+ }
716
+ }
717
+ }
718
+
719
+ // Delete some debug files
720
+ {
721
+ for (let file of dataState.rawDataFiles.filter(x => x.file.endsWith(".reason"))) {
722
+ await this.storage.deleteKey(file.file);
723
+ }
724
+ }
725
+
726
+ return {
727
+ dataFiles: Array.from(dataState.confirmedDataFiles),
728
+ rawDataFiles: dataState.rawDataFiles,
729
+ nextSeqNum: activeT.seqNum + 1
730
+ };
731
+ }
732
+
733
+ {
734
+ console.log(`Applying transaction ${activeT.seqNum}, cur try ${applyCount} / ${MAX_APPLY_TRIES}`);
735
+ }
736
+ await this.applyTransaction(activeT);
737
+ this.transactionAppliedCount.set(activeT.seqNum, applyCount + 1);
738
+
739
+ // Run again, until we can be reasonable sure activeT isn't changing. We can be wrong,
740
+ // though, which will just result in any transaction depending on activeT being
741
+ // rejecting, and having to be inserted again.
742
+ return this.getFilesBase();
743
+ }
744
+
745
+ /** If any deleted files were deleted by other transactions, then we will be rejected. */
746
+ public async addTransaction(transaction: Transaction): Promise<"accepted" | "rejected"> {
747
+ if (LOG) {
748
+ let dels = transaction.ops.filter(a => a.type === "delete").length;
749
+ let creates = transaction.ops.filter(a => a.type === "create").length;
750
+ let createBytes = transaction.ops.map(a => a.type === "create" && a.value?.length || 0).reduce((a, b) => a + b, 0);
751
+ console.log(blue(`Starting transaction with ${creates} creates and ${dels} deletes, ${formatNumber(createBytes)}B`));
752
+ }
753
+ transaction = { ...transaction, ops: transaction.ops.slice() };
754
+ function normalizePath(path: string) {
755
+ // Replace duplicate slashes with a single slash
756
+ return path.replaceAll(/\/+/g, "/");
757
+ }
758
+ for (let op of transaction.ops) {
759
+ if (op.type === "create") {
760
+ op.key += ".locked";
761
+ }
762
+ // NOTE: If we fail to normalize, we might think values are deleted which exist, or that
763
+ // creates are gone when they are actually there.
764
+ op.key = normalizePath(op.key);
765
+ }
766
+ // We check for valid AFTER applying the latest transaction. Then when we
767
+ // write, if anything changes, it will be from a transaction which superceded us,
768
+ // which will cause our transaction to never apply. Otherwise... we MUST still
769
+ // be valid!
770
+
771
+ logNodeStats(`archives|TΔ Create File`, formatNumber)(1);
772
+ await this.prepareTransaction(transaction);
773
+
774
+ while (true) {
775
+ let beforeData = await this.getFilesBase();
776
+ if (!this.isTransactionValid(transaction, beforeData.dataFiles, beforeData.rawDataFiles)) {
777
+ logNodeStats(`archives|TΔ Rejected`, formatNumber)(1);
778
+ if (LOG) {
779
+ console.log(red(`Finished transaction with rejection, ${transaction.ops.length} ops`));
780
+ }
781
+ return "rejected";
782
+ }
783
+
784
+ await this.writeTransaction({ ...transaction, seqNum: beforeData.nextSeqNum });
785
+
786
+ let afterData = await this.getFilesBase();
787
+ if (this.wasTransactionApplied(transaction, afterData.dataFiles, afterData.rawDataFiles)) {
788
+ logNodeStats(`archives|TΔ Accepted`, formatNumber)(1);
789
+ if (LOG) {
790
+ console.log(green(`Finished transaction with ${transaction.ops.length} ops`));
791
+ }
792
+ return "accepted";
793
+ }
794
+ }
795
+ }
796
+ }
797
+
798
+ function parsePath(path: string): { dir: string; name: string } {
799
+ path = path.replaceAll("\\", "/");
800
+ let lastSlash = path.lastIndexOf("/");
801
+ if (lastSlash === -1) return { dir: "", name: path };
802
+ return { dir: path.slice(0, lastSlash + 1), name: path.slice(lastSlash + 1) };
803
+ }
804
+
805
+ // Loses spaces in keys and values
806
+ function toFileNameKVP(kvp: { [key: string]: string }): string {
807
+ function s(v: string) {
808
+ return v.replaceAll(" ", "_");
809
+ }
810
+ return " " + Object.entries(kvp).map(([key, value]) => `${s(key)}=${s(value)}`).join(" ") + " ";
811
+ }
812
+ function parseFileNameKVP(fileName: string): { [key: string]: string } {
813
+ let parts = fileName.trim().split(" ");
814
+ let obj: { [key: string]: string } = {};
815
+ for (let part of parts) {
816
+ let [key, value] = part.split("=");
817
+ obj[key] = value || key;
818
+ }
819
+ return obj;
820
+ }