querysub 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/.dependency-cruiser.js +304 -0
  2. package/.eslintrc.js +51 -0
  3. package/.github/copilot-instructions.md +1 -0
  4. package/.vscode/settings.json +25 -0
  5. package/bin/deploy.js +4 -0
  6. package/bin/function.js +4 -0
  7. package/bin/server.js +4 -0
  8. package/costsBenefits.txt +112 -0
  9. package/deploy.ts +3 -0
  10. package/inject.ts +1 -0
  11. package/package.json +60 -0
  12. package/prompts.txt +54 -0
  13. package/spec.txt +820 -0
  14. package/src/-a-archives/archiveCache.ts +913 -0
  15. package/src/-a-archives/archives.ts +148 -0
  16. package/src/-a-archives/archivesBackBlaze.ts +792 -0
  17. package/src/-a-archives/archivesDisk.ts +418 -0
  18. package/src/-a-archives/copyLocalToBackblaze.ts +24 -0
  19. package/src/-a-auth/certs.ts +517 -0
  20. package/src/-a-auth/der.ts +122 -0
  21. package/src/-a-auth/ed25519.ts +1015 -0
  22. package/src/-a-auth/node-forge-ed25519.d.ts +17 -0
  23. package/src/-b-authorities/dnsAuthority.ts +203 -0
  24. package/src/-b-authorities/emailAuthority.ts +57 -0
  25. package/src/-c-identity/IdentityController.ts +200 -0
  26. package/src/-d-trust/NetworkTrust2.ts +150 -0
  27. package/src/-e-certs/EdgeCertController.ts +288 -0
  28. package/src/-e-certs/certAuthority.ts +192 -0
  29. package/src/-f-node-discovery/NodeDiscovery.ts +543 -0
  30. package/src/-g-core-values/NodeCapabilities.ts +134 -0
  31. package/src/-g-core-values/oneTimeForward.ts +91 -0
  32. package/src/-h-path-value-serialize/PathValueSerializer.ts +769 -0
  33. package/src/-h-path-value-serialize/stringSerializer.ts +176 -0
  34. package/src/0-path-value-core/LoggingClient.tsx +24 -0
  35. package/src/0-path-value-core/NodePathAuthorities.ts +978 -0
  36. package/src/0-path-value-core/PathController.ts +1 -0
  37. package/src/0-path-value-core/PathValueCommitter.ts +565 -0
  38. package/src/0-path-value-core/PathValueController.ts +231 -0
  39. package/src/0-path-value-core/archiveLocks/ArchiveLocks.ts +154 -0
  40. package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +820 -0
  41. package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +180 -0
  42. package/src/0-path-value-core/debugLogs.ts +90 -0
  43. package/src/0-path-value-core/pathValueArchives.ts +483 -0
  44. package/src/0-path-value-core/pathValueCore.ts +2217 -0
  45. package/src/1-path-client/RemoteWatcher.ts +558 -0
  46. package/src/1-path-client/pathValueClientWatcher.ts +702 -0
  47. package/src/2-proxy/PathValueProxyWatcher.ts +1857 -0
  48. package/src/2-proxy/archiveMoveHarness.ts +376 -0
  49. package/src/2-proxy/garbageCollection.ts +753 -0
  50. package/src/2-proxy/pathDatabaseProxyBase.ts +37 -0
  51. package/src/2-proxy/pathValueProxy.ts +139 -0
  52. package/src/2-proxy/schema2.ts +518 -0
  53. package/src/3-path-functions/PathFunctionHelpers.ts +129 -0
  54. package/src/3-path-functions/PathFunctionRunner.ts +619 -0
  55. package/src/3-path-functions/PathFunctionRunnerMain.ts +67 -0
  56. package/src/3-path-functions/deployBlock.ts +10 -0
  57. package/src/3-path-functions/deployCheck.ts +7 -0
  58. package/src/3-path-functions/deployMain.ts +160 -0
  59. package/src/3-path-functions/pathFunctionLoader.ts +282 -0
  60. package/src/3-path-functions/syncSchema.ts +475 -0
  61. package/src/3-path-functions/tests/functionsTest.ts +135 -0
  62. package/src/3-path-functions/tests/rejectTest.ts +77 -0
  63. package/src/4-dom/css.tsx +29 -0
  64. package/src/4-dom/cssTypes.d.ts +212 -0
  65. package/src/4-dom/qreact.tsx +2322 -0
  66. package/src/4-dom/qreactTest.tsx +417 -0
  67. package/src/4-querysub/Querysub.ts +877 -0
  68. package/src/4-querysub/QuerysubController.ts +620 -0
  69. package/src/4-querysub/copyEvent.ts +0 -0
  70. package/src/4-querysub/permissions.ts +289 -0
  71. package/src/4-querysub/permissionsShared.ts +1 -0
  72. package/src/4-querysub/querysubPrediction.ts +525 -0
  73. package/src/5-diagnostics/FullscreenModal.tsx +67 -0
  74. package/src/5-diagnostics/GenericFormat.tsx +165 -0
  75. package/src/5-diagnostics/Modal.tsx +79 -0
  76. package/src/5-diagnostics/Table.tsx +183 -0
  77. package/src/5-diagnostics/TimeGrouper.tsx +114 -0
  78. package/src/5-diagnostics/diskValueAudit.ts +216 -0
  79. package/src/5-diagnostics/memoryValueAudit.ts +442 -0
  80. package/src/5-diagnostics/nodeMetadata.ts +135 -0
  81. package/src/5-diagnostics/qreactDebug.tsx +309 -0
  82. package/src/5-diagnostics/shared.ts +26 -0
  83. package/src/5-diagnostics/synchronousLagTracking.ts +47 -0
  84. package/src/TestController.ts +35 -0
  85. package/src/allowclient.flag +0 -0
  86. package/src/bits.ts +86 -0
  87. package/src/buffers.ts +69 -0
  88. package/src/config.ts +53 -0
  89. package/src/config2.ts +48 -0
  90. package/src/diagnostics/ActionsHistory.ts +56 -0
  91. package/src/diagnostics/NodeViewer.tsx +503 -0
  92. package/src/diagnostics/SizeLimiter.ts +62 -0
  93. package/src/diagnostics/TimeDebug.tsx +18 -0
  94. package/src/diagnostics/benchmark.ts +139 -0
  95. package/src/diagnostics/errorLogs/ErrorLogController.ts +515 -0
  96. package/src/diagnostics/errorLogs/ErrorLogCore.ts +274 -0
  97. package/src/diagnostics/errorLogs/LogClassifiers.tsx +302 -0
  98. package/src/diagnostics/errorLogs/LogFilterUI.tsx +84 -0
  99. package/src/diagnostics/errorLogs/LogNotify.tsx +101 -0
  100. package/src/diagnostics/errorLogs/LogTimeSelector.tsx +724 -0
  101. package/src/diagnostics/errorLogs/LogViewer.tsx +757 -0
  102. package/src/diagnostics/errorLogs/hookErrors.ts +60 -0
  103. package/src/diagnostics/errorLogs/logFiltering.tsx +149 -0
  104. package/src/diagnostics/heapTag.ts +13 -0
  105. package/src/diagnostics/listenOnDebugger.ts +77 -0
  106. package/src/diagnostics/logs/DiskLoggerPage.tsx +572 -0
  107. package/src/diagnostics/logs/ObjectDisplay.tsx +165 -0
  108. package/src/diagnostics/logs/ansiFormat.ts +108 -0
  109. package/src/diagnostics/logs/diskLogGlobalContext.ts +38 -0
  110. package/src/diagnostics/logs/diskLogger.ts +305 -0
  111. package/src/diagnostics/logs/diskShimConsoleLogs.ts +32 -0
  112. package/src/diagnostics/logs/injectFileLocationToConsole.ts +50 -0
  113. package/src/diagnostics/logs/logGitHashes.ts +30 -0
  114. package/src/diagnostics/managementPages.tsx +289 -0
  115. package/src/diagnostics/periodic.ts +89 -0
  116. package/src/diagnostics/runSaturationTest.ts +416 -0
  117. package/src/diagnostics/satSchema.ts +64 -0
  118. package/src/diagnostics/trackResources.ts +82 -0
  119. package/src/diagnostics/watchdog.ts +55 -0
  120. package/src/errors.ts +132 -0
  121. package/src/forceProduction.ts +3 -0
  122. package/src/fs.ts +72 -0
  123. package/src/heapDumps.ts +666 -0
  124. package/src/https.ts +2 -0
  125. package/src/inject.ts +1 -0
  126. package/src/library-components/ATag.tsx +84 -0
  127. package/src/library-components/Button.tsx +344 -0
  128. package/src/library-components/ButtonSelector.tsx +64 -0
  129. package/src/library-components/DropdownCustom.tsx +151 -0
  130. package/src/library-components/DropdownSelector.tsx +32 -0
  131. package/src/library-components/Input.tsx +334 -0
  132. package/src/library-components/InputLabel.tsx +198 -0
  133. package/src/library-components/InputPicker.tsx +125 -0
  134. package/src/library-components/LazyComponent.tsx +62 -0
  135. package/src/library-components/MeasureHeightCSS.tsx +48 -0
  136. package/src/library-components/MeasuredDiv.tsx +47 -0
  137. package/src/library-components/ShowMore.tsx +51 -0
  138. package/src/library-components/SyncedController.ts +171 -0
  139. package/src/library-components/TimeRangeSelector.tsx +407 -0
  140. package/src/library-components/URLParam.ts +263 -0
  141. package/src/library-components/colors.tsx +14 -0
  142. package/src/library-components/drag.ts +114 -0
  143. package/src/library-components/icons.tsx +692 -0
  144. package/src/library-components/niceStringify.ts +50 -0
  145. package/src/library-components/renderToString.ts +52 -0
  146. package/src/misc/PromiseRace.ts +101 -0
  147. package/src/misc/color.ts +30 -0
  148. package/src/misc/getParentProcessId.cs +53 -0
  149. package/src/misc/getParentProcessId.ts +53 -0
  150. package/src/misc/hash.ts +83 -0
  151. package/src/misc/ipPong.js +13 -0
  152. package/src/misc/networking.ts +2 -0
  153. package/src/misc/random.ts +45 -0
  154. package/src/misc.ts +19 -0
  155. package/src/noserverhotreload.flag +0 -0
  156. package/src/path.ts +226 -0
  157. package/src/persistentLocalStore.ts +37 -0
  158. package/src/promise.ts +15 -0
  159. package/src/server.ts +73 -0
  160. package/src/src.d.ts +1 -0
  161. package/src/test/heapProcess.ts +36 -0
  162. package/src/test/mongoSatTest.tsx +55 -0
  163. package/src/test/satTest.ts +193 -0
  164. package/src/test/test.tsx +552 -0
  165. package/src/zip.ts +92 -0
  166. package/src/zipThreaded.ts +106 -0
  167. package/src/zipThreadedWorker.js +19 -0
  168. package/tsconfig.json +27 -0
  169. package/yarnSpec.txt +56 -0
@@ -0,0 +1,2217 @@
1
+ import debugbreak from "debugbreak";
2
+ import { SocketFunction } from "socket-function/SocketFunction";
3
+ import { cache, cacheArgsEqual, lazy } from "socket-function/src/caching";
4
+ import { addEpsilons, getBufferFraction, minusEpsilon } from "../bits";
5
+ import { errorToUndefined, ignoreErrors, logErrors } from "../errors";
6
+ import { appendToPathStr, getParentPathStr, getPathDepth, getPathFromStr, getPathIndexAssert, getPathStr, hack_getPackedPathSuffix, hack_setPackedPathSuffix, hack_stripPackedPath, removePathLastPart, rootPathStr } from "../path";
7
+ import { measureBlock, measureFnc, measureWrap } from "socket-function/src/profiling/measure";
8
+ import { IdentityController_getOwnPubKeyShort, debugNodeId } from "../-c-identity/IdentityController";
9
+ import { pathValueArchives } from "./pathValueArchives";
10
+ import { blue, magenta, red, yellow } from "socket-function/src/formatting/logColors";
11
+ import { PathValueController, pathValueCommitter } from "./PathValueController";
12
+ import { batchFunction, delay, runInfinitePoll } from "socket-function/src/batching";
13
+ import { ActionsHistory } from "../diagnostics/ActionsHistory";
14
+ import { markArrayAsSplitable } from "socket-function/src/fixLargeNetworkCalls";
15
+ import { registerDynamicResource, registerMapArrayResource, registerResource } from "../diagnostics/trackResources";
16
+ import { binarySearchIndex, isNode, isNodeTrue, promiseObj, timeInHour, timeInMinute, timeInSecond } from "socket-function/src/misc";
17
+ import { isNodeTrusted, isTrusted, isTrustedByNode } from "../-d-trust/NetworkTrust2";
18
+ import { AuthorityPath, LOCAL_DOMAIN_PATH, pathValueAuthority2 } from "./NodePathAuthorities";
19
+ import { pathValueSerializer } from "../-h-path-value-serialize/PathValueSerializer";
20
+ import { isNoNetwork } from "../config";
21
+ import { formatTime } from "socket-function/src/formatting/format";
22
+ import { getOwnNodeId, isOwnNodeId } from "../-f-node-discovery/NodeDiscovery";
23
+ import { getNodeIdDomain, getNodeIdDomainMaybeUndefined, getNodeIdIP } from "socket-function/src/nodeCache";
24
+ import { getMachineId } from "../-a-auth/certs";
25
+ import { PromiseRace } from "../../src/misc/PromiseRace";
26
+
27
+ import yargs from "yargs";
28
+ import { sha256 } from "js-sha256";
29
+ import { PromiseObj } from "../promise";
30
+ import { ClientWatcher } from "../1-path-client/pathValueClientWatcher";
31
+ import { debugLog, isDebugLogEnabled } from "./debugLogs";
32
+ import { diskLog } from "../diagnostics/logs/diskLogger";
33
+
34
+ let yargObj = isNodeTrue() && yargs(process.argv)
35
+ .option("noarchive", { type: "boolean", alias: ["noarchives"], desc: "Don't save writes to disk. Still reads from disk." })
36
+ .argv || {}
37
+ ;
38
+
39
+ /** We don't ingest changes older than this. However, once changes are ingested,
40
+ * we don't usually verify them, and if we did, it would be against a higher value
41
+ * (but NOT MAX_CHANGE_AGE, as that is too high).
42
+ */
43
+ export const MAX_ACCEPTED_CHANGE_AGE = 1000 * 30;
44
+ /** We reject changes older than this. */
45
+ //export let MAX_ACCEPTED_CHANGE_AGE = 1000 * 60 * 2;
46
+
47
+ /** After this amount of time, we assume no new changes will be created
48
+ * (which means changes will no longer become rejected)
49
+ * (Should be a bit longer than MAX_ACCEPTED_CHANGE_AGE, as there might be a delay
50
+ * in processing changes, so it might be past MAX_ACCEPTED_CHANGE_AGE, but we might
51
+ * still receive changes in the pipeline).
52
+ * NOTE: This is now MUCH higher than MAX_ACCEPTED_CHANGE_AGE, because of the case of the fast writer and slow reader.
53
+ * The slow reader can easily never get the read finished before the writer updates it. We mask the issue by moving
54
+ * the slow reader farther in the past, which helps the case where it is only reading, although causes some instability
55
+ * if the function also writes to values (as it can cause rejections that go back as far into the past as it is writing).
56
+ * Of course this won't always work, but it should solve most cases (the true solution is just running those functions
57
+ * on the same thread, but then we take a massive speed hit, and the fast writer starts to lag).
58
+ * - MAX_ACCEPTED_CHANGE_AGE should probably become even shorter, and maybe even dynamic, as in games where
59
+ * you see the rejections going back 60s would be really annoying.
60
+ * NOTE: Any values before age time are subject to being received out of order. AFTER this time,
61
+ * every authority should have all values, and therefore, all reads should be able to see
62
+ * data in the correct (global) order.
63
+ */
64
+ export const MAX_CHANGE_AGE = MAX_ACCEPTED_CHANGE_AGE * 2;
65
+
66
+ /** Extra time we keep clientside prediction rejections for, to give us time to receive the actual values. */
67
+ export const CLIENTSIDE_PREDICT_LEEWAY = 500;
68
+
69
+ /** Any PathValues which take longer than this to wrist should be rejected, so... we have
70
+ * to write well before this time.
71
+ * - This has to be at least MAX_CHANGE_AGE * 4.5 + the time to serialize and
72
+ * send our data to remote storage.
73
+ */
74
+ export const ARCHIVE_FLUSH_LIMIT = Math.max(MAX_CHANGE_AGE * 10, timeInMinute * 60);
75
+
76
+ /** After this we start considering reading values and GCing them. Before this we
77
+ * just merge the values, not really processing them.
78
+ */
79
+ export const VALUE_GC_THRESHOLD = ARCHIVE_FLUSH_LIMIT * 3;
80
+
81
+ // Only start using ARCHIVE_FLUSH_LIMIT after this time.
82
+ // Before this date, we moves some files around, so their timestamps are too old.
83
+ // Once we upload to backblaze we MIGHT have to reset this, but... really, any future moves
84
+ // should change the type (the limit is only for 'genesis' type files).
85
+ export const ARCHIVE_FLUSH_LIMIT_START = +new Date("2024-07-14");
86
+
87
+ /** The maximum time we assume a completed archive write will take to be visible to any reader.
88
+ * - We also assume any files with file.writeTime + MAX_ARCHIVE_PROPAGATION_DELAY < Date.now()
89
+ * will be visible to all readers.
90
+ */
91
+ export const MAX_ARCHIVE_PROPAGATION_DELAY = 1000 * 10;
92
+
93
+ /** The time we wait until we cleanup undefined values from memory. */
94
+ export const UNDEFINED_MEMORY_CLEANUP_DELAY = MAX_CHANGE_AGE * 2;
95
+
96
+ export const ARCHIVE_LOOP_DELAY = MAX_CHANGE_AGE * 2;
97
+
98
+ /** Both of these are just targets, we might exceed them. */
99
+ export const FILE_VALUE_COUNT_LIMIT = 50_000;
100
+ export const FILE_SIZE_LIMIT = 50_000_000;
101
+
102
+ export const THREAD_START_TIME = Date.now();
103
+
104
+ /** On startup we are more lenient waiting for things, in case many services are started
105
+ * at the same time. But... eventually, things need to be loaded, and we have to start
106
+ * throwing errors
107
+ */
108
+ export const STARTUP_CUTOFF_TIME = THREAD_START_TIME + timeInSecond * 30;
109
+
110
+ /** After this time, we assume an an authority is dead, and stop waiting for it to startup.
111
+ * - Most of this time is spent loading the database, which can easily take minutes.
112
+ * - Authority startups should be rare, so this can be somewhat long.
113
+ * - If are smart about ignoring nodes which are not responding to commands,
114
+ * so it should be rare for zombie nodes to exist, but... it could happen.
115
+ */
116
+ export const MAX_ACCEPTED_AUTHORITY_STARTUP_TIME = timeInMinute * 30;
117
+
118
+
119
+ // NOTE: `time` maybe a bit off, as it must be unique, so if we create an identical value we
120
+ // will increment it by epsilon (addEpsilons). This gives us 4096 values per millisecond before
121
+ // we start to drift, which... is way more than enough (4M values per second would be a LOT of writes).
122
+ // `creatorId` is the first 62 bits (can't use 64 as !== with double is not the same as uint64)
123
+ // of the public key of the caller. This can be validated (by IdentityController right now),
124
+ // but as only trusted nodes create writes we don't really need to validate it. There is a new
125
+ // creatorId per process, which is few enough so that we shouldn't ever have to worry about
126
+ // collisions.
127
+ // NOTE: Time is unique for each set of changes, and therefore for each path, BUT, it is NOT
128
+ // unique across paths (which should be fine, as different paths should never experience contention).
129
+ // - Values with === Time should also have === readLocks?
130
+ export type Time = {
131
+ time: number;
132
+ // NOTE: Version is required, so that FunctionRunner can update a value without
133
+ // changing the time order by too much. In most cases it will be 0.
134
+ // IMPORTANT! version should be an integer (it can be negative, just no decimals).
135
+ version: number;
136
+ creatorId: number;
137
+ };
138
+ export const epochTime: Time = { time: 0, version: 0, creatorId: 0 };
139
+ Object.freeze(epochTime);
140
+
141
+ /** `timeMinusEpsilon(time)` < time BUT, there is never a case where
142
+ * `timeMinusEpsilon(time) < otherTime < time` (there is no gap between the times)
143
+ */
144
+ export function timeMinusEpsilon(time: Time) {
145
+ return { time: time.time, version: time.version, creatorId: minusEpsilon(time.creatorId) };
146
+ }
147
+
148
+ function validateLockCount(value: PathValue) {
149
+ if (value.lockCount === 0 && value.locks.length > 0) {
150
+ throw new Error(`lockCount is 0, but locks are present (there are ${value.locks.length}), the value is invalid, lockCount MUST be set correctly`);
151
+ }
152
+ }
153
+
154
+ // The startTime MUST have a valid value. For `startTime < other.time < endTime`
155
+ // them there CANNOT be a valid value (if there is, it means there was contention,
156
+ // and the lock is rejected, invalidating the write that uses it).
157
+ // NOTE: endTime might NOT equal PathValue.time, due to reusing readLocks for the same
158
+ // write (but NOT reusing time, because time must be unique).
159
+ // 'Locks' the path for the duration of the startTime/endTime. startTime is the time
160
+ // the value read was written at, which is required as we don't provide a value
161
+ // for the ReadLock, so we need a way to determine which value we are locking
162
+ // (and if it exists!).
163
+ // - The fact that the duration of the lock is longer than required is fine. We are
164
+ // extending it from the read time to the write time, which is usually far in the past.
165
+ // And the only way the extended duration would cause the lock to break, is if
166
+ // the value change away and the changes back again... which is a rare care anyways!
167
+ export type ReadLock = {
168
+ path: string;
169
+ // Exclusive (for ranges), often === the time of the value read
170
+ // === (for single values), often === the time of the value read, but can be any value,
171
+ // as long as it exists. UNLESS readIsTransparent, then it it must either not exist,
172
+ // or be a transparent write.
173
+ startTime: Time;
174
+ // Exclusive (for ranges), often === the time we are writing at
175
+ endTime: Time;
176
+ // We don't store the ENTIRE value because that would result in sending the entire
177
+ // program state per write (although I guess if you only read numbers ReadLock
178
+ // is pretty big anyways, but... eh...). HOWEVER, sending if we are reading
179
+ // undefined is useful, and makes garbage collection possible.
180
+ readIsTranparent: boolean;
181
+
182
+ /** A clientside only value to cause rejected values to be used as valid values for a bit, to fix
183
+ * the case where we receive the function evaluation before the replacement values (which is
184
+ * actually fairly common, due to how permissions work, so that existing paths are faster,
185
+ * but new paths are slower).
186
+ */
187
+ keepRejectedUntil?: number;
188
+ };
189
+ // NOTE: WriteState is a pared down PathValue for ReadLocks to consume.
190
+ // 1) This helps keep them separate, as values you read are very different from values
191
+ // you watch or are an authority on.
192
+ // 2) This greatly reduces data sent over the wire.
193
+ // 3) WriteStates can be thrown away after a period of time, as we can assume values are valid
194
+ // once they are old enough (as if they are old and invalid no one would be using them, and
195
+ // everyone would know to stop using them a long time ago). PathValues cannot, as they store
196
+ // data, which cannot be inferred, but as WriteStates don't store data, and only matter
197
+ // if they are inValid (or cause another value to be invalid), it works fine...
198
+ export type WriteState = {
199
+ path: string;
200
+ time: Time;
201
+ isValid: boolean;
202
+ // We don't store the ENTIRE value, because that would be inefficient. But... to make
203
+ // garbage collection possible, we store if it is `undefined`.
204
+ // NOTE: We could also start to store simply values here, such as numbers. This would allow
205
+ // dumb writers that don't want to check if their value exists to write without breaking
206
+ // reads (ex, multiple might write database.name = "example" at once).
207
+ // - HOWEVER, writers should just be smart, as it causes a lot of thrashing and work
208
+ // on all the clients per write, even if nothing changes!
209
+ isTransparent: boolean;
210
+ };
211
+
212
+ // NOTE: Object values ARE allowed here. They are effectively immutable though, so it is recommended
213
+ // clients recursively freeze them after they deserialize them.
214
+ export type Value = unknown;
215
+ export type PathValue = {
216
+ path: string;
217
+ /** @deprecated ALWAYS access value using pathValueSerializer.getPathValue(), unless making a shallow copy. */
218
+ value: Value;
219
+ time: Time;
220
+ /** @deprecated NOT deprecated, just remember when you set this ALWAYS set lockCount, otherwise
221
+ * valid checking breaks!
222
+ * IMPORTANT! DO NOT check for locks.length === 0. Locks are lost during serialization, so this
223
+ * will be irrelevant. lockCount on the other hand, should be accurate.
224
+ */
225
+ locks: ReadLock[];
226
+
227
+ /** Should be set EVERY time we have locks. IF lockCount === 0, then we know there are no locks
228
+ * (this is preserved through serialization, even when locks aren't).
229
+ */
230
+ lockCount: number;
231
+
232
+ /** value === undefined
233
+ * - MUST be set if value === undefined
234
+ * - We only GC values if they aren't eclipsing any values (or if we also remove the values they are eclipsing
235
+ * at the same time, in an atomic manner).
236
+ * - Makes reading more efficient (so we don't have to worry about lazily reading values), and makes our
237
+ * intentions more clear.
238
+ */
239
+ canGCValue?: boolean;
240
+
241
+ /** Event paths are temporary paths which are never used after a certain period of time
242
+ * (NOT paths which are written to frequently, those should be normal writes).
243
+ * Characteristics of event paths:
244
+ * 1) Not stored on disk
245
+ * 2) Not readable after `time.time + MAX_CHANGE_AGE`
246
+ * 3) Dropped from storage and watches after time: `time.time + MAX_CHANGE_AGE`
247
+ * NOTE: This is per path, not per value. This means events MUST have unique paths,
248
+ * as newer values will be deleted based on when the first use of the path occurred.
249
+ * NOTE: Non-recursive, so it has to be on the exact paths
250
+ */
251
+ event?: boolean;
252
+
253
+ /*
254
+ - This means it can be read through. Such as "undefined" OR a special object indicator.
255
+ - Conventional accesses consider these equivalent to having no value, special accesses such as
256
+ Object.keys() use these to record objects inside of lookups.
257
+ - An optimization, to save time deserializing the value (however now that we have it, it's required,
258
+ and MUST be set for undefined values).
259
+ - Is compatible with other undefined values when locks are involved (for example, if you require
260
+ the value to be undefined, and you see a isTransparent, that's fine, and your lock will
261
+ be considered valid).
262
+ */
263
+ isTransparent?: boolean;
264
+
265
+
266
+ /** Authorities will clobber this, but non-authorities might use this?
267
+ * - This is mostly an in-memory value, and not meant for storage or transmission.
268
+ * IMPORTANT! The ONLY reason a PathValue can be rejected is if one of it's locks are rejected
269
+ * (due to a conflict, or that lock value being rejected itself). See call prediction for
270
+ * how this can be done to allow arbitrary rejection (as in, locking a large time range,
271
+ * on a unique path, and then writing to that path when we want to cause a rejection).
272
+ */
273
+ valid?: boolean;
274
+
275
+ // #region Serialization Only
276
+
277
+ /** ONLY for use by pathValueSerializer */
278
+ isValueLazy?: boolean;
279
+
280
+ source?: string;
281
+
282
+ /** Used internally by archive management services. */
283
+ updateCount?: number;
284
+
285
+ // #endregion
286
+ };
287
+
288
+ export type NodeId = string;
289
+
290
+ export type WatchConfig = {
291
+ paths: string[];
292
+ parentPaths: string[];
293
+ };
294
+
295
+ export type PathValueSnapshot = {
296
+ values: { [path: string]: PathValue[] };
297
+ };
298
+
299
+
300
+ export function getNextTime(): Time {
301
+ return {
302
+ time: getNextTimeInternal(),
303
+ version: 0,
304
+ creatorId: getCreatorId(),
305
+ };
306
+ }
307
+ export function compareTime(lhs: Time, rhs: Time) {
308
+ let diff = lhs.time - rhs.time;
309
+ if (diff !== 0) return diff;
310
+ diff = lhs.version - rhs.version;
311
+ if (diff !== 0) return diff;
312
+ return lhs.creatorId - rhs.creatorId;
313
+ }
314
+
315
+ export function getCreatorId() {
316
+ return IdentityController_getOwnPubKeyShort();
317
+ }
318
+ let prevTime = 0;
319
+ function getNextTimeInternal() {
320
+ let time = Date.now();
321
+ if (time <= prevTime) {
322
+ time = addEpsilons(prevTime, 1);
323
+ }
324
+ prevTime = time;
325
+ return time;
326
+ }
327
+
328
+ export let debugRejections = false;
329
+ export function enableDebugRejections() {
330
+ debugRejections = true;
331
+ }
332
+ export function disableDebugRejections() {
333
+ debugRejections = false;
334
+ }
335
+ export let isCoreQuiet = true;
336
+ export function quietCoreMode() {
337
+ isCoreQuiet = true;
338
+ }
339
+ export function debugCoreMode() {
340
+ isCoreQuiet = false;
341
+ }
342
+
343
+ export function timeHash(time: Time) {
344
+ return time.creatorId + "_" + time.time;
345
+ }
346
+ export function getLockHash(lock: ReadLock) {
347
+ // NOTE: startTime might be 0, in which case the path is required in the hash.
348
+ return timeHash(lock.startTime) + "_" + timeHash(lock.endTime) + "_" + lock.path;
349
+ }
350
+
351
+ export function debugPathValuePath(pathValue: { time: Time; path: string }): string {
352
+ // Log the raw path, so it can show up in searches
353
+ return `(${debugTime(pathValue.time)}) ${pathValue.path}`;
354
+ }
355
+ export function debugPathValue(pathValue: PathValue, write?: boolean): string {
356
+ return `${debugPathValuePath(pathValue)} ${write ? "=" : "==="} ${String(pathValueSerializer.getPathValue(pathValue))}`;
357
+ }
358
+ export function debugTime(time: Time): string {
359
+ return `${time.time}[${time.version}]@${time.creatorId.toString().slice(4, 12)}`;
360
+ }
361
+
362
+ export function filterChildPaths(parentPath: string, packedSuffix: string, paths: Set<string> | undefined) {
363
+ if (!packedSuffix || !paths) return paths;
364
+ return filterChildPathsBase(parentPath, packedSuffix, paths);
365
+ }
366
+
367
+ /** Returns a number between 0 (inclusive) and 1 (exclusive)
368
+ * - See matchesParentRangeFilter, matchesParentRangeFilterPart, and filterChildPaths.
369
+ * FOR INTERNAL USE ONLY!
370
+ */
371
+ export function __getRoutingHash(key: string): number {
372
+ let hash = sha256(key);
373
+ return getBufferFraction(Buffer.from(hash, "hex"));
374
+ }
375
+ // NOTE: Assumes fullPath is already a child of parentPath, and only checks for hash
376
+ export function matchesParentRangeFilter(config: {
377
+ parentPath: string;
378
+ fullPath: string;
379
+ start: number;
380
+ end: number;
381
+ }) {
382
+ if (config.start === 0 && config.end === 1) return true;
383
+ let part = getPathIndexAssert(config.fullPath, getPathDepth(config.parentPath));
384
+ let hash = __getRoutingHash(part);
385
+ return config.start <= hash && hash < config.end;
386
+ }
387
+ export function matchesParentRangeFilterPart(config: {
388
+ part: string;
389
+ start: number;
390
+ end: number;
391
+ }) {
392
+ if (config.start === 0 && config.end === 1) return true;
393
+ let hash = __getRoutingHash(config.part);
394
+ return config.start <= hash && hash < config.end;
395
+ }
396
+
397
+ let getCompressNetworkBase = () => false;
398
+ export const registerGetCompressNetwork = (fnc: () => boolean) => { getCompressNetworkBase = fnc; };
399
+ export function getCompressNetwork() {
400
+ return getCompressNetworkBase();
401
+ }
402
+
403
+ let getCompressDiskBase = () => false;
404
+ export const registerGetCompressDisk = (fnc: () => boolean) => { getCompressDiskBase = fnc; };
405
+ export function getCompressDisk() {
406
+ return getCompressDiskBase();
407
+ }
408
+
409
+ let isOurPrediction = (value: PathValue) => false;
410
+ export const registerIsOurPrediction = (fnc: (value: PathValue) => boolean) => { isOurPrediction = fnc; };
411
+
412
+ const filterChildPathsBase = measureWrap(
413
+ function filterChildPathsBase(parentPath: string, packedSuffix: string, paths: Set<string>): Set<string> {
414
+ let [startFractionStr, endFractionStr] = packedSuffix.split("|");
415
+ let startFraction = Number(startFractionStr);
416
+ let endFraction = Number(endFractionStr);
417
+
418
+ let depth = getPathDepth(parentPath);
419
+
420
+ let filtered = new Set<string>();
421
+ for (let path of paths) {
422
+ let key = getPathIndexAssert(path, depth);
423
+ let hash = __getRoutingHash(key);
424
+ if (startFraction <= hash && hash < endFraction) {
425
+ filtered.add(path);
426
+ }
427
+ }
428
+ //console.log(`Filtered ${paths.size} paths to ${filtered.size} paths`);
429
+ return filtered;
430
+ }
431
+ );
432
+ export function encodeParentFilter(config: {
433
+ path: string;
434
+ startFraction: number;
435
+ endFraction: number;
436
+ }) {
437
+ return hack_setPackedPathSuffix(config.path, `${config.startFraction}|${config.endFraction}`);
438
+ }
439
+ export function decodeParentFilter(path: string): { start: number, end: number } | undefined {
440
+ let packedSuffix = hack_getPackedPathSuffix(path);
441
+ if (!packedSuffix) return undefined;
442
+ let [startStr, endStr] = packedSuffix.split("|");
443
+ return { start: Number(startStr), end: Number(endStr) };
444
+
445
+ }
446
+
447
+ class AuthorityPathValueStorage {
448
+ // path => sorted by -time (so newest are first)
449
+ // NOTE: We automatically drop historical values that can no longer influence
450
+ // the most current state.
451
+ private values = registerMapArrayResource("paths|authorityPathValueStorage", new Map<string, PathValue[]>());
452
+ // parent => child paths
453
+ private parentPathLookup = registerResource("paths|parentPathLookup", new Map<string, Set<string>>());
454
+ private isSyncedCache = registerResource("paths|isSyncedCache", new Set<string>());
455
+ private parentsSynced = registerResource("paths|parentsSynced", new Map<string,
456
+ true |
457
+ // If set, contains the nodes we have yet to sync before we can consider the parent synced.
458
+ Set<string>
459
+ >());
460
+
461
+ public DEBUG_UNWATCH = false;
462
+
463
+ private overrides = new Set<Map<string, PathValue>>();
464
+
465
+ /** The latest times of paths, that have been deleted (but aren't necessarily still deleted) */
466
+ private lastDeleteAt = new Map<string, Time>();
467
+ private lastDeleteAtOld = new Map<string, Time>();
468
+ private lastDeleteAtOlder = new Map<string, Time>();
469
+ private discardOldLastDeletes = (() => {
470
+ const self = this;
471
+ runInfinitePoll(ARCHIVE_FLUSH_LIMIT, function discardOldLastDeletes() {
472
+ self.lastDeleteAtOlder = self.lastDeleteAtOld;
473
+ self.lastDeleteAtOld = self.lastDeleteAt;
474
+ self.lastDeleteAt = new Map();
475
+ });
476
+ })();
477
+
478
+ // NOTE: All of our functions only return valid values (unless otherwise specified).
479
+ // - epochTime reads the latest value, which is important, as we may receive values
480
+ // with time > Date.now(), in the case that our clock is behind the server.
481
+
482
+ public getValue(path: string): unknown {
483
+ return pathValueSerializer.getPathValue(this.getValueAtTime(path));
484
+ }
485
+ public getValueExactIgnoreInvalid(path: string, time: Time): PathValue | undefined {
486
+ let value = this.getValueAtTime(path, time, true);
487
+ if (value && compareTime(value.time, time) === 0) {
488
+ return value;
489
+ }
490
+ return undefined;
491
+ }
492
+
493
+ /** time === undefined gets the latest value */
494
+ public getValueAtTime(
495
+ path: string,
496
+ // Include all values with other.time <= time, returning the one with the highest time
497
+ // (either that is valid, or, if readInvalid is true, just the highest time).
498
+ time?: Time,
499
+ readInvalid = false
500
+ ): PathValue | undefined {
501
+ let overrideValue: PathValue | undefined;
502
+ if (this.overrides.size > 0) {
503
+ for (let override of this.overrides) {
504
+ let value = override.get(path);
505
+ if (value) {
506
+ if (
507
+ (!time || compareTime(value.time, time) <= 0)
508
+ && (!overrideValue || compareTime(value.time, overrideValue.time) > 0)
509
+ ) {
510
+ overrideValue = value;
511
+ }
512
+ }
513
+ }
514
+ }
515
+
516
+ let values = this.values.get(path);
517
+ if (values) {
518
+ for (let value of values) {
519
+ if (!value.valid && !readInvalid) continue;
520
+ if (!time || compareTime(value.time, time) <= 0) {
521
+ if (overrideValue && compareTime(overrideValue.time, value.time) >= 0) {
522
+ return overrideValue;
523
+ }
524
+ return value;
525
+ }
526
+ }
527
+ }
528
+ if (overrideValue) return overrideValue;
529
+ return this.getValueOrEpochIfSynced(path);
530
+ }
531
+ public getValueBeforeTime(path: string, time: Time): PathValue | undefined {
532
+ return this.getValueAtTime(path, timeMinusEpsilon(time));
533
+ }
534
+ public getValueOrEpochIfSynced(path: string): PathValue | undefined {
535
+ if (this.isSynced(path)) {
536
+ return { path, valid: true, time: epochTime, locks: [], lockCount: 0, value: undefined, canGCValue: true, isTransparent: true };
537
+ }
538
+ return undefined;
539
+ }
540
+ // Returns at least one value (if any are available)
541
+ public getValuePlusHistory(path: string, time?: number): PathValue[] {
542
+ if (this.overrides.size > 0) {
543
+ throw new Error(`AuthorityPathValueStorage.getValuePlusHistory support for temporaryOverride not implemented yet.`);
544
+ }
545
+ let valuesToReturn: PathValue[] = [];
546
+ let values = this.values.get(path);
547
+ if (values) {
548
+ for (let value of values) {
549
+ if (!value.valid) continue;
550
+ if (valuesToReturn.length > 0 && (!time || value.time.time < time)) break;
551
+ valuesToReturn.push(value);
552
+ }
553
+ }
554
+ return valuesToReturn;
555
+ }
556
+
557
+ // TODO: Optimize this via an index
558
+ /** NOTE: Returns paths that have undefined values. If you want to exclude those
559
+ * you will need to check the values yourself (as this function does not accept a time argument).
560
+ * TODO: We SHOULD probably check for undefined values here... as it is surely faster than checking
561
+ * anywhere else, as we already have the values array!
562
+ */
563
+ @measureFnc
564
+ public getPathsFromParent(parentPath: string): Set<string> | undefined {
565
+ let packedSuffix = hack_getPackedPathSuffix(parentPath);
566
+ parentPath = hack_stripPackedPath(parentPath);
567
+ let result = this.parentPathLookup.get(parentPath);
568
+ return filterChildPaths(parentPath, packedSuffix, result);
569
+ }
570
+
571
+ @measureFnc
572
+ private addParentPath(path: string) {
573
+ let parentPath = getParentPathStr(path);
574
+ let childPaths = this.parentPathLookup.get(parentPath);
575
+ if (!childPaths) {
576
+ childPaths = new Set();
577
+ this.parentPathLookup.set(parentPath, childPaths);
578
+ }
579
+ childPaths.add(path);
580
+ }
581
+
582
+ /** Called by PathWatcher / ourself when paths are unwatched / destroyed */
583
+ public markPathAsUnwatched(path: string) {
584
+ // If we are the authority we:
585
+ // 1) Need to maintain the values for other nodes
586
+ // 2) Don't need to worry about the values getting out of date,
587
+ // because we will get any new values whether we want to or not!
588
+ if (pathValueAuthority2.isSelfAuthority(path)) return;
589
+
590
+ if (this.DEBUG_UNWATCH) {
591
+ console.log(blue(`Unsyncing path at ${Date.now()}`), path);
592
+ }
593
+
594
+ this.isSyncedCache.delete(path);
595
+ this.removePathFromStorage(path, "unwatched");
596
+ writeValidStorage.deleteRemovedPath(path);
597
+ }
598
+ public markParentPathAsUnwatched(path: string) {
599
+ // NOTE: I don't think we have to handle the case where we are the authority,
600
+ // because we don't delete any actual data here.
601
+ this.parentsSynced.delete(path);
602
+ }
603
+
604
+ /** Used for atomic operations, to ensure a path is stable enough (and to check that it hasn't
605
+ * changed during the course of an operation).
606
+ */
607
+ public getLastChangedTime(path: string): Time | undefined {
608
+ let time = this.values.get(path)?.[0]?.time;
609
+ let time0 = this.lastDeleteAt.get(path);
610
+ let time1 = this.lastDeleteAtOld.get(path);
611
+ let time2 = this.lastDeleteAtOlder.get(path);
612
+
613
+ if (!time || (time0 && compareTime(time0, time) > 0)) time = time0;
614
+ if (!time || (time1 && compareTime(time1, time) > 0)) time = time1;
615
+ if (!time || (time2 && compareTime(time2, time) > 0)) time = time2;
616
+
617
+ return time;
618
+ }
619
+
620
+
621
+ @measureFnc
622
+ public async getSnapshot(authority: AuthorityPath): Promise<PathValueSnapshot> {
623
+ let snapshot: PathValueSnapshot = {
624
+ values: Object.create(null),
625
+ };
626
+ let now = Date.now();
627
+ let count = 0;
628
+ for (let [path, values] of this.values) {
629
+ count++;
630
+ if (count > 10_000) {
631
+ await delay("paintLoop");
632
+ count = 0;
633
+ }
634
+ // NOTE: We can't return extra values, as they might be outdated values which were
635
+ // set to undefined, and then deleted, so returning them will create spurious values.
636
+ if (!pathValueAuthority2.isInAuthority(authority, path)) continue;
637
+ this.clearRedundantOldValues(values, now);
638
+ if (values.length === 0) continue;
639
+ snapshot.values[path] = values;
640
+ }
641
+ return snapshot;
642
+ }
643
+ @measureFnc
644
+ public ingestSnapshot(snapshot: PathValueSnapshot) {
645
+ // NOTE: It is important that we don't try to archive these values, as they either came
646
+ // from the archive, or have already been archived.
647
+
648
+ let now = Date.now();
649
+
650
+ for (let [path, values] of Object.entries(snapshot.values)) {
651
+ let existingValues = this.values.get(path);
652
+ for (let value of values) {
653
+ if (value.canGCValue) {
654
+ this.possiblyUndefinedPaths().value.add(value.path);
655
+ }
656
+ if (value.event) {
657
+ this.eventPaths().value.add(value.path);
658
+ }
659
+ }
660
+ if (!existingValues) {
661
+ this.values.set(path, values);
662
+ this.addParentPath(path);
663
+ } else {
664
+ let allValues = existingValues.concat(values);
665
+ allValues.sort((a, b) => compareTime(b.time, a.time));
666
+ // Remove duplicates
667
+ for (let i = 0; i < allValues.length - 1; i++) {
668
+ if (allValues[i].time.time === allValues[i + 1].time.time &&
669
+ allValues[i].time.creatorId === allValues[i + 1].time.creatorId) {
670
+ allValues.splice(i, 1);
671
+ i--;
672
+ }
673
+ }
674
+ values = allValues;
675
+ }
676
+ this.clearRedundantOldValues(values, now);
677
+ }
678
+ }
679
+
680
+ public resetForInitialTrigger(path: string) {
681
+ if (isDebugLogEnabled()) {
682
+ debugLog("REMOVE FOR INITIAL SYNC", { path });
683
+ }
684
+ this.removePathFromStorage(path, "reset for initial trigger");
685
+ }
686
+
687
+ public DEBUG_hasAnyValues(path: string) {
688
+ let values = this.values.get(path);
689
+ if (!values) return false;
690
+ if (values.length === 1 && compareTime(values[0].time, epochTime) === 0) {
691
+ return false;
692
+ }
693
+ return true;
694
+ }
695
+ public isSynced(path: string) {
696
+ if (pathValueAuthority2.isSelfAuthority(path)) return true;
697
+
698
+ if (this.isSyncedCache.has(path)) return true;
699
+
700
+ // It is only synced if we have values that we DIDN'T create (aka, predictions)
701
+ let values = this.values.get(path);
702
+ if (values?.some(x => !isOurPrediction(x))) {
703
+ if (!isCoreQuiet && isNode()) {
704
+ //console.log(`Marking path as synced`, path);
705
+ }
706
+ this.isSyncedCache.add(path);
707
+ return true;
708
+ }
709
+ return false;
710
+ }
711
+ public isParentSynced(path: string) {
712
+ // We have to check a test child path to see if we are a self authority
713
+ if (pathValueAuthority2.isSelfAuthority(appendToPathStr(path, ""))) return true;
714
+
715
+ return this.parentsSynced.get(path) === true;
716
+ }
717
+
718
+ private getMultiNodesForParent: (path: string) => Map<string, unknown> | undefined = () => undefined;
719
+ public setGetMultiNodesForParent(fnc: (path: string) => Map<string, unknown> | undefined) {
720
+ this.getMultiNodesForParent = fnc;
721
+ }
722
+ @measureFnc
723
+ public ingestValues(newValues: PathValue[], parentsSynced: string[] | undefined, parentSyncedSources: Map<string, string[]> | undefined) {
724
+ let now = Date.now();
725
+
726
+ if (isDebugLogEnabled()) {
727
+ for (let value of newValues) {
728
+ debugLog("INGEST VALUE", { path: value.path, time: value.time.time });
729
+ }
730
+ }
731
+
732
+ if (parentsSynced) {
733
+ for (let parentPath of parentsSynced) {
734
+ let prevSynced = this.parentsSynced.get(parentPath);
735
+ if (prevSynced === true) continue;
736
+ let remoteNodeIds = this.getMultiNodesForParent(parentPath);
737
+ let sourceNodeIds = parentSyncedSources?.get(parentPath);
738
+ // NOTE: If there is only 1 authority, assume we received data from it, as who else would we receive it from?
739
+ if (!remoteNodeIds || remoteNodeIds.size === 1 || !sourceNodeIds) {
740
+ this.parentsSynced.set(parentPath, true);
741
+ } else {
742
+ if (!prevSynced) {
743
+ prevSynced = new Set(remoteNodeIds.keys());
744
+ this.parentsSynced.set(parentPath, prevSynced);
745
+ }
746
+ for (let sourceNodeId of sourceNodeIds) {
747
+ prevSynced.delete(sourceNodeId);
748
+ }
749
+ if (prevSynced.size === 0) {
750
+ this.parentsSynced.set(parentPath, true);
751
+ }
752
+ }
753
+ }
754
+ }
755
+
756
+ for (let i = 0; i < newValues.length; i++) {
757
+ let value = newValues[i];
758
+ if (value.event) {
759
+ this.eventPaths().value.add(value.path);
760
+ }
761
+ if (value.canGCValue) {
762
+ this.possiblyUndefinedPaths().value.add(value.path);
763
+ }
764
+
765
+ if (!value.event && !value.path.startsWith(LOCAL_DOMAIN_PATH)) {
766
+ let authorityPath = pathValueAuthority2.getSelfArchiveAuthority(value.path);
767
+
768
+ // Store in pending archive lookup
769
+ if (authorityPath) {
770
+ let values = this.pendingArchiveValues.get(authorityPath);
771
+ if (!values) {
772
+ values = [];
773
+ this.pendingArchiveValues.set(authorityPath, values);
774
+ }
775
+ // If our value has no locks it can never be rejected, so if it is the latest, don't bother to keep the others
776
+ if (value.lockCount === 0 && values.length > 0 && values.every(x => compareTime(x.time, value.time) < 0)) {
777
+ validateLockCount(value);
778
+ if (values.length > 1) {
779
+ values.splice(1, values.length - 1);
780
+ }
781
+ values[0] = value;
782
+ } else {
783
+ values.push(value);
784
+ }
785
+ }
786
+ }
787
+
788
+ // Store in global lookup
789
+ {
790
+ let specialGoldenCase = false;
791
+ let values = this.values.get(value.path);
792
+ if (!values) {
793
+ values = [];
794
+ this.values.set(value.path, values);
795
+ this.addParentPath(value.path);
796
+ }
797
+ let insertIndex = values.findIndex(x => compareTime(x.time, value.time) <= 0);
798
+ if (insertIndex < 0) {
799
+ values.push(value);
800
+ } else {
801
+ let prev = values[insertIndex];
802
+ if (prev && compareTime(prev.time, value.time) === 0) {
803
+ // Just update the valid state (we likely aren't an authority on the path anyways)
804
+ prev.valid = value.valid;
805
+ } else {
806
+ // Special case values that always have no locks, by just setting the value, and never splicing
807
+ if (insertIndex === 0 && value.lockCount === 0 && values.length <= 1) {
808
+ validateLockCount(value);
809
+ if (!isCoreQuiet) {
810
+ if (values.length === 1) {
811
+ console.log(`Clobbering ${debugPathValuePath(values[0])} with ${debugPathValuePath(value)}`);
812
+ }
813
+ }
814
+ specialGoldenCase = true;
815
+ values[0] = value;
816
+ } else {
817
+ values.splice(insertIndex, 0, value);
818
+ }
819
+ }
820
+ }
821
+ if (!specialGoldenCase) {
822
+ this.clearRedundantOldValues(values, now);
823
+ }
824
+ }
825
+ }
826
+
827
+ // NOTE: Archiving takes a long time (we buffer values for at least a few minutes), so we can't
828
+ // wait for them to archive. We don't retry on storage failure, because... we should have enough
829
+ // nodes that received it to have the values committed in the long term. Unless we are the only
830
+ // node, in which case redundancy clearly isn't that important anyway (also, if we are only running
831
+ // one node we are likely using the local disk, so writes really shouldn't fail! And if they do,
832
+ // it is likely because the disk is full, and so retrying wouldn't help anyways!)
833
+
834
+ this.archiveLoop();
835
+ }
836
+
837
+ // authorityPath => values
838
+ private pendingArchiveValues = registerMapArrayResource("pendingArchiveValues", new Map<AuthorityPath, PathValue[]>());
839
+ private shuttingDown = new PromiseObj<"shutdown">();
840
+ private pendingShutdownWrites: PromiseObj | undefined;
841
+
842
+ private archiveLoop = lazy(() => {
843
+ // No need to even run the archive loop in the browser...
844
+ if (!isNode()) return;
845
+ if (yargObj.noarchive) return;
846
+ let self = this;
847
+ runInfinitePoll(ARCHIVE_LOOP_DELAY, async function archiveLoop() {
848
+ if (self.shuttingDown.resolved) {
849
+ return;
850
+ }
851
+ await self.innerArchiveLoop();
852
+ });
853
+ });
854
+ private async innerArchiveLoop() {
855
+ let pending = this.pendingArchiveValues;
856
+ this.pendingArchiveValues = new Map();
857
+ if (pending.size === 0) return;
858
+
859
+ // Wait long enough for all values to be old enough to have a fixed valid state.
860
+ // Also wait some extra time randomly, to reduce the chances of archive being
861
+ // called at the same time on multiple nodes at once.
862
+ let delayTime = MAX_CHANGE_AGE * 2 + Math.random() * MAX_CHANGE_AGE * 0.5;
863
+ if (this.shuttingDown.resolved) {
864
+ delayTime = 0;
865
+ }
866
+
867
+ if (!isCoreQuiet || this.shuttingDown.resolved) {
868
+ console.log(blue(`Archiving ${pending.size} paths in ${formatTime(delayTime)}`));
869
+ }
870
+
871
+ let inShutdown = await PromiseRace([this.shuttingDown.promise, delay(delayTime)]);
872
+ if (inShutdown === "shutdown") {
873
+ this.pendingShutdownWrites = new PromiseObj();
874
+ }
875
+
876
+ try {
877
+ for (let [authorityPath, allValues] of pending) {
878
+ let validValues = allValues
879
+ .filter(x => x.valid)
880
+ // ALSO, don't archive epochTime values...
881
+ .filter(x => x.time.time)
882
+ ;
883
+ await pathValueArchives.archiveValues(authorityPath, validValues);
884
+ }
885
+ } finally {
886
+ this.pendingShutdownWrites?.resolve();
887
+ }
888
+
889
+ }
890
+ public async onShutdown() {
891
+ if (yargObj.noarchive) return;
892
+ this.shuttingDown.resolve("shutdown");
893
+ await pathValueAuthority2.waitUntilRoutingIsReady();
894
+ // If there are other nodes, we CAN'T just archive our values, as some may be rejected in the future.
895
+ let nodes = pathValueAuthority2.getReadNodes(rootPathStr);
896
+ let otherReadNodes = nodes.filter(x => !isOwnNodeId(x));
897
+ if (otherReadNodes.length > 0 && this.pendingArchiveValues.size > 0) {
898
+ console.log(`Other authorities (${otherReadNodes.length}), discarding ${this.pendingArchiveValues.size} pending writes`);
899
+ }
900
+ if (otherReadNodes.length === 0) {
901
+ // Wait the PromiseRace to finish
902
+ await delay("afterpromises");
903
+ // Wait for current loop to finish
904
+ await this.pendingShutdownWrites?.promise;
905
+ // Run another loop, in case a loop wasn't running
906
+ await this.innerArchiveLoop();
907
+ }
908
+ }
909
+
910
+
911
+ // NOTE: I'm not sure we really need to call this. It gets caller per write, which should be sufficient.
912
+ // The only case we leak is if there are a lot of writes on a value, then none. But that shouldn't
913
+ // really happen too often (and the real issue is not moving values from memory => storage, which when we do,
914
+ // we will call this anyways).
915
+ // NOTE: values are sorted sorted by -time (so newest are first)
916
+ public clearRedundantOldValues(values: PathValue[], now: number) {
917
+ // NOTE: We can also delete golden rejections BEFORE the valid golden value. But...
918
+ // that is less important, as there are likely to be few rejected values, so this
919
+ // is really just designed for removing older golden values (which are probably valid).
920
+
921
+ // NOTE: We clear values as soon as we can to make FunctionRunners run faster, as any database
922
+ // with many writes will almost certainly have many FunctionRunner calls, which all need to synchronize
923
+ // as much history as we have. So... the less history, we faster FunctionRunners can sync. AND syncing
924
+ // data is the true bottleneck, as it requires network traffic, vs this, which is just manipulating
925
+ // some values in memory...
926
+
927
+ let goldenTime = now - MAX_CHANGE_AGE;
928
+ // NOTE: Values with no locks are implicitly golden, as they can't be rejected.
929
+ let firstGoldenIndex = values.findIndex(x => x.time.time < goldenTime || x.lockCount === 0);
930
+ if (firstGoldenIndex < 0) return;
931
+ // Special case, everything is golden, and the latest value is undefined, and is behind the GC
932
+ // point, then delete everything, so constantly changing keys doesn't result in us leaking memory forever.
933
+ if (
934
+ firstGoldenIndex === 0
935
+ && values[0].canGCValue
936
+ && values[0].time.time < now - VALUE_GC_THRESHOLD
937
+ ) {
938
+ if (!isCoreQuiet) {
939
+ console.log(`GCing ALL ${debugPathValuePath(values[0])}`);
940
+ }
941
+ // Mark the value as synced though, as synced means we have received values for this path,
942
+ // and while the values are undefined and so we are deleting them... we have definitely
943
+ // received values!
944
+ let path = values[0].path;
945
+ this.isSyncedCache.add(path);
946
+ this.removePathFromStorage(path, "value is undefined and golden");
947
+ values.splice(0, values.length);
948
+ return;
949
+ }
950
+ let goldenCount = values.length - firstGoldenIndex;
951
+ // We can only compress golden values, so if there is only 1 golden, then we can't compress it
952
+ // (as we can't remove it, as the live value might become rejected)
953
+ if (goldenCount < 2) return;
954
+
955
+ if (firstGoldenIndex === 0) {
956
+ // Remove everything but the latest value
957
+ let removed = values.splice(1, values.length - 1);
958
+ if (!isCoreQuiet) {
959
+ console.log(`GCing everything older than ${debugPathValuePath(values[0])}`);
960
+ for (let value of removed) {
961
+ console.log(` ${debugPathValuePath(value)}`);
962
+ }
963
+ }
964
+ } else {
965
+ // Delete everything that is golden, except for the most recent VALID value.
966
+ let validGoldenIndex = values.findIndex(x => x.valid && x.time.time < goldenTime);
967
+ if (validGoldenIndex >= 0 && values.length > validGoldenIndex) {
968
+ for (let i = values.length - 1; i > validGoldenIndex; i--) {
969
+ let gced = values.pop();
970
+ if (!isCoreQuiet && gced) {
971
+ console.log(`GCing ${debugPathValuePath(gced)}`);
972
+ }
973
+ }
974
+ }
975
+ }
976
+ }
977
+
978
+ private removePathFromStorage(path: string, reason: string) {
979
+ let values = this.values.get(path);
980
+ if (values?.length) {
981
+ this.lastDeleteAt.set(path, values[0].time);
982
+ }
983
+ this.values.delete(path);
984
+
985
+ if (isDebugLogEnabled()) {
986
+ debugLog("REMOVE PATH", { path, reason });
987
+ }
988
+
989
+ // NOTE: parentPathLookup is important because clients often use Object.keys() to get paths,
990
+ // and then watch all of them (which incurs constant permissions checks, etc). While isSyncedCached
991
+ // and parentsSynced are just in memory, are never iterated, so extra values there are far less important...
992
+ let parentPath = getParentPathStr(path);
993
+ let childPaths = this.parentPathLookup.get(parentPath);
994
+ if (childPaths) {
995
+ childPaths.delete(path);
996
+ if (childPaths.size === 0) {
997
+ this.parentPathLookup.delete(parentPath);
998
+ }
999
+ }
1000
+ }
1001
+
1002
+ private possiblyUndefinedPaths = lazy((): { value: Set<string> } => {
1003
+ let pathsToCheckPointer = { value: new Set<string>() };
1004
+
1005
+ runInfinitePoll(UNDEFINED_MEMORY_CLEANUP_DELAY, () => {
1006
+ let now = Date.now();
1007
+ let deadTime = now - UNDEFINED_MEMORY_CLEANUP_DELAY;
1008
+ let pathsToCheck = pathsToCheckPointer.value;
1009
+ let nextPathsToCheck = new Set<string>();
1010
+ pathsToCheckPointer.value = nextPathsToCheck;
1011
+ for (let path of pathsToCheck) {
1012
+ let values = this.values.get(path);
1013
+ if (!values?.length) continue;
1014
+ let currentValue = values[0];
1015
+ if (!currentValue.canGCValue) continue;
1016
+
1017
+ // NOTE: We can delete invalid values just the same as valid values, so we don't
1018
+ // even check the valid state
1019
+
1020
+ if (currentValue.time.time >= deadTime) {
1021
+ // Give it some time, it will probably stay deleted, but will be old enough
1022
+ // to delete the next time we check (or have a value, and then we will stop checking it).
1023
+ nextPathsToCheck.add(path);
1024
+ continue;
1025
+ }
1026
+
1027
+ // NOTE: We have to set isSynced otherwise our render functions won't think they can render
1028
+ // (also proxies will fail to forward epoch values at all).
1029
+ if (pathWatcher.isWatching(path)) {
1030
+ this.isSyncedCache.add(path);
1031
+ }
1032
+
1033
+ // It is equivalent to delete the value than to keep it, so... just delete it to save space
1034
+ // - We don't need to worry about new locks on the value, as any new locks will be assumed
1035
+ // valid, due to the fact that if a value is old enough, it is either valid,
1036
+ // or it is old enough that the server will know it is invalid, and not send it.
1037
+ this.removePathFromStorage(path, "undefined and golden from loop");
1038
+ }
1039
+ });
1040
+ return pathsToCheckPointer;
1041
+ });
1042
+
1043
+ private eventPaths = lazy((): { value: Set<string> } => {
1044
+ let pathsToCheckPointer = { value: new Set<string>() };
1045
+
1046
+ let garbageCollectTime = MAX_CHANGE_AGE * 2;
1047
+ runInfinitePoll(garbageCollectTime, () => {
1048
+ let deadTime = Date.now() - garbageCollectTime;
1049
+ let pathsToCheck = pathsToCheckPointer.value;
1050
+ let nextPathsToCheck = new Set<string>();
1051
+ pathsToCheckPointer.value = nextPathsToCheck;
1052
+ let pathsToClear = new Set<string>();
1053
+ for (let path of pathsToCheck) {
1054
+ let values = this.values.get(path);
1055
+ if (!values) {
1056
+ pathsToClear.add(path);
1057
+ continue;
1058
+ }
1059
+ let value = values[values.length - 1];
1060
+ if (!value) {
1061
+ pathsToClear.add(path);
1062
+ continue;
1063
+ }
1064
+ if (value.time.time < deadTime) {
1065
+ pathsToClear.add(path);
1066
+ } else {
1067
+ // Give it more time
1068
+ nextPathsToCheck.add(path);
1069
+ }
1070
+ }
1071
+
1072
+ if (pathsToClear.size > 0) {
1073
+ for (let path of pathsToClear) {
1074
+ this.markPathAsUnwatched(path);
1075
+ // I'm not sure if removing it as a parent is needed, or... maybe it is needed,
1076
+ // and this isn't enough of a check?
1077
+ this.markParentPathAsUnwatched(path);
1078
+ // And... remove from storage, in case we are the authority and wouldn't normally remove it.
1079
+ this.removePathFromStorage(path, "event path");
1080
+ }
1081
+ pathWatcher.unwatchEventPaths(pathsToClear);
1082
+ for (let callback of this.eventRemovalsCallbacks) {
1083
+ callback(pathsToClear);
1084
+ }
1085
+ }
1086
+ });
1087
+ return pathsToCheckPointer;
1088
+ });
1089
+
1090
+ private eventRemovalsCallbacks = new Set<(removedPaths: Set<string>) => void>();
1091
+ public watchEventRemovals(callback: (removedPaths: Set<string>) => void) {
1092
+ this.eventRemovalsCallbacks.add(callback);
1093
+ }
1094
+
1095
+ /** All overrides are assumed to be valid. */
1096
+ public temporaryOverride<T>(values: PathValue[] | undefined, code: () => T): T {
1097
+ if (!values) return code();
1098
+ let overrideMap = new Map(values.map(x => [x.path, x]));
1099
+ this.overrides.add(overrideMap);
1100
+ try {
1101
+ return code();
1102
+ } finally {
1103
+ this.overrides.delete(overrideMap);
1104
+ }
1105
+ }
1106
+
1107
+ /** For value auditing. Might change to expose less, so don't use this for anything except auditting. */
1108
+ public __auditValues() {
1109
+ return this.values;
1110
+ }
1111
+ }
1112
+ export const authorityStorage = new AuthorityPathValueStorage();
1113
+
1114
+
1115
+ // If PathValue, it is a local watch
1116
+ export type WriteCallback = NodeId | PathValue[];
1117
+
1118
+ // isOwnNodeId(nodeId) means localOnValueCallback is called (instead of PathValueController.forwardWrites)
1119
+ export type PathWatcherCallback = NodeId;
1120
+ class PathWatcher {
1121
+ private watchers = registerResource("paths|PathWatcher.watchers", new Map<string, Set<PathWatcherCallback>>());
1122
+ // realPath => packedPath => watcher => { depth, start, end }[]
1123
+ private parentWatchers = registerResource("paths|parentWatchers", new Map<string,
1124
+ // packedPath
1125
+ Map<string, {
1126
+ watchers: Set<PathWatcherCallback>;
1127
+ start: number;
1128
+ end: number;
1129
+ }>
1130
+ >());
1131
+ private watchersToPaths = registerResource("paths|watchersToPaths", new Map<PathWatcherCallback, {
1132
+ paths: Set<string>;
1133
+ parents: Set<string>;
1134
+ }>());
1135
+
1136
+ /** Automatically unwatches on callback disconnect */
1137
+ @measureFnc
1138
+ public watchPath(config: WatchConfig & {
1139
+ callback: PathWatcherCallback;
1140
+ debugName?: string;
1141
+ noInitialTrigger?: boolean;
1142
+ /** Force trigger, even if we are already watching the values. */
1143
+ initialTrigger?: boolean;
1144
+ }) {
1145
+ if (config.callback) {
1146
+ this.ensureUnwatchingOnDisconnect(config.callback);
1147
+ }
1148
+
1149
+ let newPathsWatched = new Set<string>();
1150
+ let newParentsWatched = new Set<string>();
1151
+
1152
+ for (let path of config.paths) {
1153
+ let watchers = this.watchers.get(path);
1154
+ if (!watchers) {
1155
+ watchers = new Set();
1156
+ this.watchers.set(path, watchers);
1157
+ }
1158
+ if (watchers.has(config.callback)) continue;
1159
+ newPathsWatched.add(path);
1160
+ watchers.add(config.callback);
1161
+
1162
+ let watchObj = this.watchersToPaths.get(config.callback);
1163
+ if (!watchObj) {
1164
+ watchObj = { paths: new Set(), parents: new Set() };
1165
+ this.watchersToPaths.set(config.callback, watchObj);
1166
+ }
1167
+ watchObj.paths.add(path);
1168
+ }
1169
+ for (let path of config.parentPaths) {
1170
+ let basePath = hack_stripPackedPath(path);
1171
+ let watchersObj = this.parentWatchers.get(basePath);
1172
+ if (!watchersObj) {
1173
+ watchersObj = new Map();
1174
+ this.parentWatchers.set(basePath, watchersObj);
1175
+ }
1176
+ let obj = watchersObj.get(path);
1177
+ if (!obj) {
1178
+ let range = decodeParentFilter(path) || { start: 0, end: 1 };
1179
+ obj = { watchers: new Set(), start: range.start, end: range.end };
1180
+ watchersObj.set(path, obj);
1181
+ }
1182
+ if (obj.watchers.has(config.callback)) continue;
1183
+ obj.watchers.add(config.callback);
1184
+
1185
+ newParentsWatched.add(path);
1186
+
1187
+ let watchObj = this.watchersToPaths.get(config.callback);
1188
+ if (!watchObj) {
1189
+ watchObj = { paths: new Set(), parents: new Set() };
1190
+ this.watchersToPaths.set(config.callback, watchObj);
1191
+ }
1192
+ watchObj.parents.add(path);
1193
+ }
1194
+
1195
+ if (newPathsWatched.size > 0 || newParentsWatched.size > 0) {
1196
+ diskLog(`New PathValue watches`, {
1197
+ newPathsWatched: newPathsWatched.size,
1198
+ newParentsWatched: newParentsWatched.size,
1199
+ });
1200
+ }
1201
+
1202
+ // Treat everything as a new watch
1203
+ if (config.initialTrigger) {
1204
+ newPathsWatched = new Set(config.paths);
1205
+ newParentsWatched = new Set(config.parentPaths);
1206
+ }
1207
+
1208
+ if (!config.noInitialTrigger) {
1209
+ // Trigger all initial values (for paths we have synced)
1210
+ let initialValues = new Set<PathValue>();
1211
+ let newPaths = new Set(Array.from(newPathsWatched).filter(x => authorityStorage.isSynced(x)));
1212
+ for (let path of newParentsWatched) {
1213
+ // If we haven't synced the parent, we can't forward it. This is important for proxies
1214
+ // (when they receive the parent synced they will forward the values).
1215
+ if (!authorityStorage.isParentSynced(path)) continue;
1216
+ let paths = authorityStorage.getPathsFromParent(path);
1217
+ if (paths) {
1218
+ for (let path of paths) {
1219
+ newPaths.add(path);
1220
+ }
1221
+ }
1222
+ }
1223
+ if (newPaths.size > 0 || newParentsWatched.size > 0) {
1224
+ for (let path of newPaths) {
1225
+ let history = authorityStorage.getValuePlusHistory(path);
1226
+ for (let value of history) {
1227
+ initialValues.add(value);
1228
+ }
1229
+ if (history.length === 0) {
1230
+ // We always have to send something, or the client won't know when they are ready to show the data!
1231
+ initialValues.add({ path, value: undefined, canGCValue: true, valid: true, time: epochTime, locks: [], lockCount: 0, event: false });
1232
+ }
1233
+ }
1234
+ this.triggerLatestWatcher(
1235
+ config.callback,
1236
+ Array.from(initialValues),
1237
+ config.parentPaths,
1238
+ config.initialTrigger ? "initialTrigger" : undefined,
1239
+ );
1240
+ }
1241
+ }
1242
+ }
1243
+ // NOTE: Automatically unwatches from remoteWatcher on all paths that are fully unwatched
1244
+ @measureFnc
1245
+ public unwatchPath(config: WatchConfig & { callback: PathWatcherCallback; }) {
1246
+ const { callback } = config;
1247
+
1248
+ let fullyUnwatched: WatchConfig = {
1249
+ paths: [],
1250
+ parentPaths: [],
1251
+ };
1252
+
1253
+ let pathsWatched = this.watchersToPaths.get(callback);
1254
+
1255
+ for (let path of config.paths) {
1256
+ if (pathsWatched) {
1257
+ pathsWatched.paths.delete(path);
1258
+ }
1259
+
1260
+ let watchers = this.watchers.get(path);
1261
+ if (!watchers) continue;
1262
+ watchers.delete(callback);
1263
+ if (watchers.size === 0) {
1264
+ this.watchers.delete(path);
1265
+ fullyUnwatched.paths.push(path);
1266
+ authorityStorage.markPathAsUnwatched(path);
1267
+ }
1268
+ }
1269
+ for (let path of config.parentPaths) {
1270
+ if (pathsWatched) {
1271
+ pathsWatched.parents.delete(path);
1272
+ }
1273
+
1274
+ let unpackedPath = hack_stripPackedPath(path);
1275
+
1276
+ let watchersObj = this.parentWatchers.get(unpackedPath);
1277
+ if (!watchersObj) continue;
1278
+ let obj = watchersObj.get(path);
1279
+ if (!obj) continue;
1280
+ obj.watchers.delete(callback);
1281
+
1282
+ if (obj.watchers.size === 0) {
1283
+ watchersObj.delete(path);
1284
+ this.parentWatchers.delete(path);
1285
+ fullyUnwatched.parentPaths.push(path);
1286
+ authorityStorage.markParentPathAsUnwatched(path);
1287
+
1288
+ if (watchersObj.size === 0) {
1289
+ this.parentWatchers.delete(unpackedPath);
1290
+ }
1291
+ }
1292
+ }
1293
+
1294
+ if (fullyUnwatched.paths.length > 0 || fullyUnwatched.parentPaths.length > 0) {
1295
+ diskLog(`Unwatched PathValue watches`, {
1296
+ unwatchedPaths: fullyUnwatched.paths.length,
1297
+ unwatchedParents: fullyUnwatched.parentPaths.length,
1298
+ });
1299
+ for (let unwatchCallback of this.unwatchedCallbacks) {
1300
+ unwatchCallback(fullyUnwatched);
1301
+ }
1302
+ }
1303
+ }
1304
+
1305
+ public unwatchEventPaths(paths: Set<string>) {
1306
+ for (let path of paths) {
1307
+ let watchers = this.watchers.get(path);
1308
+ if (watchers) {
1309
+ this.watchers.delete(path);
1310
+ for (let watcher of watchers) {
1311
+ let watchObj = this.watchersToPaths.get(watcher);
1312
+ if (watchObj) {
1313
+ watchObj.paths.delete(path);
1314
+ }
1315
+ }
1316
+ }
1317
+ let parentWatchers = this.parentWatchers.get(path);
1318
+ if (parentWatchers) {
1319
+ this.parentWatchers.delete(path);
1320
+ for (let { watchers } of parentWatchers.values()) {
1321
+ for (let watcher of watchers) {
1322
+ let watchObj = this.watchersToPaths.get(watcher);
1323
+ if (watchObj) {
1324
+ watchObj.parents.delete(path);
1325
+ }
1326
+ }
1327
+ }
1328
+ }
1329
+ }
1330
+ }
1331
+
1332
+ public triggerValuesChanged(valuesChanged: Set<PathValue>, parentsSynced?: string[], initialTriggers?: { values: Set<PathValue>; parentPaths: Set<string> }) {
1333
+ let changedPerCallbacks = this.getWatchers(valuesChanged, parentsSynced);
1334
+ for (let [watch, changes] of changedPerCallbacks) {
1335
+ if (initialTriggers) {
1336
+ let valuesFromInitialTrigger = new Set<PathValue>();
1337
+ let parentsFromInitialTrigger = new Set<string>();
1338
+ let valuesOther = new Set<PathValue>();
1339
+ let parentsOther = new Set<string>();
1340
+ for (let value of changes) {
1341
+ if (initialTriggers.values.has(value)) {
1342
+ valuesFromInitialTrigger.add(value);
1343
+ } else {
1344
+ valuesOther.add(value);
1345
+ }
1346
+ }
1347
+ for (let parent of initialTriggers.parentPaths) {
1348
+ parentsFromInitialTrigger.add(parent);
1349
+ }
1350
+ if (parentsSynced) {
1351
+ for (let parent of parentsSynced) {
1352
+ if (!initialTriggers.parentPaths.has(parent)) {
1353
+ parentsOther.add(parent);
1354
+ }
1355
+ }
1356
+ }
1357
+
1358
+ if (valuesFromInitialTrigger.size > 0 || parentsFromInitialTrigger.size > 0) {
1359
+ this.triggerLatestWatcher(watch, Array.from(valuesFromInitialTrigger), Array.from(parentsFromInitialTrigger), "initialTrigger");
1360
+ }
1361
+ if (valuesOther.size > 0 || parentsOther.size > 0) {
1362
+ this.triggerLatestWatcher(watch, Array.from(valuesOther), Array.from(parentsOther));
1363
+ }
1364
+ } else {
1365
+ this.triggerLatestWatcher(watch, Array.from(changes), parentsSynced);
1366
+ }
1367
+ }
1368
+ }
1369
+ public getWatchers<T extends { path: string }>(valuesChanged: Set<T>, parentsSynced?: string[]) {
1370
+ let changedPerCallbacks: Map<PathWatcherCallback, Set<T>> = new Map();
1371
+ for (let value of valuesChanged) {
1372
+ let path = value.path;
1373
+ let latestWatches = this.watchers.get(path);
1374
+
1375
+ function triggerNodeChanged(watcher: NodeId) {
1376
+ let changes = changedPerCallbacks.get(watcher);
1377
+ if (!changes) {
1378
+ changes = new Set();
1379
+ changedPerCallbacks.set(watcher, changes);
1380
+ }
1381
+ changes.add(value);
1382
+ }
1383
+
1384
+ if (latestWatches) {
1385
+ for (let watch of latestWatches) {
1386
+ triggerNodeChanged(watch);
1387
+ }
1388
+ }
1389
+
1390
+ let parentPath = getParentPathStr(path);
1391
+ let latestParentWatches = this.parentWatchers.get(parentPath);
1392
+ if (latestParentWatches) {
1393
+ let pathRoutingHash: number | undefined = undefined;
1394
+ for (let { start, end, watchers } of latestParentWatches.values()) {
1395
+ if (!matchesParentRangeFilter({
1396
+ parentPath,
1397
+ fullPath: path,
1398
+ start,
1399
+ end,
1400
+ })) {
1401
+ continue;
1402
+ }
1403
+ for (let watch of watchers) {
1404
+ triggerNodeChanged(watch);
1405
+ }
1406
+ }
1407
+ }
1408
+ }
1409
+ for (let parentPath of parentsSynced ?? []) {
1410
+ let latestParentWatches = this.parentWatchers.get(parentPath);
1411
+ if (latestParentWatches) {
1412
+ for (let { watchers } of latestParentWatches.values()) {
1413
+ for (let watcher of watchers) {
1414
+ let changes = changedPerCallbacks.get(watcher);
1415
+ if (!changes) {
1416
+ changes = new Set();
1417
+ changedPerCallbacks.set(watcher, changes);
1418
+ }
1419
+ }
1420
+ }
1421
+ }
1422
+ }
1423
+ return changedPerCallbacks;
1424
+ }
1425
+
1426
+ public isWatching(path: string): boolean {
1427
+ return this.watchers.has(path) || this.parentWatchers.has(getParentPathStr(path));
1428
+ }
1429
+
1430
+ @measureFnc
1431
+ private triggerLatestWatcher(
1432
+ watcher: PathWatcherCallback,
1433
+ changes: PathValue[],
1434
+ parentPaths?: string[],
1435
+ initialTrigger?: "initialTrigger"
1436
+ ) {
1437
+ if (isOwnNodeId(watcher)) {
1438
+ for (let callback of this.localTriggerCallbacks) {
1439
+ callback(changes, parentPaths ?? []);
1440
+ }
1441
+ } else {
1442
+ if (!isCoreQuiet) {
1443
+ console.log(`(${Date.now()}) Sending values to client: ${changes.length} (${watcher})`);
1444
+ }
1445
+ this.ensureUnwatchingOnDisconnect(watcher);
1446
+ ignoreErrors((async () => {
1447
+ let allowSource = await isNodeTrusted(watcher) || getNodeIdIP(watcher) === "127.0.0.1";
1448
+ let buffers = await pathValueSerializer.serialize(changes, {
1449
+ noLocks: true,
1450
+ compress: getCompressNetwork(),
1451
+ stripSource: !allowSource,
1452
+ });
1453
+
1454
+ if (isDebugLogEnabled()) {
1455
+ for (let pathValue of changes) {
1456
+
1457
+ debugLog("SEND VALUE", { path: pathValue.path, time: pathValue.time.time, nodeId: debugNodeId(watcher), transparent: pathValue.isTransparent, canGC: pathValue.canGCValue });
1458
+ }
1459
+ }
1460
+ await PathValueController.nodes[watcher].forwardWrites(
1461
+ buffers,
1462
+ parentPaths,
1463
+ undefined,
1464
+ initialTrigger,
1465
+ );
1466
+ })());
1467
+ }
1468
+ }
1469
+
1470
+ private ensureUnwatchingOnDisconnect = cache((nodeId: string) => {
1471
+ if (isOwnNodeId(nodeId)) return;
1472
+ SocketFunction.onNextDisconnect(nodeId, async () => {
1473
+ this.ensureUnwatchingOnDisconnect.clear(nodeId);
1474
+
1475
+ // Wait while, so Querysub doesn't thrash when clients connect and disconnect
1476
+ // TODO: We should be smarter about our wait time, waiting less if we have a lot of resource
1477
+ // pressure, and more (potentially forever), if we don't have much resource pressure
1478
+ // (memory, cpu, and network).
1479
+ setTimeout(() => {
1480
+ let watches = this.watchersToPaths.get(nodeId);
1481
+ this.watchersToPaths.delete(nodeId);
1482
+ if (watches) {
1483
+ this.unwatchPath({ paths: Array.from(watches.paths), parentPaths: Array.from(watches.parents), callback: nodeId });
1484
+ }
1485
+ }, MAX_CHANGE_AGE);
1486
+ });
1487
+ });
1488
+
1489
+ private localTriggerCallbacks = new Set<(changes: PathValue[], parentPaths: string[]) => void>();
1490
+ public watchAllLocalTriggers(callback: (changes: PathValue[], parentPaths: string[]) => void) {
1491
+ this.localTriggerCallbacks.add(callback);
1492
+ }
1493
+
1494
+ public unwatchedCallbacks = new Set<(config: WatchConfig) => void>();
1495
+ public watchUnwatched(callback: (config: WatchConfig) => void) {
1496
+ this.unwatchedCallbacks.add(callback);
1497
+ }
1498
+
1499
+ public getAllParentWatches() {
1500
+ return Array.from(this.parentWatchers.keys());
1501
+ }
1502
+ }
1503
+ export const pathWatcher = new PathWatcher();
1504
+
1505
+
1506
+ class WriteValidStorage {
1507
+ // We could have a more efficient storage for non-range, but... we need range storage anyways,
1508
+ // so... this is fine.
1509
+ // (Not sorted)
1510
+ // - May have duplicates for the same path (for lockless writes), always with the same valid state.
1511
+ // path => WriteState[]
1512
+ // WriteState is UNSORTED (it is only kept around for MAX_CHANGE_AGE, so this should be the fastest way to do it)
1513
+ private validStorage = registerMapArrayResource("validStorage", new Map<string, WriteState[]>());
1514
+
1515
+ private delayedInvalidate = ((lock: ReadLock, targetTime: number, valueGroupForPredictionHack: PathValue[]) => {
1516
+ const tryNow = () => {
1517
+ if (Date.now() >= targetTime) {
1518
+ setTimeout(tryNow, Date.now() - targetTime + 50);
1519
+ return;
1520
+ }
1521
+ pathValueCommitter.ingestValidStates(valueGroupForPredictionHack.map(x => ({
1522
+ path: x.path,
1523
+ isValid: true,
1524
+ time: x.time,
1525
+ isTransparent: x.isTransparent || false,
1526
+ })), undefined, "recomputeValidState");
1527
+ };
1528
+ tryNow();
1529
+ });
1530
+
1531
+ public getWriteState(readLock: ReadLock, now: number): WriteState {
1532
+ let isValid = this.isLockValid(readLock, now);
1533
+ return { path: readLock.path, isValid, time: readLock.startTime, isTransparent: readLock.readIsTranparent };
1534
+ }
1535
+ public isLockValid(readLock: ReadLock, now: number, valueGroupForPredictionHack?: PathValue[]): boolean {
1536
+ let isValid = this.isLockValidBase(readLock, now);
1537
+ if (!isValid && readLock.keepRejectedUntil && now < readLock.keepRejectedUntil && valueGroupForPredictionHack) {
1538
+ isValid = true;
1539
+ // TODO: Test this code. We haven't run it, as it is a bit annoying to set up (we need to predict
1540
+ // writes to a new location). It will be pretty obvious if it is failing, but... I don't think it will?
1541
+ console.log(yellow(`Delaying rejection of ${readLock.path} to try to fix out of order prediction invalidation`));
1542
+ // Trigger a recheck when the keepRejectedUntil time is reached
1543
+ this.delayedInvalidate(readLock, readLock.keepRejectedUntil, valueGroupForPredictionHack);
1544
+ }
1545
+ return isValid;
1546
+ }
1547
+ public isLockValidBase(readLock: ReadLock, now: number): boolean {
1548
+ // If it old enough it must be valid, otherwise how would a client NOT know it was invalid
1549
+ // (assuming clients clear their reads aftering being disconnecting for long enough).
1550
+ if (readLock.startTime.time < now - MAX_CHANGE_AGE) {
1551
+ return true;
1552
+ }
1553
+
1554
+ let values = this.validStorage.get(readLock.path);
1555
+ let time = readLock.startTime;
1556
+ if (readLock.readIsTranparent) {
1557
+ // EDIT: Actually... isn't this done so that rejections can cascade from other nodes, with our node
1558
+ // not even evaluating the actual rejection reason, but just directly using the reject value?
1559
+ // // EDIT: I *think* that the range check should be sufficient for undefined read locks.
1560
+ // // If that is the case, remove isUndefinedAtLock entirely.
1561
+ // // - The reason the range check is good enough is that is is checking for values. Actually,
1562
+ // // if we always checked for values the range check would always be sufficient. HOWEVER,
1563
+ // // the range + valid check is an optimization, so locks don't need to store their values.
1564
+ // // But in this case, with the weird stuff we are doing with predictions... and the fact that
1565
+ // // we already store the value... we simply don't need the check.
1566
+ // return true;
1567
+ return this.isUndefinedAtLock(readLock);
1568
+ }
1569
+ if (!values) {
1570
+ // We have no record of any values on this path, so it must have never been committed!
1571
+ return false;
1572
+ }
1573
+ let value = values.find(x => compareTime(x.time, time) === 0);
1574
+ if (!value) {
1575
+ // If it is golden, it was probably just removed, so assume it was accepted
1576
+ // (nodes only ask for valid states of values they created, or that were
1577
+ // just created, so this is fine)
1578
+ if (time.time < now - MAX_CHANGE_AGE) {
1579
+ return true;
1580
+ }
1581
+ return false;
1582
+ }
1583
+ return value.isValid;
1584
+ }
1585
+ private isUndefinedAtLock(lock: ReadLock): boolean {
1586
+ let values = this.validStorage.get(lock.path);
1587
+ let time = lock.startTime;
1588
+ if (values) {
1589
+ // Find the write at read time
1590
+ let latestValue: WriteState | undefined;
1591
+ for (let value of values) {
1592
+ // Skip invalid
1593
+ if (!value.isValid) continue;
1594
+ // Skip writes AFTER our read (but not ON our read, as then we skip our read itself!)
1595
+ if (compareTime(value.time, time) > 0) continue;
1596
+ // Take the newest write that would affect us
1597
+ if (!latestValue || compareTime(value.time, latestValue.time) > 0) {
1598
+ latestValue = value;
1599
+ }
1600
+ }
1601
+ if (latestValue && !latestValue.isTransparent) {
1602
+ return false;
1603
+ }
1604
+ }
1605
+ return true;
1606
+ }
1607
+
1608
+ private getWritesInRange(lock: ReadLock): WriteState[] | undefined {
1609
+ let values = this.validStorage.get(lock.path);
1610
+ if (!values) return undefined;
1611
+ let times: WriteState[] | undefined;
1612
+ for (let value of values) {
1613
+ if (!value.isValid) continue;
1614
+ // NOTE: This is the same comparison as in getValidStateChangedTriggers
1615
+ if (compareTime(lock.startTime, value.time) < 0 && compareTime(value.time, lock.endTime) < 0) {
1616
+ if (!times) times = [];
1617
+ times.push(value);
1618
+ }
1619
+ }
1620
+ return times;
1621
+ }
1622
+
1623
+ // Returns true if the valid state changed
1624
+ public setWriteValidStateValue(pathValue: PathValue): boolean {
1625
+ this.ensureGarbageCollectOldState();
1626
+
1627
+ let time = pathValue.time;
1628
+ // Clone time, so we aren't referencing any of the original object. This is very important for garbage collection.
1629
+ time = { time: time.time, version: time.version, creatorId: time.creatorId, };
1630
+ return this.setWriteValidState({
1631
+ path: pathValue.path,
1632
+ isValid: pathValue.valid || false,
1633
+ time: time,
1634
+ isTransparent: pathValue.isTransparent || false,
1635
+ });
1636
+ }
1637
+ public setWriteValidState(write: WriteState, lockless?: boolean): boolean {
1638
+ this.ensureGarbageCollectOldState();
1639
+
1640
+ // If lockless then it starts valid, and always stays valid.
1641
+ if (!lockless) {
1642
+ let authorityValue = authorityStorage.getValueExactIgnoreInvalid(write.path, write.time);
1643
+ if (authorityValue) {
1644
+ authorityValue.valid = write.isValid;
1645
+ if (!isCoreQuiet) {
1646
+ console.log(`Setting valid state of ${debugPathValuePath(authorityValue)} to ${write.isValid}`);
1647
+ }
1648
+ } else {
1649
+ if (isNode()) {
1650
+ console.error(`Setting valid state of ${write.path}@${debugTime(write.time)} to ${write.isValid}, but the ValuePath was not found. If the ValuePath is found later, it might not have the valid state set correctly.`);
1651
+ }
1652
+ }
1653
+ }
1654
+
1655
+ let path = write.path;
1656
+ let time = write.time;
1657
+ let isValid = write.isValid;
1658
+ let values = this.validStorage.get(path);
1659
+ if (!values) {
1660
+ values = [];
1661
+ this.validStorage.set(path, values);
1662
+ }
1663
+ // NOTE: We always have to search, even if lockless, so we can return false when the isValid state hasn't changed.
1664
+ // Search from the end, as it is most likely time will have been recently added
1665
+ let index = values.length - 1;
1666
+ while (index >= 0) {
1667
+ let diff = compareTime(values[index].time, time);
1668
+ if (diff === 0) {
1669
+ break;
1670
+ }
1671
+ index--;
1672
+ }
1673
+ if (index >= 0) {
1674
+ if (values[index].isValid === isValid) {
1675
+ return false;
1676
+ }
1677
+ values[index].isValid = isValid;
1678
+ } else {
1679
+ values.push(write);
1680
+ }
1681
+ return true;
1682
+ }
1683
+
1684
+
1685
+ /** SOMEWHAT of a hack. Updates the inputs value states from cache. Needed, as it is possible for valid states
1686
+ to be processed before PathValues. Which would cause the PathValues to have outdated valid states when received.
1687
+ Valid states are always correct, and always updated (if they are sent at all), so we will use the latest valid
1688
+ state instead of latest write state.
1689
+ */
1690
+ @measureFnc
1691
+ public updateValidStatesFromCache(values: PathValue[]) {
1692
+ for (let value of values) {
1693
+ let states = this.validStorage.get(value.path);
1694
+ if (!states) continue;
1695
+ let state = states.find(x => compareTime(x.time, value.time) === 0);
1696
+ if (!state) continue;
1697
+ value.valid = state.isValid;
1698
+ }
1699
+ }
1700
+
1701
+ // NOTE: Mutates the input PathValue.valid states to be the new states
1702
+ // NOTE: Only mutates paths we are an authority on
1703
+ // NOTE: Also sets up watches, so the valid states will update when their readLocks change
1704
+ // (including talking to remote watchers, etc, etc)
1705
+ @measureFnc
1706
+ public computeValidStates(values: PathValue[], now: number, alreadyWatching?: "alreadyWatching"): WriteState[] {
1707
+ // Calculate valid states
1708
+ // - For things we are not an authority on, assumes their state is true.
1709
+ // - If we AREN'T an authority, whomever sent us the values will send us new
1710
+ // values if these become invalid.
1711
+ // - Also, if we created the values, we will get sent valid states if
1712
+ // they become invalid.
1713
+ // - We don't need to watch locks for pathValues, as any valid states changes
1714
+ // automatically impact any values that use them (we only need to watch locks
1715
+ // for values we are computing).
1716
+ // - For everything we ARE an authority on, computes them
1717
+ // - Also sets up watches for computed values, so the valid states will stay up to date).
1718
+
1719
+ let changes: WriteState[] = [];
1720
+
1721
+ // If alreadyWatching... then these are triggers caused by a local lock watch. In which case,
1722
+ // are must want to know if the values changed (as in, for call prediction).
1723
+ if (!alreadyWatching) {
1724
+ measureBlock(function computeValidStates_getSelfValues() {
1725
+ let selfValues: PathValue[] = [];
1726
+ for (let value of values) {
1727
+ if (pathValueAuthority2.isSelfAuthority(value.path)) {
1728
+ selfValues.push(value);
1729
+ } else {
1730
+ let changed = writeValidStorage.setWriteValidStateValue(value);
1731
+ if (changed) {
1732
+ changes.push({ path: value.path, isValid: value.valid || false, time: value.time, isTransparent: value.isTransparent || false });
1733
+ }
1734
+ }
1735
+ }
1736
+ values = selfValues;
1737
+ });
1738
+ }
1739
+
1740
+ if (values.length === 0) return changes;
1741
+
1742
+ // Watch the value locks, for all SELF VALUES
1743
+ // (watches remote locks if necessary)
1744
+ if (!alreadyWatching) {
1745
+ lockWatcher.watchValueLocks(values);
1746
+ }
1747
+ let lockGroups = new Map<ReadLock[], PathValue[]>();
1748
+ for (let value of values) {
1749
+ if (value.lockCount === 0) {
1750
+ validateLockCount(value);
1751
+ let change: WriteState = {
1752
+ path: value.path,
1753
+ isValid: true,
1754
+ time: value.time,
1755
+ isTransparent: value.isTransparent || false
1756
+ };
1757
+ let changed = writeValidStorage.setWriteValidState(change, true);
1758
+ if (changed) {
1759
+ changes.push(change);
1760
+ }
1761
+ continue;
1762
+ }
1763
+ let lockGroup = lockGroups.get(value.locks);
1764
+ if (!lockGroup) {
1765
+ lockGroup = [];
1766
+ lockGroups.set(value.locks, lockGroup);
1767
+ }
1768
+ lockGroup.push(value);
1769
+ }
1770
+ for (let [locks, valueGroup] of lockGroups) {
1771
+ if (valueGroup.length === 0) continue;
1772
+
1773
+ let rejected = false;
1774
+ for (let lock of locks) {
1775
+ if (!this.isLockValid(lock, now)) {
1776
+ rejected = true;
1777
+
1778
+ // This is good... unless it happens A LOT. Then the app needs to change (unless it
1779
+ // is a game, or something with realtime competition, in which case this is probably unavoidable).
1780
+ if ((!isCoreQuiet || !isNode()) && debugRejections || valueGroup.length > 0 && lock.endTime.version !== Number.MAX_SAFE_INTEGER && lock.startTime.version !== -2) {
1781
+ if (!isNode()) {
1782
+ debugger;
1783
+ }
1784
+ let timeToReject = now - valueGroup[0].time.time;
1785
+ console.log(red(`!!! VALUE REJECTED DUE TO USING MISSING / REJECTED READ!!!(rejected after ${timeToReject}ms at ${Date.now()})`));
1786
+ console.log(red(`${debugTime(valueGroup[0].time)} (write)`));
1787
+ console.log(red(` rejected as the server could not find: `));
1788
+ if (lock.readIsTranparent) {
1789
+ console.log(red(`${debugTime(lock.startTime)} to ${debugTime(lock.endTime)} ${getPathFromStr(lock.path).join(".")} `));
1790
+ } else {
1791
+ console.log(red(`${debugTime(lock.startTime)} ${getPathFromStr(lock.path).join(".")} `));
1792
+ }
1793
+ if (lock.readIsTranparent) {
1794
+ console.log(red(` (read was undefined, so presumably a value exists which the writer missed)`));
1795
+ }
1796
+ console.log(yellow(`Full list of writes rejected: `));
1797
+ for (let pathValue of valueGroup) {
1798
+ console.log(yellow(debugPathValue(pathValue)));
1799
+ }
1800
+
1801
+ if (debugRejections) {
1802
+ debugbreak(2);
1803
+ debugger;
1804
+ this.isLockValid(lock, now);
1805
+ debugger;
1806
+ }
1807
+ }
1808
+ break;
1809
+ }
1810
+ let inRange = this.getWritesInRange(lock);
1811
+ if (inRange?.length) {
1812
+ // If all the offending writes are undefined (or invalid), then we can ignore the contention
1813
+ if (lock.readIsTranparent && inRange.every(x => x.isTransparent || !x.isValid)) {
1814
+ continue;
1815
+ }
1816
+ rejected = true;
1817
+ // This is good... unless it happens A LOT. Then the app needs to change (unless it
1818
+ // is a game, or something with realtime competition, in which case this is probably unavoidable).
1819
+ if (
1820
+ (!isCoreQuiet || !isNode())
1821
+ // This special version indicates clientside prediction (which SHOULD be rejected).
1822
+ && lock.endTime.version !== Number.MAX_SAFE_INTEGER
1823
+ || debugRejections
1824
+ ) {
1825
+ let changed = valueGroup.filter(x => x.valid);
1826
+ if (changed.length > 0) {
1827
+ let timeToReject = now - changed[0].time.time;
1828
+ console.log(red(`!!! LOCK CONTENTION FIXED VIA REJECTION OF VALUE!!!(rejected after ${timeToReject}ms)`));
1829
+ for (let pathValue of changed) {
1830
+ console.log(red(debugPathValue(pathValue)));
1831
+ }
1832
+ console.log(red(` (write rejected due to original read not noticing value at: ${lock.path})`));
1833
+ console.log(red(` (original read from: ${debugTime(lock.startTime)}`));
1834
+ console.log(red(` (at time: ${debugTime(lock.endTime)}`));
1835
+ console.log(red(` (current time: ${Date.now()})`));
1836
+ for (let lockFailed of inRange) {
1837
+ console.log(red(` (conflict write at: ${debugTime(lockFailed.time)}`));
1838
+ }
1839
+ if (debugRejections) {
1840
+ debugbreak(2);
1841
+ debugger;
1842
+ this.getWritesInRange(lock);
1843
+ debugger;
1844
+ }
1845
+ }
1846
+ }
1847
+ break;
1848
+ }
1849
+ }
1850
+ // NOTE: The if statement is equivalent to this one line, we just iterate so we can
1851
+ // have better debug info, and rejection metrics tracking.
1852
+ //values.forEach(value => value.valid = !rejected);
1853
+ if (rejected) {
1854
+ for (let pathValue of valueGroup) {
1855
+ pathValue.valid = false;
1856
+ rejections.value++;
1857
+ }
1858
+ }
1859
+
1860
+ for (let value of valueGroup) {
1861
+ let change: WriteState = {
1862
+ path: value.path,
1863
+ isValid: value.valid || false,
1864
+ time: value.time,
1865
+ isTransparent: value.isTransparent || false
1866
+ };
1867
+ let changed = writeValidStorage.setWriteValidState(change);
1868
+ if (changed) {
1869
+ changes.push(change);
1870
+ }
1871
+ }
1872
+ }
1873
+
1874
+ return changes;
1875
+ }
1876
+
1877
+ public deleteRemovedPath(path: string) {
1878
+ this.validStorage.delete(path);
1879
+ }
1880
+
1881
+ private ensureGarbageCollectOldState = lazy(() => runInfinitePoll(MAX_CHANGE_AGE, () => {
1882
+ // Clear all valid states after they are old enough. We will just tell anyone who asks that
1883
+ // they are valid, because... the must be if someone still cares about them (as either they have been
1884
+ // syncing for a long time, and so they must have never gotten an invalid message and it is too late
1885
+ // for that to change now, or... they are new, which means they will either be given old valid values,
1886
+ // or new values which may be invalid but are too new for us to garbage collect).
1887
+ let garbageCollectTime = Date.now() - MAX_CHANGE_AGE;
1888
+ for (let [key, values] of this.validStorage) {
1889
+ values = values.filter(x => x.time.time > garbageCollectTime);
1890
+ if (values.length === 0) {
1891
+ this.validStorage.delete(key);
1892
+ } else {
1893
+ this.validStorage.set(key, values);
1894
+ }
1895
+ }
1896
+ }));
1897
+ }
1898
+ export const writeValidStorage = new WriteValidStorage();
1899
+
1900
+ let rejections = { value: 0 };
1901
+ export function internalGetRejections(): { value: number } {
1902
+ return rejections;
1903
+ }
1904
+ export function internalTestResetRejections() {
1905
+ rejections.value = 0;
1906
+ }
1907
+
1908
+ class LockWatcher {
1909
+ @measureFnc
1910
+ public watchValueLocks(values: PathValue[]): void {
1911
+ let { newRemoteLocks } = lockWatchDeduper.localWatchNewLocks({ values: values });
1912
+ if (newRemoteLocks.size > 0) {
1913
+ logErrors(this.watchRemoteLocks(newRemoteLocks));
1914
+ }
1915
+ }
1916
+ private watchRemoteLocks = batchFunction(
1917
+ { delay: "afterio" },
1918
+ async (locksBatched: Set<ReadLock>[]) => {
1919
+ let locks = new Set(locksBatched.flatMap(x => Array.from(x)));
1920
+ let locksByAuthority = new Map<string, Set<ReadLock>>();
1921
+ for (let lock of locks) {
1922
+ if (pathValueAuthority2.isSelfAuthority(lock.path)) continue;
1923
+
1924
+ let authorityId = await pathValueAuthority2.getSingleReadNodePromise(lock.path);
1925
+ // If they don't trust us, we just have to assume the value we read is correct.
1926
+ if (!await isTrustedByNode(authorityId)) continue;
1927
+ let locks = locksByAuthority.get(authorityId);
1928
+ if (!locks) {
1929
+ locks = new Set();
1930
+ locksByAuthority.set(authorityId, locks);
1931
+ }
1932
+ locks.add(lock);
1933
+ }
1934
+
1935
+ for (let [authorityId, locks] of locksByAuthority) {
1936
+ this.watchLocksOnAuthority(authorityId, locks);
1937
+ }
1938
+ }
1939
+ );
1940
+ private watchLocksOnAuthority(authorityId: string, locks: Set<ReadLock>) {
1941
+ if (!Array.from(locks).some(x => lockWatchDeduper.isLockStillWatched(x))) return;
1942
+
1943
+ let alreadyReconnected = false;
1944
+ const reconnectWatches = () => {
1945
+ if (alreadyReconnected) return;
1946
+ alreadyReconnected = true;
1947
+ if (!Array.from(locks).some(x => lockWatchDeduper.isLockStillWatched(x))) return;
1948
+ logErrors(this.watchRemoteLocks(locks));
1949
+ };
1950
+ this.onNextDisconnectList(authorityId).add(reconnectWatches);
1951
+ setTimeout(() => {
1952
+ this.onNextDisconnectList(authorityId).delete(reconnectWatches);
1953
+ }, MAX_CHANGE_AGE * 2);
1954
+
1955
+ let connection = PathValueController.nodes[authorityId].watchLockValid(
1956
+ Array.from(locks.values())
1957
+ );
1958
+ logErrors(connection);
1959
+ connection.catch(() => reconnectWatches());
1960
+ }
1961
+
1962
+ private onNextDisconnectList = cache((nodeId: string) => {
1963
+ let callbacks = new Set<() => void>();
1964
+ SocketFunction.onNextDisconnect(nodeId, () => {
1965
+ this.onNextDisconnectList.clear(nodeId);
1966
+ for (let callback of callbacks) {
1967
+ callback();
1968
+ }
1969
+ });
1970
+ return callbacks;
1971
+ });
1972
+ }
1973
+ export const lockWatcher = new LockWatcher();
1974
+
1975
+ class LockToCallbackLookup {
1976
+ // time.creatorId => time => callbacks
1977
+ private validWatchers = registerResource("paths|validWatchers", new Map<number, Map<number, Set<WriteCallback>>>());
1978
+
1979
+ // path => sorted by endTime
1980
+ private validRangeWatchers = registerResource("paths|validRangeWatchers", new Map<string, {
1981
+ startTime: Time;
1982
+ endTime: Time;
1983
+ // Called back if there is a valid value between startTime (exclusive) and endTime (exclusive)
1984
+ callback: WriteCallback;
1985
+ }[]>());
1986
+
1987
+ /** Watchs both the startTime, and the range */
1988
+ public watchLock(lock: ReadLock, callback: WriteCallback) {
1989
+ this.ensureGarbageCollectLoop();
1990
+ // Add valid watcher
1991
+ if (lock.endTime.time > 0) {
1992
+ const time = lock.startTime;
1993
+ let maps = this.validWatchers.get(time.creatorId);
1994
+ if (!maps) {
1995
+ maps = new Map();
1996
+ this.validWatchers.set(time.creatorId, maps);
1997
+ }
1998
+ let callbacksList = maps.get(time.time);
1999
+ if (!callbacksList) {
2000
+ callbacksList = new Set();
2001
+ maps.set(time.time, callbacksList);
2002
+ }
2003
+ callbacksList.add(callback);
2004
+ }
2005
+ // Add valid range watcher
2006
+ if (compareTime(lock.startTime, lock.endTime) !== 0) {
2007
+ let { path, startTime, endTime } = lock;
2008
+ let watchers = this.validRangeWatchers.get(path);
2009
+ if (!watchers) {
2010
+ watchers = [];
2011
+ this.validRangeWatchers.set(path, watchers);
2012
+ }
2013
+ // Find the index to insert into, maintaining a sort by time
2014
+ let index = -1;
2015
+ for (let i = watchers.length - 1; i >= 0; i--) {
2016
+ if (watchers[i].endTime.time < endTime.time) {
2017
+ index = i + 1;
2018
+ break;
2019
+ }
2020
+ }
2021
+ if (index === -1) {
2022
+ index = watchers.length;
2023
+ }
2024
+ watchers.splice(index, 0, { startTime, endTime, callback });
2025
+ }
2026
+ }
2027
+
2028
+ public garbageCollectOldWatchers() {
2029
+ const disposeTime = Date.now() - MAX_CHANGE_AGE * 2;
2030
+
2031
+ for (let [path, times] of this.validRangeWatchers) {
2032
+ let expiredIndex = times.findIndex(x => x.endTime.time < disposeTime);
2033
+ if (expiredIndex >= 0) {
2034
+ times.splice(expiredIndex, times.length - expiredIndex);
2035
+ }
2036
+ }
2037
+
2038
+ for (let [key, maps] of this.validWatchers) {
2039
+ for (let time of maps.keys()) {
2040
+ if (time < disposeTime) {
2041
+ maps.delete(time);
2042
+ if (maps.size === 0) {
2043
+ this.validWatchers.delete(key);
2044
+ }
2045
+ }
2046
+ }
2047
+ }
2048
+
2049
+ for (let [path, watchers] of this.validRangeWatchers) {
2050
+ for (let i = watchers.length - 1; i >= 0; i--) {
2051
+ let watcher = watchers[i];
2052
+ if (watcher.endTime.time < disposeTime) {
2053
+ watchers.splice(i, 1);
2054
+ }
2055
+ }
2056
+ if (watchers.length === 0) {
2057
+ this.validRangeWatchers.delete(path);
2058
+ }
2059
+ }
2060
+ }
2061
+
2062
+ public getValidStateChangedTriggers(
2063
+ remoteValue: WriteState
2064
+ ): WriteCallback[] {
2065
+ let allCallbacks: WriteCallback[] = [];
2066
+ let validMaps = this.validWatchers.get(remoteValue.time.creatorId);
2067
+ if (validMaps) {
2068
+ let callbacks = validMaps.get(remoteValue.time.time);
2069
+ if (callbacks) {
2070
+ for (let callbackList of callbacks) {
2071
+ allCallbacks.push(callbackList);
2072
+ }
2073
+ }
2074
+ }
2075
+ let rangeWatchers = this.validRangeWatchers.get(remoteValue.path);
2076
+ if (rangeWatchers) {
2077
+ for (let watcher of rangeWatchers) {
2078
+ if (
2079
+ // NOTE: This is the same comparison as in getWritesInRange
2080
+ // If the time is > startTime (exclusive, as startTime MUST exist, as we depend on it)
2081
+ compareTime(watcher.startTime, remoteValue.time) < 0
2082
+ // And < endTime (exclusive, as endTime is assumed to exist, as it is probably when we are writing!)
2083
+ && compareTime(remoteValue.time, watcher.endTime) < 0
2084
+ ) {
2085
+ allCallbacks.push(watcher.callback);
2086
+ }
2087
+ }
2088
+ }
2089
+ return allCallbacks;
2090
+ }
2091
+
2092
+ private ensureGarbageCollectLoop = lazy(() => {
2093
+ runInfinitePoll(MAX_CHANGE_AGE, () => lockToCallback.garbageCollectOldWatchers());
2094
+ });
2095
+
2096
+
2097
+ }
2098
+ export const lockToCallback = new LockToCallbackLookup();
2099
+
2100
+ // NOTE: We assume there will be no equivalent but !== PathValues. We also assume
2101
+ // there will be no equivalent but !== Locks. Fair assumptions, which save so
2102
+ // much time it is probably worth maintaining them. AND, if they are invalidated,
2103
+ // the only penalty is a memory leak, which isn't so bad (arguably having identical
2104
+ // values with difference instances is a memory leak too).
2105
+ class LockWatchDeduper {
2106
+ private watchedRemoteLocks = registerResource("paths|watchedRemoteLocks", new Map<ReadLock[], Set<PathValue>>());
2107
+ private watchedLockFlat = registerResource("paths|watchedLockFlat", new Set<ReadLock>());
2108
+
2109
+ /** Calls validWatcher with any new readLocks, using the values as the callback.
2110
+ * SHOULD ONLY BE USED IF YOU ARE WATCHING THE LOCKS, aka, if you are
2111
+ * the authority on the values. Otherwise, you are NOT watching readLocks,
2112
+ * you are watching the valid state, via the actual authority of the paths.
2113
+ */
2114
+ public localWatchNewLocks(config: { values: PathValue[]; }): {
2115
+ newRemoteLocks: Set<ReadLock>;
2116
+ } {
2117
+ this.ensureLockCleanupLoop();
2118
+
2119
+ let byLock = new Map<ReadLock[], PathValue[]>();
2120
+ for (let value of config.values) {
2121
+ validateLockCount(value);
2122
+ if (value.lockCount === 0) continue;
2123
+ let values = byLock.get(value.locks);
2124
+ if (!values) {
2125
+ values = [];
2126
+ byLock.set(value.locks, values);
2127
+ }
2128
+ values.push(value);
2129
+ }
2130
+
2131
+ let newLocks: ReadLock[] = [];
2132
+ for (let value of config.values) {
2133
+ validateLockCount(value);
2134
+ if (value.lockCount === 0) continue;
2135
+ let values = this.watchedRemoteLocks.get(value.locks);
2136
+ if (!values) {
2137
+ values = new Set();
2138
+ this.watchedRemoteLocks.set(value.locks, values);
2139
+ newLocks.push(...value.locks);
2140
+ }
2141
+ values.add(value);
2142
+ }
2143
+ for (let locks of byLock.keys()) {
2144
+ for (let lock of locks) {
2145
+ this.watchedLockFlat.add(lock);
2146
+ }
2147
+ }
2148
+ let newRemoteLocks = new Set<ReadLock>();
2149
+
2150
+ for (let [locks, values] of byLock) {
2151
+ for (let lock of locks) {
2152
+ // NOTE: We watch LOCAL_DOMAIN paths, as they might depend on a remote value, which is invalidated.
2153
+ // This can happen for routing, where you click something like "go to main node", before
2154
+ // the remote node list is loaded. You will go to the wrong page, BUT, once the remote node list
2155
+ // is loaded, it should reject (and then, ideally we rerun the function clientside on detection
2156
+ // of the rejection).
2157
+
2158
+ if (!pathValueAuthority2.isSelfAuthority(lock.path)) {
2159
+ newRemoteLocks.add(lock);
2160
+ }
2161
+ // NOTE: I BELIEVE this is valid, even if we don't own the locks. We might have some
2162
+ // unnecessary cascading, but... if we depended on a value we will have it, and
2163
+ // if we are missing any range lock conflicts we will know about them due to
2164
+ // the remote locks. AND, any values we have are synced, so we don't need to worry about
2165
+ // any range lock conflicts themselves being rejected, missing that rejection, and so
2166
+ // incorrectly invalidating valid values that we just received.
2167
+ lockToCallback.watchLock(lock, values);
2168
+ }
2169
+ }
2170
+
2171
+ return { newRemoteLocks };
2172
+ }
2173
+
2174
+ private ensureLockCleanupLoop = lazy(() => {
2175
+ runInfinitePoll(MAX_CHANGE_AGE, () => this.cleanupDeadLocks());
2176
+ });
2177
+
2178
+ @measureFnc
2179
+ private cleanupDeadLocks() {
2180
+ let deadTime = Date.now() - MAX_CHANGE_AGE;
2181
+ for (let [locks, values] of this.watchedRemoteLocks) {
2182
+ let isDead = true;
2183
+ for (let value of values) {
2184
+ if (value.time.time > deadTime) {
2185
+ isDead = false;
2186
+ break;
2187
+ }
2188
+ }
2189
+ if (isDead) {
2190
+ for (let lock of locks) {
2191
+ this.watchedLockFlat.delete(lock);
2192
+ }
2193
+ this.watchedRemoteLocks.delete(locks);
2194
+ }
2195
+ }
2196
+ }
2197
+
2198
+ public isLockStillWatched(lock: ReadLock): boolean {
2199
+ return this.watchedLockFlat.has(lock);
2200
+ }
2201
+ }
2202
+ export const lockWatchDeduper = new LockWatchDeduper();
2203
+
2204
+ // Async import, as we are not allowed synchronous imports of values in
2205
+ // higher tiers than us. We can't expose a callback, as nothing else
2206
+ // can trigger the import of memoryValueAudit for all clients as consistently
2207
+ // as the core.
2208
+ setImmediate(() => {
2209
+ logErrors(import("../5-diagnostics/memoryValueAudit").then(x => x.startMemoryAuditLoop()));
2210
+ logErrors(import("../5-diagnostics/diskValueAudit").then(x => x.startDiskAuditLoop()));
2211
+ logErrors(import("../5-diagnostics/synchronousLagTracking").then(x => x.trackSynchronousLag()));
2212
+ });
2213
+
2214
+ export { pathValueArchives };
2215
+
2216
+ (globalThis as any).core = module.exports;
2217
+ (globalThis as any).SocketFunction = SocketFunction;