querysub 0.373.0 → 0.374.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/-f-node-discovery/NodeDiscovery.ts +2 -2
- package/src/0-path-value-core/PathValueCommitter.ts +1 -1
- package/src/0-path-value-core/PathValueController.ts +2 -2
- package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +12 -12
- package/src/0-path-value-core/auditLogs.ts +1 -1
- package/src/0-path-value-core/pathValueCore.ts +2 -2
- package/src/3-path-functions/PathFunctionRunner.ts +1 -1
- package/src/3-path-functions/PathFunctionRunnerMain.ts +1 -1
- package/src/4-dom/qreact.tsx +2 -2
- package/src/4-querysub/QuerysubController.ts +1 -1
- package/src/5-diagnostics/diskValueAudit.ts +1 -1
- package/src/diagnostics/logs/diskLogger.ts +1 -1
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +2 -0
- package/src/diagnostics/watchdog.ts +1 -1
- package/src/user-implementation/userData.ts +3 -3
package/package.json
CHANGED
|
@@ -185,7 +185,7 @@ function addNodeIdBase(nodeId: string) {
|
|
|
185
185
|
function setNodeIds(nodeIds: string[]) {
|
|
186
186
|
nodeIds = nodeIds.filter(x => x !== SPECIAL_NODE_ID_FOR_UNMOUNTED_NODE);
|
|
187
187
|
|
|
188
|
-
|
|
188
|
+
console.info("setNodeIds", { nodeIds });
|
|
189
189
|
// Also try all localhost ports, if we are developing and not in public mode
|
|
190
190
|
if (isNode() && !isPublic() && isDevDebugbreak()) {
|
|
191
191
|
let ports = new Set(nodeIds.map(nodeId => decodeNodeId(nodeId)?.port).filter(isDefined));
|
|
@@ -350,7 +350,7 @@ async function runHeartbeatAuditLoop() {
|
|
|
350
350
|
}
|
|
351
351
|
} else {
|
|
352
352
|
deadCount.delete(nodeId);
|
|
353
|
-
|
|
353
|
+
console.info("Read node heartbeat", { nodeId, lastTime });
|
|
354
354
|
}
|
|
355
355
|
}
|
|
356
356
|
if (pendingDeadCount) {
|
|
@@ -177,7 +177,7 @@ class PathValueCommitter {
|
|
|
177
177
|
markArrayAsSplitable(values);
|
|
178
178
|
const { Querysub } = await import("../4-querysub/Querysub");
|
|
179
179
|
let serializedValues = await pathValueSerializer.serialize(values, { compress: Querysub.COMPRESS_NETWORK });
|
|
180
|
-
|
|
180
|
+
console.info("Send PathValues to server", { valueCount: values.length, targetId: otherAuthority, });
|
|
181
181
|
let forwardPromise = PathValueController.nodes[otherAuthority].forwardWrites(
|
|
182
182
|
serializedValues,
|
|
183
183
|
undefined,
|
|
@@ -86,9 +86,9 @@ class PathValueControllerBase {
|
|
|
86
86
|
auditLog("RECEIVE VALUE", { path: value.path, time: value.time.time, sourceNodeId });
|
|
87
87
|
}
|
|
88
88
|
}
|
|
89
|
-
|
|
89
|
+
console.info("Received PathValues via forwardWrites", { valueCount: values.length, callerId, });
|
|
90
90
|
for (let value of values) {
|
|
91
|
-
|
|
91
|
+
console.info("Received PathValue for path", { path: value.path, time: value.time.time, callerId });
|
|
92
92
|
}
|
|
93
93
|
|
|
94
94
|
if (isCoreQuiet) {
|
|
@@ -309,7 +309,7 @@ class TransactionLocker {
|
|
|
309
309
|
}
|
|
310
310
|
public async createConfirm(key: string) {
|
|
311
311
|
let path = this.getConfirmKey(key);
|
|
312
|
-
|
|
312
|
+
console.info("Creating confirmation for ${key}");
|
|
313
313
|
await this.storage.setValue(path, Buffer.from(""));
|
|
314
314
|
return path;
|
|
315
315
|
}
|
|
@@ -362,7 +362,7 @@ class TransactionLocker {
|
|
|
362
362
|
delete: ellipsize(deletes.map(a => debugFileInfo(a.key)).join(","), 50),
|
|
363
363
|
});
|
|
364
364
|
|
|
365
|
-
|
|
365
|
+
console.info("Writing transaction", {
|
|
366
366
|
name,
|
|
367
367
|
ops: transaction.ops.length,
|
|
368
368
|
});
|
|
@@ -488,7 +488,7 @@ class TransactionLocker {
|
|
|
488
488
|
}
|
|
489
489
|
}
|
|
490
490
|
|
|
491
|
-
|
|
491
|
+
console.info("Read archive state", {
|
|
492
492
|
rawFilesCount: files.length,
|
|
493
493
|
confirmedCount: currentDataFiles.size,
|
|
494
494
|
rawFiles: files.map(a => a.file),
|
|
@@ -507,7 +507,7 @@ class TransactionLocker {
|
|
|
507
507
|
let result = await tryToRead();
|
|
508
508
|
if (result) {
|
|
509
509
|
let timeToRead = Date.now() - startTime;
|
|
510
|
-
|
|
510
|
+
console.info(`Read data state in ${formatTime(timeToRead)}`);
|
|
511
511
|
return result;
|
|
512
512
|
}
|
|
513
513
|
}
|
|
@@ -546,7 +546,7 @@ class TransactionLocker {
|
|
|
546
546
|
let rawLookup = new Set(Array.from(rawDataFiles).map(a => a.file));
|
|
547
547
|
// If any creates are not confirmed, it must not have been applied
|
|
548
548
|
if (transaction.ops.some(a => a.type === "create" && rawLookup.has(a.key) && !confirmedKeys.has(a.key))) {
|
|
549
|
-
|
|
549
|
+
console.info(`Transaction not applied (has pending confirmations of creates)`, {
|
|
550
550
|
keys: transaction.ops
|
|
551
551
|
.filter(a => a.type === "create" && rawLookup.has(a.key) && !confirmedKeys.has(a.key))
|
|
552
552
|
.map(a => a.key)
|
|
@@ -555,7 +555,7 @@ class TransactionLocker {
|
|
|
555
555
|
}
|
|
556
556
|
// If any deletes still exist, it must not have been applied
|
|
557
557
|
if (transaction.ops.some(a => a.type === "delete" && confirmedKeys.has(a.key))) {
|
|
558
|
-
|
|
558
|
+
console.info(`Transaction not applied (has pending deletes)`, {
|
|
559
559
|
keys: transaction.ops
|
|
560
560
|
.filter(a => a.type === "delete" && confirmedKeys.has(a.key))
|
|
561
561
|
.map(a => a.key)
|
|
@@ -568,7 +568,7 @@ class TransactionLocker {
|
|
|
568
568
|
let createCount = transaction.ops.filter(a => a.type === "create").length;
|
|
569
569
|
let deleteCount = transaction.ops.filter(a => a.type === "delete").length;
|
|
570
570
|
let lockedFiles = transaction.lockedFilesMustEqual?.length;
|
|
571
|
-
|
|
571
|
+
console.info(`Applying transaction with ${createCount} file creates and ${deleteCount} file deletes. ${lockedFiles !== undefined && `Lock state depends on ${lockedFiles} files` || ""}`, {
|
|
572
572
|
transactions: transaction.ops.map(x => JSON.stringify(x)),
|
|
573
573
|
});
|
|
574
574
|
logNodeStats(`archives|TΔ Apply`, formatNumber, 1);
|
|
@@ -594,7 +594,7 @@ class TransactionLocker {
|
|
|
594
594
|
};
|
|
595
595
|
await Promise.all(list(CONCURRENT_WRITE_COUNT).map(runThread));
|
|
596
596
|
|
|
597
|
-
|
|
597
|
+
console.info(`Applied transaction with ${createCount} file creates and file ${deleteCount} deletes. ${lockedFiles !== undefined && `Lock state depends on ${lockedFiles} files` || ""}`, {
|
|
598
598
|
transactions: transaction.ops.map(x => JSON.stringify(x)),
|
|
599
599
|
});
|
|
600
600
|
}
|
|
@@ -660,7 +660,7 @@ class TransactionLocker {
|
|
|
660
660
|
let threshold = activeT.createTime + this.storage.propagationTime;
|
|
661
661
|
if (Date.now() < threshold) {
|
|
662
662
|
let waitTime = threshold - Date.now();
|
|
663
|
-
|
|
663
|
+
console.info(`Waiting ${formatTime(waitTime)} for transaction ${activeT.seqNum} to settle.`);
|
|
664
664
|
await new Promise(resolve => setTimeout(resolve, waitTime));
|
|
665
665
|
return this.getFilesBase();
|
|
666
666
|
}
|
|
@@ -768,7 +768,7 @@ class TransactionLocker {
|
|
|
768
768
|
let dels = transaction.ops.filter(a => a.type === "delete").length;
|
|
769
769
|
let creates = transaction.ops.filter(a => a.type === "create").length;
|
|
770
770
|
let createBytes = transaction.ops.map(a => a.type === "create" && a.value?.length || 0).reduce((a, b) => a + b, 0);
|
|
771
|
-
|
|
771
|
+
console.info(`Starting transaction with ${creates} file creates and ${dels} file deletes, ${formatNumber(createBytes)}B`, {
|
|
772
772
|
createFilesNames: transaction.ops.filter(a => a.type === "create").map(a => a.key),
|
|
773
773
|
deleteFilesNames: transaction.ops.filter(a => a.type === "delete").map(a => a.key),
|
|
774
774
|
});
|
|
@@ -797,7 +797,7 @@ class TransactionLocker {
|
|
|
797
797
|
let beforeData = await this.getFilesBase();
|
|
798
798
|
if (!this.isTransactionValid(transaction, beforeData.dataFiles, beforeData.rawDataFiles)) {
|
|
799
799
|
logNodeStats(`archives|TΔ Rejected`, formatNumber, 1);
|
|
800
|
-
|
|
800
|
+
console.info(`Finished transaction with rejection, ${transaction.ops.length} ops`);
|
|
801
801
|
return "rejected";
|
|
802
802
|
}
|
|
803
803
|
|
|
@@ -806,7 +806,7 @@ class TransactionLocker {
|
|
|
806
806
|
let afterData = await this.getFilesBase();
|
|
807
807
|
if (this.wasTransactionApplied(transaction, afterData.dataFiles, afterData.rawDataFiles)) {
|
|
808
808
|
logNodeStats(`archives|TΔ Accepted`, formatNumber, 1);
|
|
809
|
-
|
|
809
|
+
console.info(`Finished transaction with ${transaction.ops.length} ops`);
|
|
810
810
|
return "accepted";
|
|
811
811
|
}
|
|
812
812
|
}
|
|
@@ -58,7 +58,7 @@ function debugLogBase(type: string, values: { [key: string]: unknown }) {
|
|
|
58
58
|
return;
|
|
59
59
|
}
|
|
60
60
|
if (isDiskAudit()) {
|
|
61
|
-
|
|
61
|
+
console.info(type, values, { "diskAudit": true });
|
|
62
62
|
}
|
|
63
63
|
let newEntry: DebugLog = { type, time: Date.now(), values };
|
|
64
64
|
logHistory.push(newEntry);
|
|
@@ -1261,7 +1261,7 @@ class PathWatcher {
|
|
|
1261
1261
|
auditLog("new non-local WATCH PARENT", { path, watcher: config.callback });
|
|
1262
1262
|
}
|
|
1263
1263
|
}
|
|
1264
|
-
|
|
1264
|
+
console.info(`New PathValue watches`, {
|
|
1265
1265
|
newPathsWatched: newPathsWatched.size,
|
|
1266
1266
|
newParentsWatched: newParentsWatched.size,
|
|
1267
1267
|
});
|
|
@@ -1394,7 +1394,7 @@ class PathWatcher {
|
|
|
1394
1394
|
}
|
|
1395
1395
|
|
|
1396
1396
|
if (fullyUnwatched.paths.length > 0 || fullyUnwatched.parentPaths.length > 0) {
|
|
1397
|
-
|
|
1397
|
+
console.info(`Unwatched PathValue watches`, {
|
|
1398
1398
|
unwatchedPaths: fullyUnwatched.paths.length,
|
|
1399
1399
|
unwatchedParents: fullyUnwatched.parentPaths.length,
|
|
1400
1400
|
});
|
|
@@ -689,7 +689,7 @@ export class PathFunctionRunner {
|
|
|
689
689
|
let syncTime = wallTime - evalTime;
|
|
690
690
|
|
|
691
691
|
|
|
692
|
-
|
|
692
|
+
console.info("Finished FunctionRunner function", {
|
|
693
693
|
...callPath, argsEncoded: "", functionSpec,
|
|
694
694
|
wallTime, syncTime, evalTime,
|
|
695
695
|
loops: runCount,
|
|
@@ -41,7 +41,7 @@ async function main() {
|
|
|
41
41
|
await Querysub.hostService("PathFunctionRunnerMain");
|
|
42
42
|
|
|
43
43
|
if (!isPublic()) {
|
|
44
|
-
IndexedLogs.runLogMoveLoop();
|
|
44
|
+
void IndexedLogs.runLogMoveLoop();
|
|
45
45
|
}
|
|
46
46
|
|
|
47
47
|
// Use a fairly high stick time (the default is 10s), because having wait to sync data is very slow,
|
package/src/4-dom/qreact.tsx
CHANGED
|
@@ -2119,7 +2119,7 @@ function updateDOMNodeFields(domNode: DOMNode, vNode: VirtualDOM, prevVNode: Vir
|
|
|
2119
2119
|
if (name === "blur") {
|
|
2120
2120
|
let target = args[0].currentTarget as HTMLElement;
|
|
2121
2121
|
if (!target.getAttribute("data-blur-on-unmount") && !target.isConnected) {
|
|
2122
|
-
|
|
2122
|
+
console.info("Ignoring blur for disconnected element. You can use data-blur-on-unmount to re-enable blurs on this element.", target);
|
|
2123
2123
|
return;
|
|
2124
2124
|
}
|
|
2125
2125
|
}
|
|
@@ -2428,7 +2428,7 @@ function blurFixOnMouseDownHack(event: MouseEvent) {
|
|
|
2428
2428
|
|
|
2429
2429
|
// Looks like we are going to blur, so blur now
|
|
2430
2430
|
if (selected instanceof HTMLElement && !selected.hasAttribute("data-no-early-blur")) {
|
|
2431
|
-
|
|
2431
|
+
console.info(`Simulating early blur to prevent blur from firing after mousedown. This solves a problem where mousedown changes the UI, and then the blur fires on the wrong element. You can use data-no-early-blur to opt-out of this feature`, selected);
|
|
2432
2432
|
selected.blur();
|
|
2433
2433
|
}
|
|
2434
2434
|
}
|
|
@@ -480,7 +480,7 @@ export class QuerysubControllerBase {
|
|
|
480
480
|
// make the UI look cleaner (instead of showing stale values, it shows nothing)?
|
|
481
481
|
let undefinedValues: PathValue[] = newPathsNotAllowed.map(path => ({ path, value: undefined, canGCValue: true, isTransparent: true, time: epochTime, locks: [], lockCount: 0, valid: true, event: false }));
|
|
482
482
|
|
|
483
|
-
|
|
483
|
+
console.info("Disallowing PathValue watches due to disallowed permissions", { count: newPathsNotAllowed.length, callerId });
|
|
484
484
|
|
|
485
485
|
ignoreErrors(pathValueSerializer.serialize(undefinedValues, { compress: Querysub.COMPRESS_NETWORK }).then(buffers =>
|
|
486
486
|
PathValueController.nodes[callerId].forwardWrites(
|
|
@@ -188,7 +188,7 @@ async function checkAuthority(authority: AuthorityPath, threshold: number) {
|
|
|
188
188
|
console.error(`Value mismatch between disk and memory for ${formatNumber(changedValues.size)} values. Ex: ${firstValue.path}`);
|
|
189
189
|
}
|
|
190
190
|
if (removedValues.size > 0) {
|
|
191
|
-
|
|
191
|
+
console.info(green(`Removing ${formatNumber(removedValues.size)} paths from memory which have been GCed on the disk.`));
|
|
192
192
|
}
|
|
193
193
|
if (changedValues.size > 0 || removedValues.size > 0) {
|
|
194
194
|
let allValues = new Set([...changedValues, ...removedValues]);
|
|
@@ -146,7 +146,7 @@ void Promise.resolve().then(() => {
|
|
|
146
146
|
|
|
147
147
|
|
|
148
148
|
const logDiskDontShim = logDisk;
|
|
149
|
-
/**
|
|
149
|
+
/** @deprecated, Don't call this directly, call console info instead, which our shim will prevent from logging to the console, but it will still call logDisk. */
|
|
150
150
|
export function logDisk(type: "log" | "warn" | "info" | "error", ...args: unknown[]) {
|
|
151
151
|
if (!isNode()) return;
|
|
152
152
|
try {
|
|
@@ -55,6 +55,8 @@ export function shimConsoleLogs() {
|
|
|
55
55
|
// Some arguments might throw if accessed (as they might be proxies), so
|
|
56
56
|
// catch and ignore errors
|
|
57
57
|
}
|
|
58
|
+
// NOTE: Info really has absolutely no purpose. There's no reason to use info instead of log, so we're going to give it a purpose. Infos are not going to be shown in the console and are only going to be logged to disk. This helps fix our shimming issue, where when we call log disk directly, we lose out on the source file information.
|
|
59
|
+
if (fncName === "info") return;
|
|
58
60
|
return originalFnc(...args);
|
|
59
61
|
};
|
|
60
62
|
}
|
|
@@ -72,7 +72,7 @@ function logProfileMeasuresTimingsNow() {
|
|
|
72
72
|
measureObj = startMeasure();
|
|
73
73
|
function diskLogMeasureObj(table: FormattedMeasureTable | undefined) {
|
|
74
74
|
if (!table) return;
|
|
75
|
-
|
|
75
|
+
console.info(table.title, { entries: table.entries });
|
|
76
76
|
}
|
|
77
77
|
diskLogMeasureObj(logMeasureTable(profile, {
|
|
78
78
|
name: `watchdog at ${new Date().toLocaleString()}`,
|
|
@@ -626,7 +626,7 @@ function sendLoginEmail(config: {
|
|
|
626
626
|
);
|
|
627
627
|
if (alreadyAllowed) {
|
|
628
628
|
data().machineSecure[machineId].userId = userId;
|
|
629
|
-
|
|
629
|
+
console.info(`User ${userId} already allowed for current ip and machine id, no need to send email.`);
|
|
630
630
|
return;
|
|
631
631
|
}
|
|
632
632
|
}
|
|
@@ -818,14 +818,14 @@ function inviteUser(config: { email: string }) {
|
|
|
818
818
|
Querysub.ignorePermissionsChecks(() => {
|
|
819
819
|
let curUserObj = getUserObjAssert();
|
|
820
820
|
if (config.email in curUserObj.invitedUsers2) {
|
|
821
|
-
|
|
821
|
+
console.info(`User ${config.email} already invited`);
|
|
822
822
|
return;
|
|
823
823
|
}
|
|
824
824
|
// If the user already exists, don't invite
|
|
825
825
|
const { email } = config;
|
|
826
826
|
let userId = atomic(data().secure.emailToUserId[email]) || createNewUserId();
|
|
827
827
|
if (userId in data().users) {
|
|
828
|
-
|
|
828
|
+
console.info(`User ${userId} already exists, no need to invite`);
|
|
829
829
|
return;
|
|
830
830
|
}
|
|
831
831
|
|