querysub 0.356.0 → 0.357.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/.cursorrules +8 -0
  2. package/bin/movelogs.js +4 -0
  3. package/package.json +12 -6
  4. package/scripts/postinstall.js +23 -0
  5. package/src/-a-archives/archiveCache.ts +10 -12
  6. package/src/-a-archives/archives.ts +29 -0
  7. package/src/-a-archives/archivesBackBlaze.ts +60 -12
  8. package/src/-a-archives/archivesDisk.ts +27 -8
  9. package/src/-a-archives/archivesLimitedCache.ts +21 -0
  10. package/src/-a-archives/archivesMemoryCache.ts +350 -0
  11. package/src/-a-archives/archivesPrivateFileSystem.ts +22 -0
  12. package/src/-g-core-values/NodeCapabilities.ts +3 -0
  13. package/src/0-path-value-core/auditLogs.ts +5 -1
  14. package/src/0-path-value-core/pathValueCore.ts +7 -7
  15. package/src/4-dom/qreact.tsx +1 -0
  16. package/src/4-querysub/Querysub.ts +1 -5
  17. package/src/config.ts +5 -0
  18. package/src/diagnostics/MachineThreadInfo.tsx +235 -0
  19. package/src/diagnostics/NodeViewer.tsx +3 -2
  20. package/src/diagnostics/logs/FastArchiveAppendable.ts +79 -42
  21. package/src/diagnostics/logs/FastArchiveController.ts +102 -63
  22. package/src/diagnostics/logs/FastArchiveViewer.tsx +36 -8
  23. package/src/diagnostics/logs/IndexedLogs/BufferIndex.ts +461 -0
  24. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.cpp +327 -0
  25. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.d.ts +18 -0
  26. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.js +1 -0
  27. package/src/diagnostics/logs/IndexedLogs/BufferIndexHelpers.ts +140 -0
  28. package/src/diagnostics/logs/IndexedLogs/BufferIndexLogsOptimizationConstants.ts +22 -0
  29. package/src/diagnostics/logs/IndexedLogs/BufferIndexWAT.wat +1145 -0
  30. package/src/diagnostics/logs/IndexedLogs/BufferIndexWAT.wat.d.ts +178 -0
  31. package/src/diagnostics/logs/IndexedLogs/BufferListStreamer.ts +206 -0
  32. package/src/diagnostics/logs/IndexedLogs/BufferUnitIndex.ts +719 -0
  33. package/src/diagnostics/logs/IndexedLogs/BufferUnitSet.ts +146 -0
  34. package/src/diagnostics/logs/IndexedLogs/FilePathSelector.tsx +408 -0
  35. package/src/diagnostics/logs/IndexedLogs/FindProgressTracker.ts +45 -0
  36. package/src/diagnostics/logs/IndexedLogs/IndexedLogs.ts +598 -0
  37. package/src/diagnostics/logs/IndexedLogs/LogStreamer.ts +47 -0
  38. package/src/diagnostics/logs/IndexedLogs/LogViewer3.tsx +702 -0
  39. package/src/diagnostics/logs/IndexedLogs/TimeFileTree.ts +236 -0
  40. package/src/diagnostics/logs/IndexedLogs/binding.gyp +23 -0
  41. package/src/diagnostics/logs/IndexedLogs/moveIndexLogsToPublic.ts +221 -0
  42. package/src/diagnostics/logs/IndexedLogs/moveLogsEntry.ts +10 -0
  43. package/src/diagnostics/logs/LogViewer2.tsx +120 -55
  44. package/src/diagnostics/logs/TimeRangeSelector.tsx +5 -2
  45. package/src/diagnostics/logs/diskLogger.ts +32 -48
  46. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +3 -2
  47. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +1 -0
  48. package/src/diagnostics/logs/lifeCycleAnalysis/LifeCyclePages.tsx +150 -0
  49. package/src/diagnostics/logs/lifeCycleAnalysis/lifeCycles.tsx +132 -15
  50. package/src/diagnostics/logs/lifeCycleAnalysis/test.ts +180 -0
  51. package/src/diagnostics/logs/lifeCycleAnalysis/test.wat +106 -0
  52. package/src/diagnostics/logs/lifeCycleAnalysis/test.wat.d.ts +2 -0
  53. package/src/diagnostics/logs/lifeCycleAnalysis/testHoist.ts +5 -0
  54. package/src/diagnostics/logs/logViewerExtractField.ts +2 -3
  55. package/src/diagnostics/managementPages.tsx +10 -0
  56. package/src/diagnostics/trackResources.ts +1 -1
  57. package/src/misc/lz4_wasm_nodejs.d.ts +34 -0
  58. package/src/misc/lz4_wasm_nodejs.js +178 -0
  59. package/src/misc/lz4_wasm_nodejs_bg.js +94 -0
  60. package/src/misc/lz4_wasm_nodejs_bg.wasm +0 -0
  61. package/src/misc/lz4_wasm_nodejs_bg.wasm.d.ts +15 -0
  62. package/src/storage/CompressedStream.ts +13 -0
  63. package/src/storage/LZ4.ts +32 -0
  64. package/src/storage/ZSTD.ts +10 -0
  65. package/src/wat/watCompiler.ts +1716 -0
  66. package/src/wat/watGrammar.pegjs +93 -0
  67. package/src/wat/watHandler.ts +179 -0
  68. package/src/wat/watInstructions.txt +707 -0
  69. package/src/zip.ts +3 -89
  70. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +0 -125
@@ -0,0 +1,235 @@
1
+ import { SocketFunction } from "socket-function/SocketFunction";
2
+ import { NodeCapabilitiesController } from "../-g-core-values/NodeCapabilities";
3
+ import { qreact } from "../4-dom/qreact";
4
+ import { getSyncedController } from "../library-components/SyncedController";
5
+ import { isManagementUser } from "../-0-hooks/hooks";
6
+ import { assertIsManagementUser } from "./managementPages";
7
+ import { getAllNodeIds } from "../-f-node-discovery/NodeDiscovery";
8
+ import { getNodeIdDomain, getNodeIdLocation, getNodeIdDomainMaybeUndefined } from "socket-function/src/nodeCache";
9
+ import { assertIsNetworkTrusted } from "../-d-trust/NetworkTrust2";
10
+ import { runPromise } from "../functional/runCommand";
11
+ import { errorToUndefinedSilent } from "../errors";
12
+ import { decodeNodeId } from "../-a-auth/certs";
13
+ import { keyBy, keyByArray, timeInSecond, timeoutToUndefined, timeoutToUndefinedSilent } from "socket-function/src/misc";
14
+ import dns from "dns";
15
+ import { isDefined } from "../misc";
16
+ import { getDebuggerUrl } from "./listenOnDebugger";
17
+ import { Button } from "../library-components/Button";
18
+ import { Querysub } from "../4-querysub/QuerysubController";
19
+ import { NodeViewerController } from "./NodeViewer";
20
+ import { showFullscreenModal } from "../5-diagnostics/FullscreenModal";
21
+ import { css } from "../4-dom/css";
22
+ import { formatVeryNiceDateTime } from "socket-function/src/formatting/format";
23
+
24
+ module.hotreload = true;
25
+ module.noserverhotreload = false;
26
+
27
+ type NodeSpecialInfo = {
28
+ nodeId: string;
29
+ machineId: string;
30
+ threadId: string;
31
+ ip: string;
32
+ port: number;
33
+ hostname: string;
34
+ entrypoint: string;
35
+ aliveTime: number;
36
+ inspectURL: string;
37
+ };
38
+
39
+ export class MachineThreadInfo extends qreact.Component<{
40
+ machineId: string;
41
+ threadId?: string;
42
+ onlyShowThread?: boolean;
43
+ }> {
44
+ static renderInProgress = true;
45
+
46
+ handleAttach = async (threadInfo: NodeSpecialInfo | undefined) => {
47
+ Querysub.onCommitFinished(async () => {
48
+ const url = await NodeViewerController.nodes[SocketFunction.getBrowserNodeId()].getExternalInspectURL(threadInfo?.nodeId || "");
49
+ window.open(url, "_blank");
50
+ });
51
+ };
52
+
53
+ handleRefresh = async () => {
54
+ Querysub.onCommitFinished(async () => {
55
+ MachineThreadInfoController(SocketFunction.browserNodeId()).getAllInfo.refresh();
56
+ });
57
+ };
58
+
59
+ showExpandedView = (machineInfo: { hostname: string; ip: string; port: number; } | undefined, threadInfo: NodeSpecialInfo | undefined) => {
60
+ showFullscreenModal({
61
+ content: <div className={css.vbox(16).pad2(24)}>
62
+ <div className={css.vbox(12)}>
63
+ <h2>Machine & Thread Info</h2>
64
+
65
+ <div className={css.vbox(8)}>
66
+ <div className={css.vbox(4)}>
67
+ <strong>Machine ID</strong>
68
+ <div>{this.props.machineId}</div>
69
+ </div>
70
+
71
+ {this.props.threadId && (
72
+ <div className={css.vbox(4)}>
73
+ <strong>Thread ID</strong>
74
+ <div>{this.props.threadId}</div>
75
+ </div>
76
+ )}
77
+
78
+ {machineInfo && (
79
+ <>
80
+ <div className={css.vbox(4)}>
81
+ <strong>Hostname</strong>
82
+ <div>{machineInfo.hostname}</div>
83
+ </div>
84
+
85
+ <div className={css.vbox(4)}>
86
+ <strong>IP:Port</strong>
87
+ <div>{machineInfo.ip}:{machineInfo.port}</div>
88
+ </div>
89
+ </>
90
+ )}
91
+
92
+ {threadInfo && (
93
+ <>
94
+ <div className={css.vbox(4)}>
95
+ <strong>Entry Point</strong>
96
+ <div>{threadInfo.entrypoint}</div>
97
+ </div>
98
+
99
+ <div className={css.vbox(4)}>
100
+ <strong>Start Time</strong>
101
+ <div>{formatVeryNiceDateTime(threadInfo.aliveTime)}</div>
102
+ </div>
103
+
104
+ {threadInfo.inspectURL && (
105
+ <div className={css.vbox(4)}>
106
+ <strong>Inspector URL</strong>
107
+ <a href={threadInfo.inspectURL} target="_blank">{threadInfo.inspectURL}</a>
108
+ </div>
109
+ )}
110
+ </>
111
+ )}
112
+ </div>
113
+
114
+ <div className={css.hbox(12)}>
115
+ <Button onClick={() => this.handleAttach(threadInfo)}>
116
+ Attach Debugger
117
+ </Button>
118
+
119
+ <Button onClick={this.handleRefresh}>
120
+ Refresh
121
+ </Button>
122
+
123
+ {threadInfo?.nodeId && (
124
+ <Button onClick={() => {
125
+ window.location.hash = `#/diagnostics/logs/${threadInfo.nodeId}`;
126
+ }}>
127
+ View Logs
128
+ </Button>
129
+ )}
130
+ </div>
131
+ </div>
132
+ </div>
133
+ });
134
+ };
135
+
136
+ render() {
137
+ const info = MachineThreadInfoController(SocketFunction.browserNodeId()).getAllInfo();
138
+ // NOTE: Only these fields can be used from the machine info. Otherwise, you're just accessing our random thread. But for these fields, they're the same for everything with the same machine ID. So they're safe to access, even if our thread no longer exists.
139
+ const machineInfo = info?.byMachine.get(this.props.machineId) as undefined | {
140
+ hostname: string;
141
+ ip: string;
142
+ port: number;
143
+ };
144
+ const threadInfo = info?.byThread.get(this.props.threadId || "");
145
+
146
+ return (
147
+ <div
148
+ className={css.button.hbox(4).pad2(4).hsla(0, 0, 100, 0.5)}
149
+ onClick={(e) => {
150
+ e.stopPropagation();
151
+ this.showExpandedView(machineInfo, threadInfo);
152
+ }}
153
+ >
154
+ {!this.props.onlyShowThread && <span title={this.props.machineId}>
155
+ {machineInfo?.hostname || this.props.machineId}
156
+ </span>}
157
+ {!this.props.onlyShowThread && this.props.threadId && " | "}
158
+ {this.props.threadId && <span title={this.props.threadId}>
159
+ {/* TODO: Create a mapping from entrypoints to nice names. The full folder path will likely be too large, but just the filename is too similar (it's server.ts both both the PathValueServer and the application server). */}
160
+ {threadInfo?.entrypoint || this.props.threadId}
161
+ </span>}
162
+ </div>
163
+ );
164
+ }
165
+ }
166
+
167
+ const aliveTime = Date.now();
168
+
169
+ class MachineThreadInfoBase {
170
+ public async getAllInfo(): Promise<{
171
+ byMachine: Map<string, NodeSpecialInfo>;
172
+ byThread: Map<string, NodeSpecialInfo>;
173
+ }> {
174
+ let nodeIds = await getAllNodeIds();
175
+ let specialInfos = await Promise.all(nodeIds.map(async (nodeId): Promise<NodeSpecialInfo | undefined> => {
176
+ const nodeParts = decodeNodeId(nodeId);
177
+ if (!nodeParts) return undefined;
178
+
179
+ const location = getNodeIdLocation(nodeId);
180
+ if (!location) return undefined;
181
+
182
+ const domain = getNodeIdDomainMaybeUndefined(nodeId);
183
+ if (!domain) return undefined;
184
+
185
+ let ip = "";
186
+ const ipResult = await dns.promises.lookup(domain);
187
+ if (ipResult) {
188
+ ip = ipResult.address;
189
+ }
190
+
191
+ const nodeInfo = await MachineThreadInfoController(nodeId).getNodeInfo.promise();
192
+
193
+ return {
194
+ nodeId: nodeId,
195
+ machineId: nodeParts.machineId,
196
+ threadId: nodeParts.threadId,
197
+ ip: ip,
198
+ port: location.port,
199
+ hostname: nodeInfo?.hostname || "",
200
+ entrypoint: nodeInfo?.entrypoint || "",
201
+ aliveTime: nodeInfo?.aliveTime || 0,
202
+ inspectURL: nodeInfo?.inspectURL || "",
203
+ };
204
+ }).map(x => timeoutToUndefinedSilent(timeInSecond * 5, x)));
205
+
206
+ const validInfos = specialInfos.filter(isDefined);
207
+
208
+ return {
209
+ byMachine: keyBy(validInfos, info => info.machineId),
210
+ byThread: keyBy(validInfos, info => info.threadId),
211
+ };
212
+ }
213
+
214
+ // This function is only intended to be called between nodes
215
+ public async getNodeInfo() {
216
+ const hostname = await errorToUndefinedSilent(runPromise("hostname"));
217
+ return {
218
+ hostname: hostname?.trim() || "",
219
+ entrypoint: process.argv[1] || "",
220
+ aliveTime: aliveTime,
221
+ inspectURL: await getDebuggerUrl(),
222
+ };
223
+ }
224
+
225
+ }
226
+ const MachineThreadInfoController = getSyncedController(SocketFunction.register(
227
+ "MachineThreadInfoController-019c88cb-a16f-7219-9a5e-08919bd1475b",
228
+ new MachineThreadInfoBase(),
229
+ () => ({
230
+ getAllInfo: { hooks: [assertIsManagementUser] },
231
+ getNodeInfo: { hooks: [assertIsNetworkTrusted] },
232
+ }),
233
+ () => ({
234
+ })
235
+ ));
@@ -22,7 +22,7 @@ import { encodeFormattedSelector, errorMessage, formatValue, toSpaceCase } from
22
22
  import { ValueAuditController } from "../../src/5-diagnostics/memoryValueAudit";
23
23
  import { getExternalIP } from "../../src/misc/networking";
24
24
  import dns from "dns";
25
- import { getNodeIdDomain, getNodeIdIP } from "socket-function/src/nodeCache";
25
+ import { getNodeIdDomain, getNodeIdDomainMaybeUndefined, getNodeIdIP } from "socket-function/src/nodeCache";
26
26
  import ws from "ws";
27
27
  import https from "https";
28
28
  import debugbreak from "debugbreak";
@@ -344,7 +344,8 @@ class NodeViewerControllerBase {
344
344
  }
345
345
 
346
346
  public async getNodeIP(nodeId: string) {
347
- let domain = nodeId.split(":")[0];
347
+ let domain = getNodeIdDomainMaybeUndefined(nodeId);
348
+ if (!domain) return undefined;
348
349
  // Resolve the domain to an IP
349
350
  let result = await dns.promises.lookup(domain);
350
351
  return result.address;
@@ -12,7 +12,7 @@ import fs from "fs";
12
12
  import { MaybePromise, canHaveChildren } from "socket-function/src/types";
13
13
  import { formatNumber, formatTime } from "socket-function/src/formatting/format";
14
14
  import { cache, lazy } from "socket-function/src/caching";
15
- import { getArchives, nestArchives } from "../../-a-archives/archives";
15
+ import { Archives, getArchives, nestArchives } from "../../-a-archives/archives";
16
16
  import { Zip } from "../../zip";
17
17
  import { SocketFunction } from "socket-function/SocketFunction";
18
18
  import { assertIsManagementUser } from "../managementPages";
@@ -26,7 +26,7 @@ import { getPathIndex, getPathStr2 } from "../../path";
26
26
  import { onNextPaint } from "../../functional/onNextPaint";
27
27
  import { getArchivesBackblazePrivateImmutable, getArchivesBackblazePublicImmutable } from "../../-a-archives/archivesBackBlaze";
28
28
  import { httpsRequest } from "socket-function/src/https";
29
- import { getDomain } from "../../config";
29
+ import { getDomain, isPublic } from "../../config";
30
30
  import { getIPDomain } from "../../-e-certs/EdgeCertController";
31
31
  import { getArchivesPrivateFileSystem } from "../../-a-archives/archivesPrivateFileSystem";
32
32
  import { createArchivesLimitedCache } from "../../-a-archives/archivesLimitedCache";
@@ -36,6 +36,7 @@ import { blue, magenta } from "socket-function/src/formatting/logColors";
36
36
  import { FileMetadata, FastArchiveAppendableControllerBase, FastArchiveAppendableController, getFileMetadataHash } from "./FastArchiveController";
37
37
  import { fsExistsAsync } from "../../fs";
38
38
  import { ScanFnc } from "./FastArchiveViewer";
39
+ import { getArchivesLocal } from "../../-a-archives/archivesDisk";
39
40
 
40
41
  // NOTE: In a single command line micro-test it looks like we can write about 40K writes of 500 per once, when using 10X parallel, on a fairly potato server. We should probably batch though, and only do 1X parallel.
41
42
  /*
@@ -59,7 +60,7 @@ const UNCOMPRESSED_LOG_FILE_WARN_THRESHOLD = 1024 * 1024 * 512;
59
60
  const UNCOMPRESSED_LOG_FILE_STOP_THRESHOLD = 1024 * 1024 * 1024 * 2;
60
61
 
61
62
  // Add a large wait, due to day light saving time, or whatever
62
- const UPLOAD_THRESHOLD = timeInHour * 3;
63
+ const UPLOAD_THRESHOLD = timeInHour * 1.5;
63
64
  const DEAD_TIMEOUT = timeInHour * 6;
64
65
  const DELETE_TIMEOUT = timeInHour * 12;
65
66
 
@@ -68,6 +69,9 @@ const MAX_WORK_PER_PAINT = 40;
68
69
  const ON_DATA_BATCH_COUNT = 1024 * 10;
69
70
  //const ON_DATA_BATCH_COUNT = 1;
70
71
 
72
+ // NOTE: This used to be small, for atomic writes, but it causes lag, and... why do we need atomic writes? I believe we use a file per thread, so... we will be the only writer.
73
+ const WRITE_CHUNK_SIZE = 1024 * 1024 * 16;
74
+
71
75
 
72
76
 
73
77
  const MAX_LOCAL_CACHED_FILES = 1000 * 10;
@@ -113,6 +117,15 @@ export function getFileTimeStamp(path: string): {
113
117
  dateStr += ":00:00.000Z";
114
118
  }
115
119
  let startTime = new Date(dateStr).getTime();
120
+ if (!startTime) {
121
+ let pathParts = path.split("/");
122
+ let year = parseInt(pathParts[0], 10);
123
+ let month = parseInt(pathParts[1], 10) - 1;
124
+ let day = parseInt(pathParts[2], 10);
125
+ let hour = parseInt(pathParts[3], 10);
126
+ let hourStart = Date.UTC(year, month, day, hour);
127
+ startTime = hourStart;
128
+ }
116
129
  return {
117
130
  startTime,
118
131
  endTime: startTime + timeInHour,
@@ -148,8 +161,13 @@ export class FastArchiveAppendable<Datum> {
148
161
  }
149
162
  }
150
163
 
151
- public getArchives = lazy(() => {
152
- let archives = getArchivesBackblazePrivateImmutable(getDomain());
164
+ public getArchives = cache((forceGetPublic: boolean) => {
165
+ let archives: Archives;
166
+ if (!isPublic() && !forceGetPublic) {
167
+ archives = getArchivesLocal(getDomain());
168
+ } else {
169
+ archives = getArchivesBackblazePrivateImmutable(getDomain());
170
+ }
153
171
  return nestArchives("fast-logs/" + this.rootPath, archives);
154
172
  });
155
173
 
@@ -201,8 +219,7 @@ export class FastArchiveAppendable<Datum> {
201
219
  }
202
220
  }
203
221
 
204
- // NOTE: This is disk writing, which should be fast, but if it's slow we might be able to remove the measureWrap (as technically spending 50% of our time writing to the disk is fine, and won't lag anything).
205
- @measureFnc
222
+ // NOTE: No timing on this as it's just waiting on the disk. It's not actually slowing anything down.
206
223
  public async flushNow(now = Date.now()) {
207
224
 
208
225
  await appendableSerialLock(async () => {
@@ -226,35 +243,35 @@ export class FastArchiveAppendable<Datum> {
226
243
  // File not existing is fine
227
244
  }
228
245
 
229
- // NOTE: We can't use anything but JSON, as we need it to be scannable before decoding it (otherwise scanning takes 100X longer)
230
- let writeData = this.pendingWriteQueue.map(v => {
231
- let buffer = Buffer.from(JSON.stringify(v));
232
- this.escapeDelimitter(buffer);
233
- return buffer;
234
- });
235
- this.pendingWriteQueue = [];
246
+ let chunks: Buffer[][] = [];
247
+ measureBlock(() => {
248
+ // NOTE: We can't use anything but JSON, as we need it to be scannable before decoding it (otherwise scanning takes 100X longer)
249
+ let writeData = this.pendingWriteQueue.map(v => {
250
+ let buffer = Buffer.from(JSON.stringify(v));
251
+ this.escapeDelimitter(buffer);
252
+ return buffer;
253
+ });
254
+ this.pendingWriteQueue = [];
236
255
 
237
- // Apparently, anything more than this and our writes might not be atomic
238
- const WRITE_ATOMIC_LIMIT = 4096;
256
+ // Group lines into WRITE_ATOMIC_LIMIT byte chunks
239
257
 
240
- // Group lines into WRITE_ATOMIC_LIMIT byte chunks
241
- let chunks: Buffer[][] = [];
242
- let currentChunk: Buffer[] = [];
243
- let currentSize = 0;
244
- for (let line of writeData) {
245
- if (currentSize + line.length + objectDelimitterBuffer.length > WRITE_ATOMIC_LIMIT && currentChunk.length > 0) {
258
+ let currentChunk: Buffer[] = [];
259
+ let currentSize = 0;
260
+ for (let line of writeData) {
261
+ if (currentSize + line.length + objectDelimitterBuffer.length > WRITE_CHUNK_SIZE && currentChunk.length > 0) {
262
+ chunks.push(currentChunk);
263
+ currentChunk = [];
264
+ currentSize = 0;
265
+ }
266
+ currentChunk.push(line);
267
+ currentSize += line.length;
268
+ currentChunk.push(objectDelimitterBuffer);
269
+ currentSize += objectDelimitterBuffer.length;
270
+ }
271
+ if (currentChunk.length > 0) {
246
272
  chunks.push(currentChunk);
247
- currentChunk = [];
248
- currentSize = 0;
249
273
  }
250
- currentChunk.push(line);
251
- currentSize += line.length;
252
- currentChunk.push(objectDelimitterBuffer);
253
- currentSize += objectDelimitterBuffer.length;
254
- }
255
- if (currentChunk.length > 0) {
256
- chunks.push(currentChunk);
257
- }
274
+ }, `FastArchiveAppendable|serialize log data`);
258
275
 
259
276
  for (let chunk of chunks) {
260
277
  await fs.promises.appendFile(localCachePath, Buffer.concat(chunk));
@@ -286,8 +303,13 @@ export class FastArchiveAppendable<Datum> {
286
303
  let rootCacheFolder = this.baseGetLocalPathRoot();
287
304
  if (!await fsExistsAsync(rootCacheFolder)) return;
288
305
  console.log(magenta(`Moving old logs to Backblaze from ${rootCacheFolder}`));
306
+ // Ugh... what is this even? Is it... hot reloading?
307
+ if (rootCacheFolder.includes("undefined/undefined/undefined/")) {
308
+ require("debugbreak")(2);
309
+ debugger;
310
+ }
289
311
 
290
- let archives = this.getArchives();
312
+ let archives = this.getArchives(false);
291
313
  async function moveLogsForFolder(threadId: string) {
292
314
  let threadDir = rootCacheFolder + threadId + "/";
293
315
  if (!await fsExistsAsync(threadDir)) return;
@@ -361,11 +383,13 @@ export class FastArchiveAppendable<Datum> {
361
383
  endTime: number;
362
384
  };
363
385
  cacheBust: number;
386
+ forceGetPublic?: boolean;
364
387
  scanFnc?: ScanFnc;
365
388
  getWantData?: (file: FileMetadata) => Promise<((posStart: number, posEnd: number, data: Buffer) => boolean) | undefined>;
366
389
  onData: (datum: Datum[], file: FileMetadata) => void;
367
390
  // Called after onData
368
391
  onStats?: (stats: DatumStats, file: FileMetadata) => void;
392
+ onError?: (error: Error, file: FileMetadata) => void;
369
393
  onFinish?: () => void;
370
394
 
371
395
  onProgress?: (progress: {
@@ -379,10 +403,14 @@ export class FastArchiveAppendable<Datum> {
379
403
  };
380
404
  stopSynchronize: () => void;
381
405
  } | "cancelled"> {
382
- let { onData, onStats } = config;
406
+ let { onData, onStats, onError } = config;
383
407
  // Create unique client sync ID upfront
384
408
  let syncId = nextId();
385
409
 
410
+ console.log(`Synchronizing ${this.rootPath} with syncId ${syncId}`, config);
411
+
412
+ let isPublicValue = isPublic() || config.forceGetPublic || false;
413
+
386
414
  // Register progress callback immediately so we can receive progress during setup
387
415
  // - It also helps with cancellation
388
416
  FastArchiveAppendableControllerBase.progressCallbacks.set(syncId, config.onProgress ?? (() => { }));
@@ -400,6 +428,7 @@ export class FastArchiveAppendable<Datum> {
400
428
 
401
429
  let parametersHash = sha256(JSON.stringify({
402
430
  range: config.range,
431
+ isPublicValue,
403
432
  cacheBust: config.cacheBust,
404
433
  version: 1,
405
434
  })) + ".parameters";
@@ -506,6 +535,7 @@ export class FastArchiveAppendable<Datum> {
506
535
  syncId,
507
536
  range: config.range,
508
537
  rootPath: this.rootPath,
538
+ forceGetPublic: config.forceGetPublic,
509
539
  });
510
540
  syncResult.createTime = Date.now();
511
541
  await findingFiles(syncResult.files.length, syncResult.files.length, true);
@@ -618,18 +648,13 @@ export class FastArchiveAppendable<Datum> {
618
648
  })();
619
649
 
620
650
  // Fetch the file data in a streaming manner
621
- let urlObj = new URL(file.url);
622
- urlObj.searchParams.set("cacheBust", config.cacheBust.toString());
623
- if (file.nodeId && downloadSyncId) {
624
- let args = JSON.parse(urlObj.searchParams.get("args") || "");
625
- args[0] = downloadSyncId;
626
- urlObj.searchParams.set("args", JSON.stringify(args));
627
- }
628
- let url = urlObj.toString();
629
651
 
630
652
  // TODO: Stream from the local cache instead? It should be possible, we can get the total size, and read chunks.
631
653
  let hash = getFileMetadataHash(file) + ".file";
632
654
  let contents = await localCache.get(hash);
655
+ if (contents?.length !== file.size) {
656
+ contents = undefined;
657
+ }
633
658
  if (stoppedPromise.resolveCalled) return;
634
659
  await runInner(async () => {
635
660
  if (contents?.length) {
@@ -639,6 +664,15 @@ export class FastArchiveAppendable<Datum> {
639
664
  await onFetchedData(data);
640
665
  }
641
666
  } else {
667
+ let urlObj = new URL(file.url);
668
+ urlObj.searchParams.set("cacheBust", config.cacheBust.toString());
669
+ urlObj.searchParams.set("isPublic", isPublicValue.toString());
670
+ if (file.nodeId && downloadSyncId) {
671
+ let args = JSON.parse(urlObj.searchParams.get("args") || "");
672
+ args[0] = downloadSyncId;
673
+ urlObj.searchParams.set("args", JSON.stringify(args));
674
+ }
675
+ let url = urlObj.toString();
642
676
  const response = await fetch(url);
643
677
  if (!response.ok) {
644
678
  throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
@@ -693,6 +727,9 @@ export class FastArchiveAppendable<Datum> {
693
727
  } catch (e: any) {
694
728
  console.warn(`Failed to download and parse file ${file.path}:\n${e.stack}`);
695
729
  await failedFiles(1, 1, true);
730
+ if (onError) {
731
+ onError(e, file);
732
+ }
696
733
  }
697
734
  if (stoppedPromise.resolveCalled) return;
698
735
  await fileProgress(1, 0, true);