querysub 0.355.0 → 0.357.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/.cursorrules +8 -0
  2. package/bin/movelogs.js +4 -0
  3. package/package.json +12 -6
  4. package/scripts/postinstall.js +23 -0
  5. package/src/-a-archives/archiveCache.ts +10 -12
  6. package/src/-a-archives/archives.ts +29 -0
  7. package/src/-a-archives/archivesBackBlaze.ts +60 -12
  8. package/src/-a-archives/archivesDisk.ts +27 -8
  9. package/src/-a-archives/archivesLimitedCache.ts +21 -0
  10. package/src/-a-archives/archivesMemoryCache.ts +350 -0
  11. package/src/-a-archives/archivesPrivateFileSystem.ts +22 -0
  12. package/src/-g-core-values/NodeCapabilities.ts +3 -0
  13. package/src/0-path-value-core/auditLogs.ts +5 -1
  14. package/src/0-path-value-core/pathValueCore.ts +7 -7
  15. package/src/4-dom/qreact.tsx +1 -0
  16. package/src/4-querysub/Querysub.ts +1 -5
  17. package/src/config.ts +5 -0
  18. package/src/diagnostics/MachineThreadInfo.tsx +235 -0
  19. package/src/diagnostics/NodeViewer.tsx +3 -2
  20. package/src/diagnostics/logs/FastArchiveAppendable.ts +79 -42
  21. package/src/diagnostics/logs/FastArchiveController.ts +102 -63
  22. package/src/diagnostics/logs/FastArchiveViewer.tsx +36 -8
  23. package/src/diagnostics/logs/IndexedLogs/BufferIndex.ts +461 -0
  24. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.cpp +327 -0
  25. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.d.ts +18 -0
  26. package/src/diagnostics/logs/IndexedLogs/BufferIndexCPP.js +1 -0
  27. package/src/diagnostics/logs/IndexedLogs/BufferIndexHelpers.ts +140 -0
  28. package/src/diagnostics/logs/IndexedLogs/BufferIndexLogsOptimizationConstants.ts +22 -0
  29. package/src/diagnostics/logs/IndexedLogs/BufferIndexWAT.wat +1145 -0
  30. package/src/diagnostics/logs/IndexedLogs/BufferIndexWAT.wat.d.ts +178 -0
  31. package/src/diagnostics/logs/IndexedLogs/BufferListStreamer.ts +206 -0
  32. package/src/diagnostics/logs/IndexedLogs/BufferUnitIndex.ts +719 -0
  33. package/src/diagnostics/logs/IndexedLogs/BufferUnitSet.ts +146 -0
  34. package/src/diagnostics/logs/IndexedLogs/FilePathSelector.tsx +408 -0
  35. package/src/diagnostics/logs/IndexedLogs/FindProgressTracker.ts +45 -0
  36. package/src/diagnostics/logs/IndexedLogs/IndexedLogs.ts +598 -0
  37. package/src/diagnostics/logs/IndexedLogs/LogStreamer.ts +47 -0
  38. package/src/diagnostics/logs/IndexedLogs/LogViewer3.tsx +702 -0
  39. package/src/diagnostics/logs/IndexedLogs/TimeFileTree.ts +236 -0
  40. package/src/diagnostics/logs/IndexedLogs/binding.gyp +23 -0
  41. package/src/diagnostics/logs/IndexedLogs/moveIndexLogsToPublic.ts +221 -0
  42. package/src/diagnostics/logs/IndexedLogs/moveLogsEntry.ts +10 -0
  43. package/src/diagnostics/logs/LogViewer2.tsx +120 -55
  44. package/src/diagnostics/logs/TimeRangeSelector.tsx +5 -2
  45. package/src/diagnostics/logs/diskLogger.ts +32 -48
  46. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +3 -2
  47. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +1 -0
  48. package/src/diagnostics/logs/lifeCycleAnalysis/LifeCyclePages.tsx +150 -0
  49. package/src/diagnostics/logs/lifeCycleAnalysis/lifeCycles.tsx +133 -0
  50. package/src/diagnostics/logs/lifeCycleAnalysis/test.ts +180 -0
  51. package/src/diagnostics/logs/lifeCycleAnalysis/test.wat +106 -0
  52. package/src/diagnostics/logs/lifeCycleAnalysis/test.wat.d.ts +2 -0
  53. package/src/diagnostics/logs/lifeCycleAnalysis/testHoist.ts +5 -0
  54. package/src/diagnostics/logs/logViewerExtractField.ts +2 -3
  55. package/src/diagnostics/managementPages.tsx +11 -1
  56. package/src/diagnostics/trackResources.ts +1 -1
  57. package/src/misc/lz4_wasm_nodejs.d.ts +34 -0
  58. package/src/misc/lz4_wasm_nodejs.js +178 -0
  59. package/src/misc/lz4_wasm_nodejs_bg.js +94 -0
  60. package/src/misc/lz4_wasm_nodejs_bg.wasm +0 -0
  61. package/src/misc/lz4_wasm_nodejs_bg.wasm.d.ts +15 -0
  62. package/src/storage/CompressedStream.ts +13 -0
  63. package/src/storage/LZ4.ts +32 -0
  64. package/src/storage/ZSTD.ts +10 -0
  65. package/src/wat/watCompiler.ts +1716 -0
  66. package/src/wat/watGrammar.pegjs +93 -0
  67. package/src/wat/watHandler.ts +179 -0
  68. package/src/wat/watInstructions.txt +707 -0
  69. package/src/zip.ts +3 -89
  70. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +0 -125
package/.cursorrules CHANGED
@@ -4,6 +4,12 @@ Always directly set the state (this.state.x = 1), and never use setState.
4
4
 
5
5
  Unless schema values use atomic or type they will always be returned as a value (as a proxy). Use "in" to check if a value is in a t.lookup.
6
6
 
7
+ Use double quotes.
8
+
9
+ When running a command in the current project, don't "cd" to it. It's redundant, and won't work.
10
+
11
+ Don't use redundant comments. If it's a single line and the function name says the same thing that the comment is going to say, you don't need the comment.
12
+
7
13
  NEVER EVER pass state to qreact.Component as a template parameter. It should ALSO be declared like so (inside the class):
8
14
  state = t.state({
9
15
  num: t.number,
@@ -23,6 +29,8 @@ NEVER EVER pass state to qreact.Component as a template parameter. It should ALS
23
29
 
24
30
  Try not to use "null", and instead always use "undefined".
25
31
 
32
+ Never try to add dynamic pluralization in the UI, just use an s. If you add dynamic pluralization, if the code ever gets localized, all of your changes have to be undone, and you just made localization much harder.
33
+
26
34
  Never use the ternary operator. Instead, do this: "x ? y : z" => "x && y || z".
27
35
 
28
36
  If are inside an async Event Handlers, you need to use... Querysub.onCommitFinished(() => ...). and put the async code inside the callback. Then when you set state, you need to put the state setting code inside of Querysub.commit(() => ...).
@@ -0,0 +1,4 @@
1
+ #!/usr/bin/env node
2
+
3
+ require("typenode");
4
+ require("../src/diagnostics/logs/IndexedLogs/moveLogsEntry");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "querysub",
3
- "version": "0.355.0",
3
+ "version": "0.357.0",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
@@ -12,11 +12,12 @@
12
12
  "servershardedtest": "yarn server --authority ./pathremain.json & yarn server --authority ./patha.json & yarn server --authority ./pathb.json & yarn server --authority ./pathc.json & yarn server --authority ./pathd.json",
13
13
  "type": "yarn tsc --noEmit",
14
14
  "depend": "yarn --silent depcruise src --include-only \"^src\" --config --output-type dot | dot -T svg > dependency-graph.svg",
15
- "test": "yarn typenode ./test.ts",
16
- "test3": "yarn typenode ./src/test/test.tsx --local",
17
- "test2": "yarn typenode ./src/4-dom/qreactTest.tsx --local",
15
+ "t": "yarn typenode ./src/diagnostics/logs/lifeCycleAnalysis/testHoist.ts",
16
+ "test-wat": "yarn typenode ./src/wat/watCompiler.ts",
18
17
  "error-watch": "yarn typenode ./src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx",
19
- "error-email": "yarn typenode ./src/diagnostics/logs/errorNotifications/errorDigestEntry.tsx"
18
+ "error-email": "yarn typenode ./src/diagnostics/logs/errorNotifications/errorDigestEntry.tsx",
19
+ "build-native": "cd src/diagnostics/logs/IndexedLogs && node-gyp rebuild",
20
+ "postinstall": "node scripts/postinstall.js"
20
21
  },
21
22
  "bin": {
22
23
  "deploy": "./bin/deploy.js",
@@ -32,12 +33,14 @@
32
33
  "gc-watch-public": "./bin/gc-watch-public.js",
33
34
  "join": "./bin/join.js",
34
35
  "join-public": "./bin/join-public.js",
36
+ "movelogs": "./bin/movelogs.js",
35
37
  "addsuperuser": "./bin/addsuperuser.js",
36
38
  "error-email": "./bin/error-email.js",
37
39
  "error-im": "./bin/error-im.js"
38
40
  },
39
41
  "dependencies": {
40
42
  "@types/fs-ext": "^2.0.3",
43
+ "@types/lz4": "^0.6.4",
41
44
  "@types/node-forge": "^1.3.1",
42
45
  "@types/pako": "^2.0.3",
43
46
  "@types/yargs": "^15.0.5",
@@ -48,9 +51,12 @@
48
51
  "fs-ext": "^2.0.0",
49
52
  "js-sha256": "https://github.com/sliftist/js-sha256",
50
53
  "js-sha512": "^0.9.0",
54
+ "lz4": "^0.6.5",
55
+ "lz4-wasm-nodejs": "^0.9.2",
51
56
  "node-forge": "https://github.com/sliftist/forge#e618181b469b07bdc70b968b0391beb8ef5fecd6",
52
57
  "pako": "^2.1.0",
53
- "socket-function": "^1.0.4",
58
+ "peggy": "^5.0.6",
59
+ "socket-function": "^1.1.2",
54
60
  "terser": "^5.31.0",
55
61
  "typesafecss": "^0.28.0",
56
62
  "yaml": "^2.5.0",
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env node
2
+
3
+ // Only run build-native if we're being installed as a dependency in someone else's project
4
+ // Not when we're the root project adding new dependencies
5
+ const path = require("path");
6
+ const { execSync } = require("child_process");
7
+
8
+ // Check if we're in node_modules (installed as a dependency)
9
+ const isInstalledAsDependency = __dirname.includes("node_modules");
10
+
11
+ if (isInstalledAsDependency) {
12
+ console.log("Running native build for querysub...");
13
+ try {
14
+ execSync("node-gyp rebuild", {
15
+ cwd: path.join(__dirname, "..", "src", "diagnostics", "logs", "IndexedLogs"),
16
+ stdio: "inherit"
17
+ });
18
+ } catch (err) {
19
+ console.log("Native build failed (this is okay, continuing...)");
20
+ }
21
+ } else {
22
+ console.log("Skipping native build (running in development)");
23
+ }
@@ -1,5 +1,5 @@
1
1
  import { getStorageDir, getSubFolder } from "../fs";
2
- import { Archives } from "./archives";
2
+ import { Archives, createArchivesOverride } from "./archives";
3
3
  import fs from "fs";
4
4
 
5
5
  import { list, nextId, timeInHour, timeInMinute } from "socket-function/src/misc";
@@ -492,9 +492,8 @@ export function wrapArchivesWithCache(archives: Archives): Archives & {
492
492
  function debugGetPath(key: string) {
493
493
  return getArchiveCachePath(archives, key);
494
494
  }
495
- return {
495
+ return createArchivesOverride(archives, {
496
496
  [cacheArchivesSymbol]: true,
497
- getDebugName: () => archives.getDebugName(),
498
497
  debugGetPath,
499
498
 
500
499
  get: async (fileName: string, config) => {
@@ -530,8 +529,13 @@ export function wrapArchivesWithCache(archives: Archives): Archives & {
530
529
  let metrics = await getDiskMetrics();
531
530
  const tempPath = cacheArchives2 + nextId() + TEMP_SUFFIX;
532
531
  await fs.promises.writeFile(tempPath, data);
533
- await metrics.addCacheFile(archives, fileName, tempPath);
534
532
  await archives.set(fileName, data);
533
+ await metrics.addCacheFile(archives, fileName, tempPath);
534
+ },
535
+ append: async (fileName: string, data: Buffer) => {
536
+ let metrics = await getDiskMetrics();
537
+ await archives.append(fileName, data);
538
+ await metrics.delCacheFile(archives, fileName);
535
539
  },
536
540
  setLargeFile,
537
541
  del: async (fileName: string) => {
@@ -539,10 +543,6 @@ export function wrapArchivesWithCache(archives: Archives): Archives & {
539
543
  await metrics.delCacheFile(archives, fileName);
540
544
  await archives.del(fileName);
541
545
  },
542
- getInfo: (fileName: string) => archives.getInfo(fileName),
543
- find: async (prefix: string, config) => archives.find(prefix, config),
544
- findInfo: async (prefix: string, config) => archives.findInfo(prefix, config),
545
- enableLogging: () => archives.enableLogging(),
546
546
  move: async (config) => {
547
547
  // Delete the file from ourselves, even if it is a self move, as not having
548
548
  // values is in the cache is the safest thing, but having extra values
@@ -556,11 +556,9 @@ export function wrapArchivesWithCache(archives: Archives): Archives & {
556
556
  }
557
557
  await archives.move(config);
558
558
  },
559
- copy: config => archives.copy(config),
560
- assertPathValid: (path: string) => archives.assertPathValid(path),
561
559
  getBaseArchives: () => archives.getBaseArchives?.() ?? ({ archives: archives, parentPath: "" }),
562
560
 
563
- async lockRegion(code) {
561
+ async lockRegion<T>(code: (fncs: LockFncs) => Promise<T>) {
564
562
  let locked: string[] = [];
565
563
  let fncs = {
566
564
  getPathAndLock: createGetPathAndLock(locked),
@@ -576,7 +574,7 @@ export function wrapArchivesWithCache(archives: Archives): Archives & {
576
574
  }
577
575
  }
578
576
  },
579
- };
577
+ });
580
578
  }
581
579
 
582
580
 
@@ -21,11 +21,14 @@ export interface Archives {
21
21
  set(path: string, data: Buffer): Promise<void>;
22
22
  del(path: string): Promise<void>;
23
23
 
24
+ append(path: string, data: Buffer): Promise<void>;
25
+
24
26
  getInfo(path: string): Promise<{ writeTime: number; size: number; } | undefined>;
25
27
 
26
28
  setLargeFile(config: { path: string; getNextData(): Promise<Buffer | undefined>; }): Promise<void>;
27
29
 
28
30
  // For example findFileNames("ips/")
31
+ /** Returns to full paths. Returns paths WILL NOT end with / */
29
32
  find(
30
33
  prefix: string,
31
34
  config?: {
@@ -76,6 +79,31 @@ export interface Archives {
76
79
  }>;
77
80
  }
78
81
 
82
+ export function createArchivesOverride<T extends Partial<Archives>>(
83
+ baseArchives: Archives,
84
+ overrideArchives: T
85
+ ): T & Archives {
86
+ return {
87
+ getDebugName: () => baseArchives.getDebugName(),
88
+ get: (fileName: string, config) => baseArchives.get(fileName, config),
89
+ set: (fileName: string, data: Buffer) => baseArchives.set(fileName, data),
90
+ append: (fileName: string, data: Buffer) => baseArchives.append(fileName, data),
91
+ del: (fileName: string) => baseArchives.del(fileName),
92
+ getInfo: (fileName: string) => baseArchives.getInfo(fileName),
93
+ setLargeFile: (config) => baseArchives.setLargeFile(config),
94
+ find: async (prefix: string, config) => baseArchives.find(prefix, config),
95
+ findInfo: async (prefix: string, config) => baseArchives.findInfo(prefix, config),
96
+ enableLogging: () => baseArchives.enableLogging(),
97
+ move: (config) => baseArchives.move(config),
98
+ copy: (config) => baseArchives.copy(config),
99
+ assertPathValid: (path: string) => baseArchives.assertPathValid(path),
100
+ getBaseArchives: () => baseArchives.getBaseArchives?.() ?? ({ archives: baseArchives, parentPath: "" }),
101
+ getURL: baseArchives.getURL ? (filePath: string) => baseArchives.getURL!(filePath) : undefined,
102
+ getDownloadAuthorization: baseArchives.getDownloadAuthorization ? (config: { validDurationInSeconds: number }) => baseArchives.getDownloadAuthorization!(config) : undefined,
103
+ ...overrideArchives,
104
+ };
105
+ }
106
+
79
107
  export function nestArchives(path: string, archives: Archives): Archives {
80
108
  if (!path) return archives;
81
109
  if (!path.endsWith("/")) {
@@ -92,6 +120,7 @@ export function nestArchives(path: string, archives: Archives): Archives {
92
120
  get: (fileName: string, config) => archives.get(path + stripFilePrefix(fileName), config),
93
121
  setLargeFile: (config) => archives.setLargeFile({ ...config, path: path + stripFilePrefix(config.path) }),
94
122
  set: (fileName: string, data: Buffer) => archives.set(path + stripFilePrefix(fileName), data),
123
+ append: (fileName: string, data: Buffer) => archives.append(path + stripFilePrefix(fileName), data),
95
124
  del: (fileName: string) => archives.del(path + stripFilePrefix(fileName)),
96
125
  getInfo: (fileName: string) => archives.getInfo(path + stripFilePrefix(fileName)),
97
126
  find: async (prefix: string, config) => archives.find(path + prefix, config).then(x => x.map(x => x.slice(path.length))),
@@ -238,7 +238,7 @@ const getAPI = lazy(async () => {
238
238
  };
239
239
  action: string;
240
240
  uploadTimestamp: number;
241
- }>("b2_get_file_info", "POST");
241
+ }>("b2_get_file_info", "POST", "noAccountId");
242
242
 
243
243
  const listFileNames = createB2Function<{
244
244
  bucketId: string;
@@ -594,10 +594,41 @@ export class ArchivesBackblaze {
594
594
  };
595
595
  setTimeout(downloadPoll, 5000);
596
596
  let result = await this.apiRetryLogic(async (api) => {
597
+ let range = config?.range;
598
+ if (range) {
599
+ let fileInfo = await this.getInfo(fileName);
600
+ if (!fileInfo) throw new Error(`File ${fileName} not found`);
601
+ let rangeStart = range.start;
602
+ let rangeEnd = Math.min(range.end, fileInfo.size);
603
+ // NOTE: I think if we request nothing, it confuses Backblaze and ends up giving us the entire file.
604
+ if (rangeEnd <= rangeStart) return Buffer.alloc(0);
605
+ let result = await api.downloadFileByName({
606
+ bucketName: this.bucketName,
607
+ fileName,
608
+ range: { start: rangeStart, end: rangeEnd },
609
+ });
610
+ if (result.length !== rangeEnd - rangeStart) {
611
+ let afterLength = await this.getInfo(fileName);
612
+ if (afterLength && afterLength.size >= fileInfo.size) {
613
+ console.error(`Backblaze range download return the correct number of bytes. Tried to get ${rangeStart}-${rangeEnd}, but received ${rangeStart}-${rangeStart + result.length}. For file: ${fileName}`);
614
+ // I'm not sure if it's a bug that where we get extra data if we try to read beyond the end of the file, or if the bug is due to some kind of lag that will resolve itself if we wait a little bit.
615
+ setTimeout(async () => {
616
+ let resultAgain = await api.downloadFileByName({
617
+ bucketName: this.bucketName,
618
+ fileName,
619
+ range: { start: rangeStart, end: rangeEnd },
620
+ });
621
+ devDebugbreak();
622
+ let didResultFixItSelf = resultAgain.length === rangeEnd - rangeStart;
623
+
624
+ console.log({ didResultFixItSelf }, resultAgain);
625
+ }, timeInMinute * 2);
626
+ }
627
+ }
628
+ }
597
629
  return await api.downloadFileByName({
598
630
  bucketName: this.bucketName,
599
631
  fileName,
600
- range: config?.range
601
632
  });
602
633
  });
603
634
  let timeStr = formatTime(Date.now() - time);
@@ -631,6 +662,14 @@ export class ArchivesBackblaze {
631
662
  }
632
663
 
633
664
  }
665
+ public async append(fileName: string, data: Buffer): Promise<void> {
666
+ throw new Error(`ArchivesBackblaze does not support append. Use set instead.`);
667
+ // this.log(`backblaze append (${formatNumber(data.length)}B) ${fileName}`);
668
+ // // Backblaze doesn't have native append, so we need to get, concatenate, and set
669
+ // let existing = await this.get(fileName);
670
+ // let newData = existing ? Buffer.concat([existing, data]) : data;
671
+ // await this.set(fileName, newData);
672
+ }
634
673
  public async del(fileName: string): Promise<void> {
635
674
  this.log(`backblaze delete ${fileName}`);
636
675
  try {
@@ -796,17 +835,26 @@ export class ArchivesBackblaze {
796
835
 
797
836
  public async getInfo(fileName: string): Promise<{ writeTime: number; size: number; } | undefined> {
798
837
  return await this.apiRetryLogic(async (api) => {
799
- let info = await api.listFileNames({ bucketId: this.bucketId, prefix: fileName, });
800
- let file = info.files.find(x => x.fileName === fileName);
801
- if (!file) {
802
- this.log(`Backblaze file not exists ${fileName}`);
803
- return undefined;
838
+ try {
839
+ // NOTE: Apparently, there's no other way to do this, as the file name does not equal the file ID, and git file info requires the file ID.
840
+ let info = await api.listFileNames({ bucketId: this.bucketId, prefix: fileName, maxFileCount: 1 });
841
+ let file = info.files.find(x => x.fileName === fileName && x.action === "upload");
842
+ if (!file) {
843
+ this.log(`Backblaze file not exists ${fileName}`);
844
+ return undefined;
845
+ }
846
+ this.log(`Backblaze file exists ${fileName}`);
847
+ return {
848
+ writeTime: file.uploadTimestamp,
849
+ size: file.contentLength,
850
+ };
851
+ } catch (e: any) {
852
+ if (e.stack.includes(`file_not_found`)) {
853
+ this.log(`Backblaze file not exists ${fileName}`);
854
+ return undefined;
855
+ }
856
+ throw e;
804
857
  }
805
- this.log(`Backblaze file exists ${fileName}`);
806
- return {
807
- writeTime: file.uploadTimestamp,
808
- size: file.contentLength,
809
- };
810
858
  });
811
859
  }
812
860
 
@@ -65,9 +65,13 @@ class ArchivesDisk {
65
65
  // Don't create the drive (and also only add up to the last path, via slicing (0, i)
66
66
  for (let i = 1; i < fileNameParts.length; i++) {
67
67
  let dir = this.LOCAL_ARCHIVE_FOLDER + fileNameParts.slice(0, i).join("/");
68
- if (!await fsExistsAsync(dir)) {
69
- await fs.promises.mkdir(dir);
70
- }
68
+ try {
69
+ if (!await fsExistsAsync(dir)) {
70
+ await fs.promises.mkdir(dir, {
71
+ recursive: true,
72
+ });
73
+ }
74
+ } catch { }
71
75
  }
72
76
  }
73
77
 
@@ -85,6 +89,19 @@ class ArchivesDisk {
85
89
  await fs.promises.writeFile(this.LOCAL_ARCHIVE_FOLDER + fileName, data);
86
90
  }
87
91
  @measureFnc
92
+ public async append(fileName: string, data: Buffer): Promise<void> {
93
+ await this.init();
94
+
95
+ this.log(blue(`Appending to file ${fileName} += ${data.length} bytes`));
96
+ if (storageDisabled) return;
97
+ fileName = escapeFileName(fileName);
98
+ await this.simulateLag();
99
+
100
+ await this.ensureDirsExist(fileName);
101
+
102
+ await fs.promises.appendFile(this.LOCAL_ARCHIVE_FOLDER + fileName, data);
103
+ }
104
+ @measureFnc
88
105
  public async del(fileName: string): Promise<void> {
89
106
  await this.init();
90
107
  this.log(blue(`Deleting file ${fileName}`));
@@ -93,7 +110,7 @@ class ArchivesDisk {
93
110
  await this.simulateLag();
94
111
  if (!await fsExistsAsync(this.LOCAL_ARCHIVE_FOLDER + fileName)) return;
95
112
  try {
96
- await fs.promises.unlink(this.LOCAL_ARCHIVE_FOLDER + fileName);
113
+ await fs.promises.rm(this.LOCAL_ARCHIVE_FOLDER + fileName, { recursive: true });
97
114
  } catch { }
98
115
  let dir = fileName.replaceAll("\\", "/").split("/").slice(0, -1).join("/");
99
116
  await this.gcDir(dir);
@@ -191,7 +208,8 @@ class ArchivesDisk {
191
208
  throw new Error(`File modified changed while reading file ${fileName}`);
192
209
  }
193
210
 
194
- return buffer.slice(0, read.bytesRead);
211
+ // Make sure we only take as much data as we want in case we are given extra data.
212
+ return buffer.slice(0, end - start);
195
213
  } catch (e: any) {
196
214
  if (e.code === "EMFILE") {
197
215
  if (retryCount > 0) {
@@ -221,7 +239,7 @@ class ArchivesDisk {
221
239
  await this.simulateLag();
222
240
  let fileNames: string[] = [];
223
241
  let folderNames: string[] = [];
224
- async function readDir(dir: string) {
242
+ async function readDir(dir: string, depthToRead: number) {
225
243
  if (!await fsExistsAsync(dir)) {
226
244
  return;
227
245
  }
@@ -236,7 +254,7 @@ class ArchivesDisk {
236
254
  try {
237
255
  if (fileObj.isDirectory()) {
238
256
  folderNames.push(dir + fileName);
239
- await readDir(dir + fileName + "/");
257
+ await readDir(dir + fileName + "/", depthToRead - 1);
240
258
  } else {
241
259
  fileNames.push(dir + fileName);
242
260
  }
@@ -250,7 +268,8 @@ class ArchivesDisk {
250
268
  if (pathParts.length > 1) {
251
269
  rootDir = pathParts.slice(0, -1).join("/") + "/";
252
270
  }
253
- await readDir(this.LOCAL_ARCHIVE_FOLDER + rootDir);
271
+ // We don't know if the prefix they gave us is the prefix of folders or if it is a folder itself. So we have to cut off the end and then read to a depth of 2 to make this work.
272
+ await readDir(this.LOCAL_ARCHIVE_FOLDER + rootDir, config?.shallow ? 2 : Number.MAX_SAFE_INTEGER);
254
273
 
255
274
  let results = config?.type === "folders" ? folderNames : fileNames;
256
275
 
@@ -153,6 +153,27 @@ class ArchivesLimitedCache {
153
153
  this.triggerIndexFlush();
154
154
  }
155
155
 
156
+ public async append(path: string, data: Buffer): Promise<void> {
157
+ await this.ensureInitialized();
158
+ await this.baseArchives.append(path, data);
159
+ // Update the cache info - increment size and update times
160
+ const info = this.cache.get(path);
161
+ const now = Date.now();
162
+ if (info) {
163
+ info.size += data.length;
164
+ info.writeTime = now;
165
+ info.accessTime = now;
166
+ } else {
167
+ // File didn't exist in cache, add it
168
+ this.cache.set(path, {
169
+ writeTime: now,
170
+ accessTime: now,
171
+ size: data.length,
172
+ });
173
+ }
174
+ this.triggerIndexFlush();
175
+ }
176
+
156
177
  public async del(path: string): Promise<void> {
157
178
  await this.ensureInitialized();
158
179
  await this.baseArchives.del(path);