querysub 0.357.0 → 0.359.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/.cursorrules +1 -0
  2. package/package.json +2 -1
  3. package/src/-a-archives/archivesDisk.ts +24 -6
  4. package/src/-a-archives/archivesMemoryCache.ts +41 -17
  5. package/src/deployManager/components/MachineDetailPage.tsx +45 -4
  6. package/src/deployManager/components/MachinesListPage.tsx +10 -2
  7. package/src/deployManager/components/ServiceDetailPage.tsx +13 -3
  8. package/src/deployManager/components/ServicesListPage.tsx +18 -6
  9. package/src/deployManager/machineApplyMainCode.ts +3 -3
  10. package/src/deployManager/machineSchema.ts +39 -0
  11. package/src/diagnostics/NodeViewer.tsx +2 -1
  12. package/src/diagnostics/logs/IndexedLogs/BufferIndex.ts +124 -123
  13. package/src/diagnostics/logs/IndexedLogs/BufferIndexHelpers.ts +83 -1
  14. package/src/diagnostics/logs/IndexedLogs/BufferListStreamer.ts +2 -0
  15. package/src/diagnostics/logs/IndexedLogs/BufferUnitIndex.ts +21 -24
  16. package/src/diagnostics/logs/IndexedLogs/BufferUnitSet.ts +1 -1
  17. package/src/diagnostics/logs/IndexedLogs/FilePathSelector.tsx +186 -25
  18. package/src/diagnostics/logs/IndexedLogs/IndexedLogs.ts +284 -195
  19. package/src/diagnostics/logs/IndexedLogs/LogViewer3.tsx +312 -108
  20. package/src/diagnostics/logs/IndexedLogs/TimeFileTree.ts +1 -1
  21. package/src/diagnostics/logs/IndexedLogs/moveIndexLogsToPublic.ts +37 -7
  22. package/src/diagnostics/logs/errorNotifications2/errorNotifications2.ts +0 -0
  23. package/src/diagnostics/logs/lifeCycleAnalysis/lifeCycles.tsx +62 -35
  24. package/src/diagnostics/logs/lifeCycleAnalysis/test.ts +0 -180
  25. package/src/functional/limitProcessing.ts +39 -0
@@ -9,7 +9,7 @@ import { cacheArgsEqual, cacheLimited, cacheWeak, lazy } from "socket-function/s
9
9
  import { measureBlock, measureFnc, measureWrap } from "socket-function/src/profiling/measure";
10
10
  import { formatNumber, formatTime } from "socket-function/src/formatting/format";
11
11
  import { magenta, yellow } from "socket-function/src/formatting/logColors";
12
- import { Unit, getAllUnits, Reader, createMatchesPattern, createOffsetReader, splitOnWildcard, SearchParams } from "./BufferIndexHelpers";
12
+ import { Unit, getAllUnits, Reader, createMatchesPattern, createOffsetReader, splitOnWildcard, SearchParams, IndexedLogResults } from "./BufferIndexHelpers";
13
13
  import { UnitSet } from "./BufferUnitSet";
14
14
  import { BufferUnitIndex } from "./BufferUnitIndex";
15
15
  import { BufferListStreamer } from "./BufferListStreamer";
@@ -192,8 +192,8 @@ export class BufferIndex {
192
192
  };
193
193
  }
194
194
 
195
- // Rebuild index completely from data reader
196
- private static async rebuildIndexFromData(dataReader: Reader): Promise<Buffer> {
195
+ @measureFnc
196
+ private static async rebuildLocalIndexFromData(dataReader: Reader): Promise<Buffer> {
197
197
  let data = await dataReader.read(0, await dataReader.getLength());
198
198
  let dataBlocks = await BufferIndex.decodeAllBlocked(data);
199
199
 
@@ -210,9 +210,11 @@ export class BufferIndex {
210
210
  return Buffer.concat(parts);
211
211
  }
212
212
 
213
+ @measureFnc
213
214
  public static async fixPartialIndex(config: {
214
215
  index: Buffer;
215
216
  dataReader: Reader;
217
+ results: IndexedLogResults;
216
218
  }): Promise<Buffer> {
217
219
  let type = config.index[0];
218
220
  // Only streaming indexes can be partial — bulk indexes are written all at once.
@@ -223,7 +225,7 @@ export class BufferIndex {
223
225
  let decoded = decodeTypeHeader(index);
224
226
  if (!decoded) {
225
227
  // Index header is corrupted, regenerate complete index from data
226
- return await BufferIndex.rebuildIndexFromData(dataReader);
228
+ return await BufferIndex.rebuildLocalIndexFromData(dataReader);
227
229
  }
228
230
  try {
229
231
  let { headerContent, data: indexData } = decoded;
@@ -270,6 +272,7 @@ export class BufferIndex {
270
272
  let indexStreamer = indexStreamerType.createStreamer();
271
273
  // Continue from where the index left off
272
274
  for (let block of blocks) {
275
+ await config.results.limitGroup?.wait();
273
276
  try {
274
277
  let decompressedBlock = CompressedStream.decode(block);
275
278
  let blockBuffers = await blockStreamerType.getAllBlocks(decompressedBlock);
@@ -287,45 +290,120 @@ export class BufferIndex {
287
290
  } catch (e) {
288
291
  console.error(`Error fixing partial index. This SHOULDN'T error, but... we will just rebuild from the data and it should work: ${e}`);
289
292
  }
290
- return await BufferIndex.rebuildIndexFromData(dataReader);
293
+ return await BufferIndex.rebuildLocalIndexFromData(dataReader);
291
294
  }
292
295
 
293
296
  @measureFnc
294
- public static async find(config: {
297
+ private static async findLocal(config: {
295
298
  index: Buffer;
296
299
  dataReader: Reader;
297
-
298
300
  params: SearchParams;
299
-
300
301
  keepIterating: () => boolean;
301
302
  onResult: (match: Buffer) => void;
302
- }): Promise<{
303
- blocksChecked: number;
304
- blocksCheckedCompressedSize: number;
305
- blocksCheckedDecompressedSize: number;
303
+ results: IndexedLogResults;
304
+ allSearchUnits: Set<Unit>;
305
+ matchesPattern: (buffer: Buffer) => boolean;
306
+ }) {
307
+ let { index, dataReader, params, keepIterating, onResult, results, allSearchUnits, matchesPattern } = config;
308
+ // NOTE: Ironically, the stream type is actually the least efficient to read. Because there's no central index, it means that even if we do try to lazily read it, every single read call would likely have to scan through most of the file to find that specific block. However, this is fine. The stream type is mostly just used for pending files, which shouldn't be that large. And we still do only read the blocks when we have at least one match
309
+
310
+ let decoded = decodeTypeHeader(index);
311
+ if (!decoded) {
312
+ // Index is too corrupted, return empty results
313
+ return;
314
+ }
315
+ let { data: rawIndexData } = decoded;
316
+ let indexEntries = await indexStreamerType.getAllBlocks(rawIndexData);
317
+
318
+ const getDataBlocks = lazy(async (): Promise<Buffer[]> => {
319
+ return await measureBlock(async () => {
320
+ // NOTE: While this is somewhat inefficient, the fact that all these blocks are individually compressed makes this reasonable fast.
321
+ let length = await dataReader.getLength();
322
+ let dataIn = await dataReader.read(0, length);
323
+ let decoded = decodeTypeHeader(dataIn);
324
+ if (!decoded) return [];
325
+ let { data } = decoded;
326
+ return await dataStreamerType.getAllBlocks(data);
327
+ }, `BufferIndex|readLocalBlocks`);
328
+ });
306
329
 
307
- blocksWithErrors: string[];
330
+ let matchCount = 0;
331
+ let blockSearchTimeStart = Date.now();
308
332
 
333
+ results.totalBlockCount += indexEntries.length;
334
+ results.localBlockCount += indexEntries.length;
309
335
 
310
- isPending: boolean;
311
- indexCount: number;
312
- indexSize: number;
313
- totalBlockCount: number;
336
+ // Iterate newest-first so the caller gets the most recent matches first.
337
+ for (let i = indexEntries.length - 1; i >= 0; i--) {
338
+ if (matchCount >= params.limit || !config.keepIterating()) break;
339
+ await config.results.limitGroup?.wait();
340
+ const blockIndex = i;
314
341
 
315
- blockSearchTime: number;
316
- }> {
317
- let { index, dataReader, params } = config;
318
- let emptyResult = { blocksChecked: 0, blocksCheckedCompressedSize: 0, blocksCheckedDecompressedSize: 0, blocksWithErrors: [], isPending: false, indexCount: 0, totalBlockCount: 0, blockSearchTime: 0, indexSize: 0 };
342
+ // Each index entry is a UnitSet
343
+ let blockIndexData = indexEntries[i];
319
344
 
320
- // Handle empty or too small index buffer - rebuild from data
321
- if (index.length === 0) {
322
- index = await BufferIndex.rebuildIndexFromData(dataReader);
323
- if (index.length === 0) {
324
- return emptyResult;
345
+ // Check if this block contains all search units
346
+ let hasAllUnits = true;
347
+ for (let unit of allSearchUnits) {
348
+ if (!UnitSet.has(blockIndexData, unit)) {
349
+ hasAllUnits = false;
350
+ break;
351
+ }
352
+ }
353
+
354
+ results.localIndexesSearched += 1;
355
+ results.localIndexSize += blockIndexData.length;
356
+
357
+ if (!hasAllUnits) continue;
358
+
359
+ const dataBlocks = await getDataBlocks();
360
+
361
+ // Load and scan this block
362
+ try {
363
+ let blockCompressed = dataBlocks[blockIndex];
364
+ if (!blockCompressed) {
365
+ throw new Error(`Not enough blocks in data, have ${dataBlocks.length}, expected ${blockIndex + 1}`);
366
+ }
367
+ results.blocksCheckedCompressedSize += blockCompressed.length;
368
+ let blockData = CompressedStream.decode(blockCompressed);
369
+ results.blocksCheckedDecompressedSize += blockData.length;
370
+ let buffers = await blockStreamerType.getAllBlocks(blockData);
371
+ results.blockCheckedCount++;
372
+ results.localBlockCheckedCount++;
373
+
374
+ // Scan all buffers in this block
375
+ for (let bufferIndex = buffers.length - 1; bufferIndex >= 0; bufferIndex--) {
376
+ if (matchCount >= params.limit || !config.keepIterating()) break;
377
+ await config.results.limitGroup?.wait();
378
+
379
+ let buffer = buffers[bufferIndex];
380
+ if (matchesPattern(buffer)) {
381
+ config.onResult(buffer);
382
+ matchCount++;
383
+ }
384
+ }
385
+ } catch (e: any) {
386
+ // Skip corrupted block
387
+ results.blockErrors.push(`(for block ${blockIndex + 1} / ${dataBlocks.length}) ${String(e?.stack || e)}`);
388
+ console.warn(`Error decompressing block for search: ${e.stack || e}`);
389
+ continue;
325
390
  }
326
391
  }
392
+ results.blockSearchTime += Date.now() - blockSearchTimeStart;
393
+ }
394
+
395
+ @measureFnc
396
+ public static async find(config: {
397
+ index: Buffer;
398
+ dataReader: Reader;
399
+
400
+ params: SearchParams;
327
401
 
328
- let candidateCount = 0;
402
+ keepIterating: () => boolean;
403
+ onResult: (match: Buffer) => void;
404
+ results: IndexedLogResults;
405
+ }): Promise<void> {
406
+ let { index, dataReader, params, results } = config;
329
407
 
330
408
  // Create the pattern matcher once with pre-calculated segments
331
409
  const matchesPattern = createMatchesPattern(params.findBuffer, !!params.disableWildCards);
@@ -342,118 +420,41 @@ export class BufferIndex {
342
420
  }
343
421
  if (allSearchUnits.size === 0) {
344
422
  // Search pattern too short to use index, return empty results
345
- return emptyResult;
423
+ return;
346
424
  }
347
425
  }
348
426
 
349
427
  let type = index[0];
428
+ if (!type) {
429
+ type = await measureBlock(async () => {
430
+ return (await dataReader.read(0, 1))?.[0];
431
+ }, `BufferIndex|recover type from data`);
432
+ }
350
433
 
351
434
  if (type === STREAM_TYPE) {
352
-
353
- // Fix partial index before processing
354
- index = await BufferIndex.fixPartialIndex({ index, dataReader });
355
-
356
- // NOTE: Ironically, the stream type is actually the least efficient to read. Because there's no central index, it means that even if we do try to lazily read it, every single read call would likely have to scan through most of the file to find that specific block. However, this is fine. The stream type is mostly just used for pending files, which shouldn't be that large. And we still do only read the blocks when we have at least one match
357
-
358
- let decoded = decodeTypeHeader(index);
359
- if (!decoded) {
360
- // Index is too corrupted, return empty results
361
- return emptyResult;
362
- }
363
- let { data: rawIndexData } = decoded;
364
- let indexEntries = await indexStreamerType.getAllBlocks(rawIndexData);
365
-
366
- const getDataBlocks = lazy(async (): Promise<Buffer[]> => {
367
- // NOTE: While this is somewhat inefficient, the fact that all these blocks are individually compressed makes this reasonable fast.
368
- let length = await dataReader.getLength();
369
- let dataIn = await dataReader.read(0, length);
370
- let decoded = decodeTypeHeader(dataIn);
371
- if (!decoded) return [];
372
- let { data } = decoded;
373
- return await dataStreamerType.getAllBlocks(data);
374
- });
375
-
376
- let matchCount = 0;
377
- let blocksChecked = 0;
378
- let blocksCheckedCompressedSize = 0;
379
- let blocksCheckedDecompressedSize = 0;
380
- let blockSearchTime = 0;
381
- let blocksWithErrors: string[] = [];
382
-
383
- // Iterate newest-first so the caller gets the most recent matches first.
384
- for (let i = indexEntries.length - 1; i >= 0; i--) {
385
- if (matchCount >= params.limit || !config.keepIterating()) break;
386
- const blockIndex = i;
387
-
388
- // Each index entry is a UnitSet
389
- let blockIndexData = indexEntries[i];
390
-
391
- // Check if this block contains all search units
392
- let hasAllUnits = true;
393
- for (let unit of allSearchUnits) {
394
- if (!UnitSet.has(blockIndexData, unit)) {
395
- hasAllUnits = false;
396
- break;
397
- }
435
+ // Handle empty or too small index buffer - rebuild from data
436
+ if (index.length === 0) {
437
+ index = await BufferIndex.rebuildLocalIndexFromData(dataReader);
438
+ if (index.length === 0) {
439
+ return;
398
440
  }
441
+ }
399
442
 
400
- if (!hasAllUnits) continue;
401
-
402
- const dataBlocks = await getDataBlocks();
443
+ // Fix partial index before processing
444
+ index = await BufferIndex.fixPartialIndex({ index, dataReader, results });
403
445
 
404
- let blockSearchTimeStart = Date.now();
405
- // Load and scan this block
406
- try {
407
- let blockCompressed = dataBlocks[blockIndex];
408
- if (!blockCompressed) {
409
- throw new Error(`Not enough blocks in data, have ${dataBlocks.length}, expected ${blockIndex + 1}`);
410
- }
411
- blocksCheckedCompressedSize += blockCompressed.length;
412
- let blockData = CompressedStream.decode(blockCompressed);
413
- blocksCheckedDecompressedSize += blockData.length;
414
- let buffers = await blockStreamerType.getAllBlocks(blockData);
415
- blocksChecked++;
416
-
417
- // Scan all buffers in this block
418
- for (let bufferIndex = buffers.length - 1; bufferIndex >= 0; bufferIndex--) {
419
- if (matchCount >= params.limit || !config.keepIterating()) break;
420
-
421
- candidateCount++;
422
- let buffer = buffers[bufferIndex];
423
- if (matchesPattern(buffer)) {
424
- config.onResult(buffer);
425
- matchCount++;
426
- }
427
- }
428
- } catch (e: any) {
429
- // Skip corrupted block
430
- blocksWithErrors.push(`(for block ${blockIndex + 1} / ${dataBlocks.length}) ${String(e?.stack || e)}`);
431
- console.warn(`Error decompressing block for search: ${e.stack || e}`);
432
- continue;
433
- }
434
- blockSearchTime += Date.now() - blockSearchTimeStart;
435
- }
436
- let indexSize = indexEntries.map(x => x.length).reduce((a, b) => a + b, 0);
437
- return { blocksChecked, blocksCheckedCompressedSize, blocksCheckedDecompressedSize, blocksWithErrors, isPending: true, indexCount: indexEntries.length, indexSize, totalBlockCount: indexEntries.length, blockSearchTime };
446
+ await BufferIndex.findLocal({ index, dataReader, params, keepIterating: config.keepIterating, onResult: config.onResult, results, allSearchUnits, matchesPattern });
438
447
  } else if (type === BULK_TYPE) {
439
- let results = await BufferUnitIndex.find({
448
+ await BufferUnitIndex.find({
440
449
  params,
441
450
  index,
442
451
  reader: dataReader,
443
452
  keepIterating: config.keepIterating,
444
453
  onResult: config.onResult,
454
+ results,
445
455
  });
446
- return {
447
- blocksChecked: results.blocksChecked,
448
- blocksCheckedCompressedSize: results.blocksCheckedCompressedSize,
449
- blocksCheckedDecompressedSize: results.blocksCheckedDecompressedSize,
450
- totalBlockCount: results.totalBlockCount,
451
- blockSearchTime: results.blockSearchTime,
452
- blocksWithErrors: results.blocksWithErrors,
453
- isPending: false,
454
- indexCount: 1,
455
- indexSize: index.length,
456
- };
456
+ results.remoteIndexesSearched += 1;
457
+ results.remoteIndexSize += index.length;
457
458
  } else {
458
459
  throw new Error(`Unknown type in index file: ${type}`);
459
460
  }
@@ -4,6 +4,9 @@ import { formatNumber, formatPercent } from "socket-function/src/formatting/form
4
4
  import { red } from "socket-function/src/formatting/logColors";
5
5
  import { MaybePromise } from "socket-function/src/types";
6
6
  import { TimeFilePathWithSize } from "./IndexedLogs";
7
+ import { LimitGroup } from "../../../functional/limitProcessing";
8
+
9
+ export const INDEX_EXTENSION = ".index";
7
10
 
8
11
  export type SearchParams = {
9
12
  startTime: number;
@@ -41,7 +44,6 @@ export class BufferReader implements Reader {
41
44
  }
42
45
 
43
46
 
44
- export const WILD_CARD_BYTE = 42;
45
47
 
46
48
  export function createOffsetReader(reader: Reader, offset: number): Reader {
47
49
  return {
@@ -81,6 +83,86 @@ export const getAllUnits = measureWrap(function getAllUnits(config: {
81
83
  return result;
82
84
  });
83
85
 
86
+
87
+ export type IndexedLogResults = {
88
+ matchCount: number;
89
+
90
+ // NOTE: A lot of the metadata won't be accurate if multiple searches happen at the same time. However, for debugging, it should be sufficient.
91
+ reads: {
92
+ cached: boolean;
93
+ remote: boolean;
94
+ count: number;
95
+ size: number;
96
+
97
+ totalSize: number;
98
+ totalCount: number;
99
+ }[];
100
+
101
+ totalLocalFiles: number;
102
+ totalBackblazeFiles: number;
103
+ localFilesSearched: number;
104
+ backblazeFilesSearched: number;
105
+
106
+ totalBlockCount: number;
107
+ blockCheckedCount: number;
108
+
109
+ remoteBlockCount: number;
110
+ localBlockCount: number;
111
+ remoteBlockCheckedCount: number;
112
+ localBlockCheckedCount: number;
113
+
114
+ blocksCheckedCompressedSize: number;
115
+ blocksCheckedDecompressedSize: number;
116
+ blockErrors: string[];
117
+
118
+ fileErrors: { error: string; path: string; }[];
119
+
120
+ remoteIndexesSearched: number;
121
+ remoteIndexSize: number;
122
+ localIndexesSearched: number;
123
+ localIndexSize: number;
124
+
125
+ timeToFirstMatch: number;
126
+ fileFindTime: number;
127
+ indexSearchTime: number;
128
+ blockSearchTime: number;
129
+
130
+ totalSearchTime: number;
131
+
132
+ cancel?: boolean;
133
+ limitGroup?: LimitGroup;
134
+ };
135
+ export function createEmptyIndexedLogResults(): IndexedLogResults {
136
+ return {
137
+ matchCount: 0, reads: [], totalLocalFiles: 0, totalBackblazeFiles: 0, localFilesSearched: 0, backblazeFilesSearched: 0, totalBlockCount: 0, blockCheckedCount: 0, remoteBlockCount: 0, localBlockCount: 0, remoteBlockCheckedCount: 0, localBlockCheckedCount: 0, blocksCheckedCompressedSize: 0, blocksCheckedDecompressedSize: 0, blockErrors: [], fileErrors: [], remoteIndexesSearched: 0, remoteIndexSize: 0, localIndexesSearched: 0, localIndexSize: 0, timeToFirstMatch: 0, fileFindTime: 0, indexSearchTime: 0, blockSearchTime: 0, totalSearchTime: 0, cancel: undefined, limitGroup: undefined,
138
+ };
139
+ }
140
+
141
+ export function addReadToResults(results: IndexedLogResults, read: {
142
+ cached: boolean;
143
+ remote: boolean;
144
+ count: number;
145
+ size: number;
146
+ }) {
147
+ let existingRead = results.reads.find(r => r.cached === read.cached && r.remote === read.remote);
148
+ if (!existingRead) {
149
+ existingRead = {
150
+ cached: read.cached,
151
+ remote: read.remote,
152
+ count: 0,
153
+ size: 0,
154
+ totalSize: 0,
155
+ totalCount: 0,
156
+ };
157
+ results.reads.push(existingRead);
158
+ }
159
+ existingRead.count += read.count;
160
+ existingRead.size += read.size;
161
+ return existingRead;
162
+ }
163
+
164
+
165
+ export const WILD_CARD_BYTE = 42;
84
166
  export function splitOnWildcard(buffer: Buffer): Buffer[] {
85
167
  let segments: Buffer[] = [];
86
168
  let start = 0;
@@ -1,3 +1,4 @@
1
+ import { measureFnc } from "socket-function/src/profiling/measure";
1
2
  import { BufferReader, Reader } from "./BufferIndexHelpers";
2
3
 
3
4
  // Suffix layout (12 bytes), appended after each block's data:
@@ -120,6 +121,7 @@ export class BufferListStreamer {
120
121
  }
121
122
  }
122
123
 
124
+ @measureFnc
123
125
  public async getAllBlocks(buffer: Buffer): Promise<Buffer[]> {
124
126
  let blocks: Buffer[] = [];
125
127
  for await (let blockInfo of this.iterateBlocksReverse(buffer)) {
@@ -3,7 +3,7 @@
3
3
  import { LZ4 } from "../../../storage/LZ4";
4
4
  import { measureBlock, measureFnc } from "socket-function/src/profiling/measure";
5
5
  import { Zip } from "../../../zip";
6
- import { BufferReader, Reader, WILD_CARD_BYTE, createMatchesPattern, SearchParams } from "./BufferIndexHelpers";
6
+ import { BufferReader, Reader, WILD_CARD_BYTE, createMatchesPattern, SearchParams, IndexedLogResults } from "./BufferIndexHelpers";
7
7
  import { formatNumber, formatPercent } from "socket-function/src/formatting/format";
8
8
  import { lazy } from "socket-function/src/caching";
9
9
  import { list, sort } from "socket-function/src/misc";
@@ -458,15 +458,9 @@ export class BufferUnitIndex {
458
458
  onResult: (match: Buffer) => void;
459
459
  index: Buffer;
460
460
  reader: Reader;
461
- }): Promise<{
462
- blocksChecked: number;
463
- blocksCheckedCompressedSize: number;
464
- blocksCheckedDecompressedSize: number;
465
- totalBlockCount: number;
466
- blockSearchTime: number;
467
- blocksWithErrors: string[];
468
- }> {
469
- const { params, index, reader, keepIterating } = config;
461
+ results: IndexedLogResults;
462
+ }): Promise<void> {
463
+ const { params, index, reader, keepIterating, results } = config;
470
464
 
471
465
  // Split on wildcards if present
472
466
  function splitOnWildcard(buffer: Buffer): Buffer[] {
@@ -502,8 +496,8 @@ export class BufferUnitIndex {
502
496
  intersectionSet = new Set([...intersectionSet].filter(x => currentSet.has(x)));
503
497
  }
504
498
 
505
- // Search first first, as moveLogsToPublic should have made it so this is the newest.
506
- return Array.from(intersectionSet).sort((a, b) => a - b);
499
+
500
+ return intersectionSet;
507
501
  }, `findCandidateBlocks`);
508
502
 
509
503
 
@@ -511,18 +505,18 @@ export class BufferUnitIndex {
511
505
 
512
506
  const headerBuffer = await reader.read(0, 8);
513
507
  const blockCount = headerBuffer.readUInt32LE(4);
514
- let blocksCheckedCompressedSize = 0;
515
- let blocksCheckedDecompressedSize = 0;
508
+
509
+ results.totalBlockCount += blockCount;
510
+ results.remoteBlockCount += blockCount;
516
511
 
517
512
  // Read blocks and search for matches
518
- let blocksChecked = 0;
519
- let blocksWithErrors: string[] = [];
520
513
  let blockSearchTimeStart = Date.now();
521
514
  await measureBlock(async () => {
522
515
  let matchCount = 0;
523
516
  let matchCounts = list(blockCount).fill(0);
524
517
 
525
518
  const searchBlock = async (blockIndex: number) => {
519
+ if (!candidateBlocks.has(blockIndex)) return;
526
520
  // This is kind of a weird thing. Basically, because we search in parallel, we might search out of order. So we can only look at the counts before or at us, as if we match a whole bunch after us, but we should still keep going as our matches are going to take precedence.
527
521
  let stopIterating = () => {
528
522
  let countBefore = 0;
@@ -543,9 +537,10 @@ export class BufferUnitIndex {
543
537
  }
544
538
  let obj = await this.getBlock(reader, blockIndex, debugOffsets);
545
539
 
546
- blocksCheckedDecompressedSize += obj.block.length;
547
- blocksCheckedCompressedSize += obj.compressedSize;
548
- blocksChecked++;
540
+ results.blocksCheckedDecompressedSize += obj.block.length;
541
+ results.blocksCheckedCompressedSize += obj.compressedSize;
542
+ results.blockCheckedCount++;
543
+ results.remoteBlockCheckedCount++;
549
544
 
550
545
  let blockReader = new BufferReader(obj.block);
551
546
  let bufferCount = await this.getBufferCountFromBlock(blockReader);
@@ -553,6 +548,7 @@ export class BufferUnitIndex {
553
548
  // Check each buffer for a match
554
549
  for (let i = 0; i < bufferCount; i++) {
555
550
  if (stopIterating()) break;
551
+ await results.limitGroup?.wait();
556
552
 
557
553
  const buffer = await this.getBufferFromBlock(blockReader, i);
558
554
  if (matchesPattern(buffer)) {
@@ -563,7 +559,7 @@ export class BufferUnitIndex {
563
559
  }
564
560
  } catch (e: any) {
565
561
  console.warn(`Error decompressing/searching block: ${e}`);
566
- blocksWithErrors.push(`(for block ${blockIndex + 1} / ${blockCount}, ${debugOffsets.startOffset}-${debugOffsets.endOffset}) ${String(e?.stack || e)}`);
562
+ results.blockErrors.push(`(for block ${blockIndex + 1} / ${blockCount}, ${debugOffsets.startOffset}-${debugOffsets.endOffset}) ${String(e?.stack || e)}`);
567
563
  }
568
564
  };
569
565
 
@@ -571,12 +567,13 @@ export class BufferUnitIndex {
571
567
  { parallelCount: BufferUnitIndexParallelSearchCount },
572
568
  searchBlock
573
569
  );
574
- await Promise.all(list(blockCount).map(runSearchBlock));
570
+ // Search first first, as moveLogsToPublic should have made it so this is the newest.
571
+ let searchOrder = Array.from(candidateBlocks);
572
+ sort(searchOrder, x => x);
573
+ await Promise.all(searchOrder.map(runSearchBlock));
575
574
 
576
575
  }, `searchBlocks`);
577
- let blockSearchTime = Date.now() - blockSearchTimeStart;
578
-
579
- return { blocksChecked, blocksCheckedCompressedSize, blocksCheckedDecompressedSize, totalBlockCount: blockCount, blockSearchTime, blocksWithErrors };
576
+ results.blockSearchTime += Date.now() - blockSearchTimeStart;
580
577
  }
581
578
 
582
579
  private static findBlocks(config: {
@@ -9,7 +9,7 @@ export class UnitSet {
9
9
 
10
10
  @measureFnc
11
11
  static encode(blocks: Buffer[][]): Buffer {
12
- const MAX_FILL_RATIO = 0.65;
12
+ const MAX_FILL_RATIO = 0.35;
13
13
 
14
14
  // First pass: count total units
15
15
  let totalUnits = 0;