cojson 0.13.11 → 0.13.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/.turbo/turbo-build.log +1 -1
  2. package/CHANGELOG.md +6 -0
  3. package/dist/CoValuesStore.d.ts +3 -1
  4. package/dist/CoValuesStore.d.ts.map +1 -1
  5. package/dist/CoValuesStore.js +7 -6
  6. package/dist/CoValuesStore.js.map +1 -1
  7. package/dist/PeerState.d.ts +0 -2
  8. package/dist/PeerState.d.ts.map +1 -1
  9. package/dist/PeerState.js +0 -1
  10. package/dist/PeerState.js.map +1 -1
  11. package/dist/SyncStateManager.js +2 -2
  12. package/dist/SyncStateManager.js.map +1 -1
  13. package/dist/coValueCore.js +2 -2
  14. package/dist/coValueCore.js.map +1 -1
  15. package/dist/coValueState.d.ts +21 -46
  16. package/dist/coValueState.d.ts.map +1 -1
  17. package/dist/coValueState.js +174 -246
  18. package/dist/coValueState.js.map +1 -1
  19. package/dist/coValues/group.js +2 -2
  20. package/dist/coValues/group.js.map +1 -1
  21. package/dist/exports.d.ts +2 -4
  22. package/dist/exports.d.ts.map +1 -1
  23. package/dist/exports.js +1 -2
  24. package/dist/exports.js.map +1 -1
  25. package/dist/localNode.d.ts.map +1 -1
  26. package/dist/localNode.js +20 -16
  27. package/dist/localNode.js.map +1 -1
  28. package/dist/sync.d.ts.map +1 -1
  29. package/dist/sync.js +32 -41
  30. package/dist/sync.js.map +1 -1
  31. package/dist/tests/coValueState.test.js +57 -104
  32. package/dist/tests/coValueState.test.js.map +1 -1
  33. package/dist/tests/group.test.js +1 -2
  34. package/dist/tests/group.test.js.map +1 -1
  35. package/dist/tests/messagesTestUtils.d.ts +4 -1
  36. package/dist/tests/messagesTestUtils.d.ts.map +1 -1
  37. package/dist/tests/messagesTestUtils.js +10 -0
  38. package/dist/tests/messagesTestUtils.js.map +1 -1
  39. package/dist/tests/sync.peerReconciliation.test.js +8 -8
  40. package/dist/tests/sync.peerReconciliation.test.js.map +1 -1
  41. package/dist/tests/sync.test.js +6 -4
  42. package/dist/tests/sync.test.js.map +1 -1
  43. package/package.json +1 -1
  44. package/src/CoValuesStore.ts +9 -6
  45. package/src/PeerState.ts +0 -2
  46. package/src/SyncStateManager.ts +2 -2
  47. package/src/coValueCore.ts +2 -2
  48. package/src/coValueState.ts +197 -317
  49. package/src/coValues/group.ts +2 -2
  50. package/src/exports.ts +0 -6
  51. package/src/localNode.ts +30 -21
  52. package/src/sync.ts +35 -43
  53. package/src/tests/coValueState.test.ts +55 -106
  54. package/src/tests/group.test.ts +2 -2
  55. package/src/tests/messagesTestUtils.ts +12 -1
  56. package/src/tests/sync.peerReconciliation.test.ts +8 -8
  57. package/src/tests/sync.test.ts +8 -23
  58. package/dist/storage/FileSystem.d.ts +0 -37
  59. package/dist/storage/FileSystem.d.ts.map +0 -1
  60. package/dist/storage/FileSystem.js +0 -48
  61. package/dist/storage/FileSystem.js.map +0 -1
  62. package/dist/storage/chunksAndKnownStates.d.ts +0 -7
  63. package/dist/storage/chunksAndKnownStates.d.ts.map +0 -1
  64. package/dist/storage/chunksAndKnownStates.js +0 -98
  65. package/dist/storage/chunksAndKnownStates.js.map +0 -1
  66. package/dist/storage/index.d.ts +0 -52
  67. package/dist/storage/index.d.ts.map +0 -1
  68. package/dist/storage/index.js +0 -335
  69. package/dist/storage/index.js.map +0 -1
  70. package/src/storage/FileSystem.ts +0 -113
  71. package/src/storage/chunksAndKnownStates.ts +0 -137
  72. package/src/storage/index.ts +0 -531
@@ -1,137 +0,0 @@
1
- import { MAX_RECOMMENDED_TX_SIZE } from "../coValueCore.js";
2
- import { RawCoID, SessionID } from "../ids.js";
3
- import { getPriorityFromHeader } from "../priority.js";
4
- import { CoValueKnownState, NewContentMessage } from "../sync.js";
5
- import { CoValueChunk } from "./index.js";
6
-
7
- export function contentSinceChunk(
8
- id: RawCoID,
9
- chunk: CoValueChunk,
10
- known?: CoValueKnownState,
11
- ): NewContentMessage[] {
12
- const newContentPieces: NewContentMessage[] = [];
13
-
14
- newContentPieces.push({
15
- id: id,
16
- action: "content",
17
- header: known?.header ? undefined : chunk.header,
18
- new: {},
19
- priority: getPriorityFromHeader(chunk.header),
20
- });
21
-
22
- for (const [sessionID, sessionsEntry] of Object.entries(
23
- chunk.sessionEntries,
24
- )) {
25
- for (const entry of sessionsEntry) {
26
- const knownStart = known?.sessions[sessionID as SessionID] || 0;
27
-
28
- if (entry.after + entry.transactions.length <= knownStart) {
29
- continue;
30
- }
31
-
32
- const actuallyNewTransactions = entry.transactions.slice(
33
- Math.max(0, knownStart - entry.after),
34
- );
35
-
36
- const newAfter =
37
- entry.after +
38
- (actuallyNewTransactions.length - entry.transactions.length);
39
-
40
- let newContentEntry = newContentPieces[0]?.new[sessionID as SessionID];
41
-
42
- if (!newContentEntry) {
43
- newContentEntry = {
44
- after: newAfter,
45
- lastSignature: entry.lastSignature,
46
- newTransactions: actuallyNewTransactions,
47
- };
48
- newContentPieces[0]!.new[sessionID as SessionID] = newContentEntry;
49
- } else {
50
- newContentEntry.newTransactions.push(...actuallyNewTransactions);
51
- newContentEntry.lastSignature = entry.lastSignature;
52
- }
53
- }
54
- }
55
-
56
- return newContentPieces;
57
- }
58
-
59
- export function chunkToKnownState(id: RawCoID, chunk: CoValueChunk) {
60
- const ourKnown: CoValueKnownState = {
61
- id,
62
- header: !!chunk.header,
63
- sessions: {},
64
- };
65
-
66
- for (const [sessionID, sessionEntries] of Object.entries(
67
- chunk.sessionEntries,
68
- )) {
69
- for (const entry of sessionEntries) {
70
- ourKnown.sessions[sessionID as SessionID] =
71
- entry.after + entry.transactions.length;
72
- }
73
- }
74
- return ourKnown;
75
- }
76
-
77
- export function mergeChunks(
78
- chunkA: CoValueChunk,
79
- chunkB: CoValueChunk,
80
- ): "nonContigous" | CoValueChunk {
81
- const header = chunkA.header || chunkB.header;
82
-
83
- const newSessions = { ...chunkA.sessionEntries };
84
- for (const sessionID in chunkB.sessionEntries) {
85
- // figure out if we can merge the chunks
86
- const sessionEntriesA = chunkA.sessionEntries[sessionID];
87
- const sessionEntriesB = chunkB.sessionEntries[sessionID]!;
88
-
89
- if (!sessionEntriesA) {
90
- newSessions[sessionID] = sessionEntriesB;
91
- continue;
92
- }
93
-
94
- const lastEntryOfA = sessionEntriesA[sessionEntriesA.length - 1]!;
95
- const firstEntryOfB = sessionEntriesB[0]!;
96
-
97
- if (
98
- lastEntryOfA.after + lastEntryOfA.transactions.length ===
99
- firstEntryOfB.after
100
- ) {
101
- const newEntries = [];
102
- let bytesSinceLastSignature = 0;
103
- for (const entry of sessionEntriesA.concat(sessionEntriesB)) {
104
- const entryByteLength = entry.transactions.reduce(
105
- (sum, tx) =>
106
- sum +
107
- (tx.privacy === "private"
108
- ? tx.encryptedChanges.length
109
- : tx.changes.length),
110
- 0,
111
- );
112
- if (
113
- newEntries.length === 0 ||
114
- bytesSinceLastSignature + entryByteLength > MAX_RECOMMENDED_TX_SIZE
115
- ) {
116
- newEntries.push({
117
- after: entry.after,
118
- lastSignature: entry.lastSignature,
119
- transactions: entry.transactions,
120
- });
121
- bytesSinceLastSignature = 0;
122
- } else {
123
- const lastNewEntry = newEntries[newEntries.length - 1]!;
124
- lastNewEntry.transactions.push(...entry.transactions);
125
- lastNewEntry.lastSignature = entry.lastSignature;
126
-
127
- bytesSinceLastSignature += entry.transactions.length;
128
- }
129
- }
130
- newSessions[sessionID] = newEntries;
131
- } else {
132
- return "nonContigous" as const;
133
- }
134
- }
135
-
136
- return { header, sessionEntries: newSessions };
137
- }
@@ -1,531 +0,0 @@
1
- import { CoID, RawCoValue } from "../coValue.js";
2
- import { CoValueHeader, Transaction } from "../coValueCore.js";
3
- import { Signature } from "../crypto/crypto.js";
4
- import { RawCoID } from "../ids.js";
5
- import { logger } from "../logger.js";
6
- import { connectedPeers } from "../streamUtils.js";
7
- import {
8
- CoValueKnownState,
9
- IncomingSyncStream,
10
- NewContentMessage,
11
- OutgoingSyncQueue,
12
- Peer,
13
- } from "../sync.js";
14
- import {
15
- BlockFilename,
16
- FileSystem,
17
- WalEntry,
18
- WalFilename,
19
- readChunk,
20
- readHeader,
21
- textDecoder,
22
- writeBlock,
23
- writeToWal,
24
- } from "./FileSystem.js";
25
- import {
26
- chunkToKnownState,
27
- contentSinceChunk,
28
- mergeChunks,
29
- } from "./chunksAndKnownStates.js";
30
- export type { BlockFilename, WalFilename } from "./FileSystem.js";
31
-
32
- const MAX_N_LEVELS = 3;
33
-
34
- export type CoValueChunk = {
35
- header?: CoValueHeader;
36
- sessionEntries: {
37
- [sessionID: string]: {
38
- after: number;
39
- lastSignature: Signature;
40
- transactions: Transaction[];
41
- }[];
42
- };
43
- };
44
-
45
- export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
46
- currentWal: WH | undefined;
47
- coValues: {
48
- [id: RawCoID]: CoValueChunk | undefined;
49
- };
50
- fileCache: string[] | undefined;
51
- headerCache = new Map<
52
- BlockFilename,
53
- { [id: RawCoID]: { start: number; length: number } }
54
- >();
55
- blockFileHandles = new Map<
56
- BlockFilename,
57
- Promise<{ handle: RH; size: number }>
58
- >();
59
-
60
- constructor(
61
- public fs: FS,
62
- public fromLocalNode: IncomingSyncStream,
63
- public toLocalNode: OutgoingSyncQueue,
64
- ) {
65
- this.coValues = {};
66
- this.currentWal = undefined;
67
-
68
- let nMsg = 0;
69
-
70
- const processMessages = async () => {
71
- for await (const msg of fromLocalNode) {
72
- try {
73
- if (msg === "Disconnected" || msg === "PingTimeout") {
74
- throw new Error("Unexpected Disconnected message");
75
- }
76
- if (msg.action === "done") {
77
- return;
78
- }
79
-
80
- if (msg.action === "content") {
81
- await this.handleNewContent(msg);
82
- } else if (msg.action === "load" || msg.action === "known") {
83
- await this.sendNewContent(msg.id, msg, undefined);
84
- }
85
- } catch (e) {
86
- logger.error(`Error reading from localNode, handling msg`, {
87
- msg,
88
- err: e,
89
- });
90
- }
91
- nMsg++;
92
- }
93
- };
94
-
95
- processMessages().catch((e) =>
96
- logger.error("Error in processMessages in storage", { err: e }),
97
- );
98
-
99
- setTimeout(
100
- () =>
101
- this.compact().catch((e) => {
102
- logger.error("Error while compacting", { err: e });
103
- }),
104
- 20000,
105
- );
106
- }
107
-
108
- async sendNewContent(
109
- id: RawCoID,
110
- known: CoValueKnownState | undefined,
111
- asDependencyOf: RawCoID | undefined,
112
- ) {
113
- let coValue = this.coValues[id];
114
-
115
- if (!coValue) {
116
- coValue = await this.loadCoValue(id, this.fs);
117
- }
118
-
119
- if (!coValue) {
120
- this.toLocalNode
121
- .push({
122
- id: id,
123
- action: "known",
124
- header: false,
125
- sessions: {},
126
- asDependencyOf,
127
- })
128
- .catch((e) => logger.error("Error while pushing known", { err: e }));
129
-
130
- return;
131
- }
132
-
133
- if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
134
- await this.sendNewContent(
135
- coValue.header.ruleset.group,
136
- undefined,
137
- asDependencyOf || id,
138
- );
139
- } else if (!known?.header && coValue.header?.ruleset.type === "group") {
140
- const dependedOnAccountsAndGroups = new Set();
141
- for (const session of Object.values(coValue.sessionEntries)) {
142
- for (const entry of session) {
143
- for (const tx of entry.transactions) {
144
- if (tx.privacy === "trusting") {
145
- const parsedChanges = JSON.parse(tx.changes);
146
- for (const change of parsedChanges) {
147
- if (change.op === "set" && change.key.startsWith("co_")) {
148
- dependedOnAccountsAndGroups.add(change.key);
149
- }
150
- if (
151
- change.op === "set" &&
152
- change.key.startsWith("parent_co_")
153
- ) {
154
- dependedOnAccountsAndGroups.add(
155
- change.key.replace("parent_", ""),
156
- );
157
- }
158
- }
159
- }
160
- }
161
- }
162
- }
163
- for (const accountOrGroup of dependedOnAccountsAndGroups) {
164
- await this.sendNewContent(
165
- accountOrGroup as CoID<RawCoValue>,
166
- undefined,
167
- asDependencyOf || id,
168
- );
169
- }
170
- }
171
-
172
- const newContentMessages = contentSinceChunk(id, coValue, known).map(
173
- (message) => ({ ...message, asDependencyOf }),
174
- );
175
-
176
- const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
177
-
178
- this.toLocalNode
179
- .push({
180
- action: "known",
181
- ...ourKnown,
182
- asDependencyOf,
183
- })
184
- .catch((e) => logger.error("Error while pushing known", { err: e }));
185
-
186
- for (const message of newContentMessages) {
187
- if (Object.keys(message.new).length === 0) continue;
188
- this.toLocalNode
189
- .push(message)
190
- .catch((e) =>
191
- logger.error("Error while pushing new content", { err: e }),
192
- );
193
- }
194
-
195
- this.coValues[id] = coValue;
196
- }
197
-
198
- async withWAL(handler: (wal: WH) => Promise<void>) {
199
- if (!this.currentWal) {
200
- this.currentWal = await this.fs.createFile(
201
- `wal-${Date.now()}-${Math.random().toString(36).slice(2)}.jsonl`,
202
- );
203
- }
204
- await handler(this.currentWal);
205
- }
206
-
207
- async handleNewContent(newContent: NewContentMessage) {
208
- const coValue = this.coValues[newContent.id];
209
-
210
- const newContentAsChunk: CoValueChunk = {
211
- header: newContent.header,
212
- sessionEntries: Object.fromEntries(
213
- Object.entries(newContent.new).map(([sessionID, newInSession]) => [
214
- sessionID,
215
- [
216
- {
217
- after: newInSession.after,
218
- lastSignature: newInSession.lastSignature,
219
- transactions: newInSession.newTransactions,
220
- },
221
- ],
222
- ]),
223
- ),
224
- };
225
-
226
- if (!coValue) {
227
- if (newContent.header) {
228
- await this.withWAL((wal) =>
229
- writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
230
- );
231
-
232
- this.coValues[newContent.id] = newContentAsChunk;
233
- } else {
234
- logger.warn("Incontiguous incoming update for " + newContent.id);
235
- return;
236
- }
237
- } else {
238
- const merged = mergeChunks(coValue, newContentAsChunk);
239
- if (merged === "nonContigous") {
240
- console.warn(
241
- "Non-contigous new content for " + newContent.id,
242
- Object.entries(coValue.sessionEntries).map(([session, entries]) =>
243
- entries.map((entry) => ({
244
- session: session,
245
- after: entry.after,
246
- length: entry.transactions.length,
247
- })),
248
- ),
249
- Object.entries(newContentAsChunk.sessionEntries).map(
250
- ([session, entries]) =>
251
- entries.map((entry) => ({
252
- session: session,
253
- after: entry.after,
254
- length: entry.transactions.length,
255
- })),
256
- ),
257
- );
258
- } else {
259
- await this.withWAL((wal) =>
260
- writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
261
- );
262
-
263
- this.coValues[newContent.id] = merged;
264
- }
265
- }
266
- }
267
-
268
- async getBlockHandle(
269
- blockFile: BlockFilename,
270
- fs: FS,
271
- ): Promise<{ handle: RH; size: number }> {
272
- if (!this.blockFileHandles.has(blockFile)) {
273
- this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
274
- }
275
-
276
- return this.blockFileHandles.get(blockFile)!;
277
- }
278
-
279
- async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
280
- const files = this.fileCache || (await fs.listFiles());
281
- this.fileCache = files;
282
- const blockFiles = (
283
- files.filter((name) => name.startsWith("L")) as BlockFilename[]
284
- ).sort();
285
-
286
- let result;
287
-
288
- for (const blockFile of blockFiles) {
289
- let cachedHeader:
290
- | { [id: RawCoID]: { start: number; length: number } }
291
- | undefined = this.headerCache.get(blockFile);
292
-
293
- const { handle, size } = await this.getBlockHandle(blockFile, fs);
294
-
295
- if (!cachedHeader) {
296
- cachedHeader = {};
297
- const header = await readHeader(blockFile, handle, size, fs);
298
- for (const entry of header) {
299
- cachedHeader[entry.id] = {
300
- start: entry.start,
301
- length: entry.length,
302
- };
303
- }
304
-
305
- this.headerCache.set(blockFile, cachedHeader);
306
- }
307
- const headerEntry = cachedHeader[id];
308
-
309
- if (headerEntry) {
310
- const nextChunk = await readChunk(handle, headerEntry, fs);
311
- if (result) {
312
- const merged = mergeChunks(result, nextChunk);
313
-
314
- if (merged === "nonContigous") {
315
- console.warn(
316
- "Non-contigous chunks while loading " + id,
317
- result,
318
- nextChunk,
319
- );
320
- } else {
321
- result = merged;
322
- }
323
- } else {
324
- result = nextChunk;
325
- }
326
- }
327
-
328
- // await fs.close(handle);
329
- }
330
-
331
- return result;
332
- }
333
-
334
- async compact() {
335
- const fileNames = await this.fs.listFiles();
336
-
337
- const walFiles = fileNames.filter((name) =>
338
- name.startsWith("wal-"),
339
- ) as WalFilename[];
340
- walFiles.sort();
341
-
342
- const coValues = new Map<RawCoID, CoValueChunk>();
343
-
344
- if (walFiles.length === 0) return;
345
-
346
- const oldWal = this.currentWal;
347
- this.currentWal = undefined;
348
-
349
- if (oldWal) {
350
- await this.fs.close(oldWal);
351
- }
352
-
353
- for (const fileName of walFiles) {
354
- const { handle, size }: { handle: RH; size: number } =
355
- await this.fs.openToRead(fileName);
356
- if (size === 0) {
357
- await this.fs.close(handle);
358
- continue;
359
- }
360
- const bytes = await this.fs.read(handle, 0, size);
361
-
362
- const decoded = textDecoder.decode(bytes);
363
- const lines = decoded.split("\n");
364
-
365
- for (const line of lines) {
366
- if (line.length === 0) continue;
367
- const chunk = JSON.parse(line) as WalEntry;
368
-
369
- const existingChunk = coValues.get(chunk.id);
370
-
371
- if (existingChunk) {
372
- const merged = mergeChunks(existingChunk, chunk);
373
- if (merged === "nonContigous") {
374
- console.log(
375
- "Non-contigous chunks in " + chunk.id + ", " + fileName,
376
- existingChunk,
377
- chunk,
378
- );
379
- } else {
380
- coValues.set(chunk.id, merged);
381
- }
382
- } else {
383
- coValues.set(chunk.id, chunk);
384
- }
385
- }
386
-
387
- await this.fs.close(handle);
388
- }
389
-
390
- const highestBlockNumber = fileNames.reduce((acc, name) => {
391
- if (name.startsWith("L" + MAX_N_LEVELS)) {
392
- const num = parseInt(name.split("-")[1]!);
393
- if (num > acc) {
394
- return num;
395
- }
396
- }
397
- return acc;
398
- }, 0);
399
-
400
- await writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
401
-
402
- for (const walFile of walFiles) {
403
- await this.fs.removeFile(walFile);
404
- }
405
- this.fileCache = undefined;
406
-
407
- const fileNames2 = await this.fs.listFiles();
408
-
409
- const blockFiles = (
410
- fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
411
- ).sort();
412
-
413
- const blockFilesByLevelInOrder: {
414
- [level: number]: BlockFilename[];
415
- } = {};
416
-
417
- for (const blockFile of blockFiles) {
418
- const level = parseInt(blockFile.split("-")[0]!.slice(1));
419
- if (!blockFilesByLevelInOrder[level]) {
420
- blockFilesByLevelInOrder[level] = [];
421
- }
422
- blockFilesByLevelInOrder[level]!.push(blockFile);
423
- }
424
-
425
- for (let level = MAX_N_LEVELS; level > 0; level--) {
426
- const nBlocksDesired = Math.pow(2, level);
427
- const blocksInLevel = blockFilesByLevelInOrder[level];
428
-
429
- if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
430
- const coValues = new Map<RawCoID, CoValueChunk>();
431
-
432
- for (const blockFile of blocksInLevel) {
433
- const { handle, size }: { handle: RH; size: number } =
434
- await this.getBlockHandle(blockFile, this.fs);
435
-
436
- if (size === 0) {
437
- continue;
438
- }
439
- const header = await readHeader(blockFile, handle, size, this.fs);
440
- for (const entry of header) {
441
- const chunk = await readChunk(handle, entry, this.fs);
442
-
443
- const existingChunk = coValues.get(entry.id);
444
-
445
- if (existingChunk) {
446
- const merged = mergeChunks(existingChunk, chunk);
447
- if (merged === "nonContigous") {
448
- console.log(
449
- "Non-contigous chunks in " + entry.id + ", " + blockFile,
450
- existingChunk,
451
- chunk,
452
- );
453
- } else {
454
- coValues.set(entry.id, merged);
455
- }
456
- } else {
457
- coValues.set(entry.id, chunk);
458
- }
459
- }
460
- }
461
-
462
- let levelBelow = blockFilesByLevelInOrder[level - 1];
463
- if (!levelBelow) {
464
- levelBelow = [];
465
- blockFilesByLevelInOrder[level - 1] = levelBelow;
466
- }
467
-
468
- const highestBlockNumberInLevelBelow = levelBelow.reduce(
469
- (acc, name) => {
470
- const num = parseInt(name.split("-")[1]!);
471
- if (num > acc) {
472
- return num;
473
- }
474
- return acc;
475
- },
476
- 0,
477
- );
478
-
479
- const newBlockName = await writeBlock(
480
- coValues,
481
- level - 1,
482
- highestBlockNumberInLevelBelow + 1,
483
- this.fs,
484
- );
485
- levelBelow.push(newBlockName);
486
-
487
- // delete blocks that went into this one
488
- for (const blockFile of blocksInLevel) {
489
- const handle = await this.getBlockHandle(blockFile, this.fs);
490
- await this.fs.close(handle.handle);
491
- await this.fs.removeFile(blockFile);
492
- this.blockFileHandles.delete(blockFile);
493
- }
494
- }
495
- }
496
-
497
- setTimeout(
498
- () =>
499
- this.compact().catch((e) => {
500
- logger.error("Error while compacting", { err: e });
501
- }),
502
- 5000,
503
- );
504
- }
505
-
506
- static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
507
- fs,
508
- trace,
509
- localNodeName = "local",
510
- }: {
511
- fs: FS;
512
- trace?: boolean;
513
- localNodeName?: string;
514
- }): Peer {
515
- const [localNodeAsPeer, storageAsPeer] = connectedPeers(
516
- localNodeName,
517
- "storage",
518
- {
519
- peer1role: "client",
520
- peer2role: "storage",
521
- trace,
522
- crashOnClose: true,
523
- },
524
- );
525
-
526
- new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
527
-
528
- // return { ...storageAsPeer, priority: 200 };
529
- return storageAsPeer;
530
- }
531
- }