cojson 0.7.18 → 0.7.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,3 @@
1
- import { Effect, Either, Queue, Stream, SynchronizedRef } from "effect";
2
1
  import { RawCoID } from "../ids.js";
3
2
  import { CoValueHeader, Transaction } from "../coValueCore.js";
4
3
  import { Signature } from "../crypto/crypto.js";
@@ -18,7 +17,6 @@ import {
18
17
  } from "./chunksAndKnownStates.js";
19
18
  import {
20
19
  BlockFilename,
21
- FSErr,
22
20
  FileSystem,
23
21
  WalEntry,
24
22
  WalFilename,
@@ -28,7 +26,9 @@ import {
28
26
  writeBlock,
29
27
  writeToWal,
30
28
  } from "./FileSystem.js";
31
- export type { FSErr, BlockFilename, WalFilename } from "./FileSystem.js";
29
+ export type { BlockFilename, WalFilename } from "./FileSystem.js";
30
+
31
+ const MAX_N_LEVELS = 3;
32
32
 
33
33
  export type CoValueChunk = {
34
34
  header?: CoValueHeader;
@@ -42,410 +42,511 @@ export type CoValueChunk = {
42
42
  };
43
43
 
44
44
  export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
45
- currentWal: SynchronizedRef.SynchronizedRef<WH | undefined>;
46
- coValues: SynchronizedRef.SynchronizedRef<{
45
+ currentWal: WH | undefined;
46
+ coValues: {
47
47
  [id: RawCoID]: CoValueChunk | undefined;
48
- }>;
48
+ };
49
49
  fileCache: string[] | undefined;
50
50
  headerCache = new Map<
51
51
  BlockFilename,
52
52
  { [id: RawCoID]: { start: number; length: number } }
53
53
  >();
54
+ blockFileHandles = new Map<
55
+ BlockFilename,
56
+ Promise<{ handle: RH; size: number }>
57
+ >();
54
58
 
55
59
  constructor(
56
60
  public fs: FS,
57
61
  public fromLocalNode: IncomingSyncStream,
58
62
  public toLocalNode: OutgoingSyncQueue,
59
63
  ) {
60
- this.coValues = SynchronizedRef.unsafeMake({});
61
- this.currentWal = SynchronizedRef.unsafeMake<WH | undefined>(undefined);
64
+ this.coValues = {};
65
+ this.currentWal = undefined;
66
+
67
+ let nMsg = 0;
62
68
 
63
- void this.fromLocalNode.pipe(
64
- Stream.runForEach((msg) =>
65
- Effect.gen(this, function* () {
69
+ const processMessages = async () => {
70
+ for await (const msg of fromLocalNode) {
71
+ console.log("Storage msg start", nMsg);
72
+ try {
73
+ if (msg === "Disconnected" || msg === "PingTimeout") {
74
+ throw new Error("Unexpected Disconnected message");
75
+ }
66
76
  if (msg.action === "done") {
67
77
  return;
68
78
  }
69
79
 
70
80
  if (msg.action === "content") {
71
- yield* this.handleNewContent(msg);
81
+ await this.handleNewContent(msg);
72
82
  } else {
73
- yield* this.sendNewContent(msg.id, msg, undefined);
83
+ await this.sendNewContent(msg.id, msg, undefined);
74
84
  }
75
- }),
76
- ),
77
- Effect.runPromise,
78
- );
85
+ } catch (e) {
86
+ console.error(
87
+ new Error(
88
+ `Error reading from localNode, handling msg\n\n${JSON.stringify(
89
+ msg,
90
+ (k, v) =>
91
+ k === "changes" || k === "encryptedChanges"
92
+ ? v.slice(0, 20) + "..."
93
+ : v,
94
+ )}`,
95
+ { cause: e },
96
+ ),
97
+ );
98
+ }
99
+ console.log("Storage msg end", nMsg);
100
+ nMsg++;
101
+ }
102
+ };
79
103
 
80
- setTimeout(() => this.compact(), 20000);
81
- }
104
+ processMessages().catch((e) =>
105
+ console.error("Error in processMessages in storage", e),
106
+ );
82
107
 
83
- sendNewContent(
84
- id: RawCoID,
85
- known: CoValueKnownState | undefined,
86
- asDependencyOf: RawCoID | undefined,
87
- ): Effect.Effect<void, FSErr> {
88
- return SynchronizedRef.updateEffect(this.coValues, (coValues) =>
89
- this.sendNewContentInner(coValues, id, known, asDependencyOf),
108
+ setTimeout(
109
+ () =>
110
+ this.compact().catch((e) => {
111
+ console.error("Error while compacting", e);
112
+ }),
113
+ 20000,
90
114
  );
91
115
  }
92
116
 
93
- private sendNewContentInner(
94
- coValues: { [id: `co_z${string}`]: CoValueChunk | undefined },
117
+ async sendNewContent(
95
118
  id: RawCoID,
96
119
  known: CoValueKnownState | undefined,
97
120
  asDependencyOf: RawCoID | undefined,
98
- ): Effect.Effect<
99
- { [id: `co_z${string}`]: CoValueChunk | undefined },
100
- FSErr,
101
- never
102
- > {
103
- return Effect.gen(this, function* () {
104
- let coValue = coValues[id];
105
-
106
- if (!coValue) {
107
- coValue = yield* this.loadCoValue(id, this.fs);
108
- }
121
+ ) {
122
+ let coValue = this.coValues[id];
109
123
 
110
- if (!coValue) {
111
- yield* Queue.offer(this.toLocalNode, {
124
+ if (!coValue) {
125
+ coValue = await this.loadCoValue(id, this.fs);
126
+ }
127
+
128
+ if (!coValue) {
129
+ this.toLocalNode
130
+ .push({
112
131
  id: id,
113
132
  action: "known",
114
133
  header: false,
115
134
  sessions: {},
116
135
  asDependencyOf,
117
- });
118
-
119
- return coValues;
120
- }
121
-
122
- if (
123
- !known?.header &&
124
- coValue.header?.ruleset.type === "ownedByGroup"
125
- ) {
126
- coValues = yield* this.sendNewContentInner(
127
- coValues,
128
- coValue.header.ruleset.group,
129
- undefined,
130
- asDependencyOf || id,
131
- );
132
- } else if (
133
- !known?.header &&
134
- coValue.header?.ruleset.type === "group"
135
- ) {
136
- const dependedOnAccounts = new Set();
137
- for (const session of Object.values(coValue.sessionEntries)) {
138
- for (const entry of session) {
139
- for (const tx of entry.transactions) {
140
- if (tx.privacy === "trusting") {
141
- const parsedChanges = JSON.parse(tx.changes);
142
- for (const change of parsedChanges) {
143
- if (
144
- change.op === "set" &&
145
- change.key.startsWith("co_")
146
- ) {
147
- dependedOnAccounts.add(change.key);
148
- }
136
+ })
137
+ .catch((e) => console.error("Error while pushing known", e));
138
+
139
+ return;
140
+ }
141
+
142
+ if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
143
+ await this.sendNewContent(
144
+ coValue.header.ruleset.group,
145
+ undefined,
146
+ asDependencyOf || id,
147
+ );
148
+ } else if (!known?.header && coValue.header?.ruleset.type === "group") {
149
+ const dependedOnAccounts = new Set();
150
+ for (const session of Object.values(coValue.sessionEntries)) {
151
+ for (const entry of session) {
152
+ for (const tx of entry.transactions) {
153
+ if (tx.privacy === "trusting") {
154
+ const parsedChanges = JSON.parse(tx.changes);
155
+ for (const change of parsedChanges) {
156
+ if (
157
+ change.op === "set" &&
158
+ change.key.startsWith("co_")
159
+ ) {
160
+ dependedOnAccounts.add(change.key);
149
161
  }
150
162
  }
151
163
  }
152
164
  }
153
165
  }
154
- for (const account of dependedOnAccounts) {
155
- coValues = yield* this.sendNewContentInner(
156
- coValues,
157
- account as CoID<RawCoValue>,
158
- undefined,
159
- asDependencyOf || id,
160
- );
161
- }
162
166
  }
167
+ for (const account of dependedOnAccounts) {
168
+ await this.sendNewContent(
169
+ account as CoID<RawCoValue>,
170
+ undefined,
171
+ asDependencyOf || id,
172
+ );
173
+ }
174
+ }
163
175
 
164
- const newContentMessages = contentSinceChunk(
165
- id,
166
- coValue,
167
- known,
168
- ).map((message) => ({ ...message, asDependencyOf }));
176
+ const newContentMessages = contentSinceChunk(id, coValue, known).map(
177
+ (message) => ({ ...message, asDependencyOf }),
178
+ );
169
179
 
170
- const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
180
+ const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
171
181
 
172
- yield* Queue.offer(this.toLocalNode, {
182
+ this.toLocalNode
183
+ .push({
173
184
  action: "known",
174
185
  ...ourKnown,
175
186
  asDependencyOf,
176
- });
187
+ })
188
+ .catch((e) => console.error("Error while pushing known", e));
189
+
190
+ for (const message of newContentMessages) {
191
+ if (Object.keys(message.new).length === 0) continue;
192
+ this.toLocalNode
193
+ .push(message)
194
+ .catch((e) =>
195
+ console.error("Error while pushing new content", e),
196
+ );
197
+ }
177
198
 
178
- for (const message of newContentMessages) {
179
- if (Object.keys(message.new).length === 0) continue;
180
- yield* Queue.offer(this.toLocalNode, message);
199
+ this.coValues[id] = coValue;
200
+ }
201
+
202
+ async withWAL(handler: (wal: WH) => Promise<void>) {
203
+ if (!this.currentWal) {
204
+ this.currentWal = await this.fs.createFile(
205
+ `wal-${Date.now()}-${Math.random()
206
+ .toString(36)
207
+ .slice(2)}.jsonl`,
208
+ );
209
+ }
210
+ await handler(this.currentWal);
211
+ }
212
+
213
+ async handleNewContent(newContent: NewContentMessage) {
214
+ const coValue = this.coValues[newContent.id];
215
+
216
+ const newContentAsChunk: CoValueChunk = {
217
+ header: newContent.header,
218
+ sessionEntries: Object.fromEntries(
219
+ Object.entries(newContent.new).map(
220
+ ([sessionID, newInSession]) => [
221
+ sessionID,
222
+ [
223
+ {
224
+ after: newInSession.after,
225
+ lastSignature: newInSession.lastSignature,
226
+ transactions: newInSession.newTransactions,
227
+ },
228
+ ],
229
+ ],
230
+ ),
231
+ ),
232
+ };
233
+
234
+ if (!coValue) {
235
+ if (newContent.header) {
236
+ // console.log("Creating in WAL", newContent.id);
237
+ await this.withWAL((wal) =>
238
+ writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
239
+ );
240
+
241
+ this.coValues[newContent.id] = newContentAsChunk;
242
+ } else {
243
+ console.warn(
244
+ "Incontiguous incoming update for " + newContent.id,
245
+ );
246
+ return;
181
247
  }
248
+ } else {
249
+ const merged = mergeChunks(coValue, newContentAsChunk);
250
+ if (merged === "nonContigous") {
251
+ console.warn(
252
+ "Non-contigous new content for " + newContent.id,
253
+ Object.entries(coValue.sessionEntries).map(
254
+ ([session, entries]) =>
255
+ entries.map((entry) => ({
256
+ session: session,
257
+ after: entry.after,
258
+ length: entry.transactions.length,
259
+ })),
260
+ ),
261
+ Object.entries(newContentAsChunk.sessionEntries).map(
262
+ ([session, entries]) =>
263
+ entries.map((entry) => ({
264
+ session: session,
265
+ after: entry.after,
266
+ length: entry.transactions.length,
267
+ })),
268
+ ),
269
+ );
270
+ } else {
271
+ // console.log("Appending to WAL", newContent.id);
272
+ await this.withWAL((wal) =>
273
+ writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
274
+ );
182
275
 
183
- return { ...coValues, [id]: coValue };
184
- });
276
+ this.coValues[newContent.id] = merged;
277
+ }
278
+ }
185
279
  }
186
280
 
187
- withWAL(
188
- handler: (wal: WH) => Effect.Effect<void, FSErr>,
189
- ): Effect.Effect<void, FSErr> {
190
- return SynchronizedRef.updateEffect(this.currentWal, (wal) =>
191
- Effect.gen(this, function* () {
192
- let newWal = wal;
193
- if (!newWal) {
194
- newWal = yield* this.fs.createFile(
195
- `wal-${new Date().toISOString()}-${Math.random()
196
- .toString(36)
197
- .slice(2)}.jsonl`,
198
- );
199
- }
200
- yield* handler(newWal);
201
- return newWal;
202
- }),
203
- );
281
+ async getBlockHandle(
282
+ blockFile: BlockFilename,
283
+ fs: FS,
284
+ ): Promise<{ handle: RH; size: number }> {
285
+ if (!this.blockFileHandles.has(blockFile)) {
286
+ this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
287
+ }
288
+
289
+ return this.blockFileHandles.get(blockFile)!;
204
290
  }
205
291
 
206
- handleNewContent(
207
- newContent: NewContentMessage,
208
- ): Effect.Effect<void, FSErr> {
209
- return SynchronizedRef.updateEffect(this.coValues, (coValues) =>
210
- Effect.gen(this, function* () {
211
- const coValue = coValues[newContent.id];
212
-
213
- const newContentAsChunk: CoValueChunk = {
214
- header: newContent.header,
215
- sessionEntries: Object.fromEntries(
216
- Object.entries(newContent.new).map(
217
- ([sessionID, newInSession]) => [
218
- sessionID,
219
- [
220
- {
221
- after: newInSession.after,
222
- lastSignature:
223
- newInSession.lastSignature,
224
- transactions:
225
- newInSession.newTransactions,
226
- },
227
- ],
228
- ],
229
- ),
230
- ),
231
- };
232
-
233
- if (!coValue) {
234
- if (newContent.header) {
235
- // console.log("Creating in WAL", newContent.id);
236
- yield* this.withWAL((wal) =>
237
- writeToWal(
238
- wal,
239
- this.fs,
240
- newContent.id,
241
- newContentAsChunk,
242
- ),
243
- );
292
+ async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
293
+ const files = this.fileCache || (await fs.listFiles());
294
+ this.fileCache = files;
295
+ const blockFiles = (
296
+ files.filter((name) => name.startsWith("L")) as BlockFilename[]
297
+ ).sort();
244
298
 
245
- return {
246
- ...coValues,
247
- [newContent.id]: newContentAsChunk,
248
- };
249
- } else {
250
- // yield*
251
- // Effect.promise(() =>
252
- // this.toLocalNode.write({
253
- // action: "known",
254
- // id: newContent.id,
255
- // header: false,
256
- // sessions: {},
257
- // isCorrection: true,
258
- // })
259
- // )
260
- // );
261
- yield* Effect.logWarning(
262
- "Incontiguous incoming update for " + newContent.id,
299
+ let result;
300
+
301
+ for (const blockFile of blockFiles) {
302
+ let cachedHeader:
303
+ | { [id: RawCoID]: { start: number; length: number } }
304
+ | undefined = this.headerCache.get(blockFile);
305
+
306
+ const { handle, size } = await this.getBlockHandle(blockFile, fs);
307
+
308
+ // console.log("Attempting to load", id, blockFile);
309
+
310
+ if (!cachedHeader) {
311
+ cachedHeader = {};
312
+ const header = await readHeader(blockFile, handle, size, fs);
313
+ for (const entry of header) {
314
+ cachedHeader[entry.id] = {
315
+ start: entry.start,
316
+ length: entry.length,
317
+ };
318
+ }
319
+
320
+ this.headerCache.set(blockFile, cachedHeader);
321
+ }
322
+ const headerEntry = cachedHeader[id];
323
+
324
+ // console.log("Header entry", id, headerEntry);
325
+
326
+ if (headerEntry) {
327
+ const nextChunk = await readChunk(handle, headerEntry, fs);
328
+ if (result) {
329
+ const merged = mergeChunks(result, nextChunk);
330
+
331
+ if (merged === "nonContigous") {
332
+ console.warn(
333
+ "Non-contigous chunks while loading " + id,
334
+ result,
335
+ nextChunk,
263
336
  );
264
- return coValues;
337
+ } else {
338
+ result = merged;
265
339
  }
266
340
  } else {
267
- const merged = mergeChunks(coValue, newContentAsChunk);
268
- if (Either.isRight(merged)) {
269
- yield* Effect.logWarning(
270
- "Non-contigous new content for " + newContent.id,
271
- Object.entries(coValue.sessionEntries).map(
272
- ([session, entries]) =>
273
- entries.map((entry) => ({
274
- session: session,
275
- after: entry.after,
276
- length: entry.transactions.length,
277
- })),
278
- ),
279
- Object.entries(
280
- newContentAsChunk.sessionEntries,
281
- ).map(([session, entries]) =>
282
- entries.map((entry) => ({
283
- session: session,
284
- after: entry.after,
285
- length: entry.transactions.length,
286
- })),
287
- ),
288
- );
341
+ result = nextChunk;
342
+ }
343
+ }
289
344
 
290
- // yield* Effect.promise(() =>
291
- // this.toLocalNode.write({
292
- // action: "known",
293
- // ...chunkToKnownState(newContent.id, coValue),
294
- // isCorrection: true,
295
- // })
296
- // );
345
+ // await fs.close(handle);
346
+ }
297
347
 
298
- return coValues;
299
- } else {
300
- // console.log("Appending to WAL", newContent.id);
301
- yield* this.withWAL((wal) =>
302
- writeToWal(
303
- wal,
304
- this.fs,
305
- newContent.id,
306
- newContentAsChunk,
307
- ),
308
- );
348
+ return result;
349
+ }
350
+
351
+ async compact() {
352
+ const fileNames = await this.fs.listFiles();
353
+
354
+ const walFiles = fileNames.filter((name) =>
355
+ name.startsWith("wal-"),
356
+ ) as WalFilename[];
357
+ walFiles.sort();
358
+
359
+ const coValues = new Map<RawCoID, CoValueChunk>();
360
+
361
+ console.log("Compacting WAL files", walFiles);
362
+ if (walFiles.length === 0) return;
363
+
364
+ const oldWal = this.currentWal;
365
+ this.currentWal = undefined;
309
366
 
310
- return { ...coValues, [newContent.id]: merged.left };
367
+ if (oldWal) {
368
+ await this.fs.close(oldWal);
369
+ }
370
+
371
+ for (const fileName of walFiles) {
372
+ const { handle, size }: { handle: RH; size: number } =
373
+ await this.fs.openToRead(fileName);
374
+ if (size === 0) {
375
+ await this.fs.close(handle);
376
+ continue;
377
+ }
378
+ const bytes = await this.fs.read(handle, 0, size);
379
+
380
+ const decoded = textDecoder.decode(bytes);
381
+ const lines = decoded.split("\n");
382
+
383
+ for (const line of lines) {
384
+ if (line.length === 0) continue;
385
+ const chunk = JSON.parse(line) as WalEntry;
386
+
387
+ const existingChunk = coValues.get(chunk.id);
388
+
389
+ if (existingChunk) {
390
+ const merged = mergeChunks(existingChunk, chunk);
391
+ if (merged === "nonContigous") {
392
+ console.log(
393
+ "Non-contigous chunks in " +
394
+ chunk.id +
395
+ ", " +
396
+ fileName,
397
+ existingChunk,
398
+ chunk,
399
+ );
400
+ } else {
401
+ coValues.set(chunk.id, merged);
311
402
  }
403
+ } else {
404
+ coValues.set(chunk.id, chunk);
312
405
  }
313
- }),
314
- );
315
- }
406
+ }
316
407
 
317
- loadCoValue<WH, RH, FS extends FileSystem<WH, RH>>(
318
- id: RawCoID,
319
- fs: FS,
320
- ): Effect.Effect<CoValueChunk | undefined, FSErr> {
321
- // return _loadChunkFromWal(id, fs);
322
- return Effect.gen(this, function* () {
323
- const files = this.fileCache || (yield* fs.listFiles());
324
- this.fileCache = files;
325
- const blockFiles = files.filter((name) =>
326
- name.startsWith("hash_"),
327
- ) as BlockFilename[];
328
-
329
- for (const blockFile of blockFiles) {
330
- let cachedHeader:
331
- | { [id: RawCoID]: { start: number; length: number } }
332
- | undefined = this.headerCache.get(blockFile);
333
-
334
- const { handle, size } = yield* fs.openToRead(blockFile);
335
-
336
- // console.log("Attempting to load", id, blockFile);
337
-
338
- if (!cachedHeader) {
339
- cachedHeader = {};
340
- const header = yield* readHeader(
341
- blockFile,
342
- handle,
343
- size,
344
- fs,
345
- );
346
- for (const entry of header) {
347
- cachedHeader[entry.id] = {
348
- start: entry.start,
349
- length: entry.length,
350
- };
351
- }
408
+ await this.fs.close(handle);
409
+ }
352
410
 
353
- this.headerCache.set(blockFile, cachedHeader);
411
+ const highestBlockNumber = fileNames.reduce((acc, name) => {
412
+ if (name.startsWith("L" + MAX_N_LEVELS)) {
413
+ const num = parseInt(name.split("-")[1]!);
414
+ if (num > acc) {
415
+ return num;
354
416
  }
355
- const headerEntry = cachedHeader[id];
417
+ }
418
+ return acc;
419
+ }, 0);
356
420
 
357
- // console.log("Header entry", id, headerEntry);
421
+ console.log([...coValues.keys()], fileNames, highestBlockNumber);
358
422
 
359
- let result;
360
- if (headerEntry) {
361
- result = yield* readChunk(handle, headerEntry, fs);
362
- }
423
+ await writeBlock(
424
+ coValues,
425
+ MAX_N_LEVELS,
426
+ highestBlockNumber + 1,
427
+ this.fs,
428
+ );
363
429
 
364
- yield* fs.close(handle);
430
+ for (const walFile of walFiles) {
431
+ await this.fs.removeFile(walFile);
432
+ }
433
+ this.fileCache = undefined;
365
434
 
366
- return result;
367
- }
435
+ const fileNames2 = await this.fs.listFiles();
368
436
 
369
- return undefined;
370
- });
371
- }
437
+ const blockFiles = (
438
+ fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
439
+ ).sort();
372
440
 
373
- async compact() {
374
- await Effect.runPromise(
375
- Effect.gen(this, function* () {
376
- const fileNames = yield* this.fs.listFiles();
441
+ const blockFilesByLevelInOrder: {
442
+ [level: number]: BlockFilename[];
443
+ } = {};
377
444
 
378
- const walFiles = fileNames.filter((name) =>
379
- name.startsWith("wal-"),
380
- ) as WalFilename[];
381
- walFiles.sort();
445
+ for (const blockFile of blockFiles) {
446
+ const level = parseInt(blockFile.split("-")[0]!.slice(1));
447
+ if (!blockFilesByLevelInOrder[level]) {
448
+ blockFilesByLevelInOrder[level] = [];
449
+ }
450
+ blockFilesByLevelInOrder[level]!.push(blockFile);
451
+ }
382
452
 
383
- const coValues = new Map<RawCoID, CoValueChunk>();
453
+ console.log(blockFilesByLevelInOrder);
384
454
 
385
- yield* Effect.log("Compacting WAL files", walFiles);
386
- if (walFiles.length === 0) return;
455
+ for (let level = MAX_N_LEVELS; level > 0; level--) {
456
+ const nBlocksDesired = Math.pow(2, level);
457
+ const blocksInLevel = blockFilesByLevelInOrder[level];
387
458
 
388
- yield* SynchronizedRef.updateEffect(this.currentWal, (wal) =>
389
- Effect.gen(this, function* () {
390
- if (wal) {
391
- yield* this.fs.close(wal);
392
- }
393
- return undefined;
394
- }),
395
- );
459
+ if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
460
+ console.log("Compacting blocks in level", level, blocksInLevel);
461
+
462
+ const coValues = new Map<RawCoID, CoValueChunk>();
396
463
 
397
- for (const fileName of walFiles) {
464
+ for (const blockFile of blocksInLevel) {
398
465
  const { handle, size }: { handle: RH; size: number } =
399
- yield* this.fs.openToRead(fileName);
466
+ await this.getBlockHandle(blockFile, this.fs);
467
+
400
468
  if (size === 0) {
401
- yield* this.fs.close(handle);
402
469
  continue;
403
470
  }
404
- const bytes = yield* this.fs.read(handle, 0, size);
405
-
406
- const decoded = textDecoder.decode(bytes);
407
- const lines = decoded.split("\n");
408
-
409
- for (const line of lines) {
410
- if (line.length === 0) continue;
411
- const chunk = JSON.parse(line) as WalEntry;
471
+ const header = await readHeader(
472
+ blockFile,
473
+ handle,
474
+ size,
475
+ this.fs,
476
+ );
477
+ for (const entry of header) {
478
+ const chunk = await readChunk(handle, entry, this.fs);
412
479
 
413
- const existingChunk = coValues.get(chunk.id);
480
+ const existingChunk = coValues.get(entry.id);
414
481
 
415
482
  if (existingChunk) {
416
483
  const merged = mergeChunks(existingChunk, chunk);
417
- if (Either.isRight(merged)) {
418
- yield* Effect.logWarning(
484
+ if (merged === "nonContigous") {
485
+ console.log(
419
486
  "Non-contigous chunks in " +
420
- chunk.id +
487
+ entry.id +
421
488
  ", " +
422
- fileName,
489
+ blockFile,
423
490
  existingChunk,
424
491
  chunk,
425
492
  );
426
493
  } else {
427
- coValues.set(chunk.id, merged.left);
494
+ coValues.set(entry.id, merged);
428
495
  }
429
496
  } else {
430
- coValues.set(chunk.id, chunk);
497
+ coValues.set(entry.id, chunk);
431
498
  }
432
499
  }
500
+ }
433
501
 
434
- yield* this.fs.close(handle);
502
+ let levelBelow = blockFilesByLevelInOrder[level - 1];
503
+ if (!levelBelow) {
504
+ levelBelow = [];
505
+ blockFilesByLevelInOrder[level - 1] = levelBelow;
435
506
  }
436
507
 
437
- yield* writeBlock(coValues, 0, this.fs);
438
- for (const walFile of walFiles) {
439
- yield* this.fs.removeFile(walFile);
508
+ const highestBlockNumberInLevelBelow = levelBelow.reduce(
509
+ (acc, name) => {
510
+ const num = parseInt(name.split("-")[1]!);
511
+ if (num > acc) {
512
+ return num;
513
+ }
514
+ return acc;
515
+ },
516
+ 0,
517
+ );
518
+
519
+ const newBlockName = await writeBlock(
520
+ coValues,
521
+ level - 1,
522
+ highestBlockNumberInLevelBelow + 1,
523
+ this.fs,
524
+ );
525
+ levelBelow.push(newBlockName);
526
+
527
+ // delete blocks that went into this one
528
+ for (const blockFile of blocksInLevel) {
529
+ const handle = await this.getBlockHandle(
530
+ blockFile,
531
+ this.fs,
532
+ );
533
+ await this.fs.close(handle.handle);
534
+ await this.fs.removeFile(blockFile);
535
+ this.blockFileHandles.delete(blockFile);
440
536
  }
441
- this.fileCache = undefined;
442
- }),
443
- );
537
+ }
538
+ }
444
539
 
445
- setTimeout(() => this.compact(), 5000);
540
+ setTimeout(
541
+ () =>
542
+ this.compact().catch((e) => {
543
+ console.error("Error while compacting", e);
544
+ }),
545
+ 5000,
546
+ );
446
547
  }
447
548
 
448
- static async asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
549
+ static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
449
550
  fs,
450
551
  trace,
451
552
  localNodeName = "local",
@@ -453,13 +554,15 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
453
554
  fs: FS;
454
555
  trace?: boolean;
455
556
  localNodeName?: string;
456
- }): Promise<Peer> {
457
- const [localNodeAsPeer, storageAsPeer] = await Effect.runPromise(
458
- connectedPeers(localNodeName, "storage", {
557
+ }): Peer {
558
+ const [localNodeAsPeer, storageAsPeer] = connectedPeers(
559
+ localNodeName,
560
+ "storage",
561
+ {
459
562
  peer1role: "client",
460
563
  peer2role: "server",
461
564
  trace,
462
- }),
565
+ },
463
566
  );
464
567
 
465
568
  new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);