cojson 0.7.23 → 0.7.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,3 @@
1
- import { Effect, Either, Queue, Stream, SynchronizedRef, Deferred, } from "effect";
2
1
  import { connectedPeers } from "../streamUtils.js";
3
2
  import { chunkToKnownState, contentSinceChunk, mergeChunks, } from "./chunksAndKnownStates.js";
4
3
  import { readChunk, readHeader, textDecoder, writeBlock, writeToWal, } from "./FileSystem.js";
@@ -10,360 +9,336 @@ export class LSMStorage {
10
9
  this.toLocalNode = toLocalNode;
11
10
  this.headerCache = new Map();
12
11
  this.blockFileHandles = new Map();
13
- this.coValues = SynchronizedRef.unsafeMake({});
14
- this.currentWal = SynchronizedRef.unsafeMake(undefined);
15
- void this.fromLocalNode.pipe(Stream.runForEach((msg) => Effect.gen(this, function* () {
16
- if (msg.action === "done") {
17
- return;
18
- }
19
- if (msg.action === "content") {
20
- yield* this.handleNewContent(msg);
21
- }
22
- else {
23
- yield* this.sendNewContent(msg.id, msg, undefined);
12
+ this.coValues = {};
13
+ this.currentWal = undefined;
14
+ let nMsg = 0;
15
+ const processMessages = async () => {
16
+ for await (const msg of fromLocalNode) {
17
+ console.log("Storage msg start", nMsg);
18
+ try {
19
+ if (msg === "Disconnected" || msg === "PingTimeout") {
20
+ throw new Error("Unexpected Disconnected message");
21
+ }
22
+ if (msg.action === "done") {
23
+ return;
24
+ }
25
+ if (msg.action === "content") {
26
+ await this.handleNewContent(msg);
27
+ }
28
+ else {
29
+ await this.sendNewContent(msg.id, msg, undefined);
30
+ }
31
+ }
32
+ catch (e) {
33
+ console.error(new Error(`Error reading from localNode, handling msg\n\n${JSON.stringify(msg, (k, v) => k === "changes" || k === "encryptedChanges"
34
+ ? v.slice(0, 20) + "..."
35
+ : v)}`, { cause: e }));
36
+ }
37
+ console.log("Storage msg end", nMsg);
38
+ nMsg++;
24
39
  }
25
- })), Effect.runPromise);
26
- setTimeout(() => this.compact(), 20000);
27
- }
28
- sendNewContent(id, known, asDependencyOf) {
29
- return SynchronizedRef.updateEffect(this.coValues, (coValues) => this.sendNewContentInner(coValues, id, known, asDependencyOf));
40
+ };
41
+ processMessages().catch((e) => console.error("Error in processMessages in storage", e));
42
+ setTimeout(() => this.compact().catch((e) => {
43
+ console.error("Error while compacting", e);
44
+ }), 20000);
30
45
  }
31
- sendNewContentInner(coValues, id, known, asDependencyOf) {
32
- return Effect.gen(this, function* () {
33
- let coValue = coValues[id];
34
- if (!coValue) {
35
- coValue = yield* this.loadCoValue(id, this.fs);
36
- }
37
- if (!coValue) {
38
- yield* Queue.offer(this.toLocalNode, {
39
- id: id,
40
- action: "known",
41
- header: false,
42
- sessions: {},
43
- asDependencyOf,
44
- });
45
- return coValues;
46
- }
47
- if (!known?.header &&
48
- coValue.header?.ruleset.type === "ownedByGroup") {
49
- coValues = yield* this.sendNewContentInner(coValues, coValue.header.ruleset.group, undefined, asDependencyOf || id);
50
- }
51
- else if (!known?.header &&
52
- coValue.header?.ruleset.type === "group") {
53
- const dependedOnAccounts = new Set();
54
- for (const session of Object.values(coValue.sessionEntries)) {
55
- for (const entry of session) {
56
- for (const tx of entry.transactions) {
57
- if (tx.privacy === "trusting") {
58
- const parsedChanges = JSON.parse(tx.changes);
59
- for (const change of parsedChanges) {
60
- if (change.op === "set" &&
61
- change.key.startsWith("co_")) {
62
- dependedOnAccounts.add(change.key);
63
- }
46
+ async sendNewContent(id, known, asDependencyOf) {
47
+ let coValue = this.coValues[id];
48
+ if (!coValue) {
49
+ coValue = await this.loadCoValue(id, this.fs);
50
+ }
51
+ if (!coValue) {
52
+ this.toLocalNode
53
+ .push({
54
+ id: id,
55
+ action: "known",
56
+ header: false,
57
+ sessions: {},
58
+ asDependencyOf,
59
+ })
60
+ .catch((e) => console.error("Error while pushing known", e));
61
+ return;
62
+ }
63
+ if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
64
+ await this.sendNewContent(coValue.header.ruleset.group, undefined, asDependencyOf || id);
65
+ }
66
+ else if (!known?.header && coValue.header?.ruleset.type === "group") {
67
+ const dependedOnAccounts = new Set();
68
+ for (const session of Object.values(coValue.sessionEntries)) {
69
+ for (const entry of session) {
70
+ for (const tx of entry.transactions) {
71
+ if (tx.privacy === "trusting") {
72
+ const parsedChanges = JSON.parse(tx.changes);
73
+ for (const change of parsedChanges) {
74
+ if (change.op === "set" &&
75
+ change.key.startsWith("co_")) {
76
+ dependedOnAccounts.add(change.key);
64
77
  }
65
78
  }
66
79
  }
67
80
  }
68
81
  }
69
- for (const account of dependedOnAccounts) {
70
- coValues = yield* this.sendNewContentInner(coValues, account, undefined, asDependencyOf || id);
71
- }
72
82
  }
73
- const newContentMessages = contentSinceChunk(id, coValue, known).map((message) => ({ ...message, asDependencyOf }));
74
- const ourKnown = chunkToKnownState(id, coValue);
75
- yield* Queue.offer(this.toLocalNode, {
76
- action: "known",
77
- ...ourKnown,
78
- asDependencyOf,
79
- });
80
- for (const message of newContentMessages) {
81
- if (Object.keys(message.new).length === 0)
82
- continue;
83
- yield* Queue.offer(this.toLocalNode, message);
83
+ for (const account of dependedOnAccounts) {
84
+ await this.sendNewContent(account, undefined, asDependencyOf || id);
84
85
  }
85
- return { ...coValues, [id]: coValue };
86
- });
86
+ }
87
+ const newContentMessages = contentSinceChunk(id, coValue, known).map((message) => ({ ...message, asDependencyOf }));
88
+ const ourKnown = chunkToKnownState(id, coValue);
89
+ this.toLocalNode
90
+ .push({
91
+ action: "known",
92
+ ...ourKnown,
93
+ asDependencyOf,
94
+ })
95
+ .catch((e) => console.error("Error while pushing known", e));
96
+ for (const message of newContentMessages) {
97
+ if (Object.keys(message.new).length === 0)
98
+ continue;
99
+ this.toLocalNode
100
+ .push(message)
101
+ .catch((e) => console.error("Error while pushing new content", e));
102
+ }
103
+ this.coValues[id] = coValue;
87
104
  }
88
- withWAL(handler) {
89
- return SynchronizedRef.updateEffect(this.currentWal, (wal) => Effect.gen(this, function* () {
90
- let newWal = wal;
91
- if (!newWal) {
92
- newWal = yield* this.fs.createFile(`wal-${Date.now()}-${Math.random()
93
- .toString(36)
94
- .slice(2)}.jsonl`);
95
- }
96
- yield* handler(newWal);
97
- return newWal;
98
- }));
105
+ async withWAL(handler) {
106
+ if (!this.currentWal) {
107
+ this.currentWal = await this.fs.createFile(`wal-${Date.now()}-${Math.random()
108
+ .toString(36)
109
+ .slice(2)}.jsonl`);
110
+ }
111
+ await handler(this.currentWal);
99
112
  }
100
- handleNewContent(newContent) {
101
- return SynchronizedRef.updateEffect(this.coValues, (coValues) => Effect.gen(this, function* () {
102
- const coValue = coValues[newContent.id];
103
- const newContentAsChunk = {
104
- header: newContent.header,
105
- sessionEntries: Object.fromEntries(Object.entries(newContent.new).map(([sessionID, newInSession]) => [
106
- sessionID,
107
- [
108
- {
109
- after: newInSession.after,
110
- lastSignature: newInSession.lastSignature,
111
- transactions: newInSession.newTransactions,
112
- },
113
- ],
114
- ])),
115
- };
116
- if (!coValue) {
117
- if (newContent.header) {
118
- // console.log("Creating in WAL", newContent.id);
119
- yield* this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
120
- return {
121
- ...coValues,
122
- [newContent.id]: newContentAsChunk,
123
- };
124
- }
125
- else {
126
- // yield*
127
- // Effect.promise(() =>
128
- // this.toLocalNode.write({
129
- // action: "known",
130
- // id: newContent.id,
131
- // header: false,
132
- // sessions: {},
133
- // isCorrection: true,
134
- // })
135
- // )
136
- // );
137
- yield* Effect.logWarning("Incontiguous incoming update for " + newContent.id);
138
- return coValues;
139
- }
113
+ async handleNewContent(newContent) {
114
+ const coValue = this.coValues[newContent.id];
115
+ const newContentAsChunk = {
116
+ header: newContent.header,
117
+ sessionEntries: Object.fromEntries(Object.entries(newContent.new).map(([sessionID, newInSession]) => [
118
+ sessionID,
119
+ [
120
+ {
121
+ after: newInSession.after,
122
+ lastSignature: newInSession.lastSignature,
123
+ transactions: newInSession.newTransactions,
124
+ },
125
+ ],
126
+ ])),
127
+ };
128
+ if (!coValue) {
129
+ if (newContent.header) {
130
+ // console.log("Creating in WAL", newContent.id);
131
+ await this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
132
+ this.coValues[newContent.id] = newContentAsChunk;
140
133
  }
141
134
  else {
142
- const merged = mergeChunks(coValue, newContentAsChunk);
143
- if (Either.isRight(merged)) {
144
- yield* Effect.logWarning("Non-contigous new content for " + newContent.id, Object.entries(coValue.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
145
- session: session,
146
- after: entry.after,
147
- length: entry.transactions.length,
148
- }))), Object.entries(newContentAsChunk.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
149
- session: session,
150
- after: entry.after,
151
- length: entry.transactions.length,
152
- }))));
153
- // yield* Effect.promise(() =>
154
- // this.toLocalNode.write({
155
- // action: "known",
156
- // ...chunkToKnownState(newContent.id, coValue),
157
- // isCorrection: true,
158
- // })
159
- // );
160
- return coValues;
161
- }
162
- else {
163
- // console.log("Appending to WAL", newContent.id);
164
- yield* this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
165
- return { ...coValues, [newContent.id]: merged.left };
166
- }
135
+ console.warn("Incontiguous incoming update for " + newContent.id);
136
+ return;
167
137
  }
168
- }));
169
- }
170
- getBlockHandle(blockFile, fs) {
171
- return Effect.gen(this, function* () {
172
- let handleAndSize = this.blockFileHandles.get(blockFile);
173
- if (!handleAndSize) {
174
- handleAndSize = yield* Deferred.make();
175
- this.blockFileHandles.set(blockFile, handleAndSize);
176
- yield* Deferred.complete(handleAndSize, fs.openToRead(blockFile));
138
+ }
139
+ else {
140
+ const merged = mergeChunks(coValue, newContentAsChunk);
141
+ if (merged === "nonContigous") {
142
+ console.warn("Non-contigous new content for " + newContent.id, Object.entries(coValue.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
143
+ session: session,
144
+ after: entry.after,
145
+ length: entry.transactions.length,
146
+ }))), Object.entries(newContentAsChunk.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
147
+ session: session,
148
+ after: entry.after,
149
+ length: entry.transactions.length,
150
+ }))));
177
151
  }
178
- return yield* Deferred.await(handleAndSize);
179
- });
152
+ else {
153
+ // console.log("Appending to WAL", newContent.id);
154
+ await this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
155
+ this.coValues[newContent.id] = merged;
156
+ }
157
+ }
180
158
  }
181
- loadCoValue(id, fs) {
182
- return Effect.gen(this, function* () {
183
- const files = this.fileCache || (yield* fs.listFiles());
184
- this.fileCache = files;
185
- const blockFiles = files.filter((name) => name.startsWith("L")).sort();
186
- let result;
187
- for (const blockFile of blockFiles) {
188
- let cachedHeader = this.headerCache.get(blockFile);
189
- let handleAndSize = this.blockFileHandles.get(blockFile);
190
- if (!handleAndSize) {
191
- handleAndSize = yield* Deferred.make();
192
- this.blockFileHandles.set(blockFile, handleAndSize);
193
- yield* Deferred.complete(handleAndSize, fs.openToRead(blockFile));
194
- }
195
- const { handle, size } = yield* this.getBlockHandle(blockFile, fs);
196
- // console.log("Attempting to load", id, blockFile);
197
- if (!cachedHeader) {
198
- cachedHeader = {};
199
- const header = yield* readHeader(blockFile, handle, size, fs);
200
- for (const entry of header) {
201
- cachedHeader[entry.id] = {
202
- start: entry.start,
203
- length: entry.length,
204
- };
205
- }
206
- this.headerCache.set(blockFile, cachedHeader);
159
+ async getBlockHandle(blockFile, fs) {
160
+ if (!this.blockFileHandles.has(blockFile)) {
161
+ this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
162
+ }
163
+ return this.blockFileHandles.get(blockFile);
164
+ }
165
+ async loadCoValue(id, fs) {
166
+ const files = this.fileCache || (await fs.listFiles());
167
+ this.fileCache = files;
168
+ const blockFiles = files.filter((name) => name.startsWith("L")).sort();
169
+ let result;
170
+ for (const blockFile of blockFiles) {
171
+ let cachedHeader = this.headerCache.get(blockFile);
172
+ const { handle, size } = await this.getBlockHandle(blockFile, fs);
173
+ // console.log("Attempting to load", id, blockFile);
174
+ if (!cachedHeader) {
175
+ cachedHeader = {};
176
+ const header = await readHeader(blockFile, handle, size, fs);
177
+ for (const entry of header) {
178
+ cachedHeader[entry.id] = {
179
+ start: entry.start,
180
+ length: entry.length,
181
+ };
207
182
  }
208
- const headerEntry = cachedHeader[id];
209
- // console.log("Header entry", id, headerEntry);
210
- if (headerEntry) {
211
- const nextChunk = yield* readChunk(handle, headerEntry, fs);
212
- if (result) {
213
- const merged = mergeChunks(result, nextChunk);
214
- if (Either.isRight(merged)) {
215
- yield* Effect.logWarning("Non-contigous chunks while loading " + id, result, nextChunk);
216
- }
217
- else {
218
- result = merged.left;
219
- }
183
+ this.headerCache.set(blockFile, cachedHeader);
184
+ }
185
+ const headerEntry = cachedHeader[id];
186
+ // console.log("Header entry", id, headerEntry);
187
+ if (headerEntry) {
188
+ const nextChunk = await readChunk(handle, headerEntry, fs);
189
+ if (result) {
190
+ const merged = mergeChunks(result, nextChunk);
191
+ if (merged === "nonContigous") {
192
+ console.warn("Non-contigous chunks while loading " + id, result, nextChunk);
220
193
  }
221
194
  else {
222
- result = nextChunk;
195
+ result = merged;
223
196
  }
224
197
  }
225
- // yield* fs.close(handle);
198
+ else {
199
+ result = nextChunk;
200
+ }
226
201
  }
227
- return result;
228
- });
202
+ // await fs.close(handle);
203
+ }
204
+ return result;
229
205
  }
230
206
  async compact() {
231
- await Effect.runPromise(Effect.gen(this, function* () {
232
- const fileNames = yield* this.fs.listFiles();
233
- const walFiles = fileNames.filter((name) => name.startsWith("wal-"));
234
- walFiles.sort();
235
- const coValues = new Map();
236
- yield* Effect.log("Compacting WAL files", walFiles);
237
- if (walFiles.length === 0)
238
- return;
239
- yield* SynchronizedRef.updateEffect(this.currentWal, (wal) => Effect.gen(this, function* () {
240
- if (wal) {
241
- yield* this.fs.close(wal);
242
- }
243
- return undefined;
244
- }));
245
- for (const fileName of walFiles) {
246
- const { handle, size } = yield* this.fs.openToRead(fileName);
247
- if (size === 0) {
248
- yield* this.fs.close(handle);
207
+ const fileNames = await this.fs.listFiles();
208
+ const walFiles = fileNames.filter((name) => name.startsWith("wal-"));
209
+ walFiles.sort();
210
+ const coValues = new Map();
211
+ console.log("Compacting WAL files", walFiles);
212
+ if (walFiles.length === 0)
213
+ return;
214
+ const oldWal = this.currentWal;
215
+ this.currentWal = undefined;
216
+ if (oldWal) {
217
+ await this.fs.close(oldWal);
218
+ }
219
+ for (const fileName of walFiles) {
220
+ const { handle, size } = await this.fs.openToRead(fileName);
221
+ if (size === 0) {
222
+ await this.fs.close(handle);
223
+ continue;
224
+ }
225
+ const bytes = await this.fs.read(handle, 0, size);
226
+ const decoded = textDecoder.decode(bytes);
227
+ const lines = decoded.split("\n");
228
+ for (const line of lines) {
229
+ if (line.length === 0)
249
230
  continue;
250
- }
251
- const bytes = yield* this.fs.read(handle, 0, size);
252
- const decoded = textDecoder.decode(bytes);
253
- const lines = decoded.split("\n");
254
- for (const line of lines) {
255
- if (line.length === 0)
256
- continue;
257
- const chunk = JSON.parse(line);
258
- const existingChunk = coValues.get(chunk.id);
259
- if (existingChunk) {
260
- const merged = mergeChunks(existingChunk, chunk);
261
- if (Either.isRight(merged)) {
262
- yield* Effect.logWarning("Non-contigous chunks in " +
263
- chunk.id +
264
- ", " +
265
- fileName, existingChunk, chunk);
266
- }
267
- else {
268
- coValues.set(chunk.id, merged.left);
269
- }
231
+ const chunk = JSON.parse(line);
232
+ const existingChunk = coValues.get(chunk.id);
233
+ if (existingChunk) {
234
+ const merged = mergeChunks(existingChunk, chunk);
235
+ if (merged === "nonContigous") {
236
+ console.log("Non-contigous chunks in " +
237
+ chunk.id +
238
+ ", " +
239
+ fileName, existingChunk, chunk);
270
240
  }
271
241
  else {
272
- coValues.set(chunk.id, chunk);
242
+ coValues.set(chunk.id, merged);
273
243
  }
274
244
  }
275
- yield* this.fs.close(handle);
276
- }
277
- const highestBlockNumber = fileNames.reduce((acc, name) => {
278
- if (name.startsWith("L" + MAX_N_LEVELS)) {
279
- const num = parseInt(name.split("-")[1]);
280
- if (num > acc) {
281
- return num;
282
- }
245
+ else {
246
+ coValues.set(chunk.id, chunk);
283
247
  }
284
- return acc;
285
- }, 0);
286
- console.log([...coValues.keys()], fileNames, highestBlockNumber);
287
- yield* writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
288
- for (const walFile of walFiles) {
289
- yield* this.fs.removeFile(walFile);
290
248
  }
291
- this.fileCache = undefined;
292
- const fileNames2 = yield* this.fs.listFiles();
293
- const blockFiles = fileNames2.filter((name) => name.startsWith("L")).sort();
294
- const blockFilesByLevelInOrder = {};
295
- for (const blockFile of blockFiles) {
296
- const level = parseInt(blockFile.split("-")[0].slice(1));
297
- if (!blockFilesByLevelInOrder[level]) {
298
- blockFilesByLevelInOrder[level] = [];
249
+ await this.fs.close(handle);
250
+ }
251
+ const highestBlockNumber = fileNames.reduce((acc, name) => {
252
+ if (name.startsWith("L" + MAX_N_LEVELS)) {
253
+ const num = parseInt(name.split("-")[1]);
254
+ if (num > acc) {
255
+ return num;
299
256
  }
300
- blockFilesByLevelInOrder[level].push(blockFile);
301
257
  }
302
- console.log(blockFilesByLevelInOrder);
303
- for (let level = MAX_N_LEVELS; level > 0; level--) {
304
- const nBlocksDesired = Math.pow(2, level);
305
- const blocksInLevel = blockFilesByLevelInOrder[level];
306
- if (blocksInLevel &&
307
- blocksInLevel.length > nBlocksDesired) {
308
- yield* Effect.log("Compacting blocks in level", level, blocksInLevel);
309
- const coValues = new Map();
310
- for (const blockFile of blocksInLevel) {
311
- const { handle, size, } = yield* this.getBlockHandle(blockFile, this.fs);
312
- if (size === 0) {
313
- continue;
314
- }
315
- const header = yield* readHeader(blockFile, handle, size, this.fs);
316
- for (const entry of header) {
317
- const chunk = yield* readChunk(handle, entry, this.fs);
318
- const existingChunk = coValues.get(entry.id);
319
- if (existingChunk) {
320
- const merged = mergeChunks(existingChunk, chunk);
321
- if (Either.isRight(merged)) {
322
- yield* Effect.logWarning("Non-contigous chunks in " +
323
- entry.id +
324
- ", " +
325
- blockFile, existingChunk, chunk);
326
- }
327
- else {
328
- coValues.set(entry.id, merged.left);
329
- }
258
+ return acc;
259
+ }, 0);
260
+ console.log([...coValues.keys()], fileNames, highestBlockNumber);
261
+ await writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
262
+ for (const walFile of walFiles) {
263
+ await this.fs.removeFile(walFile);
264
+ }
265
+ this.fileCache = undefined;
266
+ const fileNames2 = await this.fs.listFiles();
267
+ const blockFiles = fileNames2.filter((name) => name.startsWith("L")).sort();
268
+ const blockFilesByLevelInOrder = {};
269
+ for (const blockFile of blockFiles) {
270
+ const level = parseInt(blockFile.split("-")[0].slice(1));
271
+ if (!blockFilesByLevelInOrder[level]) {
272
+ blockFilesByLevelInOrder[level] = [];
273
+ }
274
+ blockFilesByLevelInOrder[level].push(blockFile);
275
+ }
276
+ console.log(blockFilesByLevelInOrder);
277
+ for (let level = MAX_N_LEVELS; level > 0; level--) {
278
+ const nBlocksDesired = Math.pow(2, level);
279
+ const blocksInLevel = blockFilesByLevelInOrder[level];
280
+ if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
281
+ console.log("Compacting blocks in level", level, blocksInLevel);
282
+ const coValues = new Map();
283
+ for (const blockFile of blocksInLevel) {
284
+ const { handle, size } = await this.getBlockHandle(blockFile, this.fs);
285
+ if (size === 0) {
286
+ continue;
287
+ }
288
+ const header = await readHeader(blockFile, handle, size, this.fs);
289
+ for (const entry of header) {
290
+ const chunk = await readChunk(handle, entry, this.fs);
291
+ const existingChunk = coValues.get(entry.id);
292
+ if (existingChunk) {
293
+ const merged = mergeChunks(existingChunk, chunk);
294
+ if (merged === "nonContigous") {
295
+ console.log("Non-contigous chunks in " +
296
+ entry.id +
297
+ ", " +
298
+ blockFile, existingChunk, chunk);
330
299
  }
331
300
  else {
332
- coValues.set(entry.id, chunk);
301
+ coValues.set(entry.id, merged);
333
302
  }
334
303
  }
335
- }
336
- let levelBelow = blockFilesByLevelInOrder[level - 1];
337
- if (!levelBelow) {
338
- levelBelow = [];
339
- blockFilesByLevelInOrder[level - 1] = levelBelow;
340
- }
341
- const highestBlockNumberInLevelBelow = levelBelow.reduce((acc, name) => {
342
- const num = parseInt(name.split("-")[1]);
343
- if (num > acc) {
344
- return num;
304
+ else {
305
+ coValues.set(entry.id, chunk);
345
306
  }
346
- return acc;
347
- }, 0);
348
- const newBlockName = yield* writeBlock(coValues, level - 1, highestBlockNumberInLevelBelow + 1, this.fs);
349
- levelBelow.push(newBlockName);
350
- // delete blocks that went into this one
351
- for (const blockFile of blocksInLevel) {
352
- const handle = yield* this.getBlockHandle(blockFile, this.fs);
353
- yield* this.fs.close(handle.handle);
354
- yield* this.fs.removeFile(blockFile);
355
307
  }
356
308
  }
309
+ let levelBelow = blockFilesByLevelInOrder[level - 1];
310
+ if (!levelBelow) {
311
+ levelBelow = [];
312
+ blockFilesByLevelInOrder[level - 1] = levelBelow;
313
+ }
314
+ const highestBlockNumberInLevelBelow = levelBelow.reduce((acc, name) => {
315
+ const num = parseInt(name.split("-")[1]);
316
+ if (num > acc) {
317
+ return num;
318
+ }
319
+ return acc;
320
+ }, 0);
321
+ const newBlockName = await writeBlock(coValues, level - 1, highestBlockNumberInLevelBelow + 1, this.fs);
322
+ levelBelow.push(newBlockName);
323
+ // delete blocks that went into this one
324
+ for (const blockFile of blocksInLevel) {
325
+ const handle = await this.getBlockHandle(blockFile, this.fs);
326
+ await this.fs.close(handle.handle);
327
+ await this.fs.removeFile(blockFile);
328
+ this.blockFileHandles.delete(blockFile);
329
+ }
357
330
  }
358
- }));
359
- setTimeout(() => this.compact(), 5000);
331
+ }
332
+ setTimeout(() => this.compact().catch((e) => {
333
+ console.error("Error while compacting", e);
334
+ }), 5000);
360
335
  }
361
- static async asPeer({ fs, trace, localNodeName = "local", }) {
362
- const [localNodeAsPeer, storageAsPeer] = await Effect.runPromise(connectedPeers(localNodeName, "storage", {
336
+ static asPeer({ fs, trace, localNodeName = "local", }) {
337
+ const [localNodeAsPeer, storageAsPeer] = connectedPeers(localNodeName, "storage", {
363
338
  peer1role: "client",
364
339
  peer2role: "server",
365
340
  trace,
366
- }));
341
+ });
367
342
  new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
368
343
  // return { ...storageAsPeer, priority: 200 };
369
344
  return storageAsPeer;