cojson 0.7.23 → 0.7.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -2
- package/.turbo/turbo-test.log +264 -339
- package/CHANGELOG.md +6 -0
- package/dist/coValueCore.js +8 -1
- package/dist/coValueCore.js.map +1 -1
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/localNode.js +3 -0
- package/dist/localNode.js.map +1 -1
- package/dist/storage/FileSystem.js +48 -62
- package/dist/storage/FileSystem.js.map +1 -1
- package/dist/storage/chunksAndKnownStates.js +2 -3
- package/dist/storage/chunksAndKnownStates.js.map +1 -1
- package/dist/storage/index.js +285 -310
- package/dist/storage/index.js.map +1 -1
- package/dist/streamUtils.js +34 -32
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +48 -35
- package/dist/sync.js.map +1 -1
- package/dist/tests/account.test.js +2 -3
- package/dist/tests/account.test.js.map +1 -1
- package/dist/tests/sync.test.js +183 -182
- package/dist/tests/sync.test.js.map +1 -1
- package/package.json +4 -4
- package/src/coValueCore.ts +8 -1
- package/src/index.ts +5 -10
- package/src/localNode.ts +4 -0
- package/src/storage/FileSystem.ts +83 -110
- package/src/storage/chunksAndKnownStates.ts +3 -4
- package/src/storage/index.ts +391 -491
- package/src/streamUtils.ts +46 -48
- package/src/sync.ts +68 -73
- package/src/tests/account.test.ts +5 -8
- package/src/tests/sync.test.ts +731 -798
package/dist/storage/index.js
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Effect, Either, Queue, Stream, SynchronizedRef, Deferred, } from "effect";
|
|
2
1
|
import { connectedPeers } from "../streamUtils.js";
|
|
3
2
|
import { chunkToKnownState, contentSinceChunk, mergeChunks, } from "./chunksAndKnownStates.js";
|
|
4
3
|
import { readChunk, readHeader, textDecoder, writeBlock, writeToWal, } from "./FileSystem.js";
|
|
@@ -10,360 +9,336 @@ export class LSMStorage {
|
|
|
10
9
|
this.toLocalNode = toLocalNode;
|
|
11
10
|
this.headerCache = new Map();
|
|
12
11
|
this.blockFileHandles = new Map();
|
|
13
|
-
this.coValues =
|
|
14
|
-
this.currentWal =
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
12
|
+
this.coValues = {};
|
|
13
|
+
this.currentWal = undefined;
|
|
14
|
+
let nMsg = 0;
|
|
15
|
+
const processMessages = async () => {
|
|
16
|
+
for await (const msg of fromLocalNode) {
|
|
17
|
+
console.log("Storage msg start", nMsg);
|
|
18
|
+
try {
|
|
19
|
+
if (msg === "Disconnected" || msg === "PingTimeout") {
|
|
20
|
+
throw new Error("Unexpected Disconnected message");
|
|
21
|
+
}
|
|
22
|
+
if (msg.action === "done") {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
if (msg.action === "content") {
|
|
26
|
+
await this.handleNewContent(msg);
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
await this.sendNewContent(msg.id, msg, undefined);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
catch (e) {
|
|
33
|
+
console.error(new Error(`Error reading from localNode, handling msg\n\n${JSON.stringify(msg, (k, v) => k === "changes" || k === "encryptedChanges"
|
|
34
|
+
? v.slice(0, 20) + "..."
|
|
35
|
+
: v)}`, { cause: e }));
|
|
36
|
+
}
|
|
37
|
+
console.log("Storage msg end", nMsg);
|
|
38
|
+
nMsg++;
|
|
24
39
|
}
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
40
|
+
};
|
|
41
|
+
processMessages().catch((e) => console.error("Error in processMessages in storage", e));
|
|
42
|
+
setTimeout(() => this.compact().catch((e) => {
|
|
43
|
+
console.error("Error while compacting", e);
|
|
44
|
+
}), 20000);
|
|
30
45
|
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
for (const
|
|
55
|
-
for (const
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
dependedOnAccounts.add(change.key);
|
|
63
|
-
}
|
|
46
|
+
async sendNewContent(id, known, asDependencyOf) {
|
|
47
|
+
let coValue = this.coValues[id];
|
|
48
|
+
if (!coValue) {
|
|
49
|
+
coValue = await this.loadCoValue(id, this.fs);
|
|
50
|
+
}
|
|
51
|
+
if (!coValue) {
|
|
52
|
+
this.toLocalNode
|
|
53
|
+
.push({
|
|
54
|
+
id: id,
|
|
55
|
+
action: "known",
|
|
56
|
+
header: false,
|
|
57
|
+
sessions: {},
|
|
58
|
+
asDependencyOf,
|
|
59
|
+
})
|
|
60
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
|
|
64
|
+
await this.sendNewContent(coValue.header.ruleset.group, undefined, asDependencyOf || id);
|
|
65
|
+
}
|
|
66
|
+
else if (!known?.header && coValue.header?.ruleset.type === "group") {
|
|
67
|
+
const dependedOnAccounts = new Set();
|
|
68
|
+
for (const session of Object.values(coValue.sessionEntries)) {
|
|
69
|
+
for (const entry of session) {
|
|
70
|
+
for (const tx of entry.transactions) {
|
|
71
|
+
if (tx.privacy === "trusting") {
|
|
72
|
+
const parsedChanges = JSON.parse(tx.changes);
|
|
73
|
+
for (const change of parsedChanges) {
|
|
74
|
+
if (change.op === "set" &&
|
|
75
|
+
change.key.startsWith("co_")) {
|
|
76
|
+
dependedOnAccounts.add(change.key);
|
|
64
77
|
}
|
|
65
78
|
}
|
|
66
79
|
}
|
|
67
80
|
}
|
|
68
81
|
}
|
|
69
|
-
for (const account of dependedOnAccounts) {
|
|
70
|
-
coValues = yield* this.sendNewContentInner(coValues, account, undefined, asDependencyOf || id);
|
|
71
|
-
}
|
|
72
82
|
}
|
|
73
|
-
const
|
|
74
|
-
|
|
75
|
-
yield* Queue.offer(this.toLocalNode, {
|
|
76
|
-
action: "known",
|
|
77
|
-
...ourKnown,
|
|
78
|
-
asDependencyOf,
|
|
79
|
-
});
|
|
80
|
-
for (const message of newContentMessages) {
|
|
81
|
-
if (Object.keys(message.new).length === 0)
|
|
82
|
-
continue;
|
|
83
|
-
yield* Queue.offer(this.toLocalNode, message);
|
|
83
|
+
for (const account of dependedOnAccounts) {
|
|
84
|
+
await this.sendNewContent(account, undefined, asDependencyOf || id);
|
|
84
85
|
}
|
|
85
|
-
|
|
86
|
-
});
|
|
86
|
+
}
|
|
87
|
+
const newContentMessages = contentSinceChunk(id, coValue, known).map((message) => ({ ...message, asDependencyOf }));
|
|
88
|
+
const ourKnown = chunkToKnownState(id, coValue);
|
|
89
|
+
this.toLocalNode
|
|
90
|
+
.push({
|
|
91
|
+
action: "known",
|
|
92
|
+
...ourKnown,
|
|
93
|
+
asDependencyOf,
|
|
94
|
+
})
|
|
95
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
96
|
+
for (const message of newContentMessages) {
|
|
97
|
+
if (Object.keys(message.new).length === 0)
|
|
98
|
+
continue;
|
|
99
|
+
this.toLocalNode
|
|
100
|
+
.push(message)
|
|
101
|
+
.catch((e) => console.error("Error while pushing new content", e));
|
|
102
|
+
}
|
|
103
|
+
this.coValues[id] = coValue;
|
|
87
104
|
}
|
|
88
|
-
withWAL(handler) {
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
}
|
|
96
|
-
yield* handler(newWal);
|
|
97
|
-
return newWal;
|
|
98
|
-
}));
|
|
105
|
+
async withWAL(handler) {
|
|
106
|
+
if (!this.currentWal) {
|
|
107
|
+
this.currentWal = await this.fs.createFile(`wal-${Date.now()}-${Math.random()
|
|
108
|
+
.toString(36)
|
|
109
|
+
.slice(2)}.jsonl`);
|
|
110
|
+
}
|
|
111
|
+
await handler(this.currentWal);
|
|
99
112
|
}
|
|
100
|
-
handleNewContent(newContent) {
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
if (
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
return {
|
|
121
|
-
...coValues,
|
|
122
|
-
[newContent.id]: newContentAsChunk,
|
|
123
|
-
};
|
|
124
|
-
}
|
|
125
|
-
else {
|
|
126
|
-
// yield*
|
|
127
|
-
// Effect.promise(() =>
|
|
128
|
-
// this.toLocalNode.write({
|
|
129
|
-
// action: "known",
|
|
130
|
-
// id: newContent.id,
|
|
131
|
-
// header: false,
|
|
132
|
-
// sessions: {},
|
|
133
|
-
// isCorrection: true,
|
|
134
|
-
// })
|
|
135
|
-
// )
|
|
136
|
-
// );
|
|
137
|
-
yield* Effect.logWarning("Incontiguous incoming update for " + newContent.id);
|
|
138
|
-
return coValues;
|
|
139
|
-
}
|
|
113
|
+
async handleNewContent(newContent) {
|
|
114
|
+
const coValue = this.coValues[newContent.id];
|
|
115
|
+
const newContentAsChunk = {
|
|
116
|
+
header: newContent.header,
|
|
117
|
+
sessionEntries: Object.fromEntries(Object.entries(newContent.new).map(([sessionID, newInSession]) => [
|
|
118
|
+
sessionID,
|
|
119
|
+
[
|
|
120
|
+
{
|
|
121
|
+
after: newInSession.after,
|
|
122
|
+
lastSignature: newInSession.lastSignature,
|
|
123
|
+
transactions: newInSession.newTransactions,
|
|
124
|
+
},
|
|
125
|
+
],
|
|
126
|
+
])),
|
|
127
|
+
};
|
|
128
|
+
if (!coValue) {
|
|
129
|
+
if (newContent.header) {
|
|
130
|
+
// console.log("Creating in WAL", newContent.id);
|
|
131
|
+
await this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
|
|
132
|
+
this.coValues[newContent.id] = newContentAsChunk;
|
|
140
133
|
}
|
|
141
134
|
else {
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
yield* Effect.logWarning("Non-contigous new content for " + newContent.id, Object.entries(coValue.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
|
|
145
|
-
session: session,
|
|
146
|
-
after: entry.after,
|
|
147
|
-
length: entry.transactions.length,
|
|
148
|
-
}))), Object.entries(newContentAsChunk.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
|
|
149
|
-
session: session,
|
|
150
|
-
after: entry.after,
|
|
151
|
-
length: entry.transactions.length,
|
|
152
|
-
}))));
|
|
153
|
-
// yield* Effect.promise(() =>
|
|
154
|
-
// this.toLocalNode.write({
|
|
155
|
-
// action: "known",
|
|
156
|
-
// ...chunkToKnownState(newContent.id, coValue),
|
|
157
|
-
// isCorrection: true,
|
|
158
|
-
// })
|
|
159
|
-
// );
|
|
160
|
-
return coValues;
|
|
161
|
-
}
|
|
162
|
-
else {
|
|
163
|
-
// console.log("Appending to WAL", newContent.id);
|
|
164
|
-
yield* this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
|
|
165
|
-
return { ...coValues, [newContent.id]: merged.left };
|
|
166
|
-
}
|
|
135
|
+
console.warn("Incontiguous incoming update for " + newContent.id);
|
|
136
|
+
return;
|
|
167
137
|
}
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
const merged = mergeChunks(coValue, newContentAsChunk);
|
|
141
|
+
if (merged === "nonContigous") {
|
|
142
|
+
console.warn("Non-contigous new content for " + newContent.id, Object.entries(coValue.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
|
|
143
|
+
session: session,
|
|
144
|
+
after: entry.after,
|
|
145
|
+
length: entry.transactions.length,
|
|
146
|
+
}))), Object.entries(newContentAsChunk.sessionEntries).map(([session, entries]) => entries.map((entry) => ({
|
|
147
|
+
session: session,
|
|
148
|
+
after: entry.after,
|
|
149
|
+
length: entry.transactions.length,
|
|
150
|
+
}))));
|
|
177
151
|
}
|
|
178
|
-
|
|
179
|
-
|
|
152
|
+
else {
|
|
153
|
+
// console.log("Appending to WAL", newContent.id);
|
|
154
|
+
await this.withWAL((wal) => writeToWal(wal, this.fs, newContent.id, newContentAsChunk));
|
|
155
|
+
this.coValues[newContent.id] = merged;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
180
158
|
}
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
};
|
|
205
|
-
}
|
|
206
|
-
this.headerCache.set(blockFile, cachedHeader);
|
|
159
|
+
async getBlockHandle(blockFile, fs) {
|
|
160
|
+
if (!this.blockFileHandles.has(blockFile)) {
|
|
161
|
+
this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
|
|
162
|
+
}
|
|
163
|
+
return this.blockFileHandles.get(blockFile);
|
|
164
|
+
}
|
|
165
|
+
async loadCoValue(id, fs) {
|
|
166
|
+
const files = this.fileCache || (await fs.listFiles());
|
|
167
|
+
this.fileCache = files;
|
|
168
|
+
const blockFiles = files.filter((name) => name.startsWith("L")).sort();
|
|
169
|
+
let result;
|
|
170
|
+
for (const blockFile of blockFiles) {
|
|
171
|
+
let cachedHeader = this.headerCache.get(blockFile);
|
|
172
|
+
const { handle, size } = await this.getBlockHandle(blockFile, fs);
|
|
173
|
+
// console.log("Attempting to load", id, blockFile);
|
|
174
|
+
if (!cachedHeader) {
|
|
175
|
+
cachedHeader = {};
|
|
176
|
+
const header = await readHeader(blockFile, handle, size, fs);
|
|
177
|
+
for (const entry of header) {
|
|
178
|
+
cachedHeader[entry.id] = {
|
|
179
|
+
start: entry.start,
|
|
180
|
+
length: entry.length,
|
|
181
|
+
};
|
|
207
182
|
}
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
result = merged.left;
|
|
219
|
-
}
|
|
183
|
+
this.headerCache.set(blockFile, cachedHeader);
|
|
184
|
+
}
|
|
185
|
+
const headerEntry = cachedHeader[id];
|
|
186
|
+
// console.log("Header entry", id, headerEntry);
|
|
187
|
+
if (headerEntry) {
|
|
188
|
+
const nextChunk = await readChunk(handle, headerEntry, fs);
|
|
189
|
+
if (result) {
|
|
190
|
+
const merged = mergeChunks(result, nextChunk);
|
|
191
|
+
if (merged === "nonContigous") {
|
|
192
|
+
console.warn("Non-contigous chunks while loading " + id, result, nextChunk);
|
|
220
193
|
}
|
|
221
194
|
else {
|
|
222
|
-
result =
|
|
195
|
+
result = merged;
|
|
223
196
|
}
|
|
224
197
|
}
|
|
225
|
-
|
|
198
|
+
else {
|
|
199
|
+
result = nextChunk;
|
|
200
|
+
}
|
|
226
201
|
}
|
|
227
|
-
|
|
228
|
-
}
|
|
202
|
+
// await fs.close(handle);
|
|
203
|
+
}
|
|
204
|
+
return result;
|
|
229
205
|
}
|
|
230
206
|
async compact() {
|
|
231
|
-
await
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
})
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
207
|
+
const fileNames = await this.fs.listFiles();
|
|
208
|
+
const walFiles = fileNames.filter((name) => name.startsWith("wal-"));
|
|
209
|
+
walFiles.sort();
|
|
210
|
+
const coValues = new Map();
|
|
211
|
+
console.log("Compacting WAL files", walFiles);
|
|
212
|
+
if (walFiles.length === 0)
|
|
213
|
+
return;
|
|
214
|
+
const oldWal = this.currentWal;
|
|
215
|
+
this.currentWal = undefined;
|
|
216
|
+
if (oldWal) {
|
|
217
|
+
await this.fs.close(oldWal);
|
|
218
|
+
}
|
|
219
|
+
for (const fileName of walFiles) {
|
|
220
|
+
const { handle, size } = await this.fs.openToRead(fileName);
|
|
221
|
+
if (size === 0) {
|
|
222
|
+
await this.fs.close(handle);
|
|
223
|
+
continue;
|
|
224
|
+
}
|
|
225
|
+
const bytes = await this.fs.read(handle, 0, size);
|
|
226
|
+
const decoded = textDecoder.decode(bytes);
|
|
227
|
+
const lines = decoded.split("\n");
|
|
228
|
+
for (const line of lines) {
|
|
229
|
+
if (line.length === 0)
|
|
249
230
|
continue;
|
|
250
|
-
|
|
251
|
-
const
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
if (existingChunk) {
|
|
260
|
-
const merged = mergeChunks(existingChunk, chunk);
|
|
261
|
-
if (Either.isRight(merged)) {
|
|
262
|
-
yield* Effect.logWarning("Non-contigous chunks in " +
|
|
263
|
-
chunk.id +
|
|
264
|
-
", " +
|
|
265
|
-
fileName, existingChunk, chunk);
|
|
266
|
-
}
|
|
267
|
-
else {
|
|
268
|
-
coValues.set(chunk.id, merged.left);
|
|
269
|
-
}
|
|
231
|
+
const chunk = JSON.parse(line);
|
|
232
|
+
const existingChunk = coValues.get(chunk.id);
|
|
233
|
+
if (existingChunk) {
|
|
234
|
+
const merged = mergeChunks(existingChunk, chunk);
|
|
235
|
+
if (merged === "nonContigous") {
|
|
236
|
+
console.log("Non-contigous chunks in " +
|
|
237
|
+
chunk.id +
|
|
238
|
+
", " +
|
|
239
|
+
fileName, existingChunk, chunk);
|
|
270
240
|
}
|
|
271
241
|
else {
|
|
272
|
-
coValues.set(chunk.id,
|
|
242
|
+
coValues.set(chunk.id, merged);
|
|
273
243
|
}
|
|
274
244
|
}
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
|
278
|
-
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
|
279
|
-
const num = parseInt(name.split("-")[1]);
|
|
280
|
-
if (num > acc) {
|
|
281
|
-
return num;
|
|
282
|
-
}
|
|
245
|
+
else {
|
|
246
|
+
coValues.set(chunk.id, chunk);
|
|
283
247
|
}
|
|
284
|
-
return acc;
|
|
285
|
-
}, 0);
|
|
286
|
-
console.log([...coValues.keys()], fileNames, highestBlockNumber);
|
|
287
|
-
yield* writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
|
|
288
|
-
for (const walFile of walFiles) {
|
|
289
|
-
yield* this.fs.removeFile(walFile);
|
|
290
248
|
}
|
|
291
|
-
this.
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
blockFilesByLevelInOrder[level] = [];
|
|
249
|
+
await this.fs.close(handle);
|
|
250
|
+
}
|
|
251
|
+
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
|
252
|
+
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
|
253
|
+
const num = parseInt(name.split("-")[1]);
|
|
254
|
+
if (num > acc) {
|
|
255
|
+
return num;
|
|
299
256
|
}
|
|
300
|
-
blockFilesByLevelInOrder[level].push(blockFile);
|
|
301
257
|
}
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
258
|
+
return acc;
|
|
259
|
+
}, 0);
|
|
260
|
+
console.log([...coValues.keys()], fileNames, highestBlockNumber);
|
|
261
|
+
await writeBlock(coValues, MAX_N_LEVELS, highestBlockNumber + 1, this.fs);
|
|
262
|
+
for (const walFile of walFiles) {
|
|
263
|
+
await this.fs.removeFile(walFile);
|
|
264
|
+
}
|
|
265
|
+
this.fileCache = undefined;
|
|
266
|
+
const fileNames2 = await this.fs.listFiles();
|
|
267
|
+
const blockFiles = fileNames2.filter((name) => name.startsWith("L")).sort();
|
|
268
|
+
const blockFilesByLevelInOrder = {};
|
|
269
|
+
for (const blockFile of blockFiles) {
|
|
270
|
+
const level = parseInt(blockFile.split("-")[0].slice(1));
|
|
271
|
+
if (!blockFilesByLevelInOrder[level]) {
|
|
272
|
+
blockFilesByLevelInOrder[level] = [];
|
|
273
|
+
}
|
|
274
|
+
blockFilesByLevelInOrder[level].push(blockFile);
|
|
275
|
+
}
|
|
276
|
+
console.log(blockFilesByLevelInOrder);
|
|
277
|
+
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
|
278
|
+
const nBlocksDesired = Math.pow(2, level);
|
|
279
|
+
const blocksInLevel = blockFilesByLevelInOrder[level];
|
|
280
|
+
if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
|
|
281
|
+
console.log("Compacting blocks in level", level, blocksInLevel);
|
|
282
|
+
const coValues = new Map();
|
|
283
|
+
for (const blockFile of blocksInLevel) {
|
|
284
|
+
const { handle, size } = await this.getBlockHandle(blockFile, this.fs);
|
|
285
|
+
if (size === 0) {
|
|
286
|
+
continue;
|
|
287
|
+
}
|
|
288
|
+
const header = await readHeader(blockFile, handle, size, this.fs);
|
|
289
|
+
for (const entry of header) {
|
|
290
|
+
const chunk = await readChunk(handle, entry, this.fs);
|
|
291
|
+
const existingChunk = coValues.get(entry.id);
|
|
292
|
+
if (existingChunk) {
|
|
293
|
+
const merged = mergeChunks(existingChunk, chunk);
|
|
294
|
+
if (merged === "nonContigous") {
|
|
295
|
+
console.log("Non-contigous chunks in " +
|
|
296
|
+
entry.id +
|
|
297
|
+
", " +
|
|
298
|
+
blockFile, existingChunk, chunk);
|
|
330
299
|
}
|
|
331
300
|
else {
|
|
332
|
-
coValues.set(entry.id,
|
|
301
|
+
coValues.set(entry.id, merged);
|
|
333
302
|
}
|
|
334
303
|
}
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
if (!levelBelow) {
|
|
338
|
-
levelBelow = [];
|
|
339
|
-
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
340
|
-
}
|
|
341
|
-
const highestBlockNumberInLevelBelow = levelBelow.reduce((acc, name) => {
|
|
342
|
-
const num = parseInt(name.split("-")[1]);
|
|
343
|
-
if (num > acc) {
|
|
344
|
-
return num;
|
|
304
|
+
else {
|
|
305
|
+
coValues.set(entry.id, chunk);
|
|
345
306
|
}
|
|
346
|
-
return acc;
|
|
347
|
-
}, 0);
|
|
348
|
-
const newBlockName = yield* writeBlock(coValues, level - 1, highestBlockNumberInLevelBelow + 1, this.fs);
|
|
349
|
-
levelBelow.push(newBlockName);
|
|
350
|
-
// delete blocks that went into this one
|
|
351
|
-
for (const blockFile of blocksInLevel) {
|
|
352
|
-
const handle = yield* this.getBlockHandle(blockFile, this.fs);
|
|
353
|
-
yield* this.fs.close(handle.handle);
|
|
354
|
-
yield* this.fs.removeFile(blockFile);
|
|
355
307
|
}
|
|
356
308
|
}
|
|
309
|
+
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
|
310
|
+
if (!levelBelow) {
|
|
311
|
+
levelBelow = [];
|
|
312
|
+
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
313
|
+
}
|
|
314
|
+
const highestBlockNumberInLevelBelow = levelBelow.reduce((acc, name) => {
|
|
315
|
+
const num = parseInt(name.split("-")[1]);
|
|
316
|
+
if (num > acc) {
|
|
317
|
+
return num;
|
|
318
|
+
}
|
|
319
|
+
return acc;
|
|
320
|
+
}, 0);
|
|
321
|
+
const newBlockName = await writeBlock(coValues, level - 1, highestBlockNumberInLevelBelow + 1, this.fs);
|
|
322
|
+
levelBelow.push(newBlockName);
|
|
323
|
+
// delete blocks that went into this one
|
|
324
|
+
for (const blockFile of blocksInLevel) {
|
|
325
|
+
const handle = await this.getBlockHandle(blockFile, this.fs);
|
|
326
|
+
await this.fs.close(handle.handle);
|
|
327
|
+
await this.fs.removeFile(blockFile);
|
|
328
|
+
this.blockFileHandles.delete(blockFile);
|
|
329
|
+
}
|
|
357
330
|
}
|
|
358
|
-
}
|
|
359
|
-
setTimeout(() => this.compact()
|
|
331
|
+
}
|
|
332
|
+
setTimeout(() => this.compact().catch((e) => {
|
|
333
|
+
console.error("Error while compacting", e);
|
|
334
|
+
}), 5000);
|
|
360
335
|
}
|
|
361
|
-
static
|
|
362
|
-
const [localNodeAsPeer, storageAsPeer] =
|
|
336
|
+
static asPeer({ fs, trace, localNodeName = "local", }) {
|
|
337
|
+
const [localNodeAsPeer, storageAsPeer] = connectedPeers(localNodeName, "storage", {
|
|
363
338
|
peer1role: "client",
|
|
364
339
|
peer2role: "server",
|
|
365
340
|
trace,
|
|
366
|
-
})
|
|
341
|
+
});
|
|
367
342
|
new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
|
|
368
343
|
// return { ...storageAsPeer, priority: 200 };
|
|
369
344
|
return storageAsPeer;
|