cojson 0.7.18 → 0.7.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -2
- package/.turbo/turbo-lint.log +0 -4
- package/.turbo/turbo-test.log +262 -336
- package/CHANGELOG.md +12 -0
- package/dist/coValueCore.js +8 -1
- package/dist/coValueCore.js.map +1 -1
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/localNode.js +3 -0
- package/dist/localNode.js.map +1 -1
- package/dist/storage/FileSystem.js +48 -57
- package/dist/storage/FileSystem.js.map +1 -1
- package/dist/storage/chunksAndKnownStates.js +2 -3
- package/dist/storage/chunksAndKnownStates.js.map +1 -1
- package/dist/storage/index.js +295 -215
- package/dist/storage/index.js.map +1 -1
- package/dist/streamUtils.js +34 -30
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +48 -40
- package/dist/sync.js.map +1 -1
- package/dist/tests/account.test.js +2 -3
- package/dist/tests/account.test.js.map +1 -1
- package/dist/tests/sync.test.js +183 -182
- package/dist/tests/sync.test.js.map +1 -1
- package/package.json +4 -4
- package/src/coValueCore.ts +8 -1
- package/src/index.ts +5 -10
- package/src/localNode.ts +4 -0
- package/src/storage/FileSystem.ts +85 -105
- package/src/storage/chunksAndKnownStates.ts +3 -4
- package/src/storage/index.ts +413 -310
- package/src/streamUtils.ts +47 -43
- package/src/sync.ts +68 -76
- package/src/tests/account.test.ts +3 -4
- package/src/tests/sync.test.ts +731 -798
package/src/storage/index.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Effect, Either, Queue, Stream, SynchronizedRef } from "effect";
|
|
2
1
|
import { RawCoID } from "../ids.js";
|
|
3
2
|
import { CoValueHeader, Transaction } from "../coValueCore.js";
|
|
4
3
|
import { Signature } from "../crypto/crypto.js";
|
|
@@ -18,7 +17,6 @@ import {
|
|
|
18
17
|
} from "./chunksAndKnownStates.js";
|
|
19
18
|
import {
|
|
20
19
|
BlockFilename,
|
|
21
|
-
FSErr,
|
|
22
20
|
FileSystem,
|
|
23
21
|
WalEntry,
|
|
24
22
|
WalFilename,
|
|
@@ -28,7 +26,9 @@ import {
|
|
|
28
26
|
writeBlock,
|
|
29
27
|
writeToWal,
|
|
30
28
|
} from "./FileSystem.js";
|
|
31
|
-
export type {
|
|
29
|
+
export type { BlockFilename, WalFilename } from "./FileSystem.js";
|
|
30
|
+
|
|
31
|
+
const MAX_N_LEVELS = 3;
|
|
32
32
|
|
|
33
33
|
export type CoValueChunk = {
|
|
34
34
|
header?: CoValueHeader;
|
|
@@ -42,410 +42,511 @@ export type CoValueChunk = {
|
|
|
42
42
|
};
|
|
43
43
|
|
|
44
44
|
export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
45
|
-
currentWal:
|
|
46
|
-
coValues:
|
|
45
|
+
currentWal: WH | undefined;
|
|
46
|
+
coValues: {
|
|
47
47
|
[id: RawCoID]: CoValueChunk | undefined;
|
|
48
|
-
}
|
|
48
|
+
};
|
|
49
49
|
fileCache: string[] | undefined;
|
|
50
50
|
headerCache = new Map<
|
|
51
51
|
BlockFilename,
|
|
52
52
|
{ [id: RawCoID]: { start: number; length: number } }
|
|
53
53
|
>();
|
|
54
|
+
blockFileHandles = new Map<
|
|
55
|
+
BlockFilename,
|
|
56
|
+
Promise<{ handle: RH; size: number }>
|
|
57
|
+
>();
|
|
54
58
|
|
|
55
59
|
constructor(
|
|
56
60
|
public fs: FS,
|
|
57
61
|
public fromLocalNode: IncomingSyncStream,
|
|
58
62
|
public toLocalNode: OutgoingSyncQueue,
|
|
59
63
|
) {
|
|
60
|
-
this.coValues =
|
|
61
|
-
this.currentWal =
|
|
64
|
+
this.coValues = {};
|
|
65
|
+
this.currentWal = undefined;
|
|
66
|
+
|
|
67
|
+
let nMsg = 0;
|
|
62
68
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
69
|
+
const processMessages = async () => {
|
|
70
|
+
for await (const msg of fromLocalNode) {
|
|
71
|
+
console.log("Storage msg start", nMsg);
|
|
72
|
+
try {
|
|
73
|
+
if (msg === "Disconnected" || msg === "PingTimeout") {
|
|
74
|
+
throw new Error("Unexpected Disconnected message");
|
|
75
|
+
}
|
|
66
76
|
if (msg.action === "done") {
|
|
67
77
|
return;
|
|
68
78
|
}
|
|
69
79
|
|
|
70
80
|
if (msg.action === "content") {
|
|
71
|
-
|
|
81
|
+
await this.handleNewContent(msg);
|
|
72
82
|
} else {
|
|
73
|
-
|
|
83
|
+
await this.sendNewContent(msg.id, msg, undefined);
|
|
74
84
|
}
|
|
75
|
-
})
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
85
|
+
} catch (e) {
|
|
86
|
+
console.error(
|
|
87
|
+
new Error(
|
|
88
|
+
`Error reading from localNode, handling msg\n\n${JSON.stringify(
|
|
89
|
+
msg,
|
|
90
|
+
(k, v) =>
|
|
91
|
+
k === "changes" || k === "encryptedChanges"
|
|
92
|
+
? v.slice(0, 20) + "..."
|
|
93
|
+
: v,
|
|
94
|
+
)}`,
|
|
95
|
+
{ cause: e },
|
|
96
|
+
),
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
console.log("Storage msg end", nMsg);
|
|
100
|
+
nMsg++;
|
|
101
|
+
}
|
|
102
|
+
};
|
|
79
103
|
|
|
80
|
-
|
|
81
|
-
|
|
104
|
+
processMessages().catch((e) =>
|
|
105
|
+
console.error("Error in processMessages in storage", e),
|
|
106
|
+
);
|
|
82
107
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
this.sendNewContentInner(coValues, id, known, asDependencyOf),
|
|
108
|
+
setTimeout(
|
|
109
|
+
() =>
|
|
110
|
+
this.compact().catch((e) => {
|
|
111
|
+
console.error("Error while compacting", e);
|
|
112
|
+
}),
|
|
113
|
+
20000,
|
|
90
114
|
);
|
|
91
115
|
}
|
|
92
116
|
|
|
93
|
-
|
|
94
|
-
coValues: { [id: `co_z${string}`]: CoValueChunk | undefined },
|
|
117
|
+
async sendNewContent(
|
|
95
118
|
id: RawCoID,
|
|
96
119
|
known: CoValueKnownState | undefined,
|
|
97
120
|
asDependencyOf: RawCoID | undefined,
|
|
98
|
-
)
|
|
99
|
-
|
|
100
|
-
FSErr,
|
|
101
|
-
never
|
|
102
|
-
> {
|
|
103
|
-
return Effect.gen(this, function* () {
|
|
104
|
-
let coValue = coValues[id];
|
|
105
|
-
|
|
106
|
-
if (!coValue) {
|
|
107
|
-
coValue = yield* this.loadCoValue(id, this.fs);
|
|
108
|
-
}
|
|
121
|
+
) {
|
|
122
|
+
let coValue = this.coValues[id];
|
|
109
123
|
|
|
110
|
-
|
|
111
|
-
|
|
124
|
+
if (!coValue) {
|
|
125
|
+
coValue = await this.loadCoValue(id, this.fs);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (!coValue) {
|
|
129
|
+
this.toLocalNode
|
|
130
|
+
.push({
|
|
112
131
|
id: id,
|
|
113
132
|
action: "known",
|
|
114
133
|
header: false,
|
|
115
134
|
sessions: {},
|
|
116
135
|
asDependencyOf,
|
|
117
|
-
})
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
for (const change of parsedChanges) {
|
|
143
|
-
if (
|
|
144
|
-
change.op === "set" &&
|
|
145
|
-
change.key.startsWith("co_")
|
|
146
|
-
) {
|
|
147
|
-
dependedOnAccounts.add(change.key);
|
|
148
|
-
}
|
|
136
|
+
})
|
|
137
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
138
|
+
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
|
|
143
|
+
await this.sendNewContent(
|
|
144
|
+
coValue.header.ruleset.group,
|
|
145
|
+
undefined,
|
|
146
|
+
asDependencyOf || id,
|
|
147
|
+
);
|
|
148
|
+
} else if (!known?.header && coValue.header?.ruleset.type === "group") {
|
|
149
|
+
const dependedOnAccounts = new Set();
|
|
150
|
+
for (const session of Object.values(coValue.sessionEntries)) {
|
|
151
|
+
for (const entry of session) {
|
|
152
|
+
for (const tx of entry.transactions) {
|
|
153
|
+
if (tx.privacy === "trusting") {
|
|
154
|
+
const parsedChanges = JSON.parse(tx.changes);
|
|
155
|
+
for (const change of parsedChanges) {
|
|
156
|
+
if (
|
|
157
|
+
change.op === "set" &&
|
|
158
|
+
change.key.startsWith("co_")
|
|
159
|
+
) {
|
|
160
|
+
dependedOnAccounts.add(change.key);
|
|
149
161
|
}
|
|
150
162
|
}
|
|
151
163
|
}
|
|
152
164
|
}
|
|
153
165
|
}
|
|
154
|
-
for (const account of dependedOnAccounts) {
|
|
155
|
-
coValues = yield* this.sendNewContentInner(
|
|
156
|
-
coValues,
|
|
157
|
-
account as CoID<RawCoValue>,
|
|
158
|
-
undefined,
|
|
159
|
-
asDependencyOf || id,
|
|
160
|
-
);
|
|
161
|
-
}
|
|
162
166
|
}
|
|
167
|
+
for (const account of dependedOnAccounts) {
|
|
168
|
+
await this.sendNewContent(
|
|
169
|
+
account as CoID<RawCoValue>,
|
|
170
|
+
undefined,
|
|
171
|
+
asDependencyOf || id,
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
163
175
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
known,
|
|
168
|
-
).map((message) => ({ ...message, asDependencyOf }));
|
|
176
|
+
const newContentMessages = contentSinceChunk(id, coValue, known).map(
|
|
177
|
+
(message) => ({ ...message, asDependencyOf }),
|
|
178
|
+
);
|
|
169
179
|
|
|
170
|
-
|
|
180
|
+
const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
|
|
171
181
|
|
|
172
|
-
|
|
182
|
+
this.toLocalNode
|
|
183
|
+
.push({
|
|
173
184
|
action: "known",
|
|
174
185
|
...ourKnown,
|
|
175
186
|
asDependencyOf,
|
|
176
|
-
})
|
|
187
|
+
})
|
|
188
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
189
|
+
|
|
190
|
+
for (const message of newContentMessages) {
|
|
191
|
+
if (Object.keys(message.new).length === 0) continue;
|
|
192
|
+
this.toLocalNode
|
|
193
|
+
.push(message)
|
|
194
|
+
.catch((e) =>
|
|
195
|
+
console.error("Error while pushing new content", e),
|
|
196
|
+
);
|
|
197
|
+
}
|
|
177
198
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
199
|
+
this.coValues[id] = coValue;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
async withWAL(handler: (wal: WH) => Promise<void>) {
|
|
203
|
+
if (!this.currentWal) {
|
|
204
|
+
this.currentWal = await this.fs.createFile(
|
|
205
|
+
`wal-${Date.now()}-${Math.random()
|
|
206
|
+
.toString(36)
|
|
207
|
+
.slice(2)}.jsonl`,
|
|
208
|
+
);
|
|
209
|
+
}
|
|
210
|
+
await handler(this.currentWal);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
async handleNewContent(newContent: NewContentMessage) {
|
|
214
|
+
const coValue = this.coValues[newContent.id];
|
|
215
|
+
|
|
216
|
+
const newContentAsChunk: CoValueChunk = {
|
|
217
|
+
header: newContent.header,
|
|
218
|
+
sessionEntries: Object.fromEntries(
|
|
219
|
+
Object.entries(newContent.new).map(
|
|
220
|
+
([sessionID, newInSession]) => [
|
|
221
|
+
sessionID,
|
|
222
|
+
[
|
|
223
|
+
{
|
|
224
|
+
after: newInSession.after,
|
|
225
|
+
lastSignature: newInSession.lastSignature,
|
|
226
|
+
transactions: newInSession.newTransactions,
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
],
|
|
230
|
+
),
|
|
231
|
+
),
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
if (!coValue) {
|
|
235
|
+
if (newContent.header) {
|
|
236
|
+
// console.log("Creating in WAL", newContent.id);
|
|
237
|
+
await this.withWAL((wal) =>
|
|
238
|
+
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
|
239
|
+
);
|
|
240
|
+
|
|
241
|
+
this.coValues[newContent.id] = newContentAsChunk;
|
|
242
|
+
} else {
|
|
243
|
+
console.warn(
|
|
244
|
+
"Incontiguous incoming update for " + newContent.id,
|
|
245
|
+
);
|
|
246
|
+
return;
|
|
181
247
|
}
|
|
248
|
+
} else {
|
|
249
|
+
const merged = mergeChunks(coValue, newContentAsChunk);
|
|
250
|
+
if (merged === "nonContigous") {
|
|
251
|
+
console.warn(
|
|
252
|
+
"Non-contigous new content for " + newContent.id,
|
|
253
|
+
Object.entries(coValue.sessionEntries).map(
|
|
254
|
+
([session, entries]) =>
|
|
255
|
+
entries.map((entry) => ({
|
|
256
|
+
session: session,
|
|
257
|
+
after: entry.after,
|
|
258
|
+
length: entry.transactions.length,
|
|
259
|
+
})),
|
|
260
|
+
),
|
|
261
|
+
Object.entries(newContentAsChunk.sessionEntries).map(
|
|
262
|
+
([session, entries]) =>
|
|
263
|
+
entries.map((entry) => ({
|
|
264
|
+
session: session,
|
|
265
|
+
after: entry.after,
|
|
266
|
+
length: entry.transactions.length,
|
|
267
|
+
})),
|
|
268
|
+
),
|
|
269
|
+
);
|
|
270
|
+
} else {
|
|
271
|
+
// console.log("Appending to WAL", newContent.id);
|
|
272
|
+
await this.withWAL((wal) =>
|
|
273
|
+
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
|
274
|
+
);
|
|
182
275
|
|
|
183
|
-
|
|
184
|
-
|
|
276
|
+
this.coValues[newContent.id] = merged;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
185
279
|
}
|
|
186
280
|
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
.toString(36)
|
|
197
|
-
.slice(2)}.jsonl`,
|
|
198
|
-
);
|
|
199
|
-
}
|
|
200
|
-
yield* handler(newWal);
|
|
201
|
-
return newWal;
|
|
202
|
-
}),
|
|
203
|
-
);
|
|
281
|
+
async getBlockHandle(
|
|
282
|
+
blockFile: BlockFilename,
|
|
283
|
+
fs: FS,
|
|
284
|
+
): Promise<{ handle: RH; size: number }> {
|
|
285
|
+
if (!this.blockFileHandles.has(blockFile)) {
|
|
286
|
+
this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
return this.blockFileHandles.get(blockFile)!;
|
|
204
290
|
}
|
|
205
291
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
const newContentAsChunk: CoValueChunk = {
|
|
214
|
-
header: newContent.header,
|
|
215
|
-
sessionEntries: Object.fromEntries(
|
|
216
|
-
Object.entries(newContent.new).map(
|
|
217
|
-
([sessionID, newInSession]) => [
|
|
218
|
-
sessionID,
|
|
219
|
-
[
|
|
220
|
-
{
|
|
221
|
-
after: newInSession.after,
|
|
222
|
-
lastSignature:
|
|
223
|
-
newInSession.lastSignature,
|
|
224
|
-
transactions:
|
|
225
|
-
newInSession.newTransactions,
|
|
226
|
-
},
|
|
227
|
-
],
|
|
228
|
-
],
|
|
229
|
-
),
|
|
230
|
-
),
|
|
231
|
-
};
|
|
232
|
-
|
|
233
|
-
if (!coValue) {
|
|
234
|
-
if (newContent.header) {
|
|
235
|
-
// console.log("Creating in WAL", newContent.id);
|
|
236
|
-
yield* this.withWAL((wal) =>
|
|
237
|
-
writeToWal(
|
|
238
|
-
wal,
|
|
239
|
-
this.fs,
|
|
240
|
-
newContent.id,
|
|
241
|
-
newContentAsChunk,
|
|
242
|
-
),
|
|
243
|
-
);
|
|
292
|
+
async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
|
|
293
|
+
const files = this.fileCache || (await fs.listFiles());
|
|
294
|
+
this.fileCache = files;
|
|
295
|
+
const blockFiles = (
|
|
296
|
+
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
297
|
+
).sort();
|
|
244
298
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
299
|
+
let result;
|
|
300
|
+
|
|
301
|
+
for (const blockFile of blockFiles) {
|
|
302
|
+
let cachedHeader:
|
|
303
|
+
| { [id: RawCoID]: { start: number; length: number } }
|
|
304
|
+
| undefined = this.headerCache.get(blockFile);
|
|
305
|
+
|
|
306
|
+
const { handle, size } = await this.getBlockHandle(blockFile, fs);
|
|
307
|
+
|
|
308
|
+
// console.log("Attempting to load", id, blockFile);
|
|
309
|
+
|
|
310
|
+
if (!cachedHeader) {
|
|
311
|
+
cachedHeader = {};
|
|
312
|
+
const header = await readHeader(blockFile, handle, size, fs);
|
|
313
|
+
for (const entry of header) {
|
|
314
|
+
cachedHeader[entry.id] = {
|
|
315
|
+
start: entry.start,
|
|
316
|
+
length: entry.length,
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
this.headerCache.set(blockFile, cachedHeader);
|
|
321
|
+
}
|
|
322
|
+
const headerEntry = cachedHeader[id];
|
|
323
|
+
|
|
324
|
+
// console.log("Header entry", id, headerEntry);
|
|
325
|
+
|
|
326
|
+
if (headerEntry) {
|
|
327
|
+
const nextChunk = await readChunk(handle, headerEntry, fs);
|
|
328
|
+
if (result) {
|
|
329
|
+
const merged = mergeChunks(result, nextChunk);
|
|
330
|
+
|
|
331
|
+
if (merged === "nonContigous") {
|
|
332
|
+
console.warn(
|
|
333
|
+
"Non-contigous chunks while loading " + id,
|
|
334
|
+
result,
|
|
335
|
+
nextChunk,
|
|
263
336
|
);
|
|
264
|
-
|
|
337
|
+
} else {
|
|
338
|
+
result = merged;
|
|
265
339
|
}
|
|
266
340
|
} else {
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
"Non-contigous new content for " + newContent.id,
|
|
271
|
-
Object.entries(coValue.sessionEntries).map(
|
|
272
|
-
([session, entries]) =>
|
|
273
|
-
entries.map((entry) => ({
|
|
274
|
-
session: session,
|
|
275
|
-
after: entry.after,
|
|
276
|
-
length: entry.transactions.length,
|
|
277
|
-
})),
|
|
278
|
-
),
|
|
279
|
-
Object.entries(
|
|
280
|
-
newContentAsChunk.sessionEntries,
|
|
281
|
-
).map(([session, entries]) =>
|
|
282
|
-
entries.map((entry) => ({
|
|
283
|
-
session: session,
|
|
284
|
-
after: entry.after,
|
|
285
|
-
length: entry.transactions.length,
|
|
286
|
-
})),
|
|
287
|
-
),
|
|
288
|
-
);
|
|
341
|
+
result = nextChunk;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
289
344
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
// action: "known",
|
|
293
|
-
// ...chunkToKnownState(newContent.id, coValue),
|
|
294
|
-
// isCorrection: true,
|
|
295
|
-
// })
|
|
296
|
-
// );
|
|
345
|
+
// await fs.close(handle);
|
|
346
|
+
}
|
|
297
347
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
348
|
+
return result;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async compact() {
|
|
352
|
+
const fileNames = await this.fs.listFiles();
|
|
353
|
+
|
|
354
|
+
const walFiles = fileNames.filter((name) =>
|
|
355
|
+
name.startsWith("wal-"),
|
|
356
|
+
) as WalFilename[];
|
|
357
|
+
walFiles.sort();
|
|
358
|
+
|
|
359
|
+
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
360
|
+
|
|
361
|
+
console.log("Compacting WAL files", walFiles);
|
|
362
|
+
if (walFiles.length === 0) return;
|
|
363
|
+
|
|
364
|
+
const oldWal = this.currentWal;
|
|
365
|
+
this.currentWal = undefined;
|
|
309
366
|
|
|
310
|
-
|
|
367
|
+
if (oldWal) {
|
|
368
|
+
await this.fs.close(oldWal);
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
for (const fileName of walFiles) {
|
|
372
|
+
const { handle, size }: { handle: RH; size: number } =
|
|
373
|
+
await this.fs.openToRead(fileName);
|
|
374
|
+
if (size === 0) {
|
|
375
|
+
await this.fs.close(handle);
|
|
376
|
+
continue;
|
|
377
|
+
}
|
|
378
|
+
const bytes = await this.fs.read(handle, 0, size);
|
|
379
|
+
|
|
380
|
+
const decoded = textDecoder.decode(bytes);
|
|
381
|
+
const lines = decoded.split("\n");
|
|
382
|
+
|
|
383
|
+
for (const line of lines) {
|
|
384
|
+
if (line.length === 0) continue;
|
|
385
|
+
const chunk = JSON.parse(line) as WalEntry;
|
|
386
|
+
|
|
387
|
+
const existingChunk = coValues.get(chunk.id);
|
|
388
|
+
|
|
389
|
+
if (existingChunk) {
|
|
390
|
+
const merged = mergeChunks(existingChunk, chunk);
|
|
391
|
+
if (merged === "nonContigous") {
|
|
392
|
+
console.log(
|
|
393
|
+
"Non-contigous chunks in " +
|
|
394
|
+
chunk.id +
|
|
395
|
+
", " +
|
|
396
|
+
fileName,
|
|
397
|
+
existingChunk,
|
|
398
|
+
chunk,
|
|
399
|
+
);
|
|
400
|
+
} else {
|
|
401
|
+
coValues.set(chunk.id, merged);
|
|
311
402
|
}
|
|
403
|
+
} else {
|
|
404
|
+
coValues.set(chunk.id, chunk);
|
|
312
405
|
}
|
|
313
|
-
}
|
|
314
|
-
);
|
|
315
|
-
}
|
|
406
|
+
}
|
|
316
407
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
fs: FS,
|
|
320
|
-
): Effect.Effect<CoValueChunk | undefined, FSErr> {
|
|
321
|
-
// return _loadChunkFromWal(id, fs);
|
|
322
|
-
return Effect.gen(this, function* () {
|
|
323
|
-
const files = this.fileCache || (yield* fs.listFiles());
|
|
324
|
-
this.fileCache = files;
|
|
325
|
-
const blockFiles = files.filter((name) =>
|
|
326
|
-
name.startsWith("hash_"),
|
|
327
|
-
) as BlockFilename[];
|
|
328
|
-
|
|
329
|
-
for (const blockFile of blockFiles) {
|
|
330
|
-
let cachedHeader:
|
|
331
|
-
| { [id: RawCoID]: { start: number; length: number } }
|
|
332
|
-
| undefined = this.headerCache.get(blockFile);
|
|
333
|
-
|
|
334
|
-
const { handle, size } = yield* fs.openToRead(blockFile);
|
|
335
|
-
|
|
336
|
-
// console.log("Attempting to load", id, blockFile);
|
|
337
|
-
|
|
338
|
-
if (!cachedHeader) {
|
|
339
|
-
cachedHeader = {};
|
|
340
|
-
const header = yield* readHeader(
|
|
341
|
-
blockFile,
|
|
342
|
-
handle,
|
|
343
|
-
size,
|
|
344
|
-
fs,
|
|
345
|
-
);
|
|
346
|
-
for (const entry of header) {
|
|
347
|
-
cachedHeader[entry.id] = {
|
|
348
|
-
start: entry.start,
|
|
349
|
-
length: entry.length,
|
|
350
|
-
};
|
|
351
|
-
}
|
|
408
|
+
await this.fs.close(handle);
|
|
409
|
+
}
|
|
352
410
|
|
|
353
|
-
|
|
411
|
+
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
|
412
|
+
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
|
413
|
+
const num = parseInt(name.split("-")[1]!);
|
|
414
|
+
if (num > acc) {
|
|
415
|
+
return num;
|
|
354
416
|
}
|
|
355
|
-
|
|
417
|
+
}
|
|
418
|
+
return acc;
|
|
419
|
+
}, 0);
|
|
356
420
|
|
|
357
|
-
|
|
421
|
+
console.log([...coValues.keys()], fileNames, highestBlockNumber);
|
|
358
422
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
423
|
+
await writeBlock(
|
|
424
|
+
coValues,
|
|
425
|
+
MAX_N_LEVELS,
|
|
426
|
+
highestBlockNumber + 1,
|
|
427
|
+
this.fs,
|
|
428
|
+
);
|
|
363
429
|
|
|
364
|
-
|
|
430
|
+
for (const walFile of walFiles) {
|
|
431
|
+
await this.fs.removeFile(walFile);
|
|
432
|
+
}
|
|
433
|
+
this.fileCache = undefined;
|
|
365
434
|
|
|
366
|
-
|
|
367
|
-
}
|
|
435
|
+
const fileNames2 = await this.fs.listFiles();
|
|
368
436
|
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
437
|
+
const blockFiles = (
|
|
438
|
+
fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
439
|
+
).sort();
|
|
372
440
|
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
const fileNames = yield* this.fs.listFiles();
|
|
441
|
+
const blockFilesByLevelInOrder: {
|
|
442
|
+
[level: number]: BlockFilename[];
|
|
443
|
+
} = {};
|
|
377
444
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
445
|
+
for (const blockFile of blockFiles) {
|
|
446
|
+
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
|
447
|
+
if (!blockFilesByLevelInOrder[level]) {
|
|
448
|
+
blockFilesByLevelInOrder[level] = [];
|
|
449
|
+
}
|
|
450
|
+
blockFilesByLevelInOrder[level]!.push(blockFile);
|
|
451
|
+
}
|
|
382
452
|
|
|
383
|
-
|
|
453
|
+
console.log(blockFilesByLevelInOrder);
|
|
384
454
|
|
|
385
|
-
|
|
386
|
-
|
|
455
|
+
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
|
456
|
+
const nBlocksDesired = Math.pow(2, level);
|
|
457
|
+
const blocksInLevel = blockFilesByLevelInOrder[level];
|
|
387
458
|
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
}
|
|
393
|
-
return undefined;
|
|
394
|
-
}),
|
|
395
|
-
);
|
|
459
|
+
if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
|
|
460
|
+
console.log("Compacting blocks in level", level, blocksInLevel);
|
|
461
|
+
|
|
462
|
+
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
396
463
|
|
|
397
|
-
for (const
|
|
464
|
+
for (const blockFile of blocksInLevel) {
|
|
398
465
|
const { handle, size }: { handle: RH; size: number } =
|
|
399
|
-
|
|
466
|
+
await this.getBlockHandle(blockFile, this.fs);
|
|
467
|
+
|
|
400
468
|
if (size === 0) {
|
|
401
|
-
yield* this.fs.close(handle);
|
|
402
469
|
continue;
|
|
403
470
|
}
|
|
404
|
-
const
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
const chunk =
|
|
471
|
+
const header = await readHeader(
|
|
472
|
+
blockFile,
|
|
473
|
+
handle,
|
|
474
|
+
size,
|
|
475
|
+
this.fs,
|
|
476
|
+
);
|
|
477
|
+
for (const entry of header) {
|
|
478
|
+
const chunk = await readChunk(handle, entry, this.fs);
|
|
412
479
|
|
|
413
|
-
const existingChunk = coValues.get(
|
|
480
|
+
const existingChunk = coValues.get(entry.id);
|
|
414
481
|
|
|
415
482
|
if (existingChunk) {
|
|
416
483
|
const merged = mergeChunks(existingChunk, chunk);
|
|
417
|
-
if (
|
|
418
|
-
|
|
484
|
+
if (merged === "nonContigous") {
|
|
485
|
+
console.log(
|
|
419
486
|
"Non-contigous chunks in " +
|
|
420
|
-
|
|
487
|
+
entry.id +
|
|
421
488
|
", " +
|
|
422
|
-
|
|
489
|
+
blockFile,
|
|
423
490
|
existingChunk,
|
|
424
491
|
chunk,
|
|
425
492
|
);
|
|
426
493
|
} else {
|
|
427
|
-
coValues.set(
|
|
494
|
+
coValues.set(entry.id, merged);
|
|
428
495
|
}
|
|
429
496
|
} else {
|
|
430
|
-
coValues.set(
|
|
497
|
+
coValues.set(entry.id, chunk);
|
|
431
498
|
}
|
|
432
499
|
}
|
|
500
|
+
}
|
|
433
501
|
|
|
434
|
-
|
|
502
|
+
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
|
503
|
+
if (!levelBelow) {
|
|
504
|
+
levelBelow = [];
|
|
505
|
+
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
435
506
|
}
|
|
436
507
|
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
508
|
+
const highestBlockNumberInLevelBelow = levelBelow.reduce(
|
|
509
|
+
(acc, name) => {
|
|
510
|
+
const num = parseInt(name.split("-")[1]!);
|
|
511
|
+
if (num > acc) {
|
|
512
|
+
return num;
|
|
513
|
+
}
|
|
514
|
+
return acc;
|
|
515
|
+
},
|
|
516
|
+
0,
|
|
517
|
+
);
|
|
518
|
+
|
|
519
|
+
const newBlockName = await writeBlock(
|
|
520
|
+
coValues,
|
|
521
|
+
level - 1,
|
|
522
|
+
highestBlockNumberInLevelBelow + 1,
|
|
523
|
+
this.fs,
|
|
524
|
+
);
|
|
525
|
+
levelBelow.push(newBlockName);
|
|
526
|
+
|
|
527
|
+
// delete blocks that went into this one
|
|
528
|
+
for (const blockFile of blocksInLevel) {
|
|
529
|
+
const handle = await this.getBlockHandle(
|
|
530
|
+
blockFile,
|
|
531
|
+
this.fs,
|
|
532
|
+
);
|
|
533
|
+
await this.fs.close(handle.handle);
|
|
534
|
+
await this.fs.removeFile(blockFile);
|
|
535
|
+
this.blockFileHandles.delete(blockFile);
|
|
440
536
|
}
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
444
539
|
|
|
445
|
-
setTimeout(
|
|
540
|
+
setTimeout(
|
|
541
|
+
() =>
|
|
542
|
+
this.compact().catch((e) => {
|
|
543
|
+
console.error("Error while compacting", e);
|
|
544
|
+
}),
|
|
545
|
+
5000,
|
|
546
|
+
);
|
|
446
547
|
}
|
|
447
548
|
|
|
448
|
-
static
|
|
549
|
+
static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
|
|
449
550
|
fs,
|
|
450
551
|
trace,
|
|
451
552
|
localNodeName = "local",
|
|
@@ -453,13 +554,15 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
453
554
|
fs: FS;
|
|
454
555
|
trace?: boolean;
|
|
455
556
|
localNodeName?: string;
|
|
456
|
-
}):
|
|
457
|
-
const [localNodeAsPeer, storageAsPeer] =
|
|
458
|
-
|
|
557
|
+
}): Peer {
|
|
558
|
+
const [localNodeAsPeer, storageAsPeer] = connectedPeers(
|
|
559
|
+
localNodeName,
|
|
560
|
+
"storage",
|
|
561
|
+
{
|
|
459
562
|
peer1role: "client",
|
|
460
563
|
peer2role: "server",
|
|
461
564
|
trace,
|
|
462
|
-
}
|
|
565
|
+
},
|
|
463
566
|
);
|
|
464
567
|
|
|
465
568
|
new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
|