cojson 0.7.23 → 0.7.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -2
- package/.turbo/turbo-test.log +264 -339
- package/CHANGELOG.md +6 -0
- package/dist/coValueCore.js +8 -1
- package/dist/coValueCore.js.map +1 -1
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/localNode.js +3 -0
- package/dist/localNode.js.map +1 -1
- package/dist/storage/FileSystem.js +48 -62
- package/dist/storage/FileSystem.js.map +1 -1
- package/dist/storage/chunksAndKnownStates.js +2 -3
- package/dist/storage/chunksAndKnownStates.js.map +1 -1
- package/dist/storage/index.js +285 -310
- package/dist/storage/index.js.map +1 -1
- package/dist/streamUtils.js +34 -32
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +48 -35
- package/dist/sync.js.map +1 -1
- package/dist/tests/account.test.js +2 -3
- package/dist/tests/account.test.js.map +1 -1
- package/dist/tests/sync.test.js +183 -182
- package/dist/tests/sync.test.js.map +1 -1
- package/package.json +4 -4
- package/src/coValueCore.ts +8 -1
- package/src/index.ts +5 -10
- package/src/localNode.ts +4 -0
- package/src/storage/FileSystem.ts +83 -110
- package/src/storage/chunksAndKnownStates.ts +3 -4
- package/src/storage/index.ts +391 -491
- package/src/streamUtils.ts +46 -48
- package/src/sync.ts +68 -73
- package/src/tests/account.test.ts +5 -8
- package/src/tests/sync.test.ts +731 -798
package/src/storage/index.ts
CHANGED
|
@@ -1,11 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
Effect,
|
|
3
|
-
Either,
|
|
4
|
-
Queue,
|
|
5
|
-
Stream,
|
|
6
|
-
SynchronizedRef,
|
|
7
|
-
Deferred,
|
|
8
|
-
} from "effect";
|
|
9
1
|
import { RawCoID } from "../ids.js";
|
|
10
2
|
import { CoValueHeader, Transaction } from "../coValueCore.js";
|
|
11
3
|
import { Signature } from "../crypto/crypto.js";
|
|
@@ -25,7 +17,6 @@ import {
|
|
|
25
17
|
} from "./chunksAndKnownStates.js";
|
|
26
18
|
import {
|
|
27
19
|
BlockFilename,
|
|
28
|
-
FSErr,
|
|
29
20
|
FileSystem,
|
|
30
21
|
WalEntry,
|
|
31
22
|
WalFilename,
|
|
@@ -35,7 +26,7 @@ import {
|
|
|
35
26
|
writeBlock,
|
|
36
27
|
writeToWal,
|
|
37
28
|
} from "./FileSystem.js";
|
|
38
|
-
export type {
|
|
29
|
+
export type { BlockFilename, WalFilename } from "./FileSystem.js";
|
|
39
30
|
|
|
40
31
|
const MAX_N_LEVELS = 3;
|
|
41
32
|
|
|
@@ -51,10 +42,10 @@ export type CoValueChunk = {
|
|
|
51
42
|
};
|
|
52
43
|
|
|
53
44
|
export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
54
|
-
currentWal:
|
|
55
|
-
coValues:
|
|
45
|
+
currentWal: WH | undefined;
|
|
46
|
+
coValues: {
|
|
56
47
|
[id: RawCoID]: CoValueChunk | undefined;
|
|
57
|
-
}
|
|
48
|
+
};
|
|
58
49
|
fileCache: string[] | undefined;
|
|
59
50
|
headerCache = new Map<
|
|
60
51
|
BlockFilename,
|
|
@@ -62,7 +53,7 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
62
53
|
>();
|
|
63
54
|
blockFileHandles = new Map<
|
|
64
55
|
BlockFilename,
|
|
65
|
-
|
|
56
|
+
Promise<{ handle: RH; size: number }>
|
|
66
57
|
>();
|
|
67
58
|
|
|
68
59
|
constructor(
|
|
@@ -70,585 +61,492 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
70
61
|
public fromLocalNode: IncomingSyncStream,
|
|
71
62
|
public toLocalNode: OutgoingSyncQueue,
|
|
72
63
|
) {
|
|
73
|
-
this.coValues =
|
|
74
|
-
this.currentWal =
|
|
64
|
+
this.coValues = {};
|
|
65
|
+
this.currentWal = undefined;
|
|
66
|
+
|
|
67
|
+
let nMsg = 0;
|
|
75
68
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
69
|
+
const processMessages = async () => {
|
|
70
|
+
for await (const msg of fromLocalNode) {
|
|
71
|
+
console.log("Storage msg start", nMsg);
|
|
72
|
+
try {
|
|
73
|
+
if (msg === "Disconnected" || msg === "PingTimeout") {
|
|
74
|
+
throw new Error("Unexpected Disconnected message");
|
|
75
|
+
}
|
|
79
76
|
if (msg.action === "done") {
|
|
80
77
|
return;
|
|
81
78
|
}
|
|
82
79
|
|
|
83
80
|
if (msg.action === "content") {
|
|
84
|
-
|
|
81
|
+
await this.handleNewContent(msg);
|
|
85
82
|
} else {
|
|
86
|
-
|
|
83
|
+
await this.sendNewContent(msg.id, msg, undefined);
|
|
87
84
|
}
|
|
88
|
-
})
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
85
|
+
} catch (e) {
|
|
86
|
+
console.error(
|
|
87
|
+
new Error(
|
|
88
|
+
`Error reading from localNode, handling msg\n\n${JSON.stringify(
|
|
89
|
+
msg,
|
|
90
|
+
(k, v) =>
|
|
91
|
+
k === "changes" || k === "encryptedChanges"
|
|
92
|
+
? v.slice(0, 20) + "..."
|
|
93
|
+
: v,
|
|
94
|
+
)}`,
|
|
95
|
+
{ cause: e },
|
|
96
|
+
),
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
console.log("Storage msg end", nMsg);
|
|
100
|
+
nMsg++;
|
|
101
|
+
}
|
|
102
|
+
};
|
|
92
103
|
|
|
93
|
-
|
|
94
|
-
|
|
104
|
+
processMessages().catch((e) =>
|
|
105
|
+
console.error("Error in processMessages in storage", e),
|
|
106
|
+
);
|
|
95
107
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
this.sendNewContentInner(coValues, id, known, asDependencyOf),
|
|
108
|
+
setTimeout(
|
|
109
|
+
() =>
|
|
110
|
+
this.compact().catch((e) => {
|
|
111
|
+
console.error("Error while compacting", e);
|
|
112
|
+
}),
|
|
113
|
+
20000,
|
|
103
114
|
);
|
|
104
115
|
}
|
|
105
116
|
|
|
106
|
-
|
|
107
|
-
coValues: { [id: `co_z${string}`]: CoValueChunk | undefined },
|
|
117
|
+
async sendNewContent(
|
|
108
118
|
id: RawCoID,
|
|
109
119
|
known: CoValueKnownState | undefined,
|
|
110
120
|
asDependencyOf: RawCoID | undefined,
|
|
111
|
-
)
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
let coValue = coValues[id];
|
|
118
|
-
|
|
119
|
-
if (!coValue) {
|
|
120
|
-
coValue = yield* this.loadCoValue(id, this.fs);
|
|
121
|
-
}
|
|
121
|
+
) {
|
|
122
|
+
let coValue = this.coValues[id];
|
|
123
|
+
|
|
124
|
+
if (!coValue) {
|
|
125
|
+
coValue = await this.loadCoValue(id, this.fs);
|
|
126
|
+
}
|
|
122
127
|
|
|
123
|
-
|
|
124
|
-
|
|
128
|
+
if (!coValue) {
|
|
129
|
+
this.toLocalNode
|
|
130
|
+
.push({
|
|
125
131
|
id: id,
|
|
126
132
|
action: "known",
|
|
127
133
|
header: false,
|
|
128
134
|
sessions: {},
|
|
129
135
|
asDependencyOf,
|
|
130
|
-
})
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
for (const change of parsedChanges) {
|
|
156
|
-
if (
|
|
157
|
-
change.op === "set" &&
|
|
158
|
-
change.key.startsWith("co_")
|
|
159
|
-
) {
|
|
160
|
-
dependedOnAccounts.add(change.key);
|
|
161
|
-
}
|
|
136
|
+
})
|
|
137
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
138
|
+
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (!known?.header && coValue.header?.ruleset.type === "ownedByGroup") {
|
|
143
|
+
await this.sendNewContent(
|
|
144
|
+
coValue.header.ruleset.group,
|
|
145
|
+
undefined,
|
|
146
|
+
asDependencyOf || id,
|
|
147
|
+
);
|
|
148
|
+
} else if (!known?.header && coValue.header?.ruleset.type === "group") {
|
|
149
|
+
const dependedOnAccounts = new Set();
|
|
150
|
+
for (const session of Object.values(coValue.sessionEntries)) {
|
|
151
|
+
for (const entry of session) {
|
|
152
|
+
for (const tx of entry.transactions) {
|
|
153
|
+
if (tx.privacy === "trusting") {
|
|
154
|
+
const parsedChanges = JSON.parse(tx.changes);
|
|
155
|
+
for (const change of parsedChanges) {
|
|
156
|
+
if (
|
|
157
|
+
change.op === "set" &&
|
|
158
|
+
change.key.startsWith("co_")
|
|
159
|
+
) {
|
|
160
|
+
dependedOnAccounts.add(change.key);
|
|
162
161
|
}
|
|
163
162
|
}
|
|
164
163
|
}
|
|
165
164
|
}
|
|
166
165
|
}
|
|
167
|
-
for (const account of dependedOnAccounts) {
|
|
168
|
-
coValues = yield* this.sendNewContentInner(
|
|
169
|
-
coValues,
|
|
170
|
-
account as CoID<RawCoValue>,
|
|
171
|
-
undefined,
|
|
172
|
-
asDependencyOf || id,
|
|
173
|
-
);
|
|
174
|
-
}
|
|
175
166
|
}
|
|
167
|
+
for (const account of dependedOnAccounts) {
|
|
168
|
+
await this.sendNewContent(
|
|
169
|
+
account as CoID<RawCoValue>,
|
|
170
|
+
undefined,
|
|
171
|
+
asDependencyOf || id,
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
176
175
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
known,
|
|
181
|
-
).map((message) => ({ ...message, asDependencyOf }));
|
|
176
|
+
const newContentMessages = contentSinceChunk(id, coValue, known).map(
|
|
177
|
+
(message) => ({ ...message, asDependencyOf }),
|
|
178
|
+
);
|
|
182
179
|
|
|
183
|
-
|
|
180
|
+
const ourKnown: CoValueKnownState = chunkToKnownState(id, coValue);
|
|
184
181
|
|
|
185
|
-
|
|
182
|
+
this.toLocalNode
|
|
183
|
+
.push({
|
|
186
184
|
action: "known",
|
|
187
185
|
...ourKnown,
|
|
188
186
|
asDependencyOf,
|
|
189
|
-
})
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
187
|
+
})
|
|
188
|
+
.catch((e) => console.error("Error while pushing known", e));
|
|
189
|
+
|
|
190
|
+
for (const message of newContentMessages) {
|
|
191
|
+
if (Object.keys(message.new).length === 0) continue;
|
|
192
|
+
this.toLocalNode
|
|
193
|
+
.push(message)
|
|
194
|
+
.catch((e) =>
|
|
195
|
+
console.error("Error while pushing new content", e),
|
|
196
|
+
);
|
|
197
|
+
}
|
|
195
198
|
|
|
196
|
-
|
|
197
|
-
});
|
|
199
|
+
this.coValues[id] = coValue;
|
|
198
200
|
}
|
|
199
201
|
|
|
200
|
-
withWAL(
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
.toString(36)
|
|
210
|
-
.slice(2)}.jsonl`,
|
|
211
|
-
);
|
|
212
|
-
}
|
|
213
|
-
yield* handler(newWal);
|
|
214
|
-
return newWal;
|
|
215
|
-
}),
|
|
216
|
-
);
|
|
202
|
+
async withWAL(handler: (wal: WH) => Promise<void>) {
|
|
203
|
+
if (!this.currentWal) {
|
|
204
|
+
this.currentWal = await this.fs.createFile(
|
|
205
|
+
`wal-${Date.now()}-${Math.random()
|
|
206
|
+
.toString(36)
|
|
207
|
+
.slice(2)}.jsonl`,
|
|
208
|
+
);
|
|
209
|
+
}
|
|
210
|
+
await handler(this.currentWal);
|
|
217
211
|
}
|
|
218
212
|
|
|
219
|
-
handleNewContent(
|
|
220
|
-
newContent
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
},
|
|
240
|
-
],
|
|
241
|
-
],
|
|
242
|
-
),
|
|
243
|
-
),
|
|
244
|
-
};
|
|
245
|
-
|
|
246
|
-
if (!coValue) {
|
|
247
|
-
if (newContent.header) {
|
|
248
|
-
// console.log("Creating in WAL", newContent.id);
|
|
249
|
-
yield* this.withWAL((wal) =>
|
|
250
|
-
writeToWal(
|
|
251
|
-
wal,
|
|
252
|
-
this.fs,
|
|
253
|
-
newContent.id,
|
|
254
|
-
newContentAsChunk,
|
|
255
|
-
),
|
|
256
|
-
);
|
|
257
|
-
|
|
258
|
-
return {
|
|
259
|
-
...coValues,
|
|
260
|
-
[newContent.id]: newContentAsChunk,
|
|
261
|
-
};
|
|
262
|
-
} else {
|
|
263
|
-
// yield*
|
|
264
|
-
// Effect.promise(() =>
|
|
265
|
-
// this.toLocalNode.write({
|
|
266
|
-
// action: "known",
|
|
267
|
-
// id: newContent.id,
|
|
268
|
-
// header: false,
|
|
269
|
-
// sessions: {},
|
|
270
|
-
// isCorrection: true,
|
|
271
|
-
// })
|
|
272
|
-
// )
|
|
273
|
-
// );
|
|
274
|
-
yield* Effect.logWarning(
|
|
275
|
-
"Incontiguous incoming update for " + newContent.id,
|
|
276
|
-
);
|
|
277
|
-
return coValues;
|
|
278
|
-
}
|
|
279
|
-
} else {
|
|
280
|
-
const merged = mergeChunks(coValue, newContentAsChunk);
|
|
281
|
-
if (Either.isRight(merged)) {
|
|
282
|
-
yield* Effect.logWarning(
|
|
283
|
-
"Non-contigous new content for " + newContent.id,
|
|
284
|
-
Object.entries(coValue.sessionEntries).map(
|
|
285
|
-
([session, entries]) =>
|
|
286
|
-
entries.map((entry) => ({
|
|
287
|
-
session: session,
|
|
288
|
-
after: entry.after,
|
|
289
|
-
length: entry.transactions.length,
|
|
290
|
-
})),
|
|
291
|
-
),
|
|
292
|
-
Object.entries(
|
|
293
|
-
newContentAsChunk.sessionEntries,
|
|
294
|
-
).map(([session, entries]) =>
|
|
295
|
-
entries.map((entry) => ({
|
|
296
|
-
session: session,
|
|
297
|
-
after: entry.after,
|
|
298
|
-
length: entry.transactions.length,
|
|
299
|
-
})),
|
|
300
|
-
),
|
|
301
|
-
);
|
|
213
|
+
async handleNewContent(newContent: NewContentMessage) {
|
|
214
|
+
const coValue = this.coValues[newContent.id];
|
|
215
|
+
|
|
216
|
+
const newContentAsChunk: CoValueChunk = {
|
|
217
|
+
header: newContent.header,
|
|
218
|
+
sessionEntries: Object.fromEntries(
|
|
219
|
+
Object.entries(newContent.new).map(
|
|
220
|
+
([sessionID, newInSession]) => [
|
|
221
|
+
sessionID,
|
|
222
|
+
[
|
|
223
|
+
{
|
|
224
|
+
after: newInSession.after,
|
|
225
|
+
lastSignature: newInSession.lastSignature,
|
|
226
|
+
transactions: newInSession.newTransactions,
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
],
|
|
230
|
+
),
|
|
231
|
+
),
|
|
232
|
+
};
|
|
302
233
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
// );
|
|
234
|
+
if (!coValue) {
|
|
235
|
+
if (newContent.header) {
|
|
236
|
+
// console.log("Creating in WAL", newContent.id);
|
|
237
|
+
await this.withWAL((wal) =>
|
|
238
|
+
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
|
239
|
+
);
|
|
310
240
|
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
241
|
+
this.coValues[newContent.id] = newContentAsChunk;
|
|
242
|
+
} else {
|
|
243
|
+
console.warn(
|
|
244
|
+
"Incontiguous incoming update for " + newContent.id,
|
|
245
|
+
);
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
} else {
|
|
249
|
+
const merged = mergeChunks(coValue, newContentAsChunk);
|
|
250
|
+
if (merged === "nonContigous") {
|
|
251
|
+
console.warn(
|
|
252
|
+
"Non-contigous new content for " + newContent.id,
|
|
253
|
+
Object.entries(coValue.sessionEntries).map(
|
|
254
|
+
([session, entries]) =>
|
|
255
|
+
entries.map((entry) => ({
|
|
256
|
+
session: session,
|
|
257
|
+
after: entry.after,
|
|
258
|
+
length: entry.transactions.length,
|
|
259
|
+
})),
|
|
260
|
+
),
|
|
261
|
+
Object.entries(newContentAsChunk.sessionEntries).map(
|
|
262
|
+
([session, entries]) =>
|
|
263
|
+
entries.map((entry) => ({
|
|
264
|
+
session: session,
|
|
265
|
+
after: entry.after,
|
|
266
|
+
length: entry.transactions.length,
|
|
267
|
+
})),
|
|
268
|
+
),
|
|
269
|
+
);
|
|
270
|
+
} else {
|
|
271
|
+
// console.log("Appending to WAL", newContent.id);
|
|
272
|
+
await this.withWAL((wal) =>
|
|
273
|
+
writeToWal(wal, this.fs, newContent.id, newContentAsChunk),
|
|
274
|
+
);
|
|
322
275
|
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
}),
|
|
327
|
-
);
|
|
276
|
+
this.coValues[newContent.id] = merged;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
328
279
|
}
|
|
329
280
|
|
|
330
|
-
getBlockHandle(
|
|
281
|
+
async getBlockHandle(
|
|
331
282
|
blockFile: BlockFilename,
|
|
332
283
|
fs: FS,
|
|
333
|
-
):
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
handleAndSize = yield* Deferred.make<
|
|
338
|
-
{ handle: RH; size: number },
|
|
339
|
-
FSErr
|
|
340
|
-
>();
|
|
341
|
-
this.blockFileHandles.set(blockFile, handleAndSize);
|
|
342
|
-
yield* Deferred.complete(
|
|
343
|
-
handleAndSize,
|
|
344
|
-
fs.openToRead(blockFile),
|
|
345
|
-
);
|
|
346
|
-
}
|
|
284
|
+
): Promise<{ handle: RH; size: number }> {
|
|
285
|
+
if (!this.blockFileHandles.has(blockFile)) {
|
|
286
|
+
this.blockFileHandles.set(blockFile, fs.openToRead(blockFile));
|
|
287
|
+
}
|
|
347
288
|
|
|
348
|
-
|
|
349
|
-
});
|
|
289
|
+
return this.blockFileHandles.get(blockFile)!;
|
|
350
290
|
}
|
|
351
291
|
|
|
352
|
-
loadCoValue(
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
this.fileCache = files;
|
|
359
|
-
const blockFiles = (
|
|
360
|
-
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
361
|
-
).sort();
|
|
362
|
-
|
|
363
|
-
let result;
|
|
364
|
-
|
|
365
|
-
for (const blockFile of blockFiles) {
|
|
366
|
-
let cachedHeader:
|
|
367
|
-
| { [id: RawCoID]: { start: number; length: number } }
|
|
368
|
-
| undefined = this.headerCache.get(blockFile);
|
|
369
|
-
|
|
370
|
-
let handleAndSize = this.blockFileHandles.get(blockFile);
|
|
371
|
-
if (!handleAndSize) {
|
|
372
|
-
handleAndSize = yield* Deferred.make<
|
|
373
|
-
{ handle: RH; size: number },
|
|
374
|
-
FSErr
|
|
375
|
-
>();
|
|
376
|
-
this.blockFileHandles.set(blockFile, handleAndSize);
|
|
377
|
-
yield* Deferred.complete(
|
|
378
|
-
handleAndSize,
|
|
379
|
-
fs.openToRead(blockFile),
|
|
380
|
-
);
|
|
381
|
-
}
|
|
292
|
+
async loadCoValue(id: RawCoID, fs: FS): Promise<CoValueChunk | undefined> {
|
|
293
|
+
const files = this.fileCache || (await fs.listFiles());
|
|
294
|
+
this.fileCache = files;
|
|
295
|
+
const blockFiles = (
|
|
296
|
+
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
297
|
+
).sort();
|
|
382
298
|
|
|
383
|
-
|
|
384
|
-
blockFile,
|
|
385
|
-
fs,
|
|
386
|
-
);
|
|
299
|
+
let result;
|
|
387
300
|
|
|
388
|
-
|
|
301
|
+
for (const blockFile of blockFiles) {
|
|
302
|
+
let cachedHeader:
|
|
303
|
+
| { [id: RawCoID]: { start: number; length: number } }
|
|
304
|
+
| undefined = this.headerCache.get(blockFile);
|
|
389
305
|
|
|
390
|
-
|
|
391
|
-
cachedHeader = {};
|
|
392
|
-
const header = yield* readHeader(
|
|
393
|
-
blockFile,
|
|
394
|
-
handle,
|
|
395
|
-
size,
|
|
396
|
-
fs,
|
|
397
|
-
);
|
|
398
|
-
for (const entry of header) {
|
|
399
|
-
cachedHeader[entry.id] = {
|
|
400
|
-
start: entry.start,
|
|
401
|
-
length: entry.length,
|
|
402
|
-
};
|
|
403
|
-
}
|
|
306
|
+
const { handle, size } = await this.getBlockHandle(blockFile, fs);
|
|
404
307
|
|
|
405
|
-
|
|
308
|
+
// console.log("Attempting to load", id, blockFile);
|
|
309
|
+
|
|
310
|
+
if (!cachedHeader) {
|
|
311
|
+
cachedHeader = {};
|
|
312
|
+
const header = await readHeader(blockFile, handle, size, fs);
|
|
313
|
+
for (const entry of header) {
|
|
314
|
+
cachedHeader[entry.id] = {
|
|
315
|
+
start: entry.start,
|
|
316
|
+
length: entry.length,
|
|
317
|
+
};
|
|
406
318
|
}
|
|
407
|
-
const headerEntry = cachedHeader[id];
|
|
408
319
|
|
|
409
|
-
|
|
320
|
+
this.headerCache.set(blockFile, cachedHeader);
|
|
321
|
+
}
|
|
322
|
+
const headerEntry = cachedHeader[id];
|
|
323
|
+
|
|
324
|
+
// console.log("Header entry", id, headerEntry);
|
|
410
325
|
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
326
|
+
if (headerEntry) {
|
|
327
|
+
const nextChunk = await readChunk(handle, headerEntry, fs);
|
|
328
|
+
if (result) {
|
|
329
|
+
const merged = mergeChunks(result, nextChunk);
|
|
415
330
|
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
} else {
|
|
423
|
-
result = merged.left;
|
|
424
|
-
}
|
|
331
|
+
if (merged === "nonContigous") {
|
|
332
|
+
console.warn(
|
|
333
|
+
"Non-contigous chunks while loading " + id,
|
|
334
|
+
result,
|
|
335
|
+
nextChunk,
|
|
336
|
+
);
|
|
425
337
|
} else {
|
|
426
|
-
result =
|
|
338
|
+
result = merged;
|
|
427
339
|
}
|
|
340
|
+
} else {
|
|
341
|
+
result = nextChunk;
|
|
428
342
|
}
|
|
429
|
-
|
|
430
|
-
// yield* fs.close(handle);
|
|
431
343
|
}
|
|
432
344
|
|
|
433
|
-
|
|
434
|
-
}
|
|
345
|
+
// await fs.close(handle);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
return result;
|
|
435
349
|
}
|
|
436
350
|
|
|
437
351
|
async compact() {
|
|
438
|
-
await
|
|
439
|
-
Effect.gen(this, function* () {
|
|
440
|
-
const fileNames = yield* this.fs.listFiles();
|
|
352
|
+
const fileNames = await this.fs.listFiles();
|
|
441
353
|
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
354
|
+
const walFiles = fileNames.filter((name) =>
|
|
355
|
+
name.startsWith("wal-"),
|
|
356
|
+
) as WalFilename[];
|
|
357
|
+
walFiles.sort();
|
|
446
358
|
|
|
447
|
-
|
|
359
|
+
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
448
360
|
|
|
449
|
-
|
|
450
|
-
|
|
361
|
+
console.log("Compacting WAL files", walFiles);
|
|
362
|
+
if (walFiles.length === 0) return;
|
|
451
363
|
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
364
|
+
const oldWal = this.currentWal;
|
|
365
|
+
this.currentWal = undefined;
|
|
366
|
+
|
|
367
|
+
if (oldWal) {
|
|
368
|
+
await this.fs.close(oldWal);
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
for (const fileName of walFiles) {
|
|
372
|
+
const { handle, size }: { handle: RH; size: number } =
|
|
373
|
+
await this.fs.openToRead(fileName);
|
|
374
|
+
if (size === 0) {
|
|
375
|
+
await this.fs.close(handle);
|
|
376
|
+
continue;
|
|
377
|
+
}
|
|
378
|
+
const bytes = await this.fs.read(handle, 0, size);
|
|
379
|
+
|
|
380
|
+
const decoded = textDecoder.decode(bytes);
|
|
381
|
+
const lines = decoded.split("\n");
|
|
382
|
+
|
|
383
|
+
for (const line of lines) {
|
|
384
|
+
if (line.length === 0) continue;
|
|
385
|
+
const chunk = JSON.parse(line) as WalEntry;
|
|
386
|
+
|
|
387
|
+
const existingChunk = coValues.get(chunk.id);
|
|
388
|
+
|
|
389
|
+
if (existingChunk) {
|
|
390
|
+
const merged = mergeChunks(existingChunk, chunk);
|
|
391
|
+
if (merged === "nonContigous") {
|
|
392
|
+
console.log(
|
|
393
|
+
"Non-contigous chunks in " +
|
|
394
|
+
chunk.id +
|
|
395
|
+
", " +
|
|
396
|
+
fileName,
|
|
397
|
+
existingChunk,
|
|
398
|
+
chunk,
|
|
399
|
+
);
|
|
400
|
+
} else {
|
|
401
|
+
coValues.set(chunk.id, merged);
|
|
402
|
+
}
|
|
403
|
+
} else {
|
|
404
|
+
coValues.set(chunk.id, chunk);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
await this.fs.close(handle);
|
|
409
|
+
}
|
|
460
410
|
|
|
461
|
-
|
|
411
|
+
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
|
412
|
+
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
|
413
|
+
const num = parseInt(name.split("-")[1]!);
|
|
414
|
+
if (num > acc) {
|
|
415
|
+
return num;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
return acc;
|
|
419
|
+
}, 0);
|
|
420
|
+
|
|
421
|
+
console.log([...coValues.keys()], fileNames, highestBlockNumber);
|
|
422
|
+
|
|
423
|
+
await writeBlock(
|
|
424
|
+
coValues,
|
|
425
|
+
MAX_N_LEVELS,
|
|
426
|
+
highestBlockNumber + 1,
|
|
427
|
+
this.fs,
|
|
428
|
+
);
|
|
429
|
+
|
|
430
|
+
for (const walFile of walFiles) {
|
|
431
|
+
await this.fs.removeFile(walFile);
|
|
432
|
+
}
|
|
433
|
+
this.fileCache = undefined;
|
|
434
|
+
|
|
435
|
+
const fileNames2 = await this.fs.listFiles();
|
|
436
|
+
|
|
437
|
+
const blockFiles = (
|
|
438
|
+
fileNames2.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
439
|
+
).sort();
|
|
440
|
+
|
|
441
|
+
const blockFilesByLevelInOrder: {
|
|
442
|
+
[level: number]: BlockFilename[];
|
|
443
|
+
} = {};
|
|
444
|
+
|
|
445
|
+
for (const blockFile of blockFiles) {
|
|
446
|
+
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
|
447
|
+
if (!blockFilesByLevelInOrder[level]) {
|
|
448
|
+
blockFilesByLevelInOrder[level] = [];
|
|
449
|
+
}
|
|
450
|
+
blockFilesByLevelInOrder[level]!.push(blockFile);
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
console.log(blockFilesByLevelInOrder);
|
|
454
|
+
|
|
455
|
+
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
|
456
|
+
const nBlocksDesired = Math.pow(2, level);
|
|
457
|
+
const blocksInLevel = blockFilesByLevelInOrder[level];
|
|
458
|
+
|
|
459
|
+
if (blocksInLevel && blocksInLevel.length > nBlocksDesired) {
|
|
460
|
+
console.log("Compacting blocks in level", level, blocksInLevel);
|
|
461
|
+
|
|
462
|
+
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
463
|
+
|
|
464
|
+
for (const blockFile of blocksInLevel) {
|
|
462
465
|
const { handle, size }: { handle: RH; size: number } =
|
|
463
|
-
|
|
466
|
+
await this.getBlockHandle(blockFile, this.fs);
|
|
467
|
+
|
|
464
468
|
if (size === 0) {
|
|
465
|
-
yield* this.fs.close(handle);
|
|
466
469
|
continue;
|
|
467
470
|
}
|
|
468
|
-
const
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
const chunk =
|
|
471
|
+
const header = await readHeader(
|
|
472
|
+
blockFile,
|
|
473
|
+
handle,
|
|
474
|
+
size,
|
|
475
|
+
this.fs,
|
|
476
|
+
);
|
|
477
|
+
for (const entry of header) {
|
|
478
|
+
const chunk = await readChunk(handle, entry, this.fs);
|
|
476
479
|
|
|
477
|
-
const existingChunk = coValues.get(
|
|
480
|
+
const existingChunk = coValues.get(entry.id);
|
|
478
481
|
|
|
479
482
|
if (existingChunk) {
|
|
480
483
|
const merged = mergeChunks(existingChunk, chunk);
|
|
481
|
-
if (
|
|
482
|
-
|
|
484
|
+
if (merged === "nonContigous") {
|
|
485
|
+
console.log(
|
|
483
486
|
"Non-contigous chunks in " +
|
|
484
|
-
|
|
487
|
+
entry.id +
|
|
485
488
|
", " +
|
|
486
|
-
|
|
489
|
+
blockFile,
|
|
487
490
|
existingChunk,
|
|
488
491
|
chunk,
|
|
489
492
|
);
|
|
490
493
|
} else {
|
|
491
|
-
coValues.set(
|
|
494
|
+
coValues.set(entry.id, merged);
|
|
492
495
|
}
|
|
493
496
|
} else {
|
|
494
|
-
coValues.set(
|
|
497
|
+
coValues.set(entry.id, chunk);
|
|
495
498
|
}
|
|
496
499
|
}
|
|
500
|
+
}
|
|
497
501
|
|
|
498
|
-
|
|
502
|
+
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
|
503
|
+
if (!levelBelow) {
|
|
504
|
+
levelBelow = [];
|
|
505
|
+
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
499
506
|
}
|
|
500
507
|
|
|
501
|
-
const
|
|
502
|
-
|
|
508
|
+
const highestBlockNumberInLevelBelow = levelBelow.reduce(
|
|
509
|
+
(acc, name) => {
|
|
503
510
|
const num = parseInt(name.split("-")[1]!);
|
|
504
511
|
if (num > acc) {
|
|
505
512
|
return num;
|
|
506
513
|
}
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
console.log(
|
|
512
|
-
[...coValues.keys()],
|
|
513
|
-
fileNames,
|
|
514
|
-
highestBlockNumber,
|
|
514
|
+
return acc;
|
|
515
|
+
},
|
|
516
|
+
0,
|
|
515
517
|
);
|
|
516
518
|
|
|
517
|
-
|
|
519
|
+
const newBlockName = await writeBlock(
|
|
518
520
|
coValues,
|
|
519
|
-
|
|
520
|
-
|
|
521
|
+
level - 1,
|
|
522
|
+
highestBlockNumberInLevelBelow + 1,
|
|
521
523
|
this.fs,
|
|
522
524
|
);
|
|
525
|
+
levelBelow.push(newBlockName);
|
|
523
526
|
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
name.startsWith("L"),
|
|
534
|
-
) as BlockFilename[]
|
|
535
|
-
).sort();
|
|
536
|
-
|
|
537
|
-
const blockFilesByLevelInOrder: {
|
|
538
|
-
[level: number]: BlockFilename[];
|
|
539
|
-
} = {};
|
|
540
|
-
|
|
541
|
-
for (const blockFile of blockFiles) {
|
|
542
|
-
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
|
543
|
-
if (!blockFilesByLevelInOrder[level]) {
|
|
544
|
-
blockFilesByLevelInOrder[level] = [];
|
|
545
|
-
}
|
|
546
|
-
blockFilesByLevelInOrder[level]!.push(blockFile);
|
|
527
|
+
// delete blocks that went into this one
|
|
528
|
+
for (const blockFile of blocksInLevel) {
|
|
529
|
+
const handle = await this.getBlockHandle(
|
|
530
|
+
blockFile,
|
|
531
|
+
this.fs,
|
|
532
|
+
);
|
|
533
|
+
await this.fs.close(handle.handle);
|
|
534
|
+
await this.fs.removeFile(blockFile);
|
|
535
|
+
this.blockFileHandles.delete(blockFile);
|
|
547
536
|
}
|
|
537
|
+
}
|
|
538
|
+
}
|
|
548
539
|
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
if (
|
|
556
|
-
blocksInLevel &&
|
|
557
|
-
blocksInLevel.length > nBlocksDesired
|
|
558
|
-
) {
|
|
559
|
-
yield* Effect.log("Compacting blocks in level", level, blocksInLevel);
|
|
560
|
-
|
|
561
|
-
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
562
|
-
|
|
563
|
-
for (const blockFile of blocksInLevel) {
|
|
564
|
-
const {
|
|
565
|
-
handle,
|
|
566
|
-
size,
|
|
567
|
-
}: { handle: RH; size: number } =
|
|
568
|
-
yield* this.getBlockHandle(blockFile, this.fs);
|
|
569
|
-
|
|
570
|
-
if (size === 0) {
|
|
571
|
-
continue;
|
|
572
|
-
}
|
|
573
|
-
const header = yield* readHeader(
|
|
574
|
-
blockFile,
|
|
575
|
-
handle,
|
|
576
|
-
size,
|
|
577
|
-
this.fs,
|
|
578
|
-
);
|
|
579
|
-
for (const entry of header) {
|
|
580
|
-
const chunk = yield* readChunk(
|
|
581
|
-
handle,
|
|
582
|
-
entry,
|
|
583
|
-
this.fs,
|
|
584
|
-
);
|
|
585
|
-
|
|
586
|
-
const existingChunk = coValues.get(entry.id);
|
|
587
|
-
|
|
588
|
-
if (existingChunk) {
|
|
589
|
-
const merged = mergeChunks(
|
|
590
|
-
existingChunk,
|
|
591
|
-
chunk,
|
|
592
|
-
);
|
|
593
|
-
if (Either.isRight(merged)) {
|
|
594
|
-
yield* Effect.logWarning(
|
|
595
|
-
"Non-contigous chunks in " +
|
|
596
|
-
entry.id +
|
|
597
|
-
", " +
|
|
598
|
-
blockFile,
|
|
599
|
-
existingChunk,
|
|
600
|
-
chunk,
|
|
601
|
-
);
|
|
602
|
-
} else {
|
|
603
|
-
coValues.set(entry.id, merged.left);
|
|
604
|
-
}
|
|
605
|
-
} else {
|
|
606
|
-
coValues.set(entry.id, chunk);
|
|
607
|
-
}
|
|
608
|
-
}
|
|
609
|
-
}
|
|
610
|
-
|
|
611
|
-
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
|
612
|
-
if (!levelBelow) {
|
|
613
|
-
levelBelow = [];
|
|
614
|
-
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
615
|
-
}
|
|
616
|
-
|
|
617
|
-
const highestBlockNumberInLevelBelow =
|
|
618
|
-
levelBelow.reduce((acc, name) => {
|
|
619
|
-
const num = parseInt(name.split("-")[1]!);
|
|
620
|
-
if (num > acc) {
|
|
621
|
-
return num;
|
|
622
|
-
}
|
|
623
|
-
return acc;
|
|
624
|
-
}, 0);
|
|
625
|
-
|
|
626
|
-
const newBlockName = yield* writeBlock(
|
|
627
|
-
coValues,
|
|
628
|
-
level - 1,
|
|
629
|
-
highestBlockNumberInLevelBelow + 1,
|
|
630
|
-
this.fs,
|
|
631
|
-
);
|
|
632
|
-
levelBelow.push(newBlockName);
|
|
633
|
-
|
|
634
|
-
// delete blocks that went into this one
|
|
635
|
-
for (const blockFile of blocksInLevel) {
|
|
636
|
-
const handle = yield* this.getBlockHandle(
|
|
637
|
-
blockFile,
|
|
638
|
-
this.fs,
|
|
639
|
-
);
|
|
640
|
-
yield* this.fs.close(handle.handle);
|
|
641
|
-
yield* this.fs.removeFile(blockFile);
|
|
642
|
-
}
|
|
643
|
-
}
|
|
644
|
-
}
|
|
645
|
-
}),
|
|
540
|
+
setTimeout(
|
|
541
|
+
() =>
|
|
542
|
+
this.compact().catch((e) => {
|
|
543
|
+
console.error("Error while compacting", e);
|
|
544
|
+
}),
|
|
545
|
+
5000,
|
|
646
546
|
);
|
|
647
|
-
|
|
648
|
-
setTimeout(() => this.compact(), 5000);
|
|
649
547
|
}
|
|
650
548
|
|
|
651
|
-
static
|
|
549
|
+
static asPeer<WH, RH, FS extends FileSystem<WH, RH>>({
|
|
652
550
|
fs,
|
|
653
551
|
trace,
|
|
654
552
|
localNodeName = "local",
|
|
@@ -656,13 +554,15 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
656
554
|
fs: FS;
|
|
657
555
|
trace?: boolean;
|
|
658
556
|
localNodeName?: string;
|
|
659
|
-
}):
|
|
660
|
-
const [localNodeAsPeer, storageAsPeer] =
|
|
661
|
-
|
|
557
|
+
}): Peer {
|
|
558
|
+
const [localNodeAsPeer, storageAsPeer] = connectedPeers(
|
|
559
|
+
localNodeName,
|
|
560
|
+
"storage",
|
|
561
|
+
{
|
|
662
562
|
peer1role: "client",
|
|
663
563
|
peer2role: "server",
|
|
664
564
|
trace,
|
|
665
|
-
}
|
|
565
|
+
},
|
|
666
566
|
);
|
|
667
567
|
|
|
668
568
|
new LSMStorage(fs, localNodeAsPeer.incoming, localNodeAsPeer.outgoing);
|