cojson 0.7.18 → 0.7.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -2
- package/.turbo/turbo-lint.log +0 -4
- package/.turbo/turbo-test.log +272 -271
- package/CHANGELOG.md +6 -0
- package/dist/storage/FileSystem.js +7 -2
- package/dist/storage/FileSystem.js.map +1 -1
- package/dist/storage/index.js +116 -11
- package/dist/storage/index.js.map +1 -1
- package/dist/streamUtils.js +5 -3
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +2 -7
- package/dist/sync.js.map +1 -1
- package/dist/tests/account.test.js.map +1 -1
- package/package.json +1 -1
- package/src/storage/FileSystem.ts +12 -5
- package/src/storage/index.ts +218 -15
- package/src/streamUtils.ts +17 -11
- package/src/sync.ts +4 -7
- package/src/tests/account.test.ts +8 -6
package/src/storage/index.ts
CHANGED
|
@@ -1,4 +1,11 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
Effect,
|
|
3
|
+
Either,
|
|
4
|
+
Queue,
|
|
5
|
+
Stream,
|
|
6
|
+
SynchronizedRef,
|
|
7
|
+
Deferred,
|
|
8
|
+
} from "effect";
|
|
2
9
|
import { RawCoID } from "../ids.js";
|
|
3
10
|
import { CoValueHeader, Transaction } from "../coValueCore.js";
|
|
4
11
|
import { Signature } from "../crypto/crypto.js";
|
|
@@ -30,6 +37,8 @@ import {
|
|
|
30
37
|
} from "./FileSystem.js";
|
|
31
38
|
export type { FSErr, BlockFilename, WalFilename } from "./FileSystem.js";
|
|
32
39
|
|
|
40
|
+
const MAX_N_LEVELS = 3;
|
|
41
|
+
|
|
33
42
|
export type CoValueChunk = {
|
|
34
43
|
header?: CoValueHeader;
|
|
35
44
|
sessionEntries: {
|
|
@@ -51,6 +60,10 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
51
60
|
BlockFilename,
|
|
52
61
|
{ [id: RawCoID]: { start: number; length: number } }
|
|
53
62
|
>();
|
|
63
|
+
blockFileHandles = new Map<
|
|
64
|
+
BlockFilename,
|
|
65
|
+
Deferred.Deferred<{ handle: RH; size: number }, FSErr>
|
|
66
|
+
>();
|
|
54
67
|
|
|
55
68
|
constructor(
|
|
56
69
|
public fs: FS,
|
|
@@ -192,7 +205,7 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
192
205
|
let newWal = wal;
|
|
193
206
|
if (!newWal) {
|
|
194
207
|
newWal = yield* this.fs.createFile(
|
|
195
|
-
`wal-${
|
|
208
|
+
`wal-${Date.now()}-${Math.random()
|
|
196
209
|
.toString(36)
|
|
197
210
|
.slice(2)}.jsonl`,
|
|
198
211
|
);
|
|
@@ -314,24 +327,63 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
314
327
|
);
|
|
315
328
|
}
|
|
316
329
|
|
|
317
|
-
|
|
330
|
+
getBlockHandle(
|
|
331
|
+
blockFile: BlockFilename,
|
|
332
|
+
fs: FS,
|
|
333
|
+
): Effect.Effect<{ handle: RH; size: number }, FSErr> {
|
|
334
|
+
return Effect.gen(this, function* () {
|
|
335
|
+
let handleAndSize = this.blockFileHandles.get(blockFile);
|
|
336
|
+
if (!handleAndSize) {
|
|
337
|
+
handleAndSize = yield* Deferred.make<
|
|
338
|
+
{ handle: RH; size: number },
|
|
339
|
+
FSErr
|
|
340
|
+
>();
|
|
341
|
+
this.blockFileHandles.set(blockFile, handleAndSize);
|
|
342
|
+
yield* Deferred.complete(
|
|
343
|
+
handleAndSize,
|
|
344
|
+
fs.openToRead(blockFile),
|
|
345
|
+
);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
return yield* Deferred.await(handleAndSize);
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
loadCoValue(
|
|
318
353
|
id: RawCoID,
|
|
319
354
|
fs: FS,
|
|
320
355
|
): Effect.Effect<CoValueChunk | undefined, FSErr> {
|
|
321
|
-
// return _loadChunkFromWal(id, fs);
|
|
322
356
|
return Effect.gen(this, function* () {
|
|
323
357
|
const files = this.fileCache || (yield* fs.listFiles());
|
|
324
358
|
this.fileCache = files;
|
|
325
|
-
const blockFiles =
|
|
326
|
-
name.startsWith("
|
|
327
|
-
)
|
|
359
|
+
const blockFiles = (
|
|
360
|
+
files.filter((name) => name.startsWith("L")) as BlockFilename[]
|
|
361
|
+
).sort();
|
|
362
|
+
|
|
363
|
+
let result;
|
|
328
364
|
|
|
329
365
|
for (const blockFile of blockFiles) {
|
|
330
366
|
let cachedHeader:
|
|
331
367
|
| { [id: RawCoID]: { start: number; length: number } }
|
|
332
368
|
| undefined = this.headerCache.get(blockFile);
|
|
333
369
|
|
|
334
|
-
|
|
370
|
+
let handleAndSize = this.blockFileHandles.get(blockFile);
|
|
371
|
+
if (!handleAndSize) {
|
|
372
|
+
handleAndSize = yield* Deferred.make<
|
|
373
|
+
{ handle: RH; size: number },
|
|
374
|
+
FSErr
|
|
375
|
+
>();
|
|
376
|
+
this.blockFileHandles.set(blockFile, handleAndSize);
|
|
377
|
+
yield* Deferred.complete(
|
|
378
|
+
handleAndSize,
|
|
379
|
+
fs.openToRead(blockFile),
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
const { handle, size } = yield* this.getBlockHandle(
|
|
384
|
+
blockFile,
|
|
385
|
+
fs,
|
|
386
|
+
);
|
|
335
387
|
|
|
336
388
|
// console.log("Attempting to load", id, blockFile);
|
|
337
389
|
|
|
@@ -356,17 +408,29 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
356
408
|
|
|
357
409
|
// console.log("Header entry", id, headerEntry);
|
|
358
410
|
|
|
359
|
-
let result;
|
|
360
411
|
if (headerEntry) {
|
|
361
|
-
|
|
412
|
+
const nextChunk = yield* readChunk(handle, headerEntry, fs);
|
|
413
|
+
if (result) {
|
|
414
|
+
const merged = mergeChunks(result, nextChunk);
|
|
415
|
+
|
|
416
|
+
if (Either.isRight(merged)) {
|
|
417
|
+
yield* Effect.logWarning(
|
|
418
|
+
"Non-contigous chunks while loading " + id,
|
|
419
|
+
result,
|
|
420
|
+
nextChunk,
|
|
421
|
+
);
|
|
422
|
+
} else {
|
|
423
|
+
result = merged.left;
|
|
424
|
+
}
|
|
425
|
+
} else {
|
|
426
|
+
result = nextChunk;
|
|
427
|
+
}
|
|
362
428
|
}
|
|
363
429
|
|
|
364
|
-
yield* fs.close(handle);
|
|
365
|
-
|
|
366
|
-
return result;
|
|
430
|
+
// yield* fs.close(handle);
|
|
367
431
|
}
|
|
368
432
|
|
|
369
|
-
return
|
|
433
|
+
return result;
|
|
370
434
|
});
|
|
371
435
|
}
|
|
372
436
|
|
|
@@ -434,11 +498,150 @@ export class LSMStorage<WH, RH, FS extends FileSystem<WH, RH>> {
|
|
|
434
498
|
yield* this.fs.close(handle);
|
|
435
499
|
}
|
|
436
500
|
|
|
437
|
-
|
|
501
|
+
const highestBlockNumber = fileNames.reduce((acc, name) => {
|
|
502
|
+
if (name.startsWith("L" + MAX_N_LEVELS)) {
|
|
503
|
+
const num = parseInt(name.split("-")[1]!);
|
|
504
|
+
if (num > acc) {
|
|
505
|
+
return num;
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
return acc;
|
|
509
|
+
}, 0);
|
|
510
|
+
|
|
511
|
+
console.log(
|
|
512
|
+
[...coValues.keys()],
|
|
513
|
+
fileNames,
|
|
514
|
+
highestBlockNumber,
|
|
515
|
+
);
|
|
516
|
+
|
|
517
|
+
yield* writeBlock(
|
|
518
|
+
coValues,
|
|
519
|
+
MAX_N_LEVELS,
|
|
520
|
+
highestBlockNumber + 1,
|
|
521
|
+
this.fs,
|
|
522
|
+
);
|
|
523
|
+
|
|
438
524
|
for (const walFile of walFiles) {
|
|
439
525
|
yield* this.fs.removeFile(walFile);
|
|
440
526
|
}
|
|
441
527
|
this.fileCache = undefined;
|
|
528
|
+
|
|
529
|
+
const fileNames2 = yield* this.fs.listFiles();
|
|
530
|
+
|
|
531
|
+
const blockFiles = (
|
|
532
|
+
fileNames2.filter((name) =>
|
|
533
|
+
name.startsWith("L"),
|
|
534
|
+
) as BlockFilename[]
|
|
535
|
+
).sort();
|
|
536
|
+
|
|
537
|
+
const blockFilesByLevelInOrder: {
|
|
538
|
+
[level: number]: BlockFilename[];
|
|
539
|
+
} = {};
|
|
540
|
+
|
|
541
|
+
for (const blockFile of blockFiles) {
|
|
542
|
+
const level = parseInt(blockFile.split("-")[0]!.slice(1));
|
|
543
|
+
if (!blockFilesByLevelInOrder[level]) {
|
|
544
|
+
blockFilesByLevelInOrder[level] = [];
|
|
545
|
+
}
|
|
546
|
+
blockFilesByLevelInOrder[level]!.push(blockFile);
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
console.log(blockFilesByLevelInOrder);
|
|
550
|
+
|
|
551
|
+
for (let level = MAX_N_LEVELS; level > 0; level--) {
|
|
552
|
+
const nBlocksDesired = Math.pow(2, level);
|
|
553
|
+
const blocksInLevel = blockFilesByLevelInOrder[level];
|
|
554
|
+
|
|
555
|
+
if (
|
|
556
|
+
blocksInLevel &&
|
|
557
|
+
blocksInLevel.length > nBlocksDesired
|
|
558
|
+
) {
|
|
559
|
+
yield* Effect.log("Compacting blocks in level", level, blocksInLevel);
|
|
560
|
+
|
|
561
|
+
const coValues = new Map<RawCoID, CoValueChunk>();
|
|
562
|
+
|
|
563
|
+
for (const blockFile of blocksInLevel) {
|
|
564
|
+
const {
|
|
565
|
+
handle,
|
|
566
|
+
size,
|
|
567
|
+
}: { handle: RH; size: number } =
|
|
568
|
+
yield* this.getBlockHandle(blockFile, this.fs);
|
|
569
|
+
|
|
570
|
+
if (size === 0) {
|
|
571
|
+
continue;
|
|
572
|
+
}
|
|
573
|
+
const header = yield* readHeader(
|
|
574
|
+
blockFile,
|
|
575
|
+
handle,
|
|
576
|
+
size,
|
|
577
|
+
this.fs,
|
|
578
|
+
);
|
|
579
|
+
for (const entry of header) {
|
|
580
|
+
const chunk = yield* readChunk(
|
|
581
|
+
handle,
|
|
582
|
+
entry,
|
|
583
|
+
this.fs,
|
|
584
|
+
);
|
|
585
|
+
|
|
586
|
+
const existingChunk = coValues.get(entry.id);
|
|
587
|
+
|
|
588
|
+
if (existingChunk) {
|
|
589
|
+
const merged = mergeChunks(
|
|
590
|
+
existingChunk,
|
|
591
|
+
chunk,
|
|
592
|
+
);
|
|
593
|
+
if (Either.isRight(merged)) {
|
|
594
|
+
yield* Effect.logWarning(
|
|
595
|
+
"Non-contigous chunks in " +
|
|
596
|
+
entry.id +
|
|
597
|
+
", " +
|
|
598
|
+
blockFile,
|
|
599
|
+
existingChunk,
|
|
600
|
+
chunk,
|
|
601
|
+
);
|
|
602
|
+
} else {
|
|
603
|
+
coValues.set(entry.id, merged.left);
|
|
604
|
+
}
|
|
605
|
+
} else {
|
|
606
|
+
coValues.set(entry.id, chunk);
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
let levelBelow = blockFilesByLevelInOrder[level - 1];
|
|
612
|
+
if (!levelBelow) {
|
|
613
|
+
levelBelow = [];
|
|
614
|
+
blockFilesByLevelInOrder[level - 1] = levelBelow;
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
const highestBlockNumberInLevelBelow =
|
|
618
|
+
levelBelow.reduce((acc, name) => {
|
|
619
|
+
const num = parseInt(name.split("-")[1]!);
|
|
620
|
+
if (num > acc) {
|
|
621
|
+
return num;
|
|
622
|
+
}
|
|
623
|
+
return acc;
|
|
624
|
+
}, 0);
|
|
625
|
+
|
|
626
|
+
const newBlockName = yield* writeBlock(
|
|
627
|
+
coValues,
|
|
628
|
+
level - 1,
|
|
629
|
+
highestBlockNumberInLevelBelow + 1,
|
|
630
|
+
this.fs,
|
|
631
|
+
);
|
|
632
|
+
levelBelow.push(newBlockName);
|
|
633
|
+
|
|
634
|
+
// delete blocks that went into this one
|
|
635
|
+
for (const blockFile of blocksInLevel) {
|
|
636
|
+
const handle = yield* this.getBlockHandle(
|
|
637
|
+
blockFile,
|
|
638
|
+
this.fs,
|
|
639
|
+
);
|
|
640
|
+
yield* this.fs.close(handle.handle);
|
|
641
|
+
yield* this.fs.removeFile(blockFile);
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
}
|
|
442
645
|
}),
|
|
443
646
|
);
|
|
444
647
|
|
package/src/streamUtils.ts
CHANGED
|
@@ -47,18 +47,24 @@ export function newQueuePair(
|
|
|
47
47
|
const queue = yield* Queue.unbounded<SyncMessage>();
|
|
48
48
|
|
|
49
49
|
if (options.traceAs) {
|
|
50
|
-
return [
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
50
|
+
return [
|
|
51
|
+
Stream.fromQueue(queue).pipe(
|
|
52
|
+
Stream.tap((msg) =>
|
|
53
|
+
Console.debug(
|
|
54
|
+
options.traceAs,
|
|
55
|
+
JSON.stringify(
|
|
56
|
+
msg,
|
|
57
|
+
(k, v) =>
|
|
58
|
+
k === "changes" || k === "encryptedChanges"
|
|
59
|
+
? v.slice(0, 20) + "..."
|
|
60
|
+
: v,
|
|
61
|
+
2,
|
|
62
|
+
),
|
|
63
|
+
),
|
|
64
|
+
),
|
|
60
65
|
),
|
|
61
|
-
|
|
66
|
+
queue,
|
|
67
|
+
];
|
|
62
68
|
} else {
|
|
63
69
|
return [Stream.fromQueue(queue), queue];
|
|
64
70
|
}
|
package/src/sync.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { CoValueHeader, Transaction } from "./coValueCore.js";
|
|
|
3
3
|
import { CoValueCore } from "./coValueCore.js";
|
|
4
4
|
import { LocalNode, newLoadingState } from "./localNode.js";
|
|
5
5
|
import { RawCoID, SessionID } from "./ids.js";
|
|
6
|
-
import { Effect, Queue, Stream } from "effect";
|
|
6
|
+
import { Data, Effect, Queue, Stream } from "effect";
|
|
7
7
|
|
|
8
8
|
export type CoValueKnownState = {
|
|
9
9
|
id: RawCoID;
|
|
@@ -56,12 +56,9 @@ export type DoneMessage = {
|
|
|
56
56
|
|
|
57
57
|
export type PeerID = string;
|
|
58
58
|
|
|
59
|
-
export class DisconnectedError extends
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
super(message);
|
|
63
|
-
}
|
|
64
|
-
}
|
|
59
|
+
export class DisconnectedError extends Data.TaggedError("DisconnectedError")<{
|
|
60
|
+
message: string;
|
|
61
|
+
}> {}
|
|
65
62
|
|
|
66
63
|
export class PingTimeoutError extends Error {
|
|
67
64
|
readonly _tag = "PingTimeoutError";
|
|
@@ -53,13 +53,15 @@ test("Can create account with one node, and then load it on another", async () =
|
|
|
53
53
|
map.set("foo", "bar", "private");
|
|
54
54
|
expect(map.get("foo")).toEqual("bar");
|
|
55
55
|
|
|
56
|
-
const [node1asPeer, node2asPeer] = await Effect.runPromise(
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
56
|
+
const [node1asPeer, node2asPeer] = await Effect.runPromise(
|
|
57
|
+
connectedPeers("node1", "node2", {
|
|
58
|
+
trace: true,
|
|
59
|
+
peer1role: "server",
|
|
60
|
+
peer2role: "client",
|
|
61
|
+
}),
|
|
62
|
+
);
|
|
61
63
|
|
|
62
|
-
console.log("After connected peers")
|
|
64
|
+
console.log("After connected peers");
|
|
63
65
|
|
|
64
66
|
node.syncManager.addPeer(node2asPeer);
|
|
65
67
|
|