@fireproof/core-base 0.23.0 → 0.23.1-dev-issue-1057
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/apply-head-queue.js.map +1 -1
- package/bundle-not-impl.js.map +1 -1
- package/compact-strategies.js.map +1 -1
- package/crdt-clock.js.map +1 -1
- package/crdt-helpers.js.map +1 -1
- package/crdt.js.map +1 -1
- package/database.js.map +1 -1
- package/index.js.map +1 -1
- package/indexer-helpers.js.map +1 -1
- package/indexer.js.map +1 -1
- package/ledger.js.map +1 -1
- package/package.json +10 -10
- package/version.js.map +1 -1
- package/write-queue.js.map +1 -1
- package/apply-head-queue.ts +0 -72
- package/bundle-not-impl.ts +0 -4
- package/compact-strategies.ts +0 -95
- package/crdt-clock.ts +0 -192
- package/crdt-helpers.ts +0 -408
- package/crdt.ts +0 -275
- package/database.ts +0 -200
- package/index.ts +0 -15
- package/indexer-helpers.ts +0 -263
- package/indexer.ts +0 -360
- package/ledger.ts +0 -345
- package/tsconfig.json +0 -18
- package/version.ts +0 -3
- package/write-queue.ts +0 -93
package/crdt-clock.ts
DELETED
|
@@ -1,192 +0,0 @@
|
|
|
1
|
-
import { advance } from "@web3-storage/pail/clock";
|
|
2
|
-
import { root } from "@web3-storage/pail/crdt";
|
|
3
|
-
import { Logger } from "@adviser/cement";
|
|
4
|
-
|
|
5
|
-
import { clockChangesSince, toPailFetcher } from "./crdt-helpers.js";
|
|
6
|
-
import {
|
|
7
|
-
type DocUpdate,
|
|
8
|
-
type ClockHead,
|
|
9
|
-
type DocTypes,
|
|
10
|
-
type VoidFn,
|
|
11
|
-
type UnReg,
|
|
12
|
-
type SuperThis,
|
|
13
|
-
type BaseBlockstore,
|
|
14
|
-
type CarTransaction,
|
|
15
|
-
PARAM,
|
|
16
|
-
} from "@fireproof/core-types-base";
|
|
17
|
-
import { applyHeadQueue, ApplyHeadQueue } from "./apply-head-queue.js";
|
|
18
|
-
import { ensureLogger } from "@fireproof/core-runtime";
|
|
19
|
-
import { anyBlock2FPBlock } from "@fireproof/core-blockstore";
|
|
20
|
-
|
|
21
|
-
export class CRDTClockImpl {
|
|
22
|
-
// todo: track local and remote clocks independently, merge on read
|
|
23
|
-
// that way we can drop the whole remote if we need to
|
|
24
|
-
// should go with making sure the local clock only references locally available blockstore on write
|
|
25
|
-
readonly head: ClockHead = [];
|
|
26
|
-
|
|
27
|
-
readonly zoomers = new Map<string, VoidFn>();
|
|
28
|
-
readonly watchers = new Map<string, (updates: DocUpdate<DocTypes>[]) => void>();
|
|
29
|
-
readonly emptyWatchers = new Map<string, VoidFn>();
|
|
30
|
-
|
|
31
|
-
readonly blockstore: BaseBlockstore; // ready blockstore
|
|
32
|
-
|
|
33
|
-
readonly applyHeadQueue: ApplyHeadQueue<DocTypes>;
|
|
34
|
-
transaction?: CarTransaction;
|
|
35
|
-
|
|
36
|
-
async ready(): Promise<void> {
|
|
37
|
-
/* no-op */
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
async close() {
|
|
41
|
-
/* no-op */
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
readonly logger: Logger;
|
|
45
|
-
readonly sthis: SuperThis;
|
|
46
|
-
constructor(blockstore: BaseBlockstore) {
|
|
47
|
-
this.sthis = blockstore.sthis;
|
|
48
|
-
this.blockstore = blockstore;
|
|
49
|
-
this.logger = ensureLogger(blockstore.sthis, "CRDTClock");
|
|
50
|
-
this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this), this.logger);
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
setHead(head: ClockHead) {
|
|
54
|
-
// this.head = head;
|
|
55
|
-
this.head.splice(0, this.head.length, ...head);
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
async applyHead(newHead: ClockHead, prevHead: ClockHead, updates?: DocUpdate<DocTypes>[]): Promise<void> {
|
|
59
|
-
for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
|
|
60
|
-
newHead,
|
|
61
|
-
prevHead,
|
|
62
|
-
updates,
|
|
63
|
-
})) {
|
|
64
|
-
return this.processUpdates(updatesAcc, all, prevHead);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
async processUpdates(updatesAcc: DocUpdate<DocTypes>[], all: boolean, prevHead: ClockHead) {
|
|
69
|
-
let internalUpdates = updatesAcc;
|
|
70
|
-
if (this.watchers.size && !all) {
|
|
71
|
-
const changes = await clockChangesSince<DocTypes>(this.blockstore, this.head, prevHead, {}, this.logger);
|
|
72
|
-
internalUpdates = changes.result;
|
|
73
|
-
}
|
|
74
|
-
this.zoomers.forEach((fn) => fn());
|
|
75
|
-
this.notifyWatchers(internalUpdates || []);
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
notifyWatchers(updates: DocUpdate<DocTypes>[]) {
|
|
79
|
-
updates = updates.filter((update) => update.id !== PARAM.GENESIS_CID);
|
|
80
|
-
if (!updates.length) {
|
|
81
|
-
return;
|
|
82
|
-
}
|
|
83
|
-
this.emptyWatchers.forEach((fn) => fn());
|
|
84
|
-
this.watchers.forEach((fn) => fn(updates || []));
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
onTick(fn: (updates: DocUpdate<DocTypes>[]) => void): UnReg {
|
|
88
|
-
const key = this.sthis.timeOrderedNextId().str;
|
|
89
|
-
this.watchers.set(key, fn);
|
|
90
|
-
return () => {
|
|
91
|
-
this.watchers.delete(key);
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
onTock(fn: VoidFn): UnReg {
|
|
96
|
-
const key = this.sthis.timeOrderedNextId().str;
|
|
97
|
-
this.emptyWatchers.set(key, fn);
|
|
98
|
-
return () => {
|
|
99
|
-
this.emptyWatchers.delete(key);
|
|
100
|
-
};
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
onZoom(fn: VoidFn): UnReg {
|
|
104
|
-
const key = this.sthis.timeOrderedNextId().str;
|
|
105
|
-
this.zoomers.set(key, fn);
|
|
106
|
-
return () => {
|
|
107
|
-
this.zoomers.delete(key);
|
|
108
|
-
};
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
async int_applyHead(newHead: ClockHead, prevHead: ClockHead, localUpdates: boolean) {
|
|
112
|
-
// if (!(this.head && prevHead && newHead)) {
|
|
113
|
-
// throw new Error("missing head");
|
|
114
|
-
// }
|
|
115
|
-
|
|
116
|
-
const noLoader = !localUpdates;
|
|
117
|
-
|
|
118
|
-
// console.log("int_applyHead", this.applyHeadQueue.size(), this.head, newHead, prevHead, localUpdates);
|
|
119
|
-
const ogHead = sortClockHead(this.head);
|
|
120
|
-
newHead = sortClockHead(newHead);
|
|
121
|
-
if (compareClockHeads(ogHead, newHead)) {
|
|
122
|
-
return;
|
|
123
|
-
}
|
|
124
|
-
const ogPrev = sortClockHead(prevHead);
|
|
125
|
-
if (compareClockHeads(ogHead, ogPrev)) {
|
|
126
|
-
this.setHead(newHead);
|
|
127
|
-
return;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
// const noLoader = this.head.length === 1 && !updates?.length
|
|
131
|
-
if (!this.blockstore) {
|
|
132
|
-
throw this.logger.Error().Msg("missing blockstore").AsError();
|
|
133
|
-
}
|
|
134
|
-
await validateBlocks(this.logger, newHead, this.blockstore);
|
|
135
|
-
if (!this.transaction) {
|
|
136
|
-
this.transaction = this.blockstore.openTransaction({ noLoader, add: false });
|
|
137
|
-
}
|
|
138
|
-
const tblocks = this.transaction;
|
|
139
|
-
|
|
140
|
-
const advancedHead = await advanceBlocks(this.logger, newHead, tblocks, this.head);
|
|
141
|
-
const result = await root(toPailFetcher(tblocks), advancedHead);
|
|
142
|
-
|
|
143
|
-
const fpBlocks = await Promise.all(result.additions.map(anyBlock2FPBlock));
|
|
144
|
-
for (const fp of fpBlocks) {
|
|
145
|
-
tblocks.putSync(fp);
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
// for (const block of [
|
|
149
|
-
// ...result.additions,
|
|
150
|
-
// // ...result.removals
|
|
151
|
-
// ]) {
|
|
152
|
-
// tblocks.putSync(await anyBlock2FPBlock(block));
|
|
153
|
-
// }
|
|
154
|
-
if (!noLoader) {
|
|
155
|
-
await this.blockstore.commitTransaction(tblocks, { head: advancedHead }, { add: false, noLoader });
|
|
156
|
-
this.transaction = undefined;
|
|
157
|
-
}
|
|
158
|
-
this.setHead(advancedHead);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
// Helper functions
|
|
163
|
-
function sortClockHead(clockHead: ClockHead) {
|
|
164
|
-
return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
async function validateBlocks(logger: Logger, newHead: ClockHead, blockstore?: BaseBlockstore) {
|
|
168
|
-
if (!blockstore) throw logger.Error().Msg("missing blockstore");
|
|
169
|
-
newHead.map(async (cid) => {
|
|
170
|
-
const got = await blockstore.get(cid);
|
|
171
|
-
if (!got) {
|
|
172
|
-
throw logger.Error().Str("cid", cid.toString()).Msg("int_applyHead missing block").AsError();
|
|
173
|
-
}
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
function compareClockHeads(head1: ClockHead, head2: ClockHead) {
|
|
178
|
-
return head1.toString() === head2.toString();
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
async function advanceBlocks(logger: Logger, newHead: ClockHead, tblocks: CarTransaction, head: ClockHead) {
|
|
182
|
-
for (const cid of newHead) {
|
|
183
|
-
try {
|
|
184
|
-
head = await advance(toPailFetcher(tblocks), head, cid);
|
|
185
|
-
} catch (e) {
|
|
186
|
-
logger.Error().Err(e).Msg("failed to advance head");
|
|
187
|
-
// console.log('failed to advance head:', cid.toString(), e)
|
|
188
|
-
// continue;
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
return head;
|
|
192
|
-
}
|
package/crdt-helpers.ts
DELETED
|
@@ -1,408 +0,0 @@
|
|
|
1
|
-
import { asyncBlockDecode } from "@fireproof/core-runtime";
|
|
2
|
-
import { parse } from "multiformats/link";
|
|
3
|
-
import { Block } from "multiformats/block";
|
|
4
|
-
import { sha256 as hasher } from "multiformats/hashes/sha2";
|
|
5
|
-
import * as codec from "@ipld/dag-cbor";
|
|
6
|
-
import { put, get, entries } from "@web3-storage/pail/crdt";
|
|
7
|
-
import {
|
|
8
|
-
EventBlockView,
|
|
9
|
-
EventLink,
|
|
10
|
-
Operation,
|
|
11
|
-
PutOperation,
|
|
12
|
-
Result,
|
|
13
|
-
BlockFetcher as PailBlockFetcher,
|
|
14
|
-
} from "@web3-storage/pail/crdt/api";
|
|
15
|
-
import { EventFetcher, vis } from "@web3-storage/pail/clock";
|
|
16
|
-
import * as Batch from "@web3-storage/pail/crdt/batch";
|
|
17
|
-
import { BlockFetcher, TransactionMeta, AnyLink, StoreRuntime } from "@fireproof/core-types-blockstore";
|
|
18
|
-
import {
|
|
19
|
-
type EncryptedBlockstore,
|
|
20
|
-
CarTransactionImpl,
|
|
21
|
-
anyBlock2FPBlock,
|
|
22
|
-
doc2FPBlock,
|
|
23
|
-
fileBlock2FPBlock,
|
|
24
|
-
} from "@fireproof/core-blockstore";
|
|
25
|
-
import {
|
|
26
|
-
type IndexKeyType,
|
|
27
|
-
type DocUpdate,
|
|
28
|
-
type ClockHead,
|
|
29
|
-
type DocValue,
|
|
30
|
-
type CRDTMeta,
|
|
31
|
-
type ChangesOptions,
|
|
32
|
-
type DocFileMeta,
|
|
33
|
-
type DocFiles,
|
|
34
|
-
type DocSet,
|
|
35
|
-
type DocWithId,
|
|
36
|
-
type DocTypes,
|
|
37
|
-
throwFalsy,
|
|
38
|
-
CarTransaction,
|
|
39
|
-
BaseBlockstore,
|
|
40
|
-
PARAM,
|
|
41
|
-
NotFoundError,
|
|
42
|
-
} from "@fireproof/core-types-base";
|
|
43
|
-
import { Logger } from "@adviser/cement";
|
|
44
|
-
import { Link, Version } from "multiformats";
|
|
45
|
-
|
|
46
|
-
function toString<K extends IndexKeyType>(key: K, logger: Logger): string {
|
|
47
|
-
switch (typeof key) {
|
|
48
|
-
case "string":
|
|
49
|
-
case "number":
|
|
50
|
-
return key.toString();
|
|
51
|
-
default:
|
|
52
|
-
throw logger.Error().Msg("Invalid key type").AsError();
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
export function toPailFetcher(tblocks: BlockFetcher): PailBlockFetcher {
|
|
57
|
-
return {
|
|
58
|
-
get: async <T = unknown, C extends number = number, A extends number = number, V extends Version = 1>(
|
|
59
|
-
link: Link<T, C, A, V>,
|
|
60
|
-
) => {
|
|
61
|
-
const block = await tblocks.get(link);
|
|
62
|
-
return block
|
|
63
|
-
? ({
|
|
64
|
-
cid: block.cid,
|
|
65
|
-
bytes: block.bytes,
|
|
66
|
-
} as Block<T, C, A, V>)
|
|
67
|
-
: undefined;
|
|
68
|
-
},
|
|
69
|
-
};
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
export function sanitizeDocumentFields<T>(obj: T): T {
|
|
73
|
-
if (Array.isArray(obj)) {
|
|
74
|
-
return obj.map((item: unknown) => {
|
|
75
|
-
if (typeof item === "object" && item !== null) {
|
|
76
|
-
return sanitizeDocumentFields(item);
|
|
77
|
-
}
|
|
78
|
-
return item;
|
|
79
|
-
}) as T;
|
|
80
|
-
} else if (typeof obj === "object" && obj !== null) {
|
|
81
|
-
// Special case for Date objects - convert to ISO string
|
|
82
|
-
if (obj instanceof Date) {
|
|
83
|
-
return obj.toISOString() as unknown as T;
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
const typedObj = obj as Record<string, unknown>;
|
|
87
|
-
const result: Record<string, unknown> = {};
|
|
88
|
-
for (const key in typedObj) {
|
|
89
|
-
if (Object.hasOwnProperty.call(typedObj, key)) {
|
|
90
|
-
const value = typedObj[key];
|
|
91
|
-
if (value === null || (!Number.isNaN(value) && value !== undefined)) {
|
|
92
|
-
if (typeof value === "object" && !key.startsWith("_")) {
|
|
93
|
-
// Handle Date objects in properties
|
|
94
|
-
if (value instanceof Date) {
|
|
95
|
-
result[key] = (value as Date).toISOString();
|
|
96
|
-
} else {
|
|
97
|
-
const sanitized = sanitizeDocumentFields(value);
|
|
98
|
-
result[key] = sanitized;
|
|
99
|
-
}
|
|
100
|
-
} else {
|
|
101
|
-
result[key] = value;
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
return result as T;
|
|
107
|
-
}
|
|
108
|
-
return obj;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
export async function applyBulkUpdateToCrdt<T extends DocTypes>(
|
|
112
|
-
store: StoreRuntime,
|
|
113
|
-
tblocks: CarTransaction,
|
|
114
|
-
head: ClockHead,
|
|
115
|
-
updates: DocUpdate<T>[],
|
|
116
|
-
logger: Logger,
|
|
117
|
-
): Promise<CRDTMeta> {
|
|
118
|
-
let result: Result | null = null;
|
|
119
|
-
if (updates.length > 1) {
|
|
120
|
-
const batch = await Batch.create(toPailFetcher(tblocks), head);
|
|
121
|
-
for (const update of updates) {
|
|
122
|
-
const link = await writeDocContent(store, tblocks, update, logger);
|
|
123
|
-
await batch.put(toString(update.id, logger), link);
|
|
124
|
-
}
|
|
125
|
-
result = await batch.commit();
|
|
126
|
-
} else if (updates.length === 1) {
|
|
127
|
-
const link = await writeDocContent(store, tblocks, updates[0], logger);
|
|
128
|
-
result = await put(toPailFetcher(tblocks), head, toString(updates[0].id, logger), link);
|
|
129
|
-
}
|
|
130
|
-
if (!result) throw logger.Error().Uint64("updates.len", updates.length).Msg("Missing result").AsError();
|
|
131
|
-
|
|
132
|
-
if (result.event) {
|
|
133
|
-
for (const block of [
|
|
134
|
-
...result.additions,
|
|
135
|
-
// ...result.removals,
|
|
136
|
-
result.event,
|
|
137
|
-
]) {
|
|
138
|
-
tblocks.putSync(await anyBlock2FPBlock(block));
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
return { head: result.head }; // satisfies CRDTMeta;
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
// this whole thing can get pulled outside of the write queue
|
|
145
|
-
async function writeDocContent<T extends DocTypes>(
|
|
146
|
-
store: StoreRuntime,
|
|
147
|
-
blocks: CarTransaction,
|
|
148
|
-
update: DocUpdate<T>,
|
|
149
|
-
logger: Logger,
|
|
150
|
-
): Promise<AnyLink> {
|
|
151
|
-
let value: Partial<DocValue<T>>;
|
|
152
|
-
if (update.del) {
|
|
153
|
-
value = { del: true };
|
|
154
|
-
} else {
|
|
155
|
-
if (!update.value) throw logger.Error().Msg("Missing value").AsError();
|
|
156
|
-
await processFiles(store, blocks, update.value, logger);
|
|
157
|
-
value = { doc: update.value as DocWithId<T> };
|
|
158
|
-
}
|
|
159
|
-
// const ref = await encode({ value, hasher, codec });
|
|
160
|
-
const block = await doc2FPBlock(value);
|
|
161
|
-
// if (ref.cid.toString() !== block.cid.toString()) {
|
|
162
|
-
// debugger
|
|
163
|
-
// }
|
|
164
|
-
blocks.putSync(block);
|
|
165
|
-
return block.cid;
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
async function processFiles<T extends DocTypes>(store: StoreRuntime, blocks: CarTransaction, doc: DocSet<T>, logger: Logger) {
|
|
169
|
-
if (doc._files) {
|
|
170
|
-
await processFileset(logger, store, blocks, doc._files);
|
|
171
|
-
}
|
|
172
|
-
if (doc._publicFiles) {
|
|
173
|
-
await processFileset(logger, store, blocks, doc._publicFiles /*, true*/);
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
async function processFileset(
|
|
178
|
-
logger: Logger,
|
|
179
|
-
store: StoreRuntime,
|
|
180
|
-
blocks: CarTransaction,
|
|
181
|
-
files: DocFiles /*, publicFiles = false */,
|
|
182
|
-
) {
|
|
183
|
-
const dbBlockstore = blocks.parent as unknown as EncryptedBlockstore;
|
|
184
|
-
if (!dbBlockstore.loader) throw logger.Error().Msg("Missing loader, ledger name is required").AsError();
|
|
185
|
-
const t = new CarTransactionImpl(dbBlockstore); // maybe this should move to encrypted-blockstore
|
|
186
|
-
const didPut = [];
|
|
187
|
-
// let totalSize = 0
|
|
188
|
-
for (const filename in files) {
|
|
189
|
-
if (File === files[filename].constructor) {
|
|
190
|
-
const file = files[filename] as File;
|
|
191
|
-
|
|
192
|
-
// totalSize += file.size
|
|
193
|
-
const { cid, blocks: fileBlocks } = await store.encodeFile(file);
|
|
194
|
-
didPut.push(filename);
|
|
195
|
-
for (const block of fileBlocks) {
|
|
196
|
-
// console.log("processFileset", block.cid.toString())
|
|
197
|
-
t.putSync(await fileBlock2FPBlock(block));
|
|
198
|
-
}
|
|
199
|
-
files[filename] = { cid, type: file.type, size: file.size, lastModified: file.lastModified } as DocFileMeta;
|
|
200
|
-
} else {
|
|
201
|
-
const { cid, type, size, car, lastModified } = files[filename] as DocFileMeta;
|
|
202
|
-
if (cid && type && size && car) {
|
|
203
|
-
files[filename] = { cid, type, size, car, lastModified };
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
if (didPut.length) {
|
|
209
|
-
const car = await dbBlockstore.loader.commitFiles(
|
|
210
|
-
t,
|
|
211
|
-
{ files } as unknown as TransactionMeta /* {
|
|
212
|
-
public: publicFiles,
|
|
213
|
-
} */,
|
|
214
|
-
);
|
|
215
|
-
if (car) {
|
|
216
|
-
for (const name of didPut) {
|
|
217
|
-
files[name] = { car, ...files[name] } as DocFileMeta;
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
export async function getValueFromCrdt<T extends DocTypes>(
|
|
224
|
-
blocks: BaseBlockstore,
|
|
225
|
-
head: ClockHead,
|
|
226
|
-
key: string,
|
|
227
|
-
logger: Logger,
|
|
228
|
-
): Promise<DocValue<T>> {
|
|
229
|
-
if (!head.length) throw logger.Debug().Msg("Getting from an empty ledger").AsError();
|
|
230
|
-
// console.log("getValueFromCrdt-1", head, key)
|
|
231
|
-
const link = await get(toPailFetcher(blocks), head, key);
|
|
232
|
-
// console.log("getValueFromCrdt-2", key)
|
|
233
|
-
if (!link) {
|
|
234
|
-
// Use NotFoundError instead of logging an error
|
|
235
|
-
throw new NotFoundError(`Not found: ${key}`);
|
|
236
|
-
}
|
|
237
|
-
const ret = await getValueFromLink<T>(blocks, link, logger);
|
|
238
|
-
// console.log("getValueFromCrdt-3", key)
|
|
239
|
-
return ret;
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
export function readFiles<T extends DocTypes>(blocks: BaseBlockstore, { doc }: Partial<DocValue<T>>) {
|
|
243
|
-
if (!doc) return;
|
|
244
|
-
if (doc._files) {
|
|
245
|
-
readFileset(blocks as EncryptedBlockstore, doc._files);
|
|
246
|
-
}
|
|
247
|
-
if (doc._publicFiles) {
|
|
248
|
-
readFileset(blocks as EncryptedBlockstore, doc._publicFiles, true);
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
function readFileset(blocks: EncryptedBlockstore, files: DocFiles, isPublic = false) {
|
|
253
|
-
for (const filename in files) {
|
|
254
|
-
const fileMeta = files[filename] as DocFileMeta;
|
|
255
|
-
if (fileMeta.cid) {
|
|
256
|
-
if (isPublic) {
|
|
257
|
-
fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
|
|
258
|
-
}
|
|
259
|
-
if (fileMeta.car) {
|
|
260
|
-
fileMeta.file = async () => {
|
|
261
|
-
const result = await blocks.ebOpts.storeRuntime.decodeFile(
|
|
262
|
-
{
|
|
263
|
-
get: async (cid: AnyLink) => {
|
|
264
|
-
return await blocks.getFile(throwFalsy(fileMeta.car), cid);
|
|
265
|
-
},
|
|
266
|
-
},
|
|
267
|
-
fileMeta.cid,
|
|
268
|
-
fileMeta,
|
|
269
|
-
);
|
|
270
|
-
if (result.isErr()) {
|
|
271
|
-
throw blocks.logger.Error().Any("error", result.Err()).Any("cid", fileMeta.cid).Msg("Error decoding file").AsError();
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
return result.unwrap();
|
|
275
|
-
};
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
files[filename] = fileMeta;
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
async function getValueFromLink<T extends DocTypes>(blocks: BlockFetcher, link: AnyLink, logger: Logger): Promise<DocValue<T>> {
|
|
283
|
-
const block = await blocks.get(link);
|
|
284
|
-
if (!block) throw logger.Error().Str("link", link.toString()).Msg(`Missing linked block`).AsError();
|
|
285
|
-
const { value } = (await asyncBlockDecode({ bytes: block.bytes, hasher, codec })) as { value: DocValue<T> };
|
|
286
|
-
const cvalue = {
|
|
287
|
-
...value,
|
|
288
|
-
cid: link,
|
|
289
|
-
};
|
|
290
|
-
readFiles(blocks as EncryptedBlockstore, cvalue);
|
|
291
|
-
return cvalue;
|
|
292
|
-
}
|
|
293
|
-
|
|
294
|
-
class DirtyEventFetcher<T> extends EventFetcher<T> {
|
|
295
|
-
readonly logger: Logger;
|
|
296
|
-
constructor(logger: Logger, blocks: BlockFetcher) {
|
|
297
|
-
super(toPailFetcher(blocks));
|
|
298
|
-
this.logger = logger;
|
|
299
|
-
}
|
|
300
|
-
async get(link: EventLink<T>): Promise<EventBlockView<T>> {
|
|
301
|
-
try {
|
|
302
|
-
return await super.get(link);
|
|
303
|
-
} catch (e) {
|
|
304
|
-
this.logger.Error().Ref("link", link.toString()).Err(e).Msg("Missing event");
|
|
305
|
-
return { value: undefined } as unknown as EventBlockView<T>;
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
export async function clockChangesSince<T extends DocTypes>(
|
|
311
|
-
blocks: BlockFetcher,
|
|
312
|
-
head: ClockHead,
|
|
313
|
-
since: ClockHead,
|
|
314
|
-
opts: ChangesOptions,
|
|
315
|
-
logger: Logger,
|
|
316
|
-
): Promise<{ result: DocUpdate<T>[]; head: ClockHead }> {
|
|
317
|
-
const eventsFetcher = (
|
|
318
|
-
opts.dirty ? new DirtyEventFetcher<Operation>(logger, blocks) : new EventFetcher<Operation>(toPailFetcher(blocks))
|
|
319
|
-
) as EventFetcher<Operation>;
|
|
320
|
-
const keys = new Set<string>();
|
|
321
|
-
const updates = await gatherUpdates<T>(
|
|
322
|
-
blocks,
|
|
323
|
-
eventsFetcher,
|
|
324
|
-
head,
|
|
325
|
-
since,
|
|
326
|
-
[],
|
|
327
|
-
keys,
|
|
328
|
-
new Set<string>(),
|
|
329
|
-
opts.limit || Infinity,
|
|
330
|
-
logger,
|
|
331
|
-
);
|
|
332
|
-
return { result: updates.reverse(), head };
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
async function gatherUpdates<T extends DocTypes>(
|
|
336
|
-
blocks: BlockFetcher,
|
|
337
|
-
eventsFetcher: EventFetcher<Operation>,
|
|
338
|
-
head: ClockHead,
|
|
339
|
-
since: ClockHead,
|
|
340
|
-
updates: DocUpdate<T>[] = [],
|
|
341
|
-
keys: Set<string>,
|
|
342
|
-
didLinks: Set<string>,
|
|
343
|
-
limit: number,
|
|
344
|
-
logger: Logger,
|
|
345
|
-
): Promise<DocUpdate<T>[]> {
|
|
346
|
-
if (limit <= 0) return updates;
|
|
347
|
-
// if (Math.random() < 0.001) console.log('gatherUpdates', head.length, since.length, updates.length)
|
|
348
|
-
const sHead = head.map((l) => l.toString());
|
|
349
|
-
for (const link of since) {
|
|
350
|
-
if (sHead.includes(link.toString())) {
|
|
351
|
-
return updates;
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
for (const link of head) {
|
|
355
|
-
if (didLinks.has(link.toString())) continue;
|
|
356
|
-
didLinks.add(link.toString());
|
|
357
|
-
const { value: event } = await eventsFetcher.get(link);
|
|
358
|
-
if (!event) continue;
|
|
359
|
-
const { type } = event.data;
|
|
360
|
-
let ops = [] as PutOperation[];
|
|
361
|
-
if (type === "batch") {
|
|
362
|
-
ops = event.data.ops as PutOperation[];
|
|
363
|
-
} else if (type === "put") {
|
|
364
|
-
ops = [event.data] as PutOperation[];
|
|
365
|
-
}
|
|
366
|
-
for (let i = ops.length - 1; i >= 0; i--) {
|
|
367
|
-
const { key, value } = ops[i];
|
|
368
|
-
if (!keys.has(key)) {
|
|
369
|
-
// todo option to see all updates
|
|
370
|
-
const docValue = await getValueFromLink<T>(blocks, value, logger);
|
|
371
|
-
if (key === PARAM.GENESIS_CID) {
|
|
372
|
-
continue;
|
|
373
|
-
}
|
|
374
|
-
updates.push({ id: key, value: docValue.doc, del: docValue.del, clock: link });
|
|
375
|
-
limit--;
|
|
376
|
-
keys.add(key);
|
|
377
|
-
}
|
|
378
|
-
}
|
|
379
|
-
if (event.parents) {
|
|
380
|
-
updates = await gatherUpdates(blocks, eventsFetcher, event.parents, since, updates, keys, didLinks, limit, logger);
|
|
381
|
-
}
|
|
382
|
-
}
|
|
383
|
-
return updates;
|
|
384
|
-
}
|
|
385
|
-
|
|
386
|
-
export async function* getAllEntries<T extends DocTypes>(blocks: BlockFetcher, head: ClockHead, logger: Logger) {
|
|
387
|
-
// return entries(blocks, head)
|
|
388
|
-
for await (const [key, link] of entries(toPailFetcher(blocks), head)) {
|
|
389
|
-
// console.log("getAllEntries", key, link);
|
|
390
|
-
if (key !== PARAM.GENESIS_CID) {
|
|
391
|
-
const docValue = await getValueFromLink(blocks, link, logger);
|
|
392
|
-
yield { id: key, value: docValue.doc, del: docValue.del } as DocUpdate<T>;
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
export async function* clockVis(blocks: BlockFetcher, head: ClockHead) {
|
|
398
|
-
for await (const line of vis(toPailFetcher(blocks), head)) {
|
|
399
|
-
yield line;
|
|
400
|
-
}
|
|
401
|
-
}
|
|
402
|
-
|
|
403
|
-
export async function getBlock(blocks: BlockFetcher, cidString: string) {
|
|
404
|
-
const block = await blocks.get(parse(cidString));
|
|
405
|
-
if (!block) throw new Error(`Missing block ${cidString}`);
|
|
406
|
-
const { cid, value } = await asyncBlockDecode({ bytes: block.bytes, codec, hasher });
|
|
407
|
-
return new Block({ cid, value, bytes: block.bytes });
|
|
408
|
-
}
|