@peerbit/document 6.0.7 → 7.0.0-55cebfe
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/benchmark/index.d.ts +2 -0
- package/dist/benchmark/index.d.ts.map +1 -0
- package/dist/benchmark/index.js +125 -0
- package/dist/benchmark/index.js.map +1 -0
- package/dist/benchmark/memory/index.d.ts +2 -0
- package/dist/benchmark/memory/index.d.ts.map +1 -0
- package/dist/benchmark/memory/index.js +122 -0
- package/dist/benchmark/memory/index.js.map +1 -0
- package/dist/benchmark/memory/insert.d.ts +2 -0
- package/dist/benchmark/memory/insert.d.ts.map +1 -0
- package/dist/benchmark/memory/insert.js +133 -0
- package/dist/benchmark/memory/insert.js.map +1 -0
- package/dist/benchmark/memory/utils.d.ts +13 -0
- package/dist/benchmark/memory/utils.d.ts.map +1 -0
- package/dist/benchmark/memory/utils.js +2 -0
- package/dist/benchmark/memory/utils.js.map +1 -0
- package/dist/benchmark/replication.d.ts +2 -0
- package/dist/benchmark/replication.d.ts.map +1 -0
- package/dist/benchmark/replication.js +172 -0
- package/dist/benchmark/replication.js.map +1 -0
- package/dist/src/borsh.d.ts +2 -0
- package/dist/src/borsh.d.ts.map +1 -0
- package/dist/src/borsh.js +16 -0
- package/dist/src/borsh.js.map +1 -0
- package/dist/src/constants.d.ts +2 -0
- package/dist/src/constants.d.ts.map +1 -0
- package/dist/src/constants.js +2 -0
- package/dist/src/constants.js.map +1 -0
- package/dist/src/index.d.ts +4 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +4 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/program.d.ts +87 -0
- package/dist/src/program.d.ts.map +1 -0
- package/{lib/esm/document-store.js → dist/src/program.js} +159 -138
- package/dist/src/program.js.map +1 -0
- package/dist/src/search.d.ts +132 -0
- package/dist/src/search.d.ts.map +1 -0
- package/dist/src/search.js +845 -0
- package/dist/src/search.js.map +1 -0
- package/package.json +74 -43
- package/src/borsh.ts +19 -0
- package/src/constants.ts +1 -0
- package/src/index.ts +3 -3
- package/src/program.ts +580 -0
- package/src/search.ts +1217 -0
- package/LICENSE +0 -202
- package/lib/esm/document-index.d.ts +0 -147
- package/lib/esm/document-index.js +0 -942
- package/lib/esm/document-index.js.map +0 -1
- package/lib/esm/document-store.d.ts +0 -72
- package/lib/esm/document-store.js.map +0 -1
- package/lib/esm/index.d.ts +0 -3
- package/lib/esm/index.js +0 -4
- package/lib/esm/index.js.map +0 -1
- package/lib/esm/query.d.ts +0 -191
- package/lib/esm/query.js +0 -615
- package/lib/esm/query.js.map +0 -1
- package/lib/esm/utils.d.ts +0 -3
- package/lib/esm/utils.js +0 -12
- package/lib/esm/utils.js.map +0 -1
- package/src/document-index.ts +0 -1268
- package/src/document-store.ts +0 -547
- package/src/query.ts +0 -525
- package/src/utils.ts +0 -17
package/src/program.ts
ADDED
|
@@ -0,0 +1,580 @@
|
|
|
1
|
+
import {
|
|
2
|
+
type AbstractType,
|
|
3
|
+
BorshError,
|
|
4
|
+
field,
|
|
5
|
+
serialize,
|
|
6
|
+
variant,
|
|
7
|
+
} from "@dao-xyz/borsh";
|
|
8
|
+
import { CustomEvent } from "@libp2p/interface";
|
|
9
|
+
import { AccessError, DecryptedThing } from "@peerbit/crypto";
|
|
10
|
+
import * as documentsTypes from "@peerbit/document-interface";
|
|
11
|
+
import * as indexerTypes from "@peerbit/indexer-interface";
|
|
12
|
+
import {
|
|
13
|
+
type Change,
|
|
14
|
+
Entry,
|
|
15
|
+
EntryType,
|
|
16
|
+
type ShallowOrFullEntry,
|
|
17
|
+
type TrimOptions,
|
|
18
|
+
} from "@peerbit/log";
|
|
19
|
+
import { logger as loggerFn } from "@peerbit/logger";
|
|
20
|
+
import { Program, type ProgramEvents } from "@peerbit/program";
|
|
21
|
+
import {
|
|
22
|
+
type SharedAppendOptions,
|
|
23
|
+
SharedLog,
|
|
24
|
+
type SharedLogOptions,
|
|
25
|
+
} from "@peerbit/shared-log";
|
|
26
|
+
import { MAX_BATCH_SIZE } from "./constants.js";
|
|
27
|
+
import {
|
|
28
|
+
BORSH_ENCODING_OPERATION,
|
|
29
|
+
type CanRead,
|
|
30
|
+
type CanSearch,
|
|
31
|
+
DeleteOperation,
|
|
32
|
+
DocumentIndex,
|
|
33
|
+
Operation,
|
|
34
|
+
PutOperation,
|
|
35
|
+
type TransformOptions,
|
|
36
|
+
} from "./search.js";
|
|
37
|
+
|
|
38
|
+
const logger = loggerFn({ module: "document" });
|
|
39
|
+
|
|
40
|
+
export class OperationError extends Error {
|
|
41
|
+
constructor(message?: string) {
|
|
42
|
+
super(message);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
export interface DocumentsChange<T> {
|
|
46
|
+
added: T[];
|
|
47
|
+
removed: T[];
|
|
48
|
+
}
|
|
49
|
+
export interface DocumentEvents<T> {
|
|
50
|
+
change: CustomEvent<DocumentsChange<T>>;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
type MaybePromise<T> = Promise<T> | T;
|
|
54
|
+
|
|
55
|
+
type CanPerformPut<T> = {
|
|
56
|
+
type: "put";
|
|
57
|
+
value: T;
|
|
58
|
+
operation: PutOperation;
|
|
59
|
+
entry: Entry<PutOperation>;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
type CanPerformDelete<T> = {
|
|
63
|
+
type: "delete";
|
|
64
|
+
operation: DeleteOperation;
|
|
65
|
+
entry: Entry<DeleteOperation>;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
export type CanPerformOperations<T> = CanPerformPut<T> | CanPerformDelete<T>;
|
|
69
|
+
export type CanPerform<T> = (
|
|
70
|
+
properties: CanPerformOperations<T>,
|
|
71
|
+
) => MaybePromise<boolean>;
|
|
72
|
+
|
|
73
|
+
export type SetupOptions<T, I = T> = {
|
|
74
|
+
type: AbstractType<T>;
|
|
75
|
+
canOpen?: (program: T) => MaybePromise<boolean>;
|
|
76
|
+
canPerform?: CanPerform<T>;
|
|
77
|
+
id?: (obj: any) => indexerTypes.IdPrimitive;
|
|
78
|
+
index?: {
|
|
79
|
+
canSearch?: CanSearch;
|
|
80
|
+
canRead?: CanRead<T>;
|
|
81
|
+
idProperty?: string | string[];
|
|
82
|
+
} & TransformOptions<T, I>;
|
|
83
|
+
log?: {
|
|
84
|
+
trim?: TrimOptions;
|
|
85
|
+
};
|
|
86
|
+
} & SharedLogOptions<Operation>;
|
|
87
|
+
|
|
88
|
+
@variant("documents")
|
|
89
|
+
export class Documents<
|
|
90
|
+
T,
|
|
91
|
+
I extends Record<string, any> = T extends Record<string, any> ? T : any,
|
|
92
|
+
> extends Program<SetupOptions<T, I>, DocumentEvents<T> & ProgramEvents> {
|
|
93
|
+
@field({ type: SharedLog })
|
|
94
|
+
log: SharedLog<Operation>;
|
|
95
|
+
|
|
96
|
+
@field({ type: "bool" })
|
|
97
|
+
immutable: boolean; // "Can I overwrite a document?"
|
|
98
|
+
|
|
99
|
+
@field({ type: DocumentIndex })
|
|
100
|
+
private _index: DocumentIndex<T, I>;
|
|
101
|
+
|
|
102
|
+
private _clazz!: AbstractType<T>;
|
|
103
|
+
|
|
104
|
+
private _optionCanPerform?: CanPerform<T>;
|
|
105
|
+
private _manuallySynced!: Set<string>;
|
|
106
|
+
private idResolver!: (any: any) => indexerTypes.IdPrimitive;
|
|
107
|
+
|
|
108
|
+
canOpen?: (program: T, entry: Entry<Operation>) => Promise<boolean> | boolean;
|
|
109
|
+
|
|
110
|
+
constructor(properties?: {
|
|
111
|
+
id?: Uint8Array;
|
|
112
|
+
immutable?: boolean;
|
|
113
|
+
index?: DocumentIndex<T, I>;
|
|
114
|
+
}) {
|
|
115
|
+
super();
|
|
116
|
+
|
|
117
|
+
this.log = new SharedLog(properties);
|
|
118
|
+
this.immutable = properties?.immutable ?? false;
|
|
119
|
+
this._index = properties?.index || new DocumentIndex();
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
get index(): DocumentIndex<T, I> {
|
|
123
|
+
return this._index;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async open(options: SetupOptions<T, I>) {
|
|
127
|
+
this._clazz = options.type;
|
|
128
|
+
this.canOpen = options.canOpen;
|
|
129
|
+
|
|
130
|
+
/* eslint-disable */
|
|
131
|
+
if (Program.isPrototypeOf(this._clazz)) {
|
|
132
|
+
if (!this.canOpen) {
|
|
133
|
+
throw new Error(
|
|
134
|
+
"Document store needs to be opened with canOpen option when the document type is a Program",
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
this._optionCanPerform = options.canPerform;
|
|
140
|
+
this._manuallySynced = new Set();
|
|
141
|
+
const idProperty = options.index?.idProperty || "id";
|
|
142
|
+
const idResolver =
|
|
143
|
+
options.id ||
|
|
144
|
+
(typeof idProperty === "string"
|
|
145
|
+
? (obj: any) => obj[idProperty as string]
|
|
146
|
+
: (obj: any) =>
|
|
147
|
+
indexerTypes.extractFieldValue(obj, idProperty as string[]));
|
|
148
|
+
|
|
149
|
+
this.idResolver = idResolver;
|
|
150
|
+
|
|
151
|
+
await this._index.open({
|
|
152
|
+
log: this.log,
|
|
153
|
+
canRead: options?.index?.canRead,
|
|
154
|
+
canSearch: options.index?.canSearch,
|
|
155
|
+
documentType: this._clazz,
|
|
156
|
+
transform: options.index,
|
|
157
|
+
indexBy: idProperty,
|
|
158
|
+
sync: async (result: documentsTypes.Results<T>) => {
|
|
159
|
+
// here we arrive for all the results we want to persist.
|
|
160
|
+
// we we need to do here is
|
|
161
|
+
// 1. add the entry to a list of entries that we should persist through prunes
|
|
162
|
+
let heads: string[] = [];
|
|
163
|
+
for (const entry of result.results) {
|
|
164
|
+
this._manuallySynced.add(entry.context.gid);
|
|
165
|
+
heads.push(entry.context.head);
|
|
166
|
+
}
|
|
167
|
+
return this.log.log.join(heads);
|
|
168
|
+
},
|
|
169
|
+
dbType: this.constructor,
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
await this.log.open({
|
|
173
|
+
encoding: BORSH_ENCODING_OPERATION,
|
|
174
|
+
canReplicate: options?.canReplicate,
|
|
175
|
+
canAppend: this.canAppend.bind(this),
|
|
176
|
+
onChange: this.handleChanges.bind(this),
|
|
177
|
+
trim: options?.log?.trim,
|
|
178
|
+
replicate: options?.replicate,
|
|
179
|
+
replicas: options?.replicas,
|
|
180
|
+
sync: (entry: any) => {
|
|
181
|
+
// here we arrive when ever a insertion/pruning behaviour processes an entry
|
|
182
|
+
// returning true means that it should persist
|
|
183
|
+
return this._manuallySynced.has(entry.gid);
|
|
184
|
+
},
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
async recover() {
|
|
189
|
+
return this.log.recover();
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
private async _resolveEntry(history: Entry<Operation> | string) {
|
|
193
|
+
return typeof history === "string"
|
|
194
|
+
? (await this.log.log.get(history)) ||
|
|
195
|
+
(await Entry.fromMultihash<Operation>(this.log.log.blocks, history))
|
|
196
|
+
: history;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async canAppend(
|
|
200
|
+
entry: Entry<Operation>,
|
|
201
|
+
reference?: { document: T; operation: PutOperation },
|
|
202
|
+
): Promise<boolean> {
|
|
203
|
+
const l0 = await this._canAppend(entry as Entry<Operation>, reference);
|
|
204
|
+
if (!l0) {
|
|
205
|
+
return false;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
try {
|
|
209
|
+
let operation: PutOperation | DeleteOperation = l0;
|
|
210
|
+
let document: T | undefined = reference?.document;
|
|
211
|
+
if (!document) {
|
|
212
|
+
if (l0 instanceof PutOperation) {
|
|
213
|
+
document = this._index.valueEncoding.decoder(l0.data);
|
|
214
|
+
if (!document) {
|
|
215
|
+
return false;
|
|
216
|
+
}
|
|
217
|
+
} else if (l0 instanceof DeleteOperation) {
|
|
218
|
+
// Nothing to do here by default
|
|
219
|
+
// checking if the document exists is not necessary
|
|
220
|
+
// since it might already be deleted
|
|
221
|
+
} else {
|
|
222
|
+
throw new Error("Unsupported operation");
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (this._optionCanPerform) {
|
|
227
|
+
if (
|
|
228
|
+
!(await this._optionCanPerform(
|
|
229
|
+
operation instanceof PutOperation
|
|
230
|
+
? {
|
|
231
|
+
type: "put",
|
|
232
|
+
value: document!,
|
|
233
|
+
operation,
|
|
234
|
+
entry: entry as any as Entry<PutOperation>,
|
|
235
|
+
}
|
|
236
|
+
: {
|
|
237
|
+
type: "delete",
|
|
238
|
+
operation,
|
|
239
|
+
entry: entry as any as Entry<DeleteOperation>,
|
|
240
|
+
},
|
|
241
|
+
))
|
|
242
|
+
) {
|
|
243
|
+
return false;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
} catch (error) {
|
|
247
|
+
if (error instanceof BorshError) {
|
|
248
|
+
logger.warn("Received payload that could not be decoded, skipping");
|
|
249
|
+
return false;
|
|
250
|
+
}
|
|
251
|
+
throw error;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
return true;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
async _canAppend(
|
|
258
|
+
entry: Entry<Operation>,
|
|
259
|
+
reference?: { document: T; operation: PutOperation },
|
|
260
|
+
): Promise<PutOperation | DeleteOperation | false> {
|
|
261
|
+
const resolve = async (history: Entry<Operation> | string) => {
|
|
262
|
+
return typeof history === "string"
|
|
263
|
+
? this.log.log.get(history) ||
|
|
264
|
+
(await Entry.fromMultihash(this.log.log.blocks, history))
|
|
265
|
+
: history;
|
|
266
|
+
};
|
|
267
|
+
const pointsToHistory = async (history: Entry<Operation> | string) => {
|
|
268
|
+
// make sure nexts only points to this document at some point in history
|
|
269
|
+
let current = await resolve(history);
|
|
270
|
+
|
|
271
|
+
const next = entry.next[0];
|
|
272
|
+
while (
|
|
273
|
+
current?.hash &&
|
|
274
|
+
next !== current?.hash &&
|
|
275
|
+
current.next.length > 0
|
|
276
|
+
) {
|
|
277
|
+
current = await this.log.log.get(current.next[0])!;
|
|
278
|
+
}
|
|
279
|
+
if (current?.hash === next) {
|
|
280
|
+
return true; // Ok, we are pointing this new edit to some exising point in time of the old document
|
|
281
|
+
}
|
|
282
|
+
return false;
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
try {
|
|
286
|
+
entry.init({
|
|
287
|
+
encoding: this.log.log.encoding,
|
|
288
|
+
keychain: this.node.services.keychain,
|
|
289
|
+
});
|
|
290
|
+
const operation =
|
|
291
|
+
reference?.operation || entry._payload instanceof DecryptedThing
|
|
292
|
+
? entry.payload.getValue(entry.encoding)
|
|
293
|
+
: await entry.getPayloadValue();
|
|
294
|
+
if (operation instanceof PutOperation) {
|
|
295
|
+
// check nexts
|
|
296
|
+
const putOperation = operation as PutOperation;
|
|
297
|
+
let value =
|
|
298
|
+
reference?.document ??
|
|
299
|
+
this.index.valueEncoding.decoder(putOperation.data);
|
|
300
|
+
const keyValue = this.idResolver(value);
|
|
301
|
+
|
|
302
|
+
const key = indexerTypes.toId(keyValue);
|
|
303
|
+
|
|
304
|
+
const existingDocument = (await this.index.getDetailed(key))?.[0]
|
|
305
|
+
?.results[0];
|
|
306
|
+
if (existingDocument && existingDocument.context.head !== entry.hash) {
|
|
307
|
+
// econd condition can false if we reset the operation log, while not resetting the index. For example when doing .recover
|
|
308
|
+
if (this.immutable) {
|
|
309
|
+
//Key already exist and this instance Documents can note overrite/edit'
|
|
310
|
+
return false;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if (entry.next.length !== 1) {
|
|
314
|
+
return false;
|
|
315
|
+
}
|
|
316
|
+
let doc = await this.log.log.get(existingDocument.context.head);
|
|
317
|
+
if (!doc) {
|
|
318
|
+
logger.error("Failed to find Document from head");
|
|
319
|
+
return false;
|
|
320
|
+
}
|
|
321
|
+
const referenceHistoryCorrectly = await pointsToHistory(doc);
|
|
322
|
+
return referenceHistoryCorrectly ? putOperation : false;
|
|
323
|
+
} else {
|
|
324
|
+
if (entry.next.length !== 0) {
|
|
325
|
+
return false;
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
} else if (operation instanceof DeleteOperation) {
|
|
329
|
+
if (entry.next.length !== 1) {
|
|
330
|
+
return false;
|
|
331
|
+
}
|
|
332
|
+
const existingDocument = (
|
|
333
|
+
await this.index.getDetailed(operation.key)
|
|
334
|
+
)?.[0].results[0];
|
|
335
|
+
if (!existingDocument) {
|
|
336
|
+
// already deleted
|
|
337
|
+
return operation; // assume ok
|
|
338
|
+
}
|
|
339
|
+
let doc = await this.log.log.get(existingDocument.context.head);
|
|
340
|
+
if (!doc) {
|
|
341
|
+
logger.error("Failed to find Document from head");
|
|
342
|
+
return false;
|
|
343
|
+
}
|
|
344
|
+
if (await pointsToHistory(doc)) {
|
|
345
|
+
// references the existing document
|
|
346
|
+
return operation;
|
|
347
|
+
}
|
|
348
|
+
return false;
|
|
349
|
+
} else {
|
|
350
|
+
throw new Error("Unsupported operation");
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
return operation;
|
|
354
|
+
} catch (error) {
|
|
355
|
+
if (error instanceof AccessError) {
|
|
356
|
+
return false; // we cant index because we can not decrypt
|
|
357
|
+
} else if (error instanceof BorshError) {
|
|
358
|
+
logger.warn("Received payload that could not be decoded, skipping");
|
|
359
|
+
return false;
|
|
360
|
+
}
|
|
361
|
+
throw error;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
public async put(
|
|
366
|
+
doc: T,
|
|
367
|
+
options?: SharedAppendOptions<Operation> & { unique?: boolean },
|
|
368
|
+
) {
|
|
369
|
+
const keyValue = this.idResolver(doc);
|
|
370
|
+
|
|
371
|
+
// type check the key
|
|
372
|
+
indexerTypes.checkId(keyValue);
|
|
373
|
+
|
|
374
|
+
const ser = serialize(doc);
|
|
375
|
+
if (ser.length > MAX_BATCH_SIZE) {
|
|
376
|
+
throw new Error(
|
|
377
|
+
`Document is too large (${
|
|
378
|
+
ser.length * 1e-6
|
|
379
|
+
}) mb). Needs to be less than ${MAX_BATCH_SIZE * 1e-6} mb`,
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
const existingDocument = options?.unique
|
|
384
|
+
? undefined
|
|
385
|
+
: (
|
|
386
|
+
await this._index.getDetailed(keyValue, {
|
|
387
|
+
local: true,
|
|
388
|
+
remote: { sync: true }, // only query remote if we know they exist
|
|
389
|
+
})
|
|
390
|
+
)?.[0]?.results[0];
|
|
391
|
+
|
|
392
|
+
const operation = new PutOperation({
|
|
393
|
+
data: ser,
|
|
394
|
+
});
|
|
395
|
+
const appended = await this.log.append(operation, {
|
|
396
|
+
...options,
|
|
397
|
+
meta: {
|
|
398
|
+
next: existingDocument
|
|
399
|
+
? [await this._resolveEntry(existingDocument.context.head)]
|
|
400
|
+
: [],
|
|
401
|
+
...options?.meta,
|
|
402
|
+
},
|
|
403
|
+
canAppend: (entry) => {
|
|
404
|
+
return this.canAppend(entry, { document: doc, operation });
|
|
405
|
+
},
|
|
406
|
+
onChange: (change) => {
|
|
407
|
+
return this.handleChanges(change, { document: doc, operation });
|
|
408
|
+
},
|
|
409
|
+
});
|
|
410
|
+
|
|
411
|
+
return appended;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
async del(
|
|
415
|
+
id: indexerTypes.Ideable,
|
|
416
|
+
options?: SharedAppendOptions<Operation>,
|
|
417
|
+
) {
|
|
418
|
+
const key = indexerTypes.toId(id);
|
|
419
|
+
const existing = (
|
|
420
|
+
await this._index.getDetailed(key, {
|
|
421
|
+
local: true,
|
|
422
|
+
remote: { sync: true },
|
|
423
|
+
})
|
|
424
|
+
)?.[0]?.results[0];
|
|
425
|
+
|
|
426
|
+
if (!existing) {
|
|
427
|
+
throw new Error(`No entry with key '${key.primitive}' in the database`);
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
return this.log.append(
|
|
431
|
+
new DeleteOperation({
|
|
432
|
+
key,
|
|
433
|
+
}),
|
|
434
|
+
{
|
|
435
|
+
...options,
|
|
436
|
+
meta: {
|
|
437
|
+
next: [await this._resolveEntry(existing.context.head)],
|
|
438
|
+
type: EntryType.CUT,
|
|
439
|
+
...options?.meta,
|
|
440
|
+
},
|
|
441
|
+
}, //
|
|
442
|
+
);
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
async handleChanges(
|
|
446
|
+
change: Change<Operation>,
|
|
447
|
+
reference?: { document: T; operation: PutOperation },
|
|
448
|
+
): Promise<void> {
|
|
449
|
+
const isAppendOperation =
|
|
450
|
+
change?.added.length === 1 ? !!change.added[0] : false;
|
|
451
|
+
|
|
452
|
+
const removedSet = new Map<string, ShallowOrFullEntry<Operation>>();
|
|
453
|
+
for (const r of change.removed) {
|
|
454
|
+
removedSet.set(r.hash, r);
|
|
455
|
+
}
|
|
456
|
+
const sortedEntries = [
|
|
457
|
+
...change.added,
|
|
458
|
+
...((await Promise.all(
|
|
459
|
+
change.removed.map((x) =>
|
|
460
|
+
x instanceof Entry ? x : this.log.log.entryIndex.get(x.hash),
|
|
461
|
+
),
|
|
462
|
+
)) || []),
|
|
463
|
+
]; // TODO assert sorting
|
|
464
|
+
/*
|
|
465
|
+
const sortedEntries = [...change.added, ...(removed || [])]
|
|
466
|
+
.sort(this.log.log.sortFn)
|
|
467
|
+
.reverse(); // sort so we get newest to oldest */
|
|
468
|
+
|
|
469
|
+
// There might be a case where change.added and change.removed contains the same document id. Usaully because you use the "trim" option
|
|
470
|
+
// in combination with inserting the same document. To mitigate this, we loop through the changes and modify the behaviour for this
|
|
471
|
+
|
|
472
|
+
let documentsChanged: DocumentsChange<T> = {
|
|
473
|
+
added: [],
|
|
474
|
+
removed: [],
|
|
475
|
+
};
|
|
476
|
+
|
|
477
|
+
let modified: Set<string | number | bigint> = new Set();
|
|
478
|
+
for (const item of sortedEntries) {
|
|
479
|
+
if (!item) continue;
|
|
480
|
+
|
|
481
|
+
try {
|
|
482
|
+
const payload =
|
|
483
|
+
item._payload instanceof DecryptedThing
|
|
484
|
+
? item.payload.getValue(item.encoding)
|
|
485
|
+
: await item.getPayloadValue();
|
|
486
|
+
|
|
487
|
+
if (payload instanceof PutOperation && !removedSet.has(item.hash)) {
|
|
488
|
+
let value =
|
|
489
|
+
(isAppendOperation &&
|
|
490
|
+
reference?.operation === payload &&
|
|
491
|
+
reference?.document) ||
|
|
492
|
+
this.index.valueEncoding.decoder(payload.data);
|
|
493
|
+
|
|
494
|
+
// get index key from value
|
|
495
|
+
const keyObject = this.idResolver(value);
|
|
496
|
+
|
|
497
|
+
const key = indexerTypes.toId(keyObject);
|
|
498
|
+
|
|
499
|
+
// document is already updated with more recent entry
|
|
500
|
+
if (modified.has(key.primitive)) {
|
|
501
|
+
continue;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// Program specific
|
|
505
|
+
if (value instanceof Program) {
|
|
506
|
+
// if replicator, then open
|
|
507
|
+
if (
|
|
508
|
+
(await this.canOpen!(value, item)) &&
|
|
509
|
+
(await this.log.isReplicator(item)) // TODO types, throw runtime error if replicator is not provided
|
|
510
|
+
) {
|
|
511
|
+
value = (await this.node.open(value, {
|
|
512
|
+
parent: this as Program<any, any>,
|
|
513
|
+
existing: "reuse",
|
|
514
|
+
})) as any as T; // TODO types
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
documentsChanged.added.push(value);
|
|
518
|
+
await this._index.put(value, item, key);
|
|
519
|
+
modified.add(key.primitive);
|
|
520
|
+
} else if (
|
|
521
|
+
(payload instanceof DeleteOperation && !removedSet.has(item.hash)) ||
|
|
522
|
+
payload instanceof PutOperation ||
|
|
523
|
+
removedSet.has(item.hash)
|
|
524
|
+
) {
|
|
525
|
+
this._manuallySynced.delete(item.gid);
|
|
526
|
+
|
|
527
|
+
let value: T;
|
|
528
|
+
let key: indexerTypes.IdKey;
|
|
529
|
+
|
|
530
|
+
if (payload instanceof PutOperation) {
|
|
531
|
+
value = this.index.valueEncoding.decoder(payload.data);
|
|
532
|
+
key = indexerTypes.toId(this.idResolver(value));
|
|
533
|
+
// document is already updated with more recent entry
|
|
534
|
+
if (modified.has(key.primitive)) {
|
|
535
|
+
continue;
|
|
536
|
+
}
|
|
537
|
+
} else if (payload instanceof DeleteOperation) {
|
|
538
|
+
key = payload.key;
|
|
539
|
+
// document is already updated with more recent entry
|
|
540
|
+
if (modified.has(key.primitive)) {
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
const document = await this._index.get(key, {
|
|
544
|
+
local: true,
|
|
545
|
+
remote: false,
|
|
546
|
+
});
|
|
547
|
+
if (!document) {
|
|
548
|
+
continue;
|
|
549
|
+
}
|
|
550
|
+
value = document;
|
|
551
|
+
} else {
|
|
552
|
+
throw new Error("Unexpected");
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
documentsChanged.removed.push(value);
|
|
556
|
+
|
|
557
|
+
if (value instanceof Program) {
|
|
558
|
+
await value.drop(this);
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// update index
|
|
562
|
+
await this._index.del(key);
|
|
563
|
+
modified.add(key.primitive);
|
|
564
|
+
} else {
|
|
565
|
+
// Unknown operation
|
|
566
|
+
throw new OperationError("Unknown operation");
|
|
567
|
+
}
|
|
568
|
+
} catch (error) {
|
|
569
|
+
if (error instanceof AccessError) {
|
|
570
|
+
continue;
|
|
571
|
+
}
|
|
572
|
+
throw error;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
this.events.dispatchEvent(
|
|
577
|
+
new CustomEvent("change", { detail: documentsChanged }),
|
|
578
|
+
);
|
|
579
|
+
}
|
|
580
|
+
}
|