@peerbit/document 6.0.7 → 7.0.0-3a75d6e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +2 -2
  2. package/dist/benchmark/index.d.ts +2 -0
  3. package/dist/benchmark/index.d.ts.map +1 -0
  4. package/dist/benchmark/index.js +125 -0
  5. package/dist/benchmark/index.js.map +1 -0
  6. package/dist/benchmark/memory/index.d.ts +2 -0
  7. package/dist/benchmark/memory/index.d.ts.map +1 -0
  8. package/dist/benchmark/memory/index.js +122 -0
  9. package/dist/benchmark/memory/index.js.map +1 -0
  10. package/dist/benchmark/memory/insert.d.ts +2 -0
  11. package/dist/benchmark/memory/insert.d.ts.map +1 -0
  12. package/dist/benchmark/memory/insert.js +133 -0
  13. package/dist/benchmark/memory/insert.js.map +1 -0
  14. package/dist/benchmark/memory/utils.d.ts +13 -0
  15. package/dist/benchmark/memory/utils.d.ts.map +1 -0
  16. package/dist/benchmark/memory/utils.js +2 -0
  17. package/dist/benchmark/memory/utils.js.map +1 -0
  18. package/dist/benchmark/replication.d.ts +2 -0
  19. package/dist/benchmark/replication.d.ts.map +1 -0
  20. package/dist/benchmark/replication.js +172 -0
  21. package/dist/benchmark/replication.js.map +1 -0
  22. package/dist/src/borsh.d.ts +2 -0
  23. package/dist/src/borsh.d.ts.map +1 -0
  24. package/dist/src/borsh.js +16 -0
  25. package/dist/src/borsh.js.map +1 -0
  26. package/dist/src/constants.d.ts +2 -0
  27. package/dist/src/constants.d.ts.map +1 -0
  28. package/dist/src/constants.js +2 -0
  29. package/dist/src/constants.js.map +1 -0
  30. package/dist/src/index.d.ts +4 -0
  31. package/dist/src/index.d.ts.map +1 -0
  32. package/dist/src/index.js +4 -0
  33. package/dist/src/index.js.map +1 -0
  34. package/dist/src/program.d.ts +87 -0
  35. package/dist/src/program.d.ts.map +1 -0
  36. package/{lib/esm/document-store.js → dist/src/program.js} +159 -138
  37. package/dist/src/program.js.map +1 -0
  38. package/dist/src/search.d.ts +132 -0
  39. package/dist/src/search.d.ts.map +1 -0
  40. package/dist/src/search.js +845 -0
  41. package/dist/src/search.js.map +1 -0
  42. package/package.json +74 -43
  43. package/src/borsh.ts +19 -0
  44. package/src/constants.ts +1 -0
  45. package/src/index.ts +3 -3
  46. package/src/program.ts +580 -0
  47. package/src/search.ts +1217 -0
  48. package/LICENSE +0 -202
  49. package/lib/esm/document-index.d.ts +0 -147
  50. package/lib/esm/document-index.js +0 -942
  51. package/lib/esm/document-index.js.map +0 -1
  52. package/lib/esm/document-store.d.ts +0 -72
  53. package/lib/esm/document-store.js.map +0 -1
  54. package/lib/esm/index.d.ts +0 -3
  55. package/lib/esm/index.js +0 -4
  56. package/lib/esm/index.js.map +0 -1
  57. package/lib/esm/query.d.ts +0 -191
  58. package/lib/esm/query.js +0 -615
  59. package/lib/esm/query.js.map +0 -1
  60. package/lib/esm/utils.d.ts +0 -3
  61. package/lib/esm/utils.js +0 -12
  62. package/lib/esm/utils.js.map +0 -1
  63. package/src/document-index.ts +0 -1268
  64. package/src/document-store.ts +0 -547
  65. package/src/query.ts +0 -525
  66. package/src/utils.ts +0 -17
@@ -0,0 +1,845 @@
1
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
2
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
3
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
4
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
5
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
6
+ };
7
+ var __metadata = (this && this.__metadata) || function (k, v) {
8
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
9
+ };
10
+ import { field, serialize, variant } from "@dao-xyz/borsh";
11
+ import { Cache } from "@peerbit/cache";
12
+ import { PublicSignKey, sha256Base64Sync, } from "@peerbit/crypto";
13
+ import * as types from "@peerbit/document-interface";
14
+ import * as indexerTypes from "@peerbit/indexer-interface";
15
+ import { HashmapIndex } from "@peerbit/indexer-simple";
16
+ import { BORSH_ENCODING, Entry } from "@peerbit/log";
17
+ import { logger as loggerFn } from "@peerbit/logger";
18
+ import { Program } from "@peerbit/program";
19
+ import { MissingResponsesError, RPC, queryAll, } from "@peerbit/rpc";
20
+ import { SharedLog } from "@peerbit/shared-log";
21
+ import { SilentDelivery } from "@peerbit/stream-interface";
22
+ import { AbortError } from "@peerbit/time";
23
+ import { concat, fromString } from "uint8arrays";
24
+ import { copySerialization } from "./borsh.js";
25
+ import { MAX_BATCH_SIZE } from "./constants.js";
26
+ const logger = loggerFn({ module: "document-index" });
27
+ let Operation = class Operation /* <T> */ {
28
+ };
29
+ Operation = __decorate([
30
+ variant(0)
31
+ ], Operation);
32
+ export { Operation };
33
+ export const BORSH_ENCODING_OPERATION = BORSH_ENCODING(Operation);
34
+ /**
35
+ * Put a complete document at a key
36
+ */
37
+ let PutOperation = class PutOperation extends Operation /* <T> */ {
38
+ data;
39
+ /* _value?: T; */
40
+ constructor(props) {
41
+ super();
42
+ this.data = props.data;
43
+ }
44
+ };
45
+ __decorate([
46
+ field({ type: Uint8Array }),
47
+ __metadata("design:type", Uint8Array)
48
+ ], PutOperation.prototype, "data", void 0);
49
+ PutOperation = __decorate([
50
+ variant(0),
51
+ __metadata("design:paramtypes", [Object])
52
+ ], PutOperation);
53
+ export { PutOperation };
54
+ /* @variant(1)
55
+ export class PutAllOperation<T> extends Operation<T> {
56
+ @field({ type: vec(PutOperation) })
57
+ docs: PutOperation<T>[];
58
+
59
+ constructor(props?: { docs: PutOperation<T>[] }) {
60
+ super();
61
+ if (props) {
62
+ this.docs = props.docs;
63
+ }
64
+ }
65
+ }
66
+ */
67
+ /**
68
+ * Delete a document at a key
69
+ */
70
+ let DeleteOperation = class DeleteOperation extends Operation {
71
+ key;
72
+ constructor(props) {
73
+ super();
74
+ this.key = props.key;
75
+ }
76
+ };
77
+ __decorate([
78
+ field({ type: indexerTypes.IdKey }),
79
+ __metadata("design:type", indexerTypes.IdKey)
80
+ ], DeleteOperation.prototype, "key", void 0);
81
+ DeleteOperation = __decorate([
82
+ variant(2),
83
+ __metadata("design:paramtypes", [Object])
84
+ ], DeleteOperation);
85
+ export { DeleteOperation };
86
+ const introduceEntries = async (responses, type, sync, options) => {
87
+ const results = [];
88
+ for (const response of responses) {
89
+ if (!response.from) {
90
+ logger.error("Missing from for response");
91
+ }
92
+ if (response.response instanceof types.Results) {
93
+ response.response.results.forEach((r) => r.init(type));
94
+ if (typeof options?.remote !== "boolean" && options?.remote?.sync) {
95
+ await sync(response.response);
96
+ }
97
+ options?.onResponse &&
98
+ (await options.onResponse(response.response, response.from)); // TODO fix types
99
+ results.push(response);
100
+ }
101
+ else if (response.response instanceof types.NoAccess) {
102
+ logger.error("Search resulted in access error");
103
+ }
104
+ else {
105
+ throw new Error("Unsupported");
106
+ }
107
+ }
108
+ return results;
109
+ };
110
+ const dedup = (allResult, dedupBy) => {
111
+ const unique = new Set();
112
+ const dedup = [];
113
+ for (const result of allResult) {
114
+ const key = indexerTypes.toId(dedupBy(result));
115
+ const primitive = key.primitive;
116
+ if (unique.has(primitive)) {
117
+ continue;
118
+ }
119
+ unique.add(primitive);
120
+ dedup.push(result);
121
+ }
122
+ return dedup;
123
+ };
124
+ const DEFAULT_INDEX_BY = "id";
125
+ const isTransformerWithFunction = (options) => {
126
+ return options.transform != null;
127
+ };
128
+ let DocumentIndex = class DocumentIndex extends Program {
129
+ _query;
130
+ // Original document representation
131
+ documentType;
132
+ // transform options
133
+ transformer;
134
+ // The indexed document wrapped in a context
135
+ wrappedIndexedType;
136
+ // The database type, for recursive indexing
137
+ dbType;
138
+ indexedTypeIsDocumentType;
139
+ // Index key
140
+ indexBy;
141
+ indexByResolver;
142
+ index;
143
+ // Transformation, indexer
144
+ /* fields: IndexableFields<T, I>; */
145
+ _valueEncoding;
146
+ _sync;
147
+ _log;
148
+ _resolverProgramCache;
149
+ _resolverCache;
150
+ _isProgramValues;
151
+ _resultQueue;
152
+ constructor(properties) {
153
+ super();
154
+ this._query = properties?.query || new RPC();
155
+ }
156
+ get valueEncoding() {
157
+ return this._valueEncoding;
158
+ }
159
+ async open(properties) {
160
+ this._log = properties.log;
161
+ this.documentType = properties.documentType;
162
+ this.indexedTypeIsDocumentType =
163
+ !properties.transform?.type ||
164
+ properties.transform?.type === properties.documentType;
165
+ class IndexedClassWithContex {
166
+ __context;
167
+ constructor(value, context) {
168
+ Object.assign(this, value);
169
+ this.__context = context;
170
+ }
171
+ }
172
+ __decorate([
173
+ field({ type: types.Context }),
174
+ __metadata("design:type", types.Context)
175
+ ], IndexedClassWithContex.prototype, "__context", void 0);
176
+ // copy all prototype values from indexedType to IndexedClassWithContex
177
+ copySerialization((properties.transform?.type || properties.documentType), IndexedClassWithContex);
178
+ this.wrappedIndexedType = IndexedClassWithContex;
179
+ // if this.type is a class that extends Program we want to do special functionality
180
+ this._isProgramValues = this.documentType instanceof Program;
181
+ this.dbType = properties.dbType;
182
+ this._resultQueue = new Map();
183
+ this._sync = properties.sync;
184
+ const transformOptions = properties.transform;
185
+ this.transformer = transformOptions
186
+ ? isTransformerWithFunction(transformOptions)
187
+ ? (obj, context) => transformOptions.transform(obj, context)
188
+ : transformOptions.type
189
+ ? (obj, context) => new transformOptions.type(obj, context)
190
+ : (obj) => obj
191
+ : (obj) => obj; // TODO types
192
+ const maybeArr = properties.indexBy || DEFAULT_INDEX_BY;
193
+ this.indexBy = Array.isArray(maybeArr) ? maybeArr : [maybeArr];
194
+ this.indexByResolver = (obj) => indexerTypes.extractFieldValue(obj, this.indexBy);
195
+ this._valueEncoding = BORSH_ENCODING(this.documentType);
196
+ if (this._isProgramValues) {
197
+ this._resolverProgramCache = new Map();
198
+ }
199
+ this._resolverCache = new Cache({ max: 10 }); // TODO choose limit better (adaptive)
200
+ this.index =
201
+ (await (await this.node.indexer.scope(sha256Base64Sync(concat([this._log.log.id, fromString("/document-index")])))).init({
202
+ indexBy: this.indexBy,
203
+ schema: this.wrappedIndexedType,
204
+ nested: {
205
+ match: (obj) => obj instanceof this.dbType,
206
+ query: async (obj, query) => obj.index.search(query),
207
+ },
208
+ /* maxBatchSize: MAX_BATCH_SIZE */
209
+ })) || new HashmapIndex();
210
+ await this._query.open({
211
+ topic: sha256Base64Sync(concat([this._log.log.id, fromString("/document")])),
212
+ responseHandler: async (query, ctx) => {
213
+ if (!ctx.from) {
214
+ logger.info("Receieved query without from");
215
+ return;
216
+ }
217
+ if (properties.canSearch &&
218
+ (query instanceof indexerTypes.SearchRequest ||
219
+ query instanceof indexerTypes.CollectNextRequest) &&
220
+ !(await properties.canSearch(query, ctx.from))) {
221
+ return new types.NoAccess();
222
+ }
223
+ if (query instanceof indexerTypes.CloseIteratorRequest) {
224
+ this.processCloseIteratorRequest(query, ctx.from);
225
+ }
226
+ else {
227
+ const results = await this.processQuery(query, ctx.from, false, {
228
+ canRead: properties.canRead,
229
+ });
230
+ return new types.Results({
231
+ // Even if results might have length 0, respond, because then we now at least there are no matching results
232
+ results: results.results,
233
+ kept: results.kept,
234
+ });
235
+ }
236
+ },
237
+ responseType: types.AbstractSearchResult,
238
+ queryType: indexerTypes.AbstractSearchRequest,
239
+ });
240
+ }
241
+ getPending(cursorId) {
242
+ const queue = this._resultQueue.get(cursorId);
243
+ if (queue) {
244
+ return queue.queue.length + queue.keptInIndex;
245
+ }
246
+ return this.index.getPending(cursorId);
247
+ }
248
+ async close(from) {
249
+ const closed = await super.close(from);
250
+ if (closed) {
251
+ await this.index.stop?.();
252
+ }
253
+ return closed;
254
+ }
255
+ async drop(from) {
256
+ const dropped = await super.drop(from);
257
+ if (dropped) {
258
+ await this.index.drop?.();
259
+ await this.index.stop?.();
260
+ }
261
+ return dropped;
262
+ }
263
+ async get(key, options) {
264
+ return (await this.getDetailed(key instanceof indexerTypes.IdKey ? key : indexerTypes.toId(key), options))?.[0]?.results[0]?.value;
265
+ }
266
+ async put(value, entry, id) {
267
+ const idString = id.primitive;
268
+ if (this._isProgramValues) {
269
+ this._resolverProgramCache.set(idString, value);
270
+ }
271
+ else {
272
+ this._resolverCache.add(idString, value);
273
+ }
274
+ const existing = await this.index.get(id);
275
+ const context = new types.Context({
276
+ created: existing?.value.__context.created ||
277
+ entry.meta.clock.timestamp.wallTime,
278
+ modified: entry.meta.clock.timestamp.wallTime,
279
+ head: entry.hash,
280
+ gid: entry.gid,
281
+ size: entry.payloadByteLength,
282
+ });
283
+ const valueToIndex = await this.transformer(value, context);
284
+ const wrappedValueToIndex = new this.wrappedIndexedType(valueToIndex, context);
285
+ await this.index.put(wrappedValueToIndex);
286
+ }
287
+ del(key) {
288
+ if (this._isProgramValues) {
289
+ this._resolverProgramCache.delete(key.primitive);
290
+ }
291
+ else {
292
+ this._resolverCache.del(key.primitive);
293
+ }
294
+ return this.index.del(new indexerTypes.DeleteRequest({
295
+ query: [indexerTypes.getMatcher(this.indexBy, key.key)],
296
+ }));
297
+ }
298
+ async getDetailed(key, options) {
299
+ let results;
300
+ if (key instanceof Uint8Array) {
301
+ results = await this.queryDetailed(new indexerTypes.SearchRequest({
302
+ query: [
303
+ new indexerTypes.ByteMatchQuery({ key: this.indexBy, value: key }),
304
+ ],
305
+ }), options);
306
+ }
307
+ else {
308
+ const indexableKey = indexerTypes.toIdeable(key);
309
+ if (typeof indexableKey === "number" ||
310
+ typeof indexableKey === "bigint") {
311
+ results = await this.queryDetailed(new indexerTypes.SearchRequest({
312
+ query: [
313
+ new indexerTypes.IntegerCompare({
314
+ key: this.indexBy,
315
+ compare: indexerTypes.Compare.Equal,
316
+ value: indexableKey,
317
+ }),
318
+ ],
319
+ }), options);
320
+ }
321
+ else if (typeof indexableKey === "string") {
322
+ results = await this.queryDetailed(new indexerTypes.SearchRequest({
323
+ query: [
324
+ new indexerTypes.StringMatch({
325
+ key: this.indexBy,
326
+ value: indexableKey,
327
+ }),
328
+ ],
329
+ }), options);
330
+ }
331
+ else if (indexableKey instanceof Uint8Array) {
332
+ results = await this.queryDetailed(new indexerTypes.SearchRequest({
333
+ query: [
334
+ new indexerTypes.ByteMatchQuery({
335
+ key: this.indexBy,
336
+ value: indexableKey,
337
+ }),
338
+ ],
339
+ }), options);
340
+ }
341
+ }
342
+ return results;
343
+ }
344
+ getSize() {
345
+ return this.index.getSize();
346
+ }
347
+ async resolveDocument(value) {
348
+ const cached = this._resolverCache.get(value.id.primitive) ||
349
+ this._resolverProgramCache?.get(value.id.primitive);
350
+ if (cached != null) {
351
+ return { value: cached };
352
+ }
353
+ if (this.indexedTypeIsDocumentType) {
354
+ // cast value to T, i.e. convert the class but keep all properties except the __context
355
+ const obj = Object.assign(Object.create(this.documentType.prototype), value.value);
356
+ delete obj.__context;
357
+ return { value: obj };
358
+ }
359
+ const head = await this._log.log.get(value.value.__context.head);
360
+ if (!head) {
361
+ return undefined; // we could end up here if we recently pruned the document and other peers never persisted the entry
362
+ // TODO update changes in index before removing entries from log entry storage
363
+ }
364
+ const payloadValue = await head.getPayloadValue();
365
+ if (payloadValue instanceof PutOperation) {
366
+ return {
367
+ value: this.valueEncoding.decoder(payloadValue.data),
368
+ /* size: payloadValue.data.byteLength */
369
+ };
370
+ }
371
+ throw new Error("Unexpected value type when getting document: " +
372
+ payloadValue?.constructor?.name || typeof payloadValue);
373
+ }
374
+ async processQuery(query, from, isLocal, options) {
375
+ // We do special case for querying the id as we can do it faster than iterating
376
+ let prevQueued = isLocal
377
+ ? undefined
378
+ : this._resultQueue.get(query.idString);
379
+ if (prevQueued && !from.equals(prevQueued.from)) {
380
+ throw new Error("Different from in queued results");
381
+ }
382
+ let indexedResult = undefined;
383
+ if (query instanceof indexerTypes.SearchRequest) {
384
+ indexedResult = await this.index.query(query);
385
+ }
386
+ else if (query instanceof indexerTypes.CollectNextRequest) {
387
+ indexedResult =
388
+ prevQueued?.keptInIndex === 0
389
+ ? { kept: 0, results: [] }
390
+ : await this.index.next(query);
391
+ }
392
+ else {
393
+ throw new Error("Unsupported");
394
+ }
395
+ const filteredResults = [];
396
+ let resultSize = 0;
397
+ let toIterate = prevQueued
398
+ ? [...prevQueued.queue, ...indexedResult.results]
399
+ : indexedResult.results;
400
+ if (prevQueued) {
401
+ this._resultQueue.delete(query.idString);
402
+ prevQueued = undefined;
403
+ }
404
+ if (!isLocal) {
405
+ prevQueued = {
406
+ from,
407
+ queue: [],
408
+ timeout: setTimeout(() => {
409
+ this._resultQueue.delete(query.idString);
410
+ }, 6e4),
411
+ keptInIndex: indexedResult.kept,
412
+ };
413
+ this._resultQueue.set(query.idString, prevQueued);
414
+ }
415
+ for (const result of toIterate) {
416
+ if (!isLocal) {
417
+ resultSize += result.value.__context.size;
418
+ if (resultSize > MAX_BATCH_SIZE) {
419
+ prevQueued.queue.push(result);
420
+ continue;
421
+ }
422
+ }
423
+ const value = await this.resolveDocument(result);
424
+ if (!value ||
425
+ (options?.canRead && !(await options.canRead(value.value, from)))) {
426
+ continue;
427
+ }
428
+ filteredResults.push(new types.ResultWithSource({
429
+ context: result.value.__context,
430
+ value: value.value,
431
+ source: serialize(value.value),
432
+ indexed: result.value,
433
+ }));
434
+ }
435
+ const results = new types.Results({
436
+ results: filteredResults,
437
+ kept: BigInt(indexedResult.kept + (prevQueued?.queue.length || 0)),
438
+ });
439
+ if (!isLocal && results.kept === 0n) {
440
+ this.clearResultsQueue(query);
441
+ }
442
+ return results;
443
+ }
444
+ clearResultsQueue(query) {
445
+ const queue = this._resultQueue.get(query.idString);
446
+ if (queue) {
447
+ clearTimeout(queue.timeout);
448
+ this._resultQueue.delete(query.idString);
449
+ }
450
+ }
451
+ async processCloseIteratorRequest(query, publicKey) {
452
+ const queueData = this._resultQueue.get(query.idString);
453
+ if (queueData && !queueData.from.equals(publicKey)) {
454
+ logger.info("Ignoring close iterator request from different peer");
455
+ return;
456
+ }
457
+ this.clearResultsQueue(query);
458
+ return this.index.close(query);
459
+ }
460
+ /**
461
+ * Query and retrieve results with most details
462
+ * @param queryRequest
463
+ * @param options
464
+ * @returns
465
+ */
466
+ async queryDetailed(queryRequest, options) {
467
+ const local = typeof options?.local === "boolean" ? options?.local : true;
468
+ let remote = undefined;
469
+ if (typeof options?.remote === "boolean") {
470
+ if (options?.remote) {
471
+ remote = {};
472
+ }
473
+ else {
474
+ remote = undefined;
475
+ }
476
+ }
477
+ else {
478
+ remote = options?.remote || {};
479
+ }
480
+ if (remote && remote.priority == null) {
481
+ // give queries higher priority than other "normal" data activities
482
+ // without this, we might have a scenario that a peer joina network with large amount of data to be synced, but can not query anything before that is done
483
+ // this will lead to bad UX as you usually want to list/expore whats going on before doing any replication work
484
+ remote.priority = 1;
485
+ }
486
+ const promises = [];
487
+ if (!local && !remote) {
488
+ throw new Error("Expecting either 'options.remote' or 'options.local' to be true");
489
+ }
490
+ const allResults = [];
491
+ if (local) {
492
+ const results = await this.processQuery(queryRequest, this.node.identity.publicKey, true);
493
+ if (results.results.length > 0) {
494
+ options?.onResponse &&
495
+ (await options.onResponse(results, this.node.identity.publicKey));
496
+ allResults.push(results);
497
+ }
498
+ }
499
+ if (remote) {
500
+ const replicatorGroups = await this._log.getReplicatorUnion(remote.minAge);
501
+ if (replicatorGroups) {
502
+ const groupHashes = replicatorGroups.map((x) => [x]);
503
+ const fn = async () => {
504
+ const rs = [];
505
+ const responseHandler = async (results) => {
506
+ for (const r of await introduceEntries(results, this.documentType, this._sync, options)) {
507
+ rs.push(r.response);
508
+ }
509
+ };
510
+ try {
511
+ if (queryRequest instanceof indexerTypes.CloseIteratorRequest) {
512
+ // don't wait for responses
513
+ await this._query.request(queryRequest, { mode: remote.mode });
514
+ }
515
+ else {
516
+ await queryAll(this._query, groupHashes, queryRequest, responseHandler, remote);
517
+ }
518
+ }
519
+ catch (error) {
520
+ if (error instanceof MissingResponsesError) {
521
+ logger.warn("Did not reciveve responses from all shard");
522
+ if (remote?.throwOnMissing) {
523
+ throw error;
524
+ }
525
+ }
526
+ else {
527
+ throw error;
528
+ }
529
+ }
530
+ return rs;
531
+ };
532
+ promises.push(fn());
533
+ }
534
+ else {
535
+ // TODO send without direction out to the world? or just assume we can insert?
536
+ /* promises.push(
537
+ this._query
538
+ .request(queryRequest, remote)
539
+ .then((results) => introduceEntries(results, this.type, this._sync, options).then(x => x.map(y => y.response)))
540
+ ); */
541
+ /* throw new Error(
542
+ "Missing remote replicator info for performing distributed document query"
543
+ ); */
544
+ }
545
+ }
546
+ const resolved = await Promise.all(promises);
547
+ for (const r of resolved) {
548
+ if (r) {
549
+ if (r instanceof Array) {
550
+ allResults.push(...r);
551
+ }
552
+ else {
553
+ allResults.push(r);
554
+ }
555
+ }
556
+ }
557
+ return allResults;
558
+ }
559
+ /**
560
+ * Query and retrieve results
561
+ * @param queryRequest
562
+ * @param options
563
+ * @returns
564
+ */
565
+ async search(queryRequest, options) {
566
+ // Set fetch to search size, or max value (default to max u32 (4294967295))
567
+ queryRequest.fetch = queryRequest.fetch ?? 0xffffffff;
568
+ // So that the iterator is pre-fetching the right amount of entries
569
+ const iterator = this.iterate(queryRequest, options);
570
+ // So that this call will not do any remote requests
571
+ const allResults = [];
572
+ while (iterator.done() === false &&
573
+ queryRequest.fetch > allResults.length) {
574
+ // We might need to pull .next multiple time due to data message size limitations
575
+ for (const result of await iterator.next(queryRequest.fetch - allResults.length)) {
576
+ allResults.push(result);
577
+ }
578
+ }
579
+ await iterator.close();
580
+ //s Deduplicate and return values directly
581
+ return dedup(allResults, this.indexByResolver);
582
+ }
583
+ /**
584
+ * Query and retrieve documents in a iterator
585
+ * @param queryRequest
586
+ * @param options
587
+ * @returns
588
+ */
589
+ iterate(queryRequest, options) {
590
+ let fetchPromise = undefined;
591
+ const peerBufferMap = new Map();
592
+ const visited = new Set();
593
+ let done = false;
594
+ let first = false;
595
+ // TODO handle join/leave while iterating
596
+ const controller = new AbortController();
597
+ const peerBuffers = () => {
598
+ return [...peerBufferMap.values()].map((x) => x.buffer).flat();
599
+ };
600
+ const fetchFirst = async (n) => {
601
+ done = true; // Assume we are donne
602
+ queryRequest.fetch = n;
603
+ await this.queryDetailed(queryRequest, {
604
+ ...options,
605
+ onResponse: async (response, from) => {
606
+ if (!from) {
607
+ logger.error("Missing response from");
608
+ return;
609
+ }
610
+ if (response instanceof types.NoAccess) {
611
+ logger.error("Dont have access");
612
+ return;
613
+ }
614
+ else if (response instanceof types.Results) {
615
+ const results = response;
616
+ if (results.kept === 0n && results.results.length === 0) {
617
+ return;
618
+ }
619
+ if (results.kept > 0n) {
620
+ done = false; // we have more to do later!
621
+ }
622
+ const buffer = [];
623
+ for (const result of results.results) {
624
+ const indexKey = indexerTypes.toId(this.indexByResolver(result.value)).primitive;
625
+ if (visited.has(indexKey)) {
626
+ continue;
627
+ }
628
+ visited.add(indexKey);
629
+ buffer.push({
630
+ value: result.value,
631
+ context: result.context,
632
+ from,
633
+ indexed: result.indexed ||
634
+ (await this.transformer(result.value, result.context)),
635
+ });
636
+ }
637
+ peerBufferMap.set(from.hashcode(), {
638
+ buffer,
639
+ kept: Number(response.kept),
640
+ });
641
+ }
642
+ else {
643
+ throw new Error("Unsupported result type: " + response?.constructor?.name);
644
+ }
645
+ },
646
+ });
647
+ if (done) {
648
+ this.clearResultsQueue(queryRequest);
649
+ }
650
+ return done;
651
+ };
652
+ const fetchAtLeast = async (n) => {
653
+ if (done && first) {
654
+ return;
655
+ }
656
+ await fetchPromise;
657
+ if (!first) {
658
+ first = true;
659
+ fetchPromise = fetchFirst(n);
660
+ return fetchPromise;
661
+ }
662
+ const promises = [];
663
+ let resultsLeft = 0;
664
+ for (const [peer, buffer] of peerBufferMap) {
665
+ if (buffer.buffer.length < n) {
666
+ if (buffer.kept === 0) {
667
+ if (peerBufferMap.get(peer)?.buffer.length === 0) {
668
+ peerBufferMap.delete(peer); // No more results
669
+ }
670
+ continue;
671
+ }
672
+ // TODO buffer more than deleted?
673
+ // TODO batch to multiple 'to's
674
+ const collectRequest = new indexerTypes.CollectNextRequest({
675
+ id: queryRequest.id,
676
+ amount: n - buffer.buffer.length,
677
+ });
678
+ // Fetch locally?
679
+ if (peer === this.node.identity.publicKey.hashcode()) {
680
+ promises.push(this.processQuery(collectRequest, this.node.identity.publicKey, true)
681
+ .then(async (results) => {
682
+ resultsLeft += Number(results.kept);
683
+ if (results.results.length === 0) {
684
+ if (peerBufferMap.get(peer)?.buffer.length === 0) {
685
+ peerBufferMap.delete(peer); // No more results
686
+ }
687
+ }
688
+ else {
689
+ const peerBuffer = peerBufferMap.get(peer);
690
+ if (!peerBuffer) {
691
+ return;
692
+ }
693
+ peerBuffer.kept = Number(results.kept);
694
+ for (const result of results.results) {
695
+ if (visited.has(indexerTypes.toId(this.indexByResolver(result.value))
696
+ .primitive)) {
697
+ continue;
698
+ }
699
+ visited.add(indexerTypes.toId(this.indexByResolver(result.value))
700
+ .primitive);
701
+ peerBuffer.buffer.push({
702
+ value: result.value,
703
+ context: result.context,
704
+ from: this.node.identity.publicKey,
705
+ indexed: result.indexed ||
706
+ (await this.transformer(result.value, result.context)),
707
+ });
708
+ }
709
+ }
710
+ })
711
+ .catch((e) => {
712
+ logger.error("Failed to collect sorted results from self. " + e?.message);
713
+ peerBufferMap.delete(peer);
714
+ }));
715
+ }
716
+ else {
717
+ // Fetch remotely
718
+ promises.push(this._query
719
+ .request(collectRequest, {
720
+ ...options,
721
+ signal: controller.signal,
722
+ priority: 1,
723
+ mode: new SilentDelivery({ to: [peer], redundancy: 1 }),
724
+ })
725
+ .then((response) => introduceEntries(response, this.documentType, this._sync, options)
726
+ .then((responses) => {
727
+ responses.map((response) => {
728
+ resultsLeft += Number(response.response.kept);
729
+ if (!response.from) {
730
+ logger.error("Missing from for sorted query");
731
+ return;
732
+ }
733
+ if (response.response.results.length === 0) {
734
+ if (peerBufferMap.get(peer)?.buffer.length === 0) {
735
+ peerBufferMap.delete(peer); // No more results
736
+ }
737
+ }
738
+ else {
739
+ const peerBuffer = peerBufferMap.get(peer);
740
+ if (!peerBuffer) {
741
+ return;
742
+ }
743
+ peerBuffer.kept = Number(response.response.kept);
744
+ for (const result of response.response.results) {
745
+ if (visited.has(indexerTypes.toId(this.indexByResolver(result.value)).primitive)) {
746
+ continue;
747
+ }
748
+ visited.add(indexerTypes.toId(this.indexByResolver(result.value)).primitive);
749
+ peerBuffer.buffer.push({
750
+ value: result.value,
751
+ context: result.context,
752
+ from: response.from,
753
+ indexed: this.transformer(result.value, result.context),
754
+ });
755
+ }
756
+ }
757
+ });
758
+ })
759
+ .catch((e) => {
760
+ logger.error("Failed to collect sorted results from: " +
761
+ peer +
762
+ ". " +
763
+ e?.message);
764
+ peerBufferMap.delete(peer);
765
+ })));
766
+ }
767
+ }
768
+ else {
769
+ resultsLeft += peerBufferMap.get(peer)?.kept || 0;
770
+ }
771
+ }
772
+ return (fetchPromise = Promise.all(promises).then(() => {
773
+ return resultsLeft === 0; // 0 results left to fetch and 0 pending results
774
+ }));
775
+ };
776
+ const next = async (n) => {
777
+ if (n < 0) {
778
+ throw new Error("Expecting to fetch a positive amount of element");
779
+ }
780
+ if (n === 0) {
781
+ return [];
782
+ }
783
+ // TODO everything below is not very optimized
784
+ const fetchedAll = await fetchAtLeast(n);
785
+ // get n next top entries, shift and pull more results
786
+ const peerBuffersArr = peerBuffers();
787
+ const results = peerBuffersArr.sort((a, b) => indexerTypes.extractSortCompare(a.indexed, b.indexed, queryRequest.sort));
788
+ const pendingMoreResults = n < results.length;
789
+ const batch = results.splice(0, n);
790
+ for (const result of batch) {
791
+ const arr = peerBufferMap.get(result.from.hashcode());
792
+ if (!arr) {
793
+ logger.error("Unexpected empty result buffer");
794
+ continue;
795
+ }
796
+ const idx = arr.buffer.findIndex((x) => x.value === result.value);
797
+ if (idx >= 0) {
798
+ arr.buffer.splice(idx, 1);
799
+ }
800
+ }
801
+ done = fetchedAll && !pendingMoreResults;
802
+ return dedup(batch.map((x) => x.value), this.indexByResolver);
803
+ };
804
+ const close = async () => {
805
+ controller.abort(new AbortError("Iterator closed"));
806
+ const closeRequest = new indexerTypes.CloseIteratorRequest({
807
+ id: queryRequest.id,
808
+ });
809
+ const promises = [];
810
+ for (const [peer, buffer] of peerBufferMap) {
811
+ if (buffer.kept === 0) {
812
+ peerBufferMap.delete(peer);
813
+ continue;
814
+ }
815
+ // Fetch locally?
816
+ if (peer === this.node.identity.publicKey.hashcode()) {
817
+ promises.push(this.processCloseIteratorRequest(closeRequest, this.node.identity.publicKey));
818
+ }
819
+ else {
820
+ // Close remote
821
+ promises.push(this._query.send(closeRequest, {
822
+ ...options,
823
+ mode: new SilentDelivery({ to: [peer], redundancy: 1 }),
824
+ }));
825
+ }
826
+ }
827
+ await Promise.all(promises);
828
+ };
829
+ return {
830
+ close,
831
+ next,
832
+ done: () => done,
833
+ };
834
+ }
835
+ };
836
+ __decorate([
837
+ field({ type: RPC }),
838
+ __metadata("design:type", RPC)
839
+ ], DocumentIndex.prototype, "_query", void 0);
840
+ DocumentIndex = __decorate([
841
+ variant("documents_index"),
842
+ __metadata("design:paramtypes", [Object])
843
+ ], DocumentIndex);
844
+ export { DocumentIndex };
845
+ //# sourceMappingURL=search.js.map