@peerbit/document 6.0.6 → 6.0.7-218a5bb
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/benchmark/index.d.ts +2 -0
- package/dist/benchmark/index.d.ts.map +1 -0
- package/dist/benchmark/index.js +126 -0
- package/dist/benchmark/index.js.map +1 -0
- package/dist/benchmark/replication.d.ts +2 -0
- package/dist/benchmark/replication.d.ts.map +1 -0
- package/dist/benchmark/replication.js +174 -0
- package/dist/benchmark/replication.js.map +1 -0
- package/dist/src/constants.d.ts +2 -0
- package/dist/src/constants.d.ts.map +1 -0
- package/dist/src/constants.js +2 -0
- package/dist/src/constants.js.map +1 -0
- package/dist/src/index.d.ts +5 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +5 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/program.d.ts +90 -0
- package/dist/src/program.d.ts.map +1 -0
- package/{lib/esm/document-store.js → dist/src/program.js} +141 -109
- package/dist/src/program.js.map +1 -0
- package/dist/src/search.d.ts +118 -0
- package/dist/src/search.d.ts.map +1 -0
- package/{lib/esm/document-index.js → dist/src/search.js} +246 -446
- package/dist/src/search.js.map +1 -0
- package/package.json +69 -43
- package/src/constants.ts +1 -0
- package/src/index.ts +4 -3
- package/src/{document-store.ts → program.ts} +216 -183
- package/src/search.ts +997 -0
- package/LICENSE +0 -202
- package/lib/esm/document-index.d.ts +0 -147
- package/lib/esm/document-index.js.map +0 -1
- package/lib/esm/document-store.d.ts +0 -72
- package/lib/esm/document-store.js.map +0 -1
- package/lib/esm/index.d.ts +0 -3
- package/lib/esm/index.js +0 -4
- package/lib/esm/index.js.map +0 -1
- package/lib/esm/query.d.ts +0 -191
- package/lib/esm/query.js +0 -615
- package/lib/esm/query.js.map +0 -1
- package/lib/esm/utils.d.ts +0 -3
- package/lib/esm/utils.js +0 -12
- package/lib/esm/utils.js.map +0 -1
- package/src/document-index.ts +0 -1268
- package/src/query.ts +0 -525
- package/src/utils.ts +0 -17
package/src/search.ts
ADDED
|
@@ -0,0 +1,997 @@
|
|
|
1
|
+
import { type AbstractType, field, serialize, variant } from "@dao-xyz/borsh";
|
|
2
|
+
import { BORSH_ENCODING, type Encoding, Entry } from "@peerbit/log";
|
|
3
|
+
import { Program } from "@peerbit/program";
|
|
4
|
+
import * as types from "@peerbit/document-interface";
|
|
5
|
+
import {
|
|
6
|
+
RPC,
|
|
7
|
+
type RPCResponse,
|
|
8
|
+
queryAll,
|
|
9
|
+
MissingResponsesError,
|
|
10
|
+
type RPCRequestAllOptions
|
|
11
|
+
} from "@peerbit/rpc";
|
|
12
|
+
import { logger as loggerFn } from "@peerbit/logger";
|
|
13
|
+
import { PublicSignKey, sha256Base64Sync } from "@peerbit/crypto";
|
|
14
|
+
import { SharedLog } from "@peerbit/shared-log";
|
|
15
|
+
import { concat, fromString } from "uint8arrays";
|
|
16
|
+
import { SilentDelivery } from "@peerbit/stream-interface";
|
|
17
|
+
import { AbortError } from "@peerbit/time";
|
|
18
|
+
import { Cache } from "@peerbit/cache";
|
|
19
|
+
import { HashmapIndexEngine } from "@peerbit/document-index-simple";
|
|
20
|
+
import { MAX_BATCH_SIZE } from "./constants.js";
|
|
21
|
+
|
|
22
|
+
const logger = loggerFn({ module: "document-index" });
|
|
23
|
+
|
|
24
|
+
type BufferedResult<T> = {
|
|
25
|
+
value: T;
|
|
26
|
+
indexed: Record<string, any>;
|
|
27
|
+
context: types.Context;
|
|
28
|
+
from: PublicSignKey;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
@variant(0)
|
|
32
|
+
export class Operation /* <T> */ { }
|
|
33
|
+
|
|
34
|
+
export const BORSH_ENCODING_OPERATION = BORSH_ENCODING(Operation);
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Put a complete document at a key
|
|
38
|
+
*/
|
|
39
|
+
|
|
40
|
+
@variant(0)
|
|
41
|
+
export class PutOperation extends Operation /* <T> */ {
|
|
42
|
+
@field({ type: Uint8Array })
|
|
43
|
+
data: Uint8Array;
|
|
44
|
+
|
|
45
|
+
/* _value?: T; */
|
|
46
|
+
|
|
47
|
+
constructor(props?: { data: Uint8Array /* value?: T */ }) {
|
|
48
|
+
super();
|
|
49
|
+
if (props) {
|
|
50
|
+
this.data = props.data;
|
|
51
|
+
/* this._value = props.value; */
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/* @variant(1)
|
|
57
|
+
export class PutAllOperation<T> extends Operation<T> {
|
|
58
|
+
@field({ type: vec(PutOperation) })
|
|
59
|
+
docs: PutOperation<T>[];
|
|
60
|
+
|
|
61
|
+
constructor(props?: { docs: PutOperation<T>[] }) {
|
|
62
|
+
super();
|
|
63
|
+
if (props) {
|
|
64
|
+
this.docs = props.docs;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
*/
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Delete a document at a key
|
|
72
|
+
*/
|
|
73
|
+
@variant(2)
|
|
74
|
+
export class DeleteOperation extends Operation {
|
|
75
|
+
@field({ type: types.IdKey })
|
|
76
|
+
key: types.IdKey;
|
|
77
|
+
|
|
78
|
+
constructor(props: { key: types.IdKey }) {
|
|
79
|
+
super();
|
|
80
|
+
this.key = props.key;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export type RemoteQueryOptions<R> = RPCRequestAllOptions<R> & {
|
|
85
|
+
sync?: boolean;
|
|
86
|
+
minAge?: number;
|
|
87
|
+
throwOnMissing?: boolean;
|
|
88
|
+
};
|
|
89
|
+
export type QueryOptions<R> = {
|
|
90
|
+
remote?: boolean | RemoteQueryOptions<types.AbstractSearchResult<R>>;
|
|
91
|
+
local?: boolean;
|
|
92
|
+
};
|
|
93
|
+
export type SearchOptions<R> = { size?: number } & QueryOptions<R>;
|
|
94
|
+
export type IndexableFields<T> = (
|
|
95
|
+
obj: T,
|
|
96
|
+
context: types.Context
|
|
97
|
+
) => Record<string, any> | Promise<Record<string, any>>;
|
|
98
|
+
|
|
99
|
+
export type ResultsIterator<T> = {
|
|
100
|
+
close: () => Promise<void>;
|
|
101
|
+
next: (number: number) => Promise<T[]>;
|
|
102
|
+
done: () => boolean;
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
type QueryDetailedOptions<T> = QueryOptions<T> & {
|
|
106
|
+
onResponse?: (
|
|
107
|
+
response: types.AbstractSearchResult<T>,
|
|
108
|
+
from: PublicSignKey
|
|
109
|
+
) => void | Promise<void>;
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
const introduceEntries = async <T>(
|
|
113
|
+
responses: RPCResponse<types.AbstractSearchResult<T>>[],
|
|
114
|
+
type: AbstractType<T>,
|
|
115
|
+
sync: (result: types.Results<T>) => Promise<void>,
|
|
116
|
+
options?: QueryDetailedOptions<T>
|
|
117
|
+
): Promise<RPCResponse<types.Results<T>>[]> => {
|
|
118
|
+
const results: RPCResponse<types.Results<T>>[] = [];
|
|
119
|
+
for (const response of responses) {
|
|
120
|
+
if (!response.from) {
|
|
121
|
+
logger.error("Missing from for response");
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (response.response instanceof types.Results) {
|
|
125
|
+
response.response.results.forEach((r) => r.init(type));
|
|
126
|
+
if (typeof options?.remote !== "boolean" && options?.remote?.sync) {
|
|
127
|
+
await sync(response.response);
|
|
128
|
+
}
|
|
129
|
+
options?.onResponse &&
|
|
130
|
+
(await options.onResponse(response.response, response.from!)); // TODO fix types
|
|
131
|
+
results.push(response as RPCResponse<types.Results<T>>);
|
|
132
|
+
} else if (response.response instanceof types.NoAccess) {
|
|
133
|
+
logger.error("Search resulted in access error");
|
|
134
|
+
} else {
|
|
135
|
+
throw new Error("Unsupported");
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
return results;
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
const dedup = <T>(
|
|
142
|
+
allResult: T[],
|
|
143
|
+
dedupBy: (obj: any) => string | Uint8Array | number | bigint
|
|
144
|
+
) => {
|
|
145
|
+
const unique: Set<types.IdPrimitive> = new Set();
|
|
146
|
+
const dedup: T[] = [];
|
|
147
|
+
for (const result of allResult) {
|
|
148
|
+
const key = types.toIdeable(dedupBy(result));
|
|
149
|
+
if (unique.has(key)) {
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
unique.add(key);
|
|
153
|
+
dedup.push(result);
|
|
154
|
+
}
|
|
155
|
+
return dedup;
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
const DEFAULT_INDEX_BY = "id";
|
|
159
|
+
|
|
160
|
+
/*
|
|
161
|
+
if (!(await this.canRead(message.sender))) {
|
|
162
|
+
throw new AccessError();
|
|
163
|
+
} */
|
|
164
|
+
|
|
165
|
+
export type CanSearch = (
|
|
166
|
+
request: types.SearchRequest | types.CollectNextRequest,
|
|
167
|
+
from: PublicSignKey
|
|
168
|
+
) => Promise<boolean> | boolean;
|
|
169
|
+
|
|
170
|
+
export type CanRead<T> = (
|
|
171
|
+
result: T,
|
|
172
|
+
from: PublicSignKey
|
|
173
|
+
) => Promise<boolean> | boolean;
|
|
174
|
+
|
|
175
|
+
export type OpenOptions<T> = {
|
|
176
|
+
type: AbstractType<T>;
|
|
177
|
+
dbType: AbstractType<types.IDocumentStore<T>>;
|
|
178
|
+
log: SharedLog<Operation>;
|
|
179
|
+
canRead?: CanRead<T>;
|
|
180
|
+
canSearch?: CanSearch;
|
|
181
|
+
engine?: types.IndexEngine;
|
|
182
|
+
sync: (result: types.Results<T>) => Promise<void>;
|
|
183
|
+
indexBy?: string | string[];
|
|
184
|
+
fields: IndexableFields<T>;
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
@variant("documents_index")
|
|
188
|
+
export class DocumentIndex<T> extends Program<OpenOptions<T>> {
|
|
189
|
+
@field({ type: RPC })
|
|
190
|
+
_query: RPC<types.AbstractSearchRequest, types.AbstractSearchResult<T>>;
|
|
191
|
+
|
|
192
|
+
engine: types.IndexEngine;
|
|
193
|
+
|
|
194
|
+
type: AbstractType<T>;
|
|
195
|
+
dbType: AbstractType<types.IDocumentStore<T>>;
|
|
196
|
+
|
|
197
|
+
// Index key
|
|
198
|
+
private indexBy: string | string[];
|
|
199
|
+
private indexByArr: string[];
|
|
200
|
+
private indexByResolver: (obj: any) => string | Uint8Array;
|
|
201
|
+
|
|
202
|
+
// Transformation, indexer
|
|
203
|
+
fields: IndexableFields<T>;
|
|
204
|
+
|
|
205
|
+
private _valueEncoding: Encoding<T>;
|
|
206
|
+
|
|
207
|
+
private _sync: (result: types.Results<T>) => Promise<void>;
|
|
208
|
+
|
|
209
|
+
private _log: SharedLog<Operation>;
|
|
210
|
+
|
|
211
|
+
private _resolverProgramCache?: Map<string | number | bigint, T>;
|
|
212
|
+
private _resolverCache: Cache<T>;
|
|
213
|
+
private _isProgramValues: boolean;
|
|
214
|
+
constructor(properties?: {
|
|
215
|
+
query?: RPC<types.AbstractSearchRequest, types.AbstractSearchResult<T>>;
|
|
216
|
+
}) {
|
|
217
|
+
super();
|
|
218
|
+
this._query = properties?.query || new RPC();
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
get valueEncoding() {
|
|
222
|
+
return this._valueEncoding;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
async open(properties: OpenOptions<T>) {
|
|
226
|
+
this._log = properties.log;
|
|
227
|
+
this.type = properties.type;
|
|
228
|
+
// if this.type is a class that extends Program we want to do special functionality
|
|
229
|
+
this._isProgramValues = this.type instanceof Program;
|
|
230
|
+
this.dbType = properties.dbType;
|
|
231
|
+
this._sync = properties.sync;
|
|
232
|
+
this.fields = properties.fields;
|
|
233
|
+
this.indexBy = properties.indexBy || DEFAULT_INDEX_BY;
|
|
234
|
+
this.indexByArr = Array.isArray(this.indexBy)
|
|
235
|
+
? this.indexBy
|
|
236
|
+
: [this.indexBy];
|
|
237
|
+
this.indexByResolver =
|
|
238
|
+
typeof this.indexBy === "string"
|
|
239
|
+
? (obj) => obj[this.indexBy as string]
|
|
240
|
+
: (obj: any) => types.extractFieldValue(obj, this.indexBy as string[]);
|
|
241
|
+
|
|
242
|
+
this._valueEncoding = BORSH_ENCODING(this.type);
|
|
243
|
+
|
|
244
|
+
if (this._isProgramValues) {
|
|
245
|
+
this._resolverProgramCache = new Map();
|
|
246
|
+
}
|
|
247
|
+
this._resolverCache = new Cache({ max: 1000 }); // TODO choose limit better (adaptive)
|
|
248
|
+
|
|
249
|
+
this.engine = properties.engine || new HashmapIndexEngine();
|
|
250
|
+
|
|
251
|
+
await this.engine.init({
|
|
252
|
+
indexBy: this.indexBy,
|
|
253
|
+
nested: {
|
|
254
|
+
match: (obj: any): obj is types.IDocumentStore<any> =>
|
|
255
|
+
obj instanceof this.dbType,
|
|
256
|
+
query: async (obj: types.IDocumentStore<any>, query) =>
|
|
257
|
+
obj.index.search(query)
|
|
258
|
+
},
|
|
259
|
+
maxBatchSize: MAX_BATCH_SIZE
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
await this.engine.start?.();
|
|
263
|
+
await this._query.open({
|
|
264
|
+
topic: sha256Base64Sync(
|
|
265
|
+
concat([this._log.log.id, fromString("/document")])
|
|
266
|
+
),
|
|
267
|
+
responseHandler: async (query, ctx) => {
|
|
268
|
+
if (!ctx.from) {
|
|
269
|
+
logger.info("Receieved query without from");
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
if (
|
|
274
|
+
properties.canSearch &&
|
|
275
|
+
(query instanceof types.SearchRequest ||
|
|
276
|
+
query instanceof types.CollectNextRequest) &&
|
|
277
|
+
!(await properties.canSearch(
|
|
278
|
+
query as types.SearchRequest | types.CollectNextRequest,
|
|
279
|
+
ctx.from
|
|
280
|
+
))
|
|
281
|
+
) {
|
|
282
|
+
return new types.NoAccess();
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (query instanceof types.CloseIteratorRequest) {
|
|
286
|
+
this.processCloseIteratorRequest(query, ctx.from);
|
|
287
|
+
} else {
|
|
288
|
+
const results = await this.processQuery(
|
|
289
|
+
query as
|
|
290
|
+
| types.SearchRequest
|
|
291
|
+
| types.SearchRequest
|
|
292
|
+
| types.CollectNextRequest,
|
|
293
|
+
ctx.from,
|
|
294
|
+
{
|
|
295
|
+
canRead: properties.canRead
|
|
296
|
+
}
|
|
297
|
+
);
|
|
298
|
+
|
|
299
|
+
return new types.Results({
|
|
300
|
+
// Even if results might have length 0, respond, because then we now at least there are no matching results
|
|
301
|
+
results: results.results,
|
|
302
|
+
kept: results.kept
|
|
303
|
+
});
|
|
304
|
+
}
|
|
305
|
+
},
|
|
306
|
+
responseType: types.AbstractSearchResult,
|
|
307
|
+
queryType: types.AbstractSearchRequest
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
async close(from?: Program): Promise<boolean> {
|
|
312
|
+
const closed = await super.close(from);
|
|
313
|
+
if (closed) {
|
|
314
|
+
await this.engine.stop?.();
|
|
315
|
+
}
|
|
316
|
+
return closed;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
async drop(from?: Program): Promise<boolean> {
|
|
320
|
+
const closed = await super.drop(from);
|
|
321
|
+
if (closed) {
|
|
322
|
+
await this.engine.stop?.();
|
|
323
|
+
}
|
|
324
|
+
return closed;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
public async get(
|
|
328
|
+
key: types.Ideable | types.IdKey,
|
|
329
|
+
options?: QueryOptions<T>
|
|
330
|
+
): Promise<T | undefined> {
|
|
331
|
+
return (
|
|
332
|
+
await this.getDetailed(
|
|
333
|
+
key instanceof types.IdKey ? key : types.toId(key),
|
|
334
|
+
options
|
|
335
|
+
)
|
|
336
|
+
)?.[0]?.results[0]?.value;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
public async put(value: T, entry: Entry<Operation>, id: types.IdKey) {
|
|
340
|
+
const idString = id.primitive;
|
|
341
|
+
if (this._isProgramValues) {
|
|
342
|
+
this._resolverProgramCache!.set(idString, value);
|
|
343
|
+
} else {
|
|
344
|
+
this._resolverCache.add(idString, value);
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
const context = new types.Context({
|
|
348
|
+
created:
|
|
349
|
+
(await this.engine.get(id))?.context.created ||
|
|
350
|
+
entry.meta.clock.timestamp.wallTime,
|
|
351
|
+
modified: entry.meta.clock.timestamp.wallTime,
|
|
352
|
+
head: entry.hash,
|
|
353
|
+
gid: entry.gid
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
const valueToIndex = await this.fields(value, context);
|
|
357
|
+
this.engine.put({
|
|
358
|
+
id,
|
|
359
|
+
indexed: valueToIndex,
|
|
360
|
+
context,
|
|
361
|
+
size: entry.payload.data.byteLength
|
|
362
|
+
/* reference:
|
|
363
|
+
valueToIndex === value || value instanceof Program
|
|
364
|
+
? { value }
|
|
365
|
+
: undefined */
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
public del(key: types.IdPrimitive) {
|
|
370
|
+
const keyObject = types.toId(key);
|
|
371
|
+
if (this._isProgramValues) {
|
|
372
|
+
this._resolverProgramCache!.delete(key);
|
|
373
|
+
} else {
|
|
374
|
+
this._resolverCache.del(key);
|
|
375
|
+
}
|
|
376
|
+
return this.engine.del(keyObject);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
public async getDetailed(
|
|
380
|
+
key: types.IdKey | types.IdPrimitive,
|
|
381
|
+
options?: QueryOptions<T>
|
|
382
|
+
): Promise<types.Results<T>[] | undefined> {
|
|
383
|
+
let results: types.Results<T>[] | undefined;
|
|
384
|
+
if (key instanceof Uint8Array) {
|
|
385
|
+
results = await this.queryDetailed(
|
|
386
|
+
new types.SearchRequest({
|
|
387
|
+
query: [
|
|
388
|
+
new types.ByteMatchQuery({ key: this.indexByArr, value: key })
|
|
389
|
+
]
|
|
390
|
+
}),
|
|
391
|
+
options
|
|
392
|
+
);
|
|
393
|
+
} else {
|
|
394
|
+
const indexableKey = types.toIdeable(key);
|
|
395
|
+
|
|
396
|
+
if (
|
|
397
|
+
typeof indexableKey === "number" ||
|
|
398
|
+
typeof indexableKey === "bigint"
|
|
399
|
+
) {
|
|
400
|
+
results = await this.queryDetailed(
|
|
401
|
+
new types.SearchRequest({
|
|
402
|
+
query: [
|
|
403
|
+
new types.IntegerCompare({
|
|
404
|
+
key: this.indexByArr,
|
|
405
|
+
compare: types.Compare.Equal,
|
|
406
|
+
value: indexableKey
|
|
407
|
+
})
|
|
408
|
+
]
|
|
409
|
+
}),
|
|
410
|
+
options
|
|
411
|
+
);
|
|
412
|
+
} else {
|
|
413
|
+
results = await this.queryDetailed(
|
|
414
|
+
new types.SearchRequest({
|
|
415
|
+
query: [
|
|
416
|
+
new types.StringMatch({
|
|
417
|
+
key: this.indexByArr,
|
|
418
|
+
value: indexableKey
|
|
419
|
+
})
|
|
420
|
+
]
|
|
421
|
+
}),
|
|
422
|
+
options
|
|
423
|
+
);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
return results;
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
getSize(): Promise<number> | number {
|
|
431
|
+
return this.engine.getSize();
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
private async resolveDocument(
|
|
435
|
+
value: types.IndexedResult
|
|
436
|
+
): Promise<{ value: T } | undefined> {
|
|
437
|
+
const cached =
|
|
438
|
+
this._resolverCache.get(value.id.primitive) ||
|
|
439
|
+
this._resolverProgramCache?.get(value.id.primitive);
|
|
440
|
+
if (cached != null) {
|
|
441
|
+
return { value: cached };
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
if (value.indexed instanceof this.type) {
|
|
445
|
+
return { value: value.indexed as T };
|
|
446
|
+
}
|
|
447
|
+
const head = await await this._log.log.get(value.context.head);
|
|
448
|
+
if (!head) {
|
|
449
|
+
return undefined; // we could end up here if we recently pruned the document and other peers never persisted the entry
|
|
450
|
+
// TODO update changes in index before removing entries from log entry storage
|
|
451
|
+
}
|
|
452
|
+
const payloadValue = await head.getPayloadValue();
|
|
453
|
+
if (payloadValue instanceof PutOperation) {
|
|
454
|
+
return {
|
|
455
|
+
value: this.valueEncoding.decoder(payloadValue.data)
|
|
456
|
+
/* size: payloadValue.data.byteLength */
|
|
457
|
+
};
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
throw new Error(
|
|
461
|
+
"Unexpected value type when getting document: " +
|
|
462
|
+
payloadValue?.constructor?.name || typeof payloadValue
|
|
463
|
+
);
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
async processQuery(
|
|
467
|
+
query: types.SearchRequest | types.CollectNextRequest,
|
|
468
|
+
from: PublicSignKey,
|
|
469
|
+
options?: {
|
|
470
|
+
canRead?: CanRead<T>;
|
|
471
|
+
}
|
|
472
|
+
): Promise<types.Results<T>> {
|
|
473
|
+
// We do special case for querying the id as we can do it faster than iterating
|
|
474
|
+
|
|
475
|
+
let indexedResult: types.IndexedResults | undefined = undefined;
|
|
476
|
+
if (query instanceof types.SearchRequest) {
|
|
477
|
+
indexedResult = await this.engine.query(query, from);
|
|
478
|
+
} else if (query instanceof types.CollectNextRequest) {
|
|
479
|
+
indexedResult = await this.engine.next(query, from);
|
|
480
|
+
} else {
|
|
481
|
+
throw new Error("Unsupported");
|
|
482
|
+
}
|
|
483
|
+
const filteredResults: types.ResultWithSource<T>[] = [];
|
|
484
|
+
for (const result of indexedResult.results) {
|
|
485
|
+
const value = await this.resolveDocument(result);
|
|
486
|
+
if (
|
|
487
|
+
!value ||
|
|
488
|
+
(options?.canRead && !(await options.canRead(value.value, from)))
|
|
489
|
+
) {
|
|
490
|
+
continue;
|
|
491
|
+
}
|
|
492
|
+
filteredResults.push(
|
|
493
|
+
new types.ResultWithSource({
|
|
494
|
+
context: result.context,
|
|
495
|
+
value: value.value,
|
|
496
|
+
source: serialize(value.value),
|
|
497
|
+
indexed: result.indexed
|
|
498
|
+
})
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
const results: types.Results<T> = new types.Results({
|
|
502
|
+
results: filteredResults,
|
|
503
|
+
kept: BigInt(indexedResult.kept)
|
|
504
|
+
});
|
|
505
|
+
return results;
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
async processCloseIteratorRequest(
|
|
509
|
+
query: types.CloseIteratorRequest,
|
|
510
|
+
publicKey: PublicSignKey
|
|
511
|
+
): Promise<void> {
|
|
512
|
+
return this.engine.close(query, publicKey);
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
/**
|
|
516
|
+
* Query and retrieve results with most details
|
|
517
|
+
* @param queryRequest
|
|
518
|
+
* @param options
|
|
519
|
+
* @returns
|
|
520
|
+
*/
|
|
521
|
+
public async queryDetailed(
|
|
522
|
+
queryRequest: types.SearchRequest,
|
|
523
|
+
options?: QueryDetailedOptions<T>
|
|
524
|
+
): Promise<types.Results<T>[]> {
|
|
525
|
+
const local = typeof options?.local == "boolean" ? options?.local : true;
|
|
526
|
+
let remote: RemoteQueryOptions<types.AbstractSearchResult<T>> | undefined =
|
|
527
|
+
undefined;
|
|
528
|
+
if (typeof options?.remote === "boolean") {
|
|
529
|
+
if (options?.remote) {
|
|
530
|
+
remote = {};
|
|
531
|
+
} else {
|
|
532
|
+
remote = undefined;
|
|
533
|
+
}
|
|
534
|
+
} else {
|
|
535
|
+
remote = options?.remote || {};
|
|
536
|
+
}
|
|
537
|
+
if (remote && remote.priority == null) {
|
|
538
|
+
// give queries higher priority than other "normal" data activities
|
|
539
|
+
// without this, we might have a scenario that a peer joina network with large amount of data to be synced, but can not query anything before that is done
|
|
540
|
+
// this will lead to bad UX as you usually want to list/expore whats going on before doing any replication work
|
|
541
|
+
remote.priority = 1;
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
const promises: Promise<types.Results<T>[] | undefined>[] = [];
|
|
545
|
+
if (!local && !remote) {
|
|
546
|
+
throw new Error(
|
|
547
|
+
"Expecting either 'options.remote' or 'options.local' to be true"
|
|
548
|
+
);
|
|
549
|
+
}
|
|
550
|
+
const allResults: types.Results<T>[] = [];
|
|
551
|
+
|
|
552
|
+
if (local) {
|
|
553
|
+
const results = await this.processQuery(
|
|
554
|
+
queryRequest,
|
|
555
|
+
this.node.identity.publicKey
|
|
556
|
+
);
|
|
557
|
+
if (results.results.length > 0) {
|
|
558
|
+
options?.onResponse &&
|
|
559
|
+
(await options.onResponse(results, this.node.identity.publicKey));
|
|
560
|
+
allResults.push(results);
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
if (remote) {
|
|
565
|
+
const replicatorGroups = await this._log.getReplicatorUnion(
|
|
566
|
+
remote.minAge
|
|
567
|
+
);
|
|
568
|
+
if (replicatorGroups) {
|
|
569
|
+
const groupHashes: string[][] = replicatorGroups.map((x) => [x]);
|
|
570
|
+
const fn = async () => {
|
|
571
|
+
const rs: types.Results<T>[] = [];
|
|
572
|
+
const responseHandler = async (
|
|
573
|
+
results: RPCResponse<types.AbstractSearchResult<T>>[]
|
|
574
|
+
) => {
|
|
575
|
+
for (const r of await introduceEntries(
|
|
576
|
+
results,
|
|
577
|
+
this.type,
|
|
578
|
+
this._sync,
|
|
579
|
+
options
|
|
580
|
+
)) {
|
|
581
|
+
rs.push(r.response);
|
|
582
|
+
}
|
|
583
|
+
};
|
|
584
|
+
try {
|
|
585
|
+
if (queryRequest instanceof types.CloseIteratorRequest) {
|
|
586
|
+
// don't wait for responses
|
|
587
|
+
await this._query.request(queryRequest, { mode: remote!.mode });
|
|
588
|
+
} else {
|
|
589
|
+
await queryAll(
|
|
590
|
+
this._query,
|
|
591
|
+
groupHashes,
|
|
592
|
+
queryRequest,
|
|
593
|
+
responseHandler,
|
|
594
|
+
remote
|
|
595
|
+
);
|
|
596
|
+
}
|
|
597
|
+
} catch (error) {
|
|
598
|
+
if (error instanceof MissingResponsesError) {
|
|
599
|
+
logger.warn("Did not reciveve responses from all shard");
|
|
600
|
+
if (remote?.throwOnMissing) {
|
|
601
|
+
throw error;
|
|
602
|
+
}
|
|
603
|
+
} else {
|
|
604
|
+
throw error;
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
return rs;
|
|
608
|
+
};
|
|
609
|
+
promises.push(fn());
|
|
610
|
+
} else {
|
|
611
|
+
// TODO send without direction out to the world? or just assume we can insert?
|
|
612
|
+
/* promises.push(
|
|
613
|
+
this._query
|
|
614
|
+
.request(queryRequest, remote)
|
|
615
|
+
.then((results) => introduceEntries(results, this.type, this._sync, options).then(x => x.map(y => y.response)))
|
|
616
|
+
); */
|
|
617
|
+
/* throw new Error(
|
|
618
|
+
"Missing remote replicator info for performing distributed document query"
|
|
619
|
+
); */
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
const resolved = await Promise.all(promises);
|
|
623
|
+
for (const r of resolved) {
|
|
624
|
+
if (r) {
|
|
625
|
+
if (r instanceof Array) {
|
|
626
|
+
allResults.push(...r);
|
|
627
|
+
} else {
|
|
628
|
+
allResults.push(r);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
return allResults;
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
/**
|
|
636
|
+
* Query and retrieve results
|
|
637
|
+
* @param queryRequest
|
|
638
|
+
* @param options
|
|
639
|
+
* @returns
|
|
640
|
+
*/
|
|
641
|
+
public async search(
|
|
642
|
+
queryRequest: types.SearchRequest,
|
|
643
|
+
options?: SearchOptions<T>
|
|
644
|
+
): Promise<T[]> {
|
|
645
|
+
// Set fetch to search size, or max value (default to max u32 (4294967295))
|
|
646
|
+
queryRequest.fetch = options?.size ?? 0xffffffff;
|
|
647
|
+
|
|
648
|
+
// So that the iterator is pre-fetching the right amount of entries
|
|
649
|
+
const iterator = this.iterate(queryRequest, options);
|
|
650
|
+
|
|
651
|
+
// So that this call will not do any remote requests
|
|
652
|
+
const allResults: T[] = [];
|
|
653
|
+
while (
|
|
654
|
+
iterator.done() === false &&
|
|
655
|
+
queryRequest.fetch > allResults.length
|
|
656
|
+
) {
|
|
657
|
+
// We might need to pull .next multiple time due to data message size limitations
|
|
658
|
+
for (const result of await iterator.next(
|
|
659
|
+
queryRequest.fetch - allResults.length
|
|
660
|
+
)) {
|
|
661
|
+
allResults.push(result);
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
await iterator.close();
|
|
666
|
+
|
|
667
|
+
//s Deduplicate and return values directly
|
|
668
|
+
return dedup(allResults, this.indexByResolver);
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
/**
|
|
672
|
+
* Query and retrieve documents in a iterator
|
|
673
|
+
* @param queryRequest
|
|
674
|
+
* @param options
|
|
675
|
+
* @returns
|
|
676
|
+
*/
|
|
677
|
+
public iterate(
|
|
678
|
+
queryRequest: types.SearchRequest,
|
|
679
|
+
options?: QueryOptions<T>
|
|
680
|
+
): ResultsIterator<T> {
|
|
681
|
+
let fetchPromise: Promise<any> | undefined = undefined;
|
|
682
|
+
const peerBufferMap: Map<
|
|
683
|
+
string,
|
|
684
|
+
{
|
|
685
|
+
kept: number;
|
|
686
|
+
buffer: BufferedResult<T>[];
|
|
687
|
+
}
|
|
688
|
+
> = new Map();
|
|
689
|
+
const visited = new Set<string | number | bigint>();
|
|
690
|
+
|
|
691
|
+
let done = false;
|
|
692
|
+
let first = false;
|
|
693
|
+
|
|
694
|
+
// TODO handle join/leave while iterating
|
|
695
|
+
const controller = new AbortController();
|
|
696
|
+
|
|
697
|
+
const peerBuffers = (): {
|
|
698
|
+
indexed: Record<string, any>;
|
|
699
|
+
value: T;
|
|
700
|
+
from: PublicSignKey;
|
|
701
|
+
context: types.Context;
|
|
702
|
+
}[] => {
|
|
703
|
+
return [...peerBufferMap.values()].map((x) => x.buffer).flat();
|
|
704
|
+
};
|
|
705
|
+
|
|
706
|
+
const fetchFirst = async (n: number): Promise<boolean> => {
|
|
707
|
+
done = true; // Assume we are donne
|
|
708
|
+
queryRequest.fetch = n;
|
|
709
|
+
await this.queryDetailed(queryRequest, {
|
|
710
|
+
...options,
|
|
711
|
+
onResponse: async (response, from) => {
|
|
712
|
+
if (!from) {
|
|
713
|
+
logger.error("Missing response from");
|
|
714
|
+
return;
|
|
715
|
+
}
|
|
716
|
+
if (response instanceof types.NoAccess) {
|
|
717
|
+
logger.error("Dont have access");
|
|
718
|
+
return;
|
|
719
|
+
} else if (response instanceof types.Results) {
|
|
720
|
+
const results = response as types.Results<T>;
|
|
721
|
+
if (results.kept === 0n && results.results.length === 0) {
|
|
722
|
+
return;
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
if (results.kept > 0n) {
|
|
726
|
+
done = false; // we have more to do later!
|
|
727
|
+
}
|
|
728
|
+
const buffer: BufferedResult<T>[] = [];
|
|
729
|
+
|
|
730
|
+
for (const result of results.results) {
|
|
731
|
+
const indexKey = types.toIdeable(
|
|
732
|
+
this.indexByResolver(result.value)
|
|
733
|
+
);
|
|
734
|
+
if (visited.has(indexKey)) {
|
|
735
|
+
continue;
|
|
736
|
+
}
|
|
737
|
+
visited.add(indexKey);
|
|
738
|
+
buffer.push({
|
|
739
|
+
value: result.value,
|
|
740
|
+
context: result.context,
|
|
741
|
+
from: from,
|
|
742
|
+
indexed:
|
|
743
|
+
result.indexed ||
|
|
744
|
+
(await this.fields(result.value, result.context))
|
|
745
|
+
});
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
peerBufferMap.set(from.hashcode(), {
|
|
749
|
+
buffer,
|
|
750
|
+
kept: Number(response.kept)
|
|
751
|
+
});
|
|
752
|
+
} else {
|
|
753
|
+
throw new Error(
|
|
754
|
+
"Unsupported result type: " + response?.constructor?.name
|
|
755
|
+
);
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
});
|
|
759
|
+
|
|
760
|
+
return done;
|
|
761
|
+
};
|
|
762
|
+
|
|
763
|
+
const fetchAtLeast = async (n: number) => {
|
|
764
|
+
if (done && first) {
|
|
765
|
+
return;
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
await fetchPromise;
|
|
769
|
+
|
|
770
|
+
if (!first) {
|
|
771
|
+
first = true;
|
|
772
|
+
fetchPromise = fetchFirst(n);
|
|
773
|
+
return fetchPromise;
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
const promises: Promise<any>[] = [];
|
|
777
|
+
let resultsLeft = 0;
|
|
778
|
+
|
|
779
|
+
for (const [peer, buffer] of peerBufferMap) {
|
|
780
|
+
if (buffer.buffer.length < n) {
|
|
781
|
+
if (buffer.kept === 0) {
|
|
782
|
+
if (peerBufferMap.get(peer)?.buffer.length === 0) {
|
|
783
|
+
peerBufferMap.delete(peer); // No more results
|
|
784
|
+
}
|
|
785
|
+
continue;
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
// TODO buffer more than deleted?
|
|
789
|
+
// TODO batch to multiple 'to's
|
|
790
|
+
const collectRequest = new types.CollectNextRequest({
|
|
791
|
+
id: queryRequest.id,
|
|
792
|
+
amount: n - buffer.buffer.length
|
|
793
|
+
});
|
|
794
|
+
// Fetch locally?
|
|
795
|
+
if (peer === this.node.identity.publicKey.hashcode()) {
|
|
796
|
+
promises.push(
|
|
797
|
+
this.processQuery(collectRequest, this.node.identity.publicKey)
|
|
798
|
+
.then(async (results) => {
|
|
799
|
+
resultsLeft += Number(results.kept);
|
|
800
|
+
|
|
801
|
+
if (results.results.length === 0) {
|
|
802
|
+
if (peerBufferMap.get(peer)?.buffer.length === 0) {
|
|
803
|
+
peerBufferMap.delete(peer); // No more results
|
|
804
|
+
}
|
|
805
|
+
} else {
|
|
806
|
+
const peerBuffer = peerBufferMap.get(peer);
|
|
807
|
+
if (!peerBuffer) {
|
|
808
|
+
return;
|
|
809
|
+
}
|
|
810
|
+
peerBuffer.kept = Number(results.kept);
|
|
811
|
+
|
|
812
|
+
for (const result of results.results) {
|
|
813
|
+
if (
|
|
814
|
+
visited.has(
|
|
815
|
+
types.toIdeable(this.indexByResolver(result.value))
|
|
816
|
+
)
|
|
817
|
+
) {
|
|
818
|
+
continue;
|
|
819
|
+
}
|
|
820
|
+
visited.add(
|
|
821
|
+
types.toIdeable(this.indexByResolver(result.value))
|
|
822
|
+
);
|
|
823
|
+
peerBuffer.buffer.push({
|
|
824
|
+
value: result.value,
|
|
825
|
+
context: result.context,
|
|
826
|
+
from: this.node.identity.publicKey,
|
|
827
|
+
indexed:
|
|
828
|
+
result.indexed ||
|
|
829
|
+
(await this.fields(result.value, result.context))
|
|
830
|
+
});
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
})
|
|
834
|
+
.catch((e) => {
|
|
835
|
+
logger.error(
|
|
836
|
+
"Failed to collect sorted results from self. " + e?.message
|
|
837
|
+
);
|
|
838
|
+
peerBufferMap.delete(peer);
|
|
839
|
+
})
|
|
840
|
+
);
|
|
841
|
+
} else {
|
|
842
|
+
// Fetch remotely
|
|
843
|
+
promises.push(
|
|
844
|
+
this._query
|
|
845
|
+
.request(collectRequest, {
|
|
846
|
+
...options,
|
|
847
|
+
signal: controller.signal,
|
|
848
|
+
priority: 1,
|
|
849
|
+
mode: new SilentDelivery({ to: [peer], redundancy: 1 })
|
|
850
|
+
})
|
|
851
|
+
.then((response) =>
|
|
852
|
+
introduceEntries(response, this.type, this._sync, options)
|
|
853
|
+
.then((responses) => {
|
|
854
|
+
responses.map((response) => {
|
|
855
|
+
resultsLeft += Number(response.response.kept);
|
|
856
|
+
if (!response.from) {
|
|
857
|
+
logger.error("Missing from for sorted query");
|
|
858
|
+
return;
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
if (response.response.results.length === 0) {
|
|
862
|
+
if (peerBufferMap.get(peer)?.buffer.length === 0) {
|
|
863
|
+
peerBufferMap.delete(peer); // No more results
|
|
864
|
+
}
|
|
865
|
+
} else {
|
|
866
|
+
const peerBuffer = peerBufferMap.get(peer);
|
|
867
|
+
if (!peerBuffer) {
|
|
868
|
+
return;
|
|
869
|
+
}
|
|
870
|
+
peerBuffer.kept = Number(response.response.kept);
|
|
871
|
+
for (const result of response.response.results) {
|
|
872
|
+
if (
|
|
873
|
+
visited.has(
|
|
874
|
+
types.toIdeable(
|
|
875
|
+
this.indexByResolver(result.value)
|
|
876
|
+
)
|
|
877
|
+
)
|
|
878
|
+
) {
|
|
879
|
+
continue;
|
|
880
|
+
}
|
|
881
|
+
visited.add(
|
|
882
|
+
types.toIdeable(
|
|
883
|
+
this.indexByResolver(result.value)
|
|
884
|
+
)
|
|
885
|
+
);
|
|
886
|
+
peerBuffer.buffer.push({
|
|
887
|
+
value: result.value,
|
|
888
|
+
context: result.context,
|
|
889
|
+
from: response.from!,
|
|
890
|
+
indexed: this.fields(result.value, result.context)
|
|
891
|
+
});
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
});
|
|
895
|
+
})
|
|
896
|
+
.catch((e) => {
|
|
897
|
+
logger.error(
|
|
898
|
+
"Failed to collect sorted results from: " +
|
|
899
|
+
peer +
|
|
900
|
+
". " +
|
|
901
|
+
e?.message
|
|
902
|
+
);
|
|
903
|
+
peerBufferMap.delete(peer);
|
|
904
|
+
})
|
|
905
|
+
)
|
|
906
|
+
);
|
|
907
|
+
}
|
|
908
|
+
} else {
|
|
909
|
+
resultsLeft += peerBufferMap.get(peer)?.kept || 0;
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
return (fetchPromise = Promise.all(promises).then(() => {
|
|
913
|
+
return resultsLeft === 0; // 0 results left to fetch and 0 pending results
|
|
914
|
+
}));
|
|
915
|
+
};
|
|
916
|
+
|
|
917
|
+
const next = async (n: number) => {
|
|
918
|
+
if (n < 0) {
|
|
919
|
+
throw new Error("Expecting to fetch a positive amount of element");
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
if (n === 0) {
|
|
923
|
+
return [];
|
|
924
|
+
}
|
|
925
|
+
|
|
926
|
+
// TODO everything below is not very optimized
|
|
927
|
+
const fetchedAll = await fetchAtLeast(n);
|
|
928
|
+
|
|
929
|
+
// get n next top entries, shift and pull more results
|
|
930
|
+
const results = await types.resolvedSort(
|
|
931
|
+
peerBuffers(),
|
|
932
|
+
queryRequest.sort
|
|
933
|
+
);
|
|
934
|
+
|
|
935
|
+
const pendingMoreResults = n < results.length;
|
|
936
|
+
|
|
937
|
+
const batch = results.splice(0, n);
|
|
938
|
+
|
|
939
|
+
for (const result of batch) {
|
|
940
|
+
const arr = peerBufferMap.get(result.from.hashcode());
|
|
941
|
+
if (!arr) {
|
|
942
|
+
logger.error("Unexpected empty result buffer");
|
|
943
|
+
continue;
|
|
944
|
+
}
|
|
945
|
+
const idx = arr.buffer.findIndex((x) => x.value == result.value);
|
|
946
|
+
if (idx >= 0) {
|
|
947
|
+
arr.buffer.splice(idx, 1);
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
done = fetchedAll && !pendingMoreResults;
|
|
952
|
+
return dedup(
|
|
953
|
+
batch.map((x) => x.value),
|
|
954
|
+
this.indexByResolver
|
|
955
|
+
);
|
|
956
|
+
};
|
|
957
|
+
|
|
958
|
+
const close = async () => {
|
|
959
|
+
controller.abort(new AbortError("Iterator closed"));
|
|
960
|
+
|
|
961
|
+
const closeRequest = new types.CloseIteratorRequest({
|
|
962
|
+
id: queryRequest.id
|
|
963
|
+
});
|
|
964
|
+
const promises: Promise<any>[] = [];
|
|
965
|
+
for (const [peer, buffer] of peerBufferMap) {
|
|
966
|
+
if (buffer.kept === 0) {
|
|
967
|
+
peerBufferMap.delete(peer);
|
|
968
|
+
continue;
|
|
969
|
+
}
|
|
970
|
+
// Fetch locally?
|
|
971
|
+
if (peer === this.node.identity.publicKey.hashcode()) {
|
|
972
|
+
promises.push(
|
|
973
|
+
this.processCloseIteratorRequest(
|
|
974
|
+
closeRequest,
|
|
975
|
+
this.node.identity.publicKey
|
|
976
|
+
)
|
|
977
|
+
);
|
|
978
|
+
} else {
|
|
979
|
+
// Close remote
|
|
980
|
+
promises.push(
|
|
981
|
+
this._query.send(closeRequest, {
|
|
982
|
+
...options,
|
|
983
|
+
mode: new SilentDelivery({ to: [peer], redundancy: 1 })
|
|
984
|
+
})
|
|
985
|
+
);
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
await Promise.all(promises);
|
|
989
|
+
};
|
|
990
|
+
|
|
991
|
+
return {
|
|
992
|
+
close,
|
|
993
|
+
next,
|
|
994
|
+
done: () => done
|
|
995
|
+
};
|
|
996
|
+
}
|
|
997
|
+
}
|