@peerbit/shared-log 9.2.13 → 10.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/benchmark/get-samples.js +190 -64
- package/dist/benchmark/get-samples.js.map +1 -1
- package/dist/benchmark/index.js +16 -38
- package/dist/benchmark/index.js.map +1 -1
- package/dist/benchmark/memory/child.js.map +1 -1
- package/dist/benchmark/partial-sync.d.ts +3 -0
- package/dist/benchmark/partial-sync.d.ts.map +1 -0
- package/dist/benchmark/partial-sync.js +121 -0
- package/dist/benchmark/partial-sync.js.map +1 -0
- package/dist/benchmark/replication-prune.js.map +1 -1
- package/dist/benchmark/replication.js.map +1 -1
- package/dist/benchmark/to-rebalance.d.ts +2 -0
- package/dist/benchmark/to-rebalance.d.ts.map +1 -0
- package/dist/benchmark/to-rebalance.js +117 -0
- package/dist/benchmark/to-rebalance.js.map +1 -0
- package/dist/benchmark/utils.d.ts +24 -0
- package/dist/benchmark/utils.d.ts.map +1 -0
- package/dist/benchmark/utils.js +47 -0
- package/dist/benchmark/utils.js.map +1 -0
- package/dist/src/debounce.d.ts +2 -2
- package/dist/src/debounce.d.ts.map +1 -1
- package/dist/src/debounce.js +17 -47
- package/dist/src/debounce.js.map +1 -1
- package/dist/src/exchange-heads.d.ts +1 -13
- package/dist/src/exchange-heads.d.ts.map +1 -1
- package/dist/src/exchange-heads.js +0 -32
- package/dist/src/exchange-heads.js.map +1 -1
- package/dist/src/index.d.ts +119 -60
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +1116 -762
- package/dist/src/index.js.map +1 -1
- package/dist/src/integers.d.ts +22 -0
- package/dist/src/integers.d.ts.map +1 -0
- package/dist/src/integers.js +76 -0
- package/dist/src/integers.js.map +1 -0
- package/dist/src/pid.d.ts.map +1 -1
- package/dist/src/pid.js +22 -22
- package/dist/src/pid.js.map +1 -1
- package/dist/src/ranges.d.ts +168 -38
- package/dist/src/ranges.d.ts.map +1 -1
- package/dist/src/ranges.js +869 -272
- package/dist/src/ranges.js.map +1 -1
- package/dist/src/replication-domain-hash.d.ts +2 -3
- package/dist/src/replication-domain-hash.d.ts.map +1 -1
- package/dist/src/replication-domain-hash.js +40 -15
- package/dist/src/replication-domain-hash.js.map +1 -1
- package/dist/src/replication-domain-time.d.ts +5 -5
- package/dist/src/replication-domain-time.d.ts.map +1 -1
- package/dist/src/replication-domain-time.js +2 -0
- package/dist/src/replication-domain-time.js.map +1 -1
- package/dist/src/replication-domain.d.ts +17 -19
- package/dist/src/replication-domain.d.ts.map +1 -1
- package/dist/src/replication-domain.js +2 -6
- package/dist/src/replication-domain.js.map +1 -1
- package/dist/src/replication.d.ts +6 -6
- package/dist/src/replication.d.ts.map +1 -1
- package/dist/src/replication.js +4 -4
- package/dist/src/replication.js.map +1 -1
- package/dist/src/role.d.ts +3 -6
- package/dist/src/role.d.ts.map +1 -1
- package/dist/src/role.js +4 -5
- package/dist/src/role.js.map +1 -1
- package/dist/src/sync/index.d.ts +40 -0
- package/dist/src/sync/index.d.ts.map +1 -0
- package/dist/src/sync/index.js +2 -0
- package/dist/src/sync/index.js.map +1 -0
- package/dist/src/sync/rateless-iblt.d.ts +124 -0
- package/dist/src/sync/rateless-iblt.d.ts.map +1 -0
- package/dist/src/sync/rateless-iblt.js +495 -0
- package/dist/src/sync/rateless-iblt.js.map +1 -0
- package/dist/src/sync/simple.d.ts +69 -0
- package/dist/src/sync/simple.d.ts.map +1 -0
- package/dist/src/sync/simple.js +338 -0
- package/dist/src/sync/simple.js.map +1 -0
- package/dist/src/sync/wasm-init.browser.d.ts +1 -0
- package/dist/src/sync/wasm-init.browser.d.ts.map +1 -0
- package/dist/src/sync/wasm-init.browser.js +3 -0
- package/dist/src/sync/wasm-init.browser.js.map +1 -0
- package/dist/src/sync/wasm-init.d.ts +2 -0
- package/dist/src/sync/wasm-init.d.ts.map +1 -0
- package/dist/src/sync/wasm-init.js +13 -0
- package/dist/src/sync/wasm-init.js.map +1 -0
- package/dist/src/utils.d.ts +3 -3
- package/dist/src/utils.d.ts.map +1 -1
- package/dist/src/utils.js +2 -2
- package/dist/src/utils.js.map +1 -1
- package/package.json +10 -6
- package/src/debounce.ts +16 -51
- package/src/exchange-heads.ts +1 -23
- package/src/index.ts +1532 -1038
- package/src/integers.ts +102 -0
- package/src/pid.ts +23 -22
- package/src/ranges.ts +1204 -413
- package/src/replication-domain-hash.ts +43 -18
- package/src/replication-domain-time.ts +9 -9
- package/src/replication-domain.ts +21 -31
- package/src/replication.ts +10 -9
- package/src/role.ts +4 -6
- package/src/sync/index.ts +51 -0
- package/src/sync/rateless-iblt.ts +617 -0
- package/src/sync/simple.ts +403 -0
- package/src/sync/wasm-init.browser.ts +1 -0
- package/src/sync/wasm-init.ts +14 -0
- package/src/utils.ts +10 -4
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
import { field, variant, vec } from "@dao-xyz/borsh";
|
|
2
|
+
import { Cache } from "@peerbit/cache";
|
|
3
|
+
import type { PublicSignKey } from "@peerbit/crypto";
|
|
4
|
+
import {
|
|
5
|
+
Compare,
|
|
6
|
+
type Index,
|
|
7
|
+
IntegerCompare,
|
|
8
|
+
Or,
|
|
9
|
+
} from "@peerbit/indexer-interface";
|
|
10
|
+
import { Entry, Log } from "@peerbit/log";
|
|
11
|
+
import type { RPC, RequestContext } from "@peerbit/rpc";
|
|
12
|
+
import { SilentDelivery } from "@peerbit/stream-interface";
|
|
13
|
+
import type { SyncableKey, Syncronizer } from ".";
|
|
14
|
+
import {
|
|
15
|
+
EntryWithRefs,
|
|
16
|
+
createExchangeHeadsMessages,
|
|
17
|
+
} from "../exchange-heads.js";
|
|
18
|
+
import { TransportMessage } from "../message.js";
|
|
19
|
+
import type { EntryReplicated } from "../ranges.js";
|
|
20
|
+
|
|
21
|
+
@variant([0, 1])
|
|
22
|
+
export class RequestMaybeSync extends TransportMessage {
|
|
23
|
+
@field({ type: vec("string") })
|
|
24
|
+
hashes: string[];
|
|
25
|
+
|
|
26
|
+
constructor(props: { hashes: string[] }) {
|
|
27
|
+
super();
|
|
28
|
+
this.hashes = props.hashes;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
@variant([0, 2])
|
|
33
|
+
export class ResponseMaybeSync extends TransportMessage {
|
|
34
|
+
@field({ type: vec("string") })
|
|
35
|
+
hashes: string[];
|
|
36
|
+
|
|
37
|
+
constructor(props: { hashes: string[] }) {
|
|
38
|
+
super();
|
|
39
|
+
this.hashes = props.hashes;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
@variant([0, 5])
|
|
44
|
+
export class RequestMaybeSyncCoordinate extends TransportMessage {
|
|
45
|
+
@field({ type: vec("u64") })
|
|
46
|
+
coordinates: bigint[];
|
|
47
|
+
|
|
48
|
+
constructor(props: { coordinates: bigint[] }) {
|
|
49
|
+
super();
|
|
50
|
+
this.coordinates = props.coordinates;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const getHashesFromSymbols = async (
|
|
55
|
+
symbols: bigint[],
|
|
56
|
+
entryIndex: Index<EntryReplicated<any>, any>,
|
|
57
|
+
coordinateToHash: Cache<string>,
|
|
58
|
+
) => {
|
|
59
|
+
let queries: IntegerCompare[] = [];
|
|
60
|
+
let batchSize = 1; // TODO arg
|
|
61
|
+
let results = new Set<string>();
|
|
62
|
+
const handleBatch = async (end = false) => {
|
|
63
|
+
if (queries.length >= batchSize || (end && queries.length > 0)) {
|
|
64
|
+
const entries = await entryIndex
|
|
65
|
+
.iterate(
|
|
66
|
+
{ query: queries.length > 1 ? new Or(queries) : queries },
|
|
67
|
+
{ shape: { hash: true, coordinates: true } },
|
|
68
|
+
)
|
|
69
|
+
.all();
|
|
70
|
+
queries = [];
|
|
71
|
+
|
|
72
|
+
for (const entry of entries) {
|
|
73
|
+
results.add(entry.value.hash);
|
|
74
|
+
for (const coordinate of entry.value.coordinates) {
|
|
75
|
+
coordinateToHash.add(coordinate, entry.value.hash);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
for (let i = 0; i < symbols.length; i++) {
|
|
81
|
+
const fromCache = coordinateToHash.get(symbols[i]);
|
|
82
|
+
if (fromCache) {
|
|
83
|
+
results.add(fromCache);
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
const matchQuery = new IntegerCompare({
|
|
87
|
+
key: "coordinates",
|
|
88
|
+
compare: Compare.Equal,
|
|
89
|
+
value: symbols[i],
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
queries.push(matchQuery);
|
|
93
|
+
await handleBatch();
|
|
94
|
+
}
|
|
95
|
+
await handleBatch(true);
|
|
96
|
+
|
|
97
|
+
return results;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
export class SimpleSyncronizer<R extends "u32" | "u64">
|
|
101
|
+
implements Syncronizer<R>
|
|
102
|
+
{
|
|
103
|
+
// map of hash to public keys that we can ask for entries
|
|
104
|
+
syncInFlightQueue: Map<SyncableKey, PublicSignKey[]>;
|
|
105
|
+
syncInFlightQueueInverted: Map<string, Set<SyncableKey>>;
|
|
106
|
+
|
|
107
|
+
// map of hash to public keys that we have asked for entries
|
|
108
|
+
syncInFlight!: Map<string, Map<SyncableKey, { timestamp: number }>>;
|
|
109
|
+
|
|
110
|
+
rpc: RPC<TransportMessage, TransportMessage>;
|
|
111
|
+
log: Log<any>;
|
|
112
|
+
entryIndex: Index<EntryReplicated<R>, any>;
|
|
113
|
+
coordinateToHash: Cache<string>;
|
|
114
|
+
|
|
115
|
+
// Syncing and dedeplucation work
|
|
116
|
+
syncMoreInterval?: ReturnType<typeof setTimeout>;
|
|
117
|
+
|
|
118
|
+
closed!: boolean;
|
|
119
|
+
|
|
120
|
+
constructor(properties: {
|
|
121
|
+
rpc: RPC<TransportMessage, TransportMessage>;
|
|
122
|
+
entryIndex: Index<EntryReplicated<R>, any>;
|
|
123
|
+
log: Log<any>;
|
|
124
|
+
coordinateToHash: Cache<string>;
|
|
125
|
+
}) {
|
|
126
|
+
this.syncInFlightQueue = new Map();
|
|
127
|
+
this.syncInFlightQueueInverted = new Map();
|
|
128
|
+
this.syncInFlight = new Map();
|
|
129
|
+
this.rpc = properties.rpc;
|
|
130
|
+
this.log = properties.log;
|
|
131
|
+
this.entryIndex = properties.entryIndex;
|
|
132
|
+
this.coordinateToHash = properties.coordinateToHash;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
onMaybeMissingEntries(properties: {
|
|
136
|
+
entries: Map<string, EntryReplicated<R>>;
|
|
137
|
+
targets: string[];
|
|
138
|
+
}): Promise<void> {
|
|
139
|
+
return this.rpc.send(
|
|
140
|
+
new RequestMaybeSync({ hashes: [...properties.entries.keys()] }),
|
|
141
|
+
{
|
|
142
|
+
priority: 1,
|
|
143
|
+
mode: new SilentDelivery({ to: properties.targets, redundancy: 1 }),
|
|
144
|
+
},
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async onMessage(
|
|
149
|
+
msg: TransportMessage,
|
|
150
|
+
context: RequestContext,
|
|
151
|
+
): Promise<boolean> {
|
|
152
|
+
const from = context.from!;
|
|
153
|
+
if (msg instanceof RequestMaybeSync) {
|
|
154
|
+
await this.queueSync(msg.hashes, from);
|
|
155
|
+
return true;
|
|
156
|
+
} else if (msg instanceof ResponseMaybeSync) {
|
|
157
|
+
// TODO perhaps send less messages to more receivers for performance reasons?
|
|
158
|
+
// TODO wait for previous send to target before trying to send more?
|
|
159
|
+
|
|
160
|
+
for await (const message of createExchangeHeadsMessages(
|
|
161
|
+
this.log,
|
|
162
|
+
msg.hashes,
|
|
163
|
+
)) {
|
|
164
|
+
await this.rpc.send(message, {
|
|
165
|
+
mode: new SilentDelivery({ to: [context.from!], redundancy: 1 }),
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
return true;
|
|
169
|
+
} else if (msg instanceof RequestMaybeSyncCoordinate) {
|
|
170
|
+
const hashes = await getHashesFromSymbols(
|
|
171
|
+
msg.coordinates,
|
|
172
|
+
this.entryIndex,
|
|
173
|
+
this.coordinateToHash,
|
|
174
|
+
);
|
|
175
|
+
for await (const message of createExchangeHeadsMessages(
|
|
176
|
+
this.log,
|
|
177
|
+
hashes,
|
|
178
|
+
)) {
|
|
179
|
+
await this.rpc.send(message, {
|
|
180
|
+
mode: new SilentDelivery({ to: [context.from!], redundancy: 1 }),
|
|
181
|
+
priority: 1,
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
return true;
|
|
186
|
+
} else {
|
|
187
|
+
return false; // no message was consumed
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
onReceivedEntries(properties: {
|
|
192
|
+
entries: EntryWithRefs<any>[];
|
|
193
|
+
from: PublicSignKey;
|
|
194
|
+
}): Promise<void> | void {
|
|
195
|
+
for (const entry of properties.entries) {
|
|
196
|
+
const set = this.syncInFlight.get(properties.from.hashcode());
|
|
197
|
+
if (set) {
|
|
198
|
+
set.delete(entry.entry.hash);
|
|
199
|
+
if (set?.size === 0) {
|
|
200
|
+
this.syncInFlight.delete(properties.from.hashcode());
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async queueSync(
|
|
207
|
+
keys: string[] | bigint[],
|
|
208
|
+
from: PublicSignKey,
|
|
209
|
+
options?: { skipCheck?: boolean },
|
|
210
|
+
) {
|
|
211
|
+
const requestHashes: SyncableKey[] = [];
|
|
212
|
+
|
|
213
|
+
for (const coordinateOrHash of keys) {
|
|
214
|
+
const inFlight = this.syncInFlightQueue.get(coordinateOrHash);
|
|
215
|
+
if (inFlight) {
|
|
216
|
+
if (!inFlight.find((x) => x.hashcode() === from.hashcode())) {
|
|
217
|
+
inFlight.push(from);
|
|
218
|
+
let inverted = this.syncInFlightQueueInverted.get(from.hashcode());
|
|
219
|
+
if (!inverted) {
|
|
220
|
+
inverted = new Set();
|
|
221
|
+
this.syncInFlightQueueInverted.set(from.hashcode(), inverted);
|
|
222
|
+
}
|
|
223
|
+
inverted.add(coordinateOrHash);
|
|
224
|
+
}
|
|
225
|
+
} else if (
|
|
226
|
+
options?.skipCheck ||
|
|
227
|
+
!(await this.checkHasCoordinateOrHash(coordinateOrHash))
|
|
228
|
+
) {
|
|
229
|
+
this.syncInFlightQueue.set(coordinateOrHash, []);
|
|
230
|
+
requestHashes.push(coordinateOrHash); // request immediately (first time we have seen this hash)
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
requestHashes.length > 0 &&
|
|
235
|
+
(await this.requestSync(requestHashes as string[] | bigint[], [
|
|
236
|
+
from!.hashcode(),
|
|
237
|
+
]));
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
private async requestSync(
|
|
241
|
+
hashes: string[] | bigint[],
|
|
242
|
+
to: Set<string> | string[],
|
|
243
|
+
) {
|
|
244
|
+
if (hashes.length === 0) {
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const now = +new Date();
|
|
249
|
+
for (const node of to) {
|
|
250
|
+
let map = this.syncInFlight.get(node);
|
|
251
|
+
if (!map) {
|
|
252
|
+
map = new Map();
|
|
253
|
+
this.syncInFlight.set(node, map);
|
|
254
|
+
}
|
|
255
|
+
for (const hash of hashes) {
|
|
256
|
+
map.set(hash, { timestamp: now });
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
const isBigInt = typeof hashes[0] === "bigint";
|
|
261
|
+
|
|
262
|
+
await this.rpc.send(
|
|
263
|
+
isBigInt
|
|
264
|
+
? new RequestMaybeSyncCoordinate({ coordinates: hashes as bigint[] })
|
|
265
|
+
: new ResponseMaybeSync({ hashes: hashes as string[] }),
|
|
266
|
+
{
|
|
267
|
+
mode: new SilentDelivery({ to, redundancy: 1 }),
|
|
268
|
+
priority: 1,
|
|
269
|
+
},
|
|
270
|
+
);
|
|
271
|
+
}
|
|
272
|
+
private async checkHasCoordinateOrHash(key: string | bigint) {
|
|
273
|
+
return typeof key === "bigint"
|
|
274
|
+
? (await this.entryIndex.count({ query: { coordinates: key } })) > 0
|
|
275
|
+
: this.log.has(key);
|
|
276
|
+
}
|
|
277
|
+
async open() {
|
|
278
|
+
this.closed = false;
|
|
279
|
+
const requestSyncLoop = async () => {
|
|
280
|
+
/**
|
|
281
|
+
* This method fetches entries that we potentially want.
|
|
282
|
+
* In a case in which we become replicator of a segment,
|
|
283
|
+
* multiple remote peers might want to send us entries
|
|
284
|
+
* This method makes sure that we only request on entry from the remotes at a time
|
|
285
|
+
* so we don't get flooded with the same entry
|
|
286
|
+
*/
|
|
287
|
+
|
|
288
|
+
const requestHashes: SyncableKey[] = [];
|
|
289
|
+
const from: Set<string> = new Set();
|
|
290
|
+
for (const [key, value] of this.syncInFlightQueue) {
|
|
291
|
+
if (this.closed) {
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
const has = await this.checkHasCoordinateOrHash(key);
|
|
296
|
+
|
|
297
|
+
if (!has) {
|
|
298
|
+
// TODO test that this if statement actually does anymeaningfull
|
|
299
|
+
if (value.length > 0) {
|
|
300
|
+
requestHashes.push(key);
|
|
301
|
+
const publicKeyHash = value.shift()!.hashcode();
|
|
302
|
+
from.add(publicKeyHash);
|
|
303
|
+
const invertedSet =
|
|
304
|
+
this.syncInFlightQueueInverted.get(publicKeyHash);
|
|
305
|
+
if (invertedSet) {
|
|
306
|
+
if (invertedSet.delete(key)) {
|
|
307
|
+
if (invertedSet.size === 0) {
|
|
308
|
+
this.syncInFlightQueueInverted.delete(publicKeyHash);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
if (value.length === 0) {
|
|
314
|
+
this.syncInFlightQueue.delete(key); // no-one more to ask for this entry
|
|
315
|
+
}
|
|
316
|
+
} else {
|
|
317
|
+
this.syncInFlightQueue.delete(key);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const nowMin10s = +new Date() - 2e4;
|
|
322
|
+
for (const [key, map] of this.syncInFlight) {
|
|
323
|
+
// cleanup "old" missing syncs
|
|
324
|
+
for (const [hash, { timestamp }] of map) {
|
|
325
|
+
if (timestamp < nowMin10s) {
|
|
326
|
+
map.delete(hash);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
if (map.size === 0) {
|
|
330
|
+
this.syncInFlight.delete(key);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
this.requestSync(requestHashes as string[] | bigint[], from).finally(
|
|
334
|
+
() => {
|
|
335
|
+
if (this.closed) {
|
|
336
|
+
return;
|
|
337
|
+
}
|
|
338
|
+
this.syncMoreInterval = setTimeout(requestSyncLoop, 3e3);
|
|
339
|
+
},
|
|
340
|
+
);
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
requestSyncLoop();
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
async close() {
|
|
347
|
+
this.closed = true;
|
|
348
|
+
this.syncInFlightQueue.clear();
|
|
349
|
+
this.syncInFlightQueueInverted.clear();
|
|
350
|
+
this.syncInFlight.clear();
|
|
351
|
+
clearTimeout(this.syncMoreInterval);
|
|
352
|
+
}
|
|
353
|
+
onEntryAdded(entry: Entry<any>): void {
|
|
354
|
+
return this.clearSyncProcess(entry.hash);
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
onEntryRemoved(hash: string): void {
|
|
358
|
+
return this.clearSyncProcess(hash);
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
private clearSyncProcess(hash: string) {
|
|
362
|
+
const inflight = this.syncInFlightQueue.get(hash);
|
|
363
|
+
if (inflight) {
|
|
364
|
+
for (const key of inflight) {
|
|
365
|
+
const map = this.syncInFlightQueueInverted.get(key.hashcode());
|
|
366
|
+
if (map) {
|
|
367
|
+
map.delete(hash);
|
|
368
|
+
if (map.size === 0) {
|
|
369
|
+
this.syncInFlightQueueInverted.delete(key.hashcode());
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
this.syncInFlightQueue.delete(hash);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
onPeerDisconnected(key: PublicSignKey): Promise<void> | void {
|
|
379
|
+
return this.clearSyncProcessPublicKey(key);
|
|
380
|
+
}
|
|
381
|
+
private clearSyncProcessPublicKey(publicKey: PublicSignKey) {
|
|
382
|
+
this.syncInFlight.delete(publicKey.hashcode());
|
|
383
|
+
const map = this.syncInFlightQueueInverted.get(publicKey.hashcode());
|
|
384
|
+
if (map) {
|
|
385
|
+
for (const hash of map) {
|
|
386
|
+
const arr = this.syncInFlightQueue.get(hash);
|
|
387
|
+
if (arr) {
|
|
388
|
+
const filtered = arr.filter((x) => !x.equals(publicKey));
|
|
389
|
+
if (filtered.length > 0) {
|
|
390
|
+
this.syncInFlightQueue.set(hash, filtered);
|
|
391
|
+
} else {
|
|
392
|
+
this.syncInFlightQueue.delete(hash);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
this.syncInFlightQueueInverted.delete(publicKey.hashcode());
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
get pending() {
|
|
401
|
+
return this.syncInFlightQueue.size;
|
|
402
|
+
}
|
|
403
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
// nothing to do since 'fetch' works as expected in the browsere
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
// Override globalThis.fetch to intercept .wasm requests
|
|
2
|
+
import { readFile } from "fs/promises";
|
|
3
|
+
|
|
4
|
+
const defaultFetch = globalThis.fetch.bind(globalThis);
|
|
5
|
+
(globalThis.fetch as any) = async (url: any, options: any) => {
|
|
6
|
+
// If you have multiple wasm files, you might use some logic to handle them.
|
|
7
|
+
// Here, we assume any request ending in `.wasm` is local on disk at the same path.
|
|
8
|
+
if (url.toString().endsWith(".wasm")) {
|
|
9
|
+
// Return a NodeResponse that looks enough like a fetch Response
|
|
10
|
+
return readFile(url);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
return defaultFetch(url, options);
|
|
14
|
+
};
|
package/src/utils.ts
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
import { Entry, ShallowEntry } from "@peerbit/log";
|
|
2
2
|
import type { EntryWithRefs } from "./exchange-heads.js";
|
|
3
|
-
import { EntryReplicated } from "./ranges.js";
|
|
3
|
+
import { type EntryReplicated, isEntryReplicated } from "./ranges.js";
|
|
4
4
|
|
|
5
5
|
export const groupByGid = async <
|
|
6
|
-
T extends
|
|
6
|
+
T extends
|
|
7
|
+
| ShallowEntry
|
|
8
|
+
| Entry<any>
|
|
9
|
+
| EntryWithRefs<any>
|
|
10
|
+
| EntryReplicated<any>,
|
|
7
11
|
>(
|
|
8
12
|
entries: T[],
|
|
9
13
|
): Promise<Map<string, T[]>> => {
|
|
@@ -14,7 +18,7 @@ export const groupByGid = async <
|
|
|
14
18
|
? (await head.getMeta()).gid
|
|
15
19
|
: head instanceof ShallowEntry
|
|
16
20
|
? head.meta.gid
|
|
17
|
-
: head
|
|
21
|
+
: isEntryReplicated(head)
|
|
18
22
|
? head.gid
|
|
19
23
|
: (await head.entry.getMeta()).gid;
|
|
20
24
|
let value = groupByGid.get(gid);
|
|
@@ -27,7 +31,9 @@ export const groupByGid = async <
|
|
|
27
31
|
return groupByGid;
|
|
28
32
|
};
|
|
29
33
|
|
|
30
|
-
export const groupByGidSync = async <
|
|
34
|
+
export const groupByGidSync = async <
|
|
35
|
+
T extends ShallowEntry | EntryReplicated<any>,
|
|
36
|
+
>(
|
|
31
37
|
entries: T[],
|
|
32
38
|
): Promise<Map<string, T[]>> => {
|
|
33
39
|
const groupByGid: Map<string, T[]> = new Map();
|