@peerbit/shared-log 10.0.6 → 10.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/benchmark/get-samples.js +1 -1
- package/dist/benchmark/get-samples.js.map +1 -1
- package/dist/benchmark/utils.js +1 -1
- package/dist/benchmark/utils.js.map +1 -1
- package/dist/src/index.d.ts +15 -10
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +138 -65
- package/dist/src/index.js.map +1 -1
- package/dist/src/ranges.d.ts +95 -11
- package/dist/src/ranges.d.ts.map +1 -1
- package/dist/src/ranges.js +437 -83
- package/dist/src/ranges.js.map +1 -1
- package/dist/src/replication-domain-hash.d.ts +2 -2
- package/dist/src/replication-domain-hash.d.ts.map +1 -1
- package/dist/src/replication-domain-hash.js +2 -17
- package/dist/src/replication-domain-hash.js.map +1 -1
- package/dist/src/replication-domain-time.d.ts +7 -2
- package/dist/src/replication-domain-time.d.ts.map +1 -1
- package/dist/src/replication-domain-time.js +7 -12
- package/dist/src/replication-domain-time.js.map +1 -1
- package/dist/src/replication-domain.d.ts +3 -20
- package/dist/src/replication-domain.d.ts.map +1 -1
- package/dist/src/replication-domain.js +0 -33
- package/dist/src/replication-domain.js.map +1 -1
- package/package.json +4 -4
- package/src/index.ts +205 -107
- package/src/ranges.ts +669 -127
- package/src/replication-domain-hash.ts +16 -29
- package/src/replication-domain-time.ts +46 -40
- package/src/replication-domain.ts +7 -59
package/dist/src/ranges.js
CHANGED
|
@@ -9,13 +9,13 @@ var __metadata = (this && this.__metadata) || function (k, v) {
|
|
|
9
9
|
};
|
|
10
10
|
import { deserialize, field, serialize, variant, vec } from "@dao-xyz/borsh";
|
|
11
11
|
import { PublicSignKey, equals, randomBytes, sha256Base64Sync, toBase64, } from "@peerbit/crypto";
|
|
12
|
-
import { And, BoolQuery, ByteMatchQuery, Compare, IntegerCompare, Not, Or, Sort, SortDirection, StringMatch,
|
|
12
|
+
import { And, BoolQuery, ByteMatchQuery, Compare, IntegerCompare, Not, Or, Sort, SortDirection, StringMatch, iteratorInSeries,
|
|
13
13
|
/* iteratorInSeries, */
|
|
14
14
|
} from "@peerbit/indexer-interface";
|
|
15
15
|
import { id } from "@peerbit/indexer-interface";
|
|
16
16
|
import { Meta, ShallowMeta } from "@peerbit/log";
|
|
17
|
+
import { debounceAccumulator } from "./debounce.js";
|
|
17
18
|
import { MAX_U32, MAX_U64, } from "./integers.js";
|
|
18
|
-
import {} from "./replication-domain.js";
|
|
19
19
|
export var ReplicationIntent;
|
|
20
20
|
(function (ReplicationIntent) {
|
|
21
21
|
ReplicationIntent[ReplicationIntent["NonStrict"] = 0] = "NonStrict";
|
|
@@ -193,7 +193,7 @@ let ReplicationRangeMessageU32 = class ReplicationRangeMessageU32 extends Replic
|
|
|
193
193
|
id: this.id,
|
|
194
194
|
publicKeyHash: key.hashcode(),
|
|
195
195
|
offset: this.offset,
|
|
196
|
-
|
|
196
|
+
width: this.factor,
|
|
197
197
|
timestamp: this.timestamp,
|
|
198
198
|
mode: this.mode,
|
|
199
199
|
});
|
|
@@ -250,7 +250,7 @@ let ReplicationRangeMessageU64 = class ReplicationRangeMessageU64 extends Replic
|
|
|
250
250
|
id: this.id,
|
|
251
251
|
publicKeyHash: key.hashcode(),
|
|
252
252
|
offset: this.offset,
|
|
253
|
-
|
|
253
|
+
width: this.factor,
|
|
254
254
|
timestamp: this.timestamp,
|
|
255
255
|
mode: this.mode,
|
|
256
256
|
});
|
|
@@ -349,6 +349,185 @@ __decorate([
|
|
|
349
349
|
field({ type: "u8" }),
|
|
350
350
|
__metadata("design:type", Number)
|
|
351
351
|
], HashableSegmentU64.prototype, "mode", void 0);
|
|
352
|
+
/**
|
|
353
|
+
* Convert a GeneralRange<N> into one or two `[bigint, bigint]` segments.
|
|
354
|
+
* - If it’s not wrapped, there’s one segment: [start1, end1).
|
|
355
|
+
* - If it’s wrapped, there’s two: [start1, end1) and [start2, end2).
|
|
356
|
+
*
|
|
357
|
+
* We always do the conversion to bigints internally.
|
|
358
|
+
*/
|
|
359
|
+
export function toSegmentsBigInt(range) {
|
|
360
|
+
// Safely convert the numeric fields to bigint
|
|
361
|
+
const s1 = typeof range.start1 === "number" ? BigInt(range.start1) : range.start1;
|
|
362
|
+
const e1 = typeof range.end1 === "number" ? BigInt(range.end1) : range.end1;
|
|
363
|
+
const s2 = typeof range.start2 === "number" ? BigInt(range.start2) : range.start2;
|
|
364
|
+
const e2 = typeof range.end2 === "number" ? BigInt(range.end2) : range.end2;
|
|
365
|
+
const segments = [];
|
|
366
|
+
segments.push([s1, e1]);
|
|
367
|
+
if (s2 !== s1 && s2 !== e2) {
|
|
368
|
+
segments.push([s2, e2]);
|
|
369
|
+
}
|
|
370
|
+
return segments;
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Build an array of new GeneralRange<N> objects from leftover `[bigint, bigint]` segments.
|
|
374
|
+
* We split them in pairs, each range can hold up to two segments:
|
|
375
|
+
*
|
|
376
|
+
* - [seg1Start, seg1End)
|
|
377
|
+
* - [seg2Start, seg2End) (if available)
|
|
378
|
+
*
|
|
379
|
+
* We convert bigints back to the correct numeric type, if needed.
|
|
380
|
+
*/
|
|
381
|
+
function buildRangesFromBigIntSegments(segments, templateRange) {
|
|
382
|
+
// Sort by start
|
|
383
|
+
segments.sort((a, b) => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0));
|
|
384
|
+
const result = [];
|
|
385
|
+
let i = 0;
|
|
386
|
+
const proto = Object.getPrototypeOf(templateRange);
|
|
387
|
+
while (i < segments.length) {
|
|
388
|
+
const seg1 = segments[i];
|
|
389
|
+
i++;
|
|
390
|
+
let seg2 = null;
|
|
391
|
+
if (i < segments.length) {
|
|
392
|
+
seg2 = segments[i];
|
|
393
|
+
i++;
|
|
394
|
+
}
|
|
395
|
+
// Convert back to the original numeric type
|
|
396
|
+
const [s1, e1] = toOriginalType(seg1, templateRange);
|
|
397
|
+
const [s2, e2] = seg2
|
|
398
|
+
? toOriginalType(seg2, templateRange)
|
|
399
|
+
: [s1, e1];
|
|
400
|
+
// Build a new range object. You can clone or replicate metadata as needed.
|
|
401
|
+
const newRange = Object.assign(Object.create(proto), {
|
|
402
|
+
...templateRange,
|
|
403
|
+
start1: s1,
|
|
404
|
+
end1: e1,
|
|
405
|
+
start2: s2,
|
|
406
|
+
end2: e2,
|
|
407
|
+
});
|
|
408
|
+
result.push(newRange);
|
|
409
|
+
}
|
|
410
|
+
return result;
|
|
411
|
+
}
|
|
412
|
+
/**
|
|
413
|
+
* Subtract one bigint segment [bStart, bEnd) from [aStart, aEnd).
|
|
414
|
+
* Returns 0..2 leftover segments in bigint form.
|
|
415
|
+
*/
|
|
416
|
+
function subtractBigIntSegment(aStart, aEnd, bStart, bEnd) {
|
|
417
|
+
const result = [];
|
|
418
|
+
// No overlap
|
|
419
|
+
if (bEnd <= aStart || bStart >= aEnd) {
|
|
420
|
+
result.push([aStart, aEnd]);
|
|
421
|
+
return result;
|
|
422
|
+
}
|
|
423
|
+
// Fully contained
|
|
424
|
+
if (bStart <= aStart && bEnd >= aEnd) {
|
|
425
|
+
return [];
|
|
426
|
+
}
|
|
427
|
+
// Partial overlaps
|
|
428
|
+
if (bStart > aStart) {
|
|
429
|
+
result.push([aStart, bStart]);
|
|
430
|
+
}
|
|
431
|
+
if (bEnd < aEnd) {
|
|
432
|
+
result.push([bEnd, aEnd]);
|
|
433
|
+
}
|
|
434
|
+
return result;
|
|
435
|
+
}
|
|
436
|
+
/**
|
|
437
|
+
* Helper: convert `[bigint, bigint]` to `[number, number]` if N is "u32",
|
|
438
|
+
* or keep as `[bigint, bigint]` if N is "u64".
|
|
439
|
+
*/
|
|
440
|
+
function toOriginalType(segment, templateRange) {
|
|
441
|
+
const [start, end] = segment;
|
|
442
|
+
if (isU32Range(templateRange)) {
|
|
443
|
+
// Convert back to number
|
|
444
|
+
return [Number(start), Number(end)];
|
|
445
|
+
}
|
|
446
|
+
else {
|
|
447
|
+
// Keep as bigint
|
|
448
|
+
return [start, end];
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
/**
|
|
452
|
+
* Merge any adjacent or overlapping `[bigint, bigint]` segments.
|
|
453
|
+
* E.g. [10,20) and [20,25) => [10,25)
|
|
454
|
+
*/
|
|
455
|
+
export function mergeBigIntSegments(segments) {
|
|
456
|
+
if (segments.length < 2)
|
|
457
|
+
return segments;
|
|
458
|
+
// Sort by start
|
|
459
|
+
segments.sort((a, b) => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0));
|
|
460
|
+
const merged = [];
|
|
461
|
+
let current = segments[0];
|
|
462
|
+
for (let i = 1; i < segments.length; i++) {
|
|
463
|
+
const next = segments[i];
|
|
464
|
+
// If current overlaps or touches next
|
|
465
|
+
if (current[1] >= next[0]) {
|
|
466
|
+
// Merge
|
|
467
|
+
current = [current[0], current[1] > next[1] ? current[1] : next[1]];
|
|
468
|
+
}
|
|
469
|
+
else {
|
|
470
|
+
merged.push(current);
|
|
471
|
+
current = next;
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
merged.push(current);
|
|
475
|
+
return merged;
|
|
476
|
+
}
|
|
477
|
+
/**
|
|
478
|
+
* Figure out if a given range is "u32" or "u64".
|
|
479
|
+
* You might also store this in the object itself if you prefer.
|
|
480
|
+
*/
|
|
481
|
+
function isU32Range(range) {
|
|
482
|
+
// If you store a separate `type: "u32" | "u64"` in the range, you can just check that:
|
|
483
|
+
// return range.type === "u32";
|
|
484
|
+
// or we do a hack by checking the type of start1, e.g.:
|
|
485
|
+
// If "start1" is a number (not a bigint), we treat it as u32
|
|
486
|
+
return typeof range.start1 === "number";
|
|
487
|
+
}
|
|
488
|
+
export function symmetricDifferenceRanges(rangeA, rangeB) {
|
|
489
|
+
const segmentsA = toSegmentsBigInt(rangeA);
|
|
490
|
+
const segmentsB = toSegmentsBigInt(rangeB);
|
|
491
|
+
const resultSegmentsA = [];
|
|
492
|
+
const resultSegmentsB = [];
|
|
493
|
+
// Compute symmetric difference for A
|
|
494
|
+
for (const [aStart, aEnd] of segmentsA) {
|
|
495
|
+
let leftover = [[aStart, aEnd]];
|
|
496
|
+
for (const [bStart, bEnd] of segmentsB) {
|
|
497
|
+
const newLeftover = [];
|
|
498
|
+
for (const [start, end] of leftover) {
|
|
499
|
+
newLeftover.push(...subtractBigIntSegment(start, end, bStart, bEnd));
|
|
500
|
+
}
|
|
501
|
+
leftover = newLeftover;
|
|
502
|
+
}
|
|
503
|
+
resultSegmentsA.push(...leftover);
|
|
504
|
+
}
|
|
505
|
+
// Compute symmetric difference for B
|
|
506
|
+
for (const [bStart, bEnd] of segmentsB) {
|
|
507
|
+
let leftover = [[bStart, bEnd]];
|
|
508
|
+
for (const [aStart, aEnd] of segmentsA) {
|
|
509
|
+
const newLeftover = [];
|
|
510
|
+
for (const [start, end] of leftover) {
|
|
511
|
+
newLeftover.push(...subtractBigIntSegment(start, end, aStart, aEnd));
|
|
512
|
+
}
|
|
513
|
+
leftover = newLeftover;
|
|
514
|
+
}
|
|
515
|
+
resultSegmentsB.push(...leftover);
|
|
516
|
+
}
|
|
517
|
+
// Remove zero-length or invalid segments
|
|
518
|
+
const validSegmentsA = resultSegmentsA.filter(([start, end]) => start < end);
|
|
519
|
+
const validSegmentsB = resultSegmentsB.filter(([start, end]) => start < end);
|
|
520
|
+
// Merge and deduplicate segments
|
|
521
|
+
const mergedSegmentsA = mergeBigIntSegments(validSegmentsA);
|
|
522
|
+
const mergedSegmentsB = mergeBigIntSegments(validSegmentsB);
|
|
523
|
+
// Build ranges
|
|
524
|
+
const rangesFromA = buildRangesFromBigIntSegments(mergedSegmentsA, rangeA);
|
|
525
|
+
const rangesFromB = buildRangesFromBigIntSegments(mergedSegmentsB, rangeB);
|
|
526
|
+
return {
|
|
527
|
+
rangesFromA,
|
|
528
|
+
rangesFromB,
|
|
529
|
+
};
|
|
530
|
+
}
|
|
352
531
|
export class ReplicationRangeIndexableU32 {
|
|
353
532
|
id;
|
|
354
533
|
hash;
|
|
@@ -364,12 +543,12 @@ export class ReplicationRangeIndexableU32 {
|
|
|
364
543
|
this.hash =
|
|
365
544
|
properties.publicKeyHash ||
|
|
366
545
|
properties.publicKey.hashcode();
|
|
367
|
-
this.transform({
|
|
546
|
+
this.transform({ width: properties.width, offset: properties.offset });
|
|
368
547
|
this.mode = properties.mode ?? ReplicationIntent.NonStrict;
|
|
369
548
|
this.timestamp = properties.timestamp || BigInt(0);
|
|
370
549
|
}
|
|
371
550
|
transform(properties) {
|
|
372
|
-
const ranges = getSegmentsFromOffsetAndRange(properties.offset, properties.
|
|
551
|
+
const ranges = getSegmentsFromOffsetAndRange(properties.offset, properties.width, 0, MAX_U32);
|
|
373
552
|
this.start1 = Math.round(ranges[0][0]);
|
|
374
553
|
this.end1 = Math.round(ranges[0][1]);
|
|
375
554
|
this.start2 = Math.round(ranges[1][0]);
|
|
@@ -390,6 +569,10 @@ export class ReplicationRangeIndexableU32 {
|
|
|
390
569
|
get idString() {
|
|
391
570
|
return toBase64(this.id);
|
|
392
571
|
}
|
|
572
|
+
get rangeHash() {
|
|
573
|
+
const ser = serialize(this);
|
|
574
|
+
return sha256Base64Sync(ser);
|
|
575
|
+
}
|
|
393
576
|
contains(point) {
|
|
394
577
|
return ((point >= this.start1 && point < this.end1) ||
|
|
395
578
|
(point >= this.start2 && point < this.end2));
|
|
@@ -436,7 +619,8 @@ export class ReplicationRangeIndexableU32 {
|
|
|
436
619
|
return false;
|
|
437
620
|
}
|
|
438
621
|
equalRange(other) {
|
|
439
|
-
return (this.
|
|
622
|
+
return (this.hash === other.hash &&
|
|
623
|
+
this.start1 === other.start1 &&
|
|
440
624
|
this.end1 === other.end1 &&
|
|
441
625
|
this.start2 === other.start2 &&
|
|
442
626
|
this.end2 === other.end2);
|
|
@@ -495,7 +679,7 @@ __decorate([
|
|
|
495
679
|
], ReplicationRangeIndexableU32.prototype, "mode", void 0);
|
|
496
680
|
export class ReplicationRangeIndexableU64 {
|
|
497
681
|
id;
|
|
498
|
-
hash;
|
|
682
|
+
hash; // publickey hash
|
|
499
683
|
timestamp;
|
|
500
684
|
start1;
|
|
501
685
|
end1;
|
|
@@ -508,12 +692,12 @@ export class ReplicationRangeIndexableU64 {
|
|
|
508
692
|
this.hash =
|
|
509
693
|
properties.publicKeyHash ||
|
|
510
694
|
properties.publicKey.hashcode();
|
|
511
|
-
this.transform({
|
|
695
|
+
this.transform({ width: properties.width, offset: properties.offset });
|
|
512
696
|
this.mode = properties.mode ?? ReplicationIntent.NonStrict;
|
|
513
697
|
this.timestamp = properties.timestamp || BigInt(0);
|
|
514
698
|
}
|
|
515
699
|
transform(properties) {
|
|
516
|
-
const ranges = getSegmentsFromOffsetAndRange(BigInt(properties.offset), BigInt(properties.
|
|
700
|
+
const ranges = getSegmentsFromOffsetAndRange(BigInt(properties.offset), BigInt(properties.width), 0n, MAX_U64);
|
|
517
701
|
this.start1 = ranges[0][0];
|
|
518
702
|
this.end1 = ranges[0][1];
|
|
519
703
|
this.start2 = ranges[1][0];
|
|
@@ -538,6 +722,10 @@ export class ReplicationRangeIndexableU64 {
|
|
|
538
722
|
return ((point >= this.start1 && point < this.end1) ||
|
|
539
723
|
(point >= this.start2 && point < this.end2));
|
|
540
724
|
}
|
|
725
|
+
get rangeHash() {
|
|
726
|
+
const ser = serialize(this);
|
|
727
|
+
return sha256Base64Sync(ser);
|
|
728
|
+
}
|
|
541
729
|
overlaps(other, checkOther = true) {
|
|
542
730
|
if (this.contains(other.start1) ||
|
|
543
731
|
this.contains(other.start2) ||
|
|
@@ -580,7 +768,8 @@ export class ReplicationRangeIndexableU64 {
|
|
|
580
768
|
return false;
|
|
581
769
|
}
|
|
582
770
|
equalRange(other) {
|
|
583
|
-
return (this.
|
|
771
|
+
return (this.hash === other.hash &&
|
|
772
|
+
this.start1 === other.start1 &&
|
|
584
773
|
this.end1 === other.end1 &&
|
|
585
774
|
this.start2 === other.start2 &&
|
|
586
775
|
this.end2 === other.end2);
|
|
@@ -649,18 +838,17 @@ export const mergeRanges = (segments, numbers) => {
|
|
|
649
838
|
if (!sameHash) {
|
|
650
839
|
throw new Error("Segments have different publicKeyHash");
|
|
651
840
|
}
|
|
652
|
-
// only allow merging segments with length 1 (trivial)
|
|
653
|
-
const sameLength = segments.every((x) => x.width === 1 || x.width === 1n);
|
|
654
|
-
if (!sameLength) {
|
|
655
|
-
throw new Error("Segments have different length, only merging of segments length 1 is supported");
|
|
656
|
-
}
|
|
657
841
|
const sorted = segments.sort((a, b) => Number(a.start1 - b.start1));
|
|
658
842
|
let calculateLargeGap = () => {
|
|
659
843
|
let last = sorted[sorted.length - 1];
|
|
660
844
|
let largestArc = numbers.zero;
|
|
661
845
|
let largestArcIndex = -1;
|
|
846
|
+
let mode = ReplicationIntent.NonStrict;
|
|
662
847
|
for (let i = 0; i < sorted.length; i++) {
|
|
663
848
|
const current = sorted[i];
|
|
849
|
+
if (current.mode === ReplicationIntent.Strict) {
|
|
850
|
+
mode = ReplicationIntent.Strict;
|
|
851
|
+
}
|
|
664
852
|
if (current.start1 !== last.start1) {
|
|
665
853
|
let arc = numbers.zero;
|
|
666
854
|
if (current.start1 < last.end2) {
|
|
@@ -677,19 +865,28 @@ export const mergeRanges = (segments, numbers) => {
|
|
|
677
865
|
}
|
|
678
866
|
last = current;
|
|
679
867
|
}
|
|
680
|
-
return [largestArc, largestArcIndex];
|
|
868
|
+
return [largestArc, largestArcIndex, mode];
|
|
681
869
|
};
|
|
682
|
-
const [largestArc, largestArcIndex] = calculateLargeGap();
|
|
870
|
+
const [largestArc, largestArcIndex, mode] = calculateLargeGap();
|
|
683
871
|
let totalLengthFinal = numbers.maxValue - largestArc;
|
|
872
|
+
const proto = segments[0].constructor;
|
|
684
873
|
if (largestArcIndex === -1) {
|
|
874
|
+
if (mode !== segments[0].mode) {
|
|
875
|
+
return new proto({
|
|
876
|
+
width: segments[0].width,
|
|
877
|
+
offset: segments[0].start1,
|
|
878
|
+
publicKeyHash: segments[0].hash,
|
|
879
|
+
mode,
|
|
880
|
+
});
|
|
881
|
+
}
|
|
685
882
|
return segments[0]; // all ranges are the same
|
|
686
883
|
}
|
|
687
884
|
// use segments[0] constructor to create a new object
|
|
688
|
-
const proto = segments[0].constructor;
|
|
689
885
|
return new proto({
|
|
690
|
-
|
|
886
|
+
width: totalLengthFinal,
|
|
691
887
|
offset: segments[largestArcIndex].start1,
|
|
692
888
|
publicKeyHash: segments[0].hash,
|
|
889
|
+
mode,
|
|
693
890
|
});
|
|
694
891
|
};
|
|
695
892
|
const createContainingPointQuery = (points, options) => {
|
|
@@ -815,7 +1012,7 @@ export const appromixateCoverage = async (properties) => {
|
|
|
815
1012
|
}
|
|
816
1013
|
return hits / properties.samples;
|
|
817
1014
|
};
|
|
818
|
-
const getClosest = (direction, rects, point,
|
|
1015
|
+
const getClosest = (direction, rects, point, includeStrict, numbers, options) => {
|
|
819
1016
|
const createQueries = (p, equality) => {
|
|
820
1017
|
let queries;
|
|
821
1018
|
if (direction === "below") {
|
|
@@ -825,11 +1022,6 @@ const getClosest = (direction, rects, point, roleAgeLimit, matured, now, include
|
|
|
825
1022
|
compare: equality ? Compare.LessOrEqual : Compare.Less,
|
|
826
1023
|
value: p,
|
|
827
1024
|
}),
|
|
828
|
-
new IntegerCompare({
|
|
829
|
-
key: "timestamp",
|
|
830
|
-
compare: matured ? Compare.LessOrEqual : Compare.GreaterOrEqual,
|
|
831
|
-
value: BigInt(now - roleAgeLimit),
|
|
832
|
-
}),
|
|
833
1025
|
];
|
|
834
1026
|
}
|
|
835
1027
|
else {
|
|
@@ -839,13 +1031,17 @@ const getClosest = (direction, rects, point, roleAgeLimit, matured, now, include
|
|
|
839
1031
|
compare: equality ? Compare.GreaterOrEqual : Compare.Greater,
|
|
840
1032
|
value: p,
|
|
841
1033
|
}),
|
|
842
|
-
new IntegerCompare({
|
|
843
|
-
key: "timestamp",
|
|
844
|
-
compare: matured ? Compare.LessOrEqual : Compare.GreaterOrEqual,
|
|
845
|
-
value: BigInt(now - roleAgeLimit),
|
|
846
|
-
}),
|
|
847
1034
|
];
|
|
848
1035
|
}
|
|
1036
|
+
if (options?.time) {
|
|
1037
|
+
queries.push(new IntegerCompare({
|
|
1038
|
+
key: "timestamp",
|
|
1039
|
+
compare: options?.time?.matured
|
|
1040
|
+
? Compare.LessOrEqual
|
|
1041
|
+
: Compare.GreaterOrEqual,
|
|
1042
|
+
value: BigInt(options.time.now - options.time.roleAgeLimit),
|
|
1043
|
+
}));
|
|
1044
|
+
}
|
|
849
1045
|
queries.push(new IntegerCompare({ key: "width", compare: Compare.Greater, value: 0 }));
|
|
850
1046
|
if (!includeStrict) {
|
|
851
1047
|
queries.push(new IntegerCompare({
|
|
@@ -854,6 +1050,9 @@ const getClosest = (direction, rects, point, roleAgeLimit, matured, now, include
|
|
|
854
1050
|
value: ReplicationIntent.NonStrict,
|
|
855
1051
|
}));
|
|
856
1052
|
}
|
|
1053
|
+
if (options?.hash) {
|
|
1054
|
+
queries.push(new StringMatch({ key: "hash", value: options.hash }));
|
|
1055
|
+
}
|
|
857
1056
|
return queries;
|
|
858
1057
|
};
|
|
859
1058
|
const sortByOldest = new Sort({ key: "timestamp", direction: "asc" });
|
|
@@ -936,7 +1135,7 @@ export const getCoveringRangeQuery = (range) => {
|
|
|
936
1135
|
]),
|
|
937
1136
|
];
|
|
938
1137
|
};
|
|
939
|
-
export const
|
|
1138
|
+
export const countCoveringRangesSameOwner = async (rects, range) => {
|
|
940
1139
|
return ((await rects.count({
|
|
941
1140
|
query: [
|
|
942
1141
|
...getCoveringRangeQuery(range),
|
|
@@ -952,6 +1151,22 @@ export const iHaveCoveringRange = async (rects, range) => {
|
|
|
952
1151
|
],
|
|
953
1152
|
})) > 0);
|
|
954
1153
|
};
|
|
1154
|
+
export const getCoveringRangesSameOwner = (rects, range) => {
|
|
1155
|
+
return rects.iterate({
|
|
1156
|
+
query: [
|
|
1157
|
+
...getCoveringRangeQuery(range),
|
|
1158
|
+
new StringMatch({
|
|
1159
|
+
key: "hash",
|
|
1160
|
+
value: range.hash,
|
|
1161
|
+
}),
|
|
1162
|
+
// assume that we are looking for other ranges, not want to update an existing one
|
|
1163
|
+
new Not(new ByteMatchQuery({
|
|
1164
|
+
key: "id",
|
|
1165
|
+
value: range.id,
|
|
1166
|
+
})),
|
|
1167
|
+
],
|
|
1168
|
+
});
|
|
1169
|
+
};
|
|
955
1170
|
// TODO
|
|
956
1171
|
export function getDistance(from, to, direction, end) {
|
|
957
1172
|
const abs = (value) => value < 0 ? -value : value;
|
|
@@ -1055,26 +1270,44 @@ const joinIterator = (iterators, point, direction, numbers) => {
|
|
|
1055
1270
|
},
|
|
1056
1271
|
};
|
|
1057
1272
|
};
|
|
1058
|
-
const
|
|
1059
|
-
const closestBelow = getClosest("below", peers, point,
|
|
1060
|
-
const closestAbove = getClosest("above", peers, point,
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1273
|
+
const getClosestAroundOrContaining = (peers, point, includeStrictBelow, includeStrictAbove, numbers, options) => {
|
|
1274
|
+
const closestBelow = getClosest("below", peers, point, includeStrictBelow, numbers, options);
|
|
1275
|
+
const closestAbove = getClosest("above", peers, point, includeStrictAbove, numbers, options);
|
|
1276
|
+
const containing = iterateRangesContainingPoint(peers, point, options);
|
|
1277
|
+
return iteratorInSeries(containing, joinIterator([closestBelow, closestAbove], point, "closest", numbers));
|
|
1278
|
+
};
|
|
1279
|
+
export const getAdjecentSameOwner = async (peers, range, numbers) => {
|
|
1280
|
+
const closestBelowIterator = getClosest("below", peers, range.start1, true, numbers, {
|
|
1281
|
+
hash: range.hash,
|
|
1282
|
+
});
|
|
1283
|
+
const closestBelow = await closestBelowIterator.next(1);
|
|
1284
|
+
closestBelowIterator.close();
|
|
1285
|
+
const closestAboveIterator = getClosest("above", peers, range.end2, true, numbers, {
|
|
1286
|
+
hash: range.hash,
|
|
1287
|
+
});
|
|
1288
|
+
const closestAbove = await closestAboveIterator.next(1);
|
|
1289
|
+
closestAboveIterator.close();
|
|
1290
|
+
return {
|
|
1291
|
+
below: range.idString === closestBelow[0]?.value.idString
|
|
1292
|
+
? undefined
|
|
1293
|
+
: closestBelow[0]?.value,
|
|
1294
|
+
above: closestBelow[0]?.id.primitive === closestAbove[0]?.id.primitive ||
|
|
1295
|
+
range.idString === closestBelow[0]?.value.idString
|
|
1296
|
+
? undefined
|
|
1297
|
+
: closestAbove[0]?.value,
|
|
1298
|
+
};
|
|
1299
|
+
};
|
|
1300
|
+
export const getAllMergeCandiates = async (peers, range, numbers) => {
|
|
1301
|
+
const adjacent = await getAdjecentSameOwner(peers, range, numbers);
|
|
1302
|
+
const covering = await getCoveringRangesSameOwner(peers, range).all();
|
|
1303
|
+
let ret = [];
|
|
1304
|
+
if (adjacent.below) {
|
|
1305
|
+
ret.push(adjacent.below);
|
|
1306
|
+
}
|
|
1307
|
+
if (adjacent.above) {
|
|
1308
|
+
ret.push(adjacent.above);
|
|
1309
|
+
}
|
|
1310
|
+
return [...ret, ...covering.map((x) => x.value)];
|
|
1078
1311
|
};
|
|
1079
1312
|
export const isMatured = (segment, now, minAge) => {
|
|
1080
1313
|
return now - Number(segment.timestamp) >= minAge;
|
|
@@ -1145,13 +1378,8 @@ const collectNodesAroundPoint = async <R extends "u32" | "u64">(
|
|
|
1145
1378
|
};
|
|
1146
1379
|
*/
|
|
1147
1380
|
const collectClosestAround = async (roleAge, peers, collector, point, now, numbers, done = () => true) => {
|
|
1148
|
-
const closestBelow = getClosest("below", peers, point,
|
|
1149
|
-
const closestAbove = getClosest("above", peers, point,
|
|
1150
|
-
/* const containingIterator = iterateRangesContainingPoint<undefined, R>(
|
|
1151
|
-
peers,
|
|
1152
|
-
point,
|
|
1153
|
-
);
|
|
1154
|
-
*/
|
|
1381
|
+
const closestBelow = getClosest("below", peers, point, false, numbers);
|
|
1382
|
+
const closestAbove = getClosest("above", peers, point, false, numbers);
|
|
1155
1383
|
const aroundIterator = joinIterator([/* containingIterator, */ closestBelow, closestAbove], point, "closest", numbers);
|
|
1156
1384
|
let visited = new Set();
|
|
1157
1385
|
while (aroundIterator.done() !== true && done() !== true) {
|
|
@@ -1243,7 +1471,13 @@ export const minimumWidthToCover = async (minReplicas /* , replicatorCount: numb
|
|
|
1243
1471
|
export const getCoverSet = async (properties) => {
|
|
1244
1472
|
const { peers, start, widthToCoverScaled, roleAge } = properties;
|
|
1245
1473
|
const now = Date.now();
|
|
1246
|
-
const { startNode, startLocation, endLocation } = await getStartAndEnd(peers, start, widthToCoverScaled,
|
|
1474
|
+
const { startNode, startLocation, endLocation } = await getStartAndEnd(peers, start, widthToCoverScaled, properties.numbers, {
|
|
1475
|
+
time: {
|
|
1476
|
+
roleAgeLimit: roleAge,
|
|
1477
|
+
now,
|
|
1478
|
+
matured: true,
|
|
1479
|
+
},
|
|
1480
|
+
});
|
|
1247
1481
|
let ret = new Set();
|
|
1248
1482
|
// if start node (assume is self) and not mature, ask all known remotes if limited
|
|
1249
1483
|
// TODO consider a more robust stragety here in a scenario where there are many nodes, lets say
|
|
@@ -1288,7 +1522,13 @@ export const getCoverSet = async (properties) => {
|
|
|
1288
1522
|
};
|
|
1289
1523
|
const resolveNextAbove = async (nextLocation, roleAge) => {
|
|
1290
1524
|
// if not get closest from above
|
|
1291
|
-
let next = await fetchOne(getClosest("above", peers, nextLocation,
|
|
1525
|
+
let next = await fetchOne(getClosest("above", peers, nextLocation, true, properties.numbers, {
|
|
1526
|
+
time: {
|
|
1527
|
+
matured: true,
|
|
1528
|
+
roleAgeLimit: roleAge,
|
|
1529
|
+
now,
|
|
1530
|
+
},
|
|
1531
|
+
}));
|
|
1292
1532
|
return next;
|
|
1293
1533
|
};
|
|
1294
1534
|
const resolveNext = async (nextLocation, roleAge) => {
|
|
@@ -1410,33 +1650,146 @@ export const matchEntriesInRangeQuery = (range) => {
|
|
|
1410
1650
|
];
|
|
1411
1651
|
return new Or(ors);
|
|
1412
1652
|
};
|
|
1413
|
-
export const
|
|
1653
|
+
export const debounceAggregationChanges = (fn, delay) => {
|
|
1654
|
+
return debounceAccumulator((result) => {
|
|
1655
|
+
return fn([...result.values()]);
|
|
1656
|
+
}, () => {
|
|
1657
|
+
let aggregated = new Map();
|
|
1658
|
+
return {
|
|
1659
|
+
add: (change) => {
|
|
1660
|
+
const prev = aggregated.get(change.range.idString);
|
|
1661
|
+
if (prev) {
|
|
1662
|
+
if (prev.range.timestamp < change.range.timestamp) {
|
|
1663
|
+
aggregated.set(change.range.idString, change);
|
|
1664
|
+
}
|
|
1665
|
+
}
|
|
1666
|
+
else {
|
|
1667
|
+
aggregated.set(change.range.idString, change);
|
|
1668
|
+
}
|
|
1669
|
+
},
|
|
1670
|
+
delete: (key) => {
|
|
1671
|
+
aggregated.delete(key);
|
|
1672
|
+
},
|
|
1673
|
+
size: () => aggregated.size,
|
|
1674
|
+
value: aggregated,
|
|
1675
|
+
};
|
|
1676
|
+
}, delay);
|
|
1677
|
+
};
|
|
1678
|
+
export const mergeReplicationChanges = (changesOrChangesArr, rebalanceHistory) => {
|
|
1679
|
+
let first = changesOrChangesArr[0];
|
|
1680
|
+
let changes;
|
|
1681
|
+
if (!Array.isArray(first)) {
|
|
1682
|
+
changes = changesOrChangesArr;
|
|
1683
|
+
}
|
|
1684
|
+
else {
|
|
1685
|
+
changes = changesOrChangesArr.flat();
|
|
1686
|
+
}
|
|
1687
|
+
// group by hash so we can cancel out changes
|
|
1688
|
+
const grouped = new Map();
|
|
1689
|
+
for (const change of changes) {
|
|
1690
|
+
const prev = grouped.get(change.range.hash);
|
|
1691
|
+
if (prev) {
|
|
1692
|
+
prev.push(change);
|
|
1693
|
+
}
|
|
1694
|
+
else {
|
|
1695
|
+
grouped.set(change.range.hash, [change]);
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
let all = [];
|
|
1699
|
+
for (const [_k, v] of grouped) {
|
|
1700
|
+
if (v.length > 1) {
|
|
1701
|
+
// sort by timestamp so newest is last
|
|
1702
|
+
v.sort((a, b) => a.range.timestamp < b.range.timestamp
|
|
1703
|
+
? -1
|
|
1704
|
+
: a.range.timestamp > b.range.timestamp
|
|
1705
|
+
? 1
|
|
1706
|
+
: 0);
|
|
1707
|
+
let results = [];
|
|
1708
|
+
let consumed = new Set();
|
|
1709
|
+
for (let i = 0; i < v.length; i++) {
|
|
1710
|
+
// if segment is removed and we have previously processed it
|
|
1711
|
+
// then go over each overlapping added segment add remove the removal,
|
|
1712
|
+
// equivalent is that this would represent (1 - 1 + 1) = 1
|
|
1713
|
+
if (v[i].type === "removed" || v[i].type === "replaced") {
|
|
1714
|
+
if (rebalanceHistory.has(v[i].range.rangeHash)) {
|
|
1715
|
+
let vStart = v.length;
|
|
1716
|
+
for (let j = i + 1; j < vStart; j++) {
|
|
1717
|
+
const newer = v[j];
|
|
1718
|
+
if (newer.type === "added" && !newer.matured) {
|
|
1719
|
+
const { rangesFromA: updatedRemoved, rangesFromB: updatedNewer, } = symmetricDifferenceRanges(v[i].range, newer.range);
|
|
1720
|
+
for (const diff of updatedRemoved) {
|
|
1721
|
+
results.push({
|
|
1722
|
+
range: diff,
|
|
1723
|
+
type: "removed",
|
|
1724
|
+
timestamp: v[i].timestamp,
|
|
1725
|
+
});
|
|
1726
|
+
}
|
|
1727
|
+
for (const diff of updatedNewer) {
|
|
1728
|
+
v.push({
|
|
1729
|
+
range: diff,
|
|
1730
|
+
type: "added",
|
|
1731
|
+
timestamp: newer.timestamp,
|
|
1732
|
+
});
|
|
1733
|
+
}
|
|
1734
|
+
consumed.add(j);
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
rebalanceHistory.del(v[i].range.rangeHash);
|
|
1738
|
+
}
|
|
1739
|
+
else {
|
|
1740
|
+
results.push(v[i]);
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
else if (v[i].type === "added") {
|
|
1744
|
+
// TODO should the below clause be used?
|
|
1745
|
+
// after testing it seems that certain changes are not propagating as expected using this
|
|
1746
|
+
/* if (rebalanceHistory.has(v[i].range.rangeHash)) {
|
|
1747
|
+
continue;
|
|
1748
|
+
} */
|
|
1749
|
+
rebalanceHistory.add(v[i].range.rangeHash);
|
|
1750
|
+
if (!consumed.has(i)) {
|
|
1751
|
+
results.push(v[i]);
|
|
1752
|
+
}
|
|
1753
|
+
}
|
|
1754
|
+
else {
|
|
1755
|
+
results.push(v[i]);
|
|
1756
|
+
}
|
|
1757
|
+
}
|
|
1758
|
+
all.push(...results);
|
|
1759
|
+
}
|
|
1760
|
+
else {
|
|
1761
|
+
rebalanceHistory.add(v[0].range.rangeHash);
|
|
1762
|
+
all.push(v[0]);
|
|
1763
|
+
}
|
|
1764
|
+
}
|
|
1765
|
+
return all;
|
|
1766
|
+
};
|
|
1767
|
+
export const toRebalance = (changeOrChanges, index, rebalanceHistory) => {
|
|
1768
|
+
const change = mergeReplicationChanges(changeOrChanges, rebalanceHistory);
|
|
1414
1769
|
const assignedRangesQuery = (changes) => {
|
|
1415
1770
|
let ors = [];
|
|
1771
|
+
let onlyStrict = true;
|
|
1416
1772
|
for (const change of changes) {
|
|
1417
1773
|
const matchRange = matchEntriesInRangeQuery(change.range);
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
ors.push(prevMatchRange);
|
|
1422
|
-
ors.push(matchRange);
|
|
1423
|
-
}
|
|
1424
|
-
else {
|
|
1425
|
-
ors.push(matchRange);
|
|
1774
|
+
ors.push(matchRange);
|
|
1775
|
+
if (change.range.mode === ReplicationIntent.NonStrict) {
|
|
1776
|
+
onlyStrict = false;
|
|
1426
1777
|
}
|
|
1427
1778
|
}
|
|
1428
1779
|
// entry is assigned to a range boundary, meaning it is due to be inspected
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1780
|
+
if (!onlyStrict || changes.length === 0) {
|
|
1781
|
+
ors.push(new BoolQuery({
|
|
1782
|
+
key: "assignedToRangeBoundary",
|
|
1783
|
+
value: true,
|
|
1784
|
+
}));
|
|
1785
|
+
}
|
|
1433
1786
|
// entry is not sufficiently replicated, and we are to still keep it
|
|
1434
1787
|
return new Or(ors);
|
|
1435
1788
|
};
|
|
1436
1789
|
return {
|
|
1437
1790
|
[Symbol.asyncIterator]: async function* () {
|
|
1438
1791
|
const iterator = index.iterate({
|
|
1439
|
-
query: assignedRangesQuery(
|
|
1792
|
+
query: assignedRangesQuery(change),
|
|
1440
1793
|
});
|
|
1441
1794
|
while (iterator.done() !== true) {
|
|
1442
1795
|
const entries = await iterator.all(); // TODO choose right batch sizes here for optimal memory usage / speed
|
|
@@ -1451,7 +1804,7 @@ export const toRebalance = (changes, index) => {
|
|
|
1451
1804
|
},
|
|
1452
1805
|
};
|
|
1453
1806
|
};
|
|
1454
|
-
export const fetchOneFromPublicKey = async (publicKey, index,
|
|
1807
|
+
export const fetchOneFromPublicKey = async (publicKey, index, numbers, options) => {
|
|
1455
1808
|
let iterator = index.iterate({
|
|
1456
1809
|
query: [new StringMatch({ key: "hash", value: publicKey.hashcode() })],
|
|
1457
1810
|
}, options);
|
|
@@ -1459,8 +1812,9 @@ export const fetchOneFromPublicKey = async (publicKey, index, roleAge, now, numb
|
|
|
1459
1812
|
await iterator.close();
|
|
1460
1813
|
let node = result[0]?.value;
|
|
1461
1814
|
if (node) {
|
|
1462
|
-
if (
|
|
1463
|
-
|
|
1815
|
+
if (options?.time &&
|
|
1816
|
+
!isMatured(node, options.time.now, options.time.roleAgeLimit)) {
|
|
1817
|
+
const matured = await fetchOne(getClosestAroundOrContaining(index, node.start1, false, false, numbers, options));
|
|
1464
1818
|
if (matured) {
|
|
1465
1819
|
node = matured;
|
|
1466
1820
|
}
|
|
@@ -1468,17 +1822,17 @@ export const fetchOneFromPublicKey = async (publicKey, index, roleAge, now, numb
|
|
|
1468
1822
|
}
|
|
1469
1823
|
return node;
|
|
1470
1824
|
};
|
|
1471
|
-
export const getStartAndEnd = async (peers, start, widthToCoverScaled,
|
|
1825
|
+
export const getStartAndEnd = async (peers, start, widthToCoverScaled, numbers, options) => {
|
|
1472
1826
|
// find a good starting point
|
|
1473
1827
|
let startNode = undefined;
|
|
1474
1828
|
let startLocation = undefined;
|
|
1475
1829
|
const nodeFromPoint = async (point = numbers.random()) => {
|
|
1476
1830
|
startLocation = point;
|
|
1477
|
-
startNode = await fetchOneClosest(peers, startLocation,
|
|
1831
|
+
startNode = await fetchOneClosest(peers, startLocation, false, true, numbers, options);
|
|
1478
1832
|
};
|
|
1479
1833
|
if (start instanceof PublicSignKey) {
|
|
1480
1834
|
// start at our node (local first)
|
|
1481
|
-
startNode = await fetchOneFromPublicKey(start, peers,
|
|
1835
|
+
startNode = await fetchOneFromPublicKey(start, peers, numbers, options);
|
|
1482
1836
|
if (!startNode) {
|
|
1483
1837
|
// fetch randomly
|
|
1484
1838
|
await nodeFromPoint();
|
|
@@ -1529,7 +1883,7 @@ export const getStartAndEnd = async (peers, start, widthToCoverScaled, roleAge,
|
|
|
1529
1883
|
endLocation,
|
|
1530
1884
|
};
|
|
1531
1885
|
};
|
|
1532
|
-
export const fetchOneClosest = (peers, point,
|
|
1533
|
-
return fetchOne(
|
|
1886
|
+
export const fetchOneClosest = (peers, point, includeStrictBelow, includeStrictAbove, numbers, options) => {
|
|
1887
|
+
return fetchOne(getClosestAroundOrContaining(peers, point, includeStrictBelow, includeStrictAbove, numbers, options));
|
|
1534
1888
|
};
|
|
1535
1889
|
//# sourceMappingURL=ranges.js.map
|