@noy-db/core 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.cjs +951 -14
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +597 -6
- package/dist/index.d.ts +597 -6
- package/dist/index.js +942 -14
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -21,10 +21,12 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
23
|
Collection: () => Collection,
|
|
24
|
+
CollectionIndexes: () => CollectionIndexes,
|
|
24
25
|
Compartment: () => Compartment,
|
|
25
26
|
ConflictError: () => ConflictError,
|
|
26
27
|
DecryptionError: () => DecryptionError,
|
|
27
28
|
InvalidKeyError: () => InvalidKeyError,
|
|
29
|
+
Lru: () => Lru,
|
|
28
30
|
NOYDB_BACKUP_VERSION: () => NOYDB_BACKUP_VERSION,
|
|
29
31
|
NOYDB_FORMAT_VERSION: () => NOYDB_FORMAT_VERSION,
|
|
30
32
|
NOYDB_KEYRING_VERSION: () => NOYDB_KEYRING_VERSION,
|
|
@@ -35,6 +37,7 @@ __export(index_exports, {
|
|
|
35
37
|
Noydb: () => Noydb,
|
|
36
38
|
NoydbError: () => NoydbError,
|
|
37
39
|
PermissionDeniedError: () => PermissionDeniedError,
|
|
40
|
+
Query: () => Query,
|
|
38
41
|
ReadOnlyError: () => ReadOnlyError,
|
|
39
42
|
SyncEngine: () => SyncEngine,
|
|
40
43
|
TamperedError: () => TamperedError,
|
|
@@ -44,9 +47,15 @@ __export(index_exports, {
|
|
|
44
47
|
diff: () => diff,
|
|
45
48
|
enrollBiometric: () => enrollBiometric,
|
|
46
49
|
estimateEntropy: () => estimateEntropy,
|
|
50
|
+
estimateRecordBytes: () => estimateRecordBytes,
|
|
51
|
+
evaluateClause: () => evaluateClause,
|
|
52
|
+
evaluateFieldClause: () => evaluateFieldClause,
|
|
53
|
+
executePlan: () => executePlan,
|
|
47
54
|
formatDiff: () => formatDiff,
|
|
48
55
|
isBiometricAvailable: () => isBiometricAvailable,
|
|
49
56
|
loadBiometric: () => loadBiometric,
|
|
57
|
+
parseBytes: () => parseBytes,
|
|
58
|
+
readPath: () => readPath,
|
|
50
59
|
removeBiometric: () => removeBiometric,
|
|
51
60
|
saveBiometric: () => saveBiometric,
|
|
52
61
|
unlockBiometric: () => unlockBiometric,
|
|
@@ -639,7 +648,627 @@ function formatDiff(changes) {
|
|
|
639
648
|
}).join("\n");
|
|
640
649
|
}
|
|
641
650
|
|
|
651
|
+
// src/query/predicate.ts
|
|
652
|
+
function readPath(record, path) {
|
|
653
|
+
if (record === null || record === void 0) return void 0;
|
|
654
|
+
if (!path.includes(".")) {
|
|
655
|
+
return record[path];
|
|
656
|
+
}
|
|
657
|
+
const segments = path.split(".");
|
|
658
|
+
let cursor = record;
|
|
659
|
+
for (const segment of segments) {
|
|
660
|
+
if (cursor === null || cursor === void 0) return void 0;
|
|
661
|
+
cursor = cursor[segment];
|
|
662
|
+
}
|
|
663
|
+
return cursor;
|
|
664
|
+
}
|
|
665
|
+
function evaluateFieldClause(record, clause) {
|
|
666
|
+
const actual = readPath(record, clause.field);
|
|
667
|
+
const { op, value } = clause;
|
|
668
|
+
switch (op) {
|
|
669
|
+
case "==":
|
|
670
|
+
return actual === value;
|
|
671
|
+
case "!=":
|
|
672
|
+
return actual !== value;
|
|
673
|
+
case "<":
|
|
674
|
+
return isComparable(actual, value) && actual < value;
|
|
675
|
+
case "<=":
|
|
676
|
+
return isComparable(actual, value) && actual <= value;
|
|
677
|
+
case ">":
|
|
678
|
+
return isComparable(actual, value) && actual > value;
|
|
679
|
+
case ">=":
|
|
680
|
+
return isComparable(actual, value) && actual >= value;
|
|
681
|
+
case "in":
|
|
682
|
+
return Array.isArray(value) && value.includes(actual);
|
|
683
|
+
case "contains":
|
|
684
|
+
if (typeof actual === "string") return typeof value === "string" && actual.includes(value);
|
|
685
|
+
if (Array.isArray(actual)) return actual.includes(value);
|
|
686
|
+
return false;
|
|
687
|
+
case "startsWith":
|
|
688
|
+
return typeof actual === "string" && typeof value === "string" && actual.startsWith(value);
|
|
689
|
+
case "between": {
|
|
690
|
+
if (!Array.isArray(value) || value.length !== 2) return false;
|
|
691
|
+
const [lo, hi] = value;
|
|
692
|
+
if (!isComparable(actual, lo) || !isComparable(actual, hi)) return false;
|
|
693
|
+
return actual >= lo && actual <= hi;
|
|
694
|
+
}
|
|
695
|
+
default: {
|
|
696
|
+
const _exhaustive = op;
|
|
697
|
+
void _exhaustive;
|
|
698
|
+
return false;
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
function isComparable(a, b) {
|
|
703
|
+
if (typeof a === "number" && typeof b === "number") return true;
|
|
704
|
+
if (typeof a === "string" && typeof b === "string") return true;
|
|
705
|
+
if (a instanceof Date && b instanceof Date) return true;
|
|
706
|
+
return false;
|
|
707
|
+
}
|
|
708
|
+
function evaluateClause(record, clause) {
|
|
709
|
+
switch (clause.type) {
|
|
710
|
+
case "field":
|
|
711
|
+
return evaluateFieldClause(record, clause);
|
|
712
|
+
case "filter":
|
|
713
|
+
return clause.fn(record);
|
|
714
|
+
case "group":
|
|
715
|
+
if (clause.op === "and") {
|
|
716
|
+
for (const child of clause.clauses) {
|
|
717
|
+
if (!evaluateClause(record, child)) return false;
|
|
718
|
+
}
|
|
719
|
+
return true;
|
|
720
|
+
} else {
|
|
721
|
+
for (const child of clause.clauses) {
|
|
722
|
+
if (evaluateClause(record, child)) return true;
|
|
723
|
+
}
|
|
724
|
+
return false;
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
// src/query/builder.ts
|
|
730
|
+
var EMPTY_PLAN = {
|
|
731
|
+
clauses: [],
|
|
732
|
+
orderBy: [],
|
|
733
|
+
limit: void 0,
|
|
734
|
+
offset: 0
|
|
735
|
+
};
|
|
736
|
+
var Query = class _Query {
|
|
737
|
+
source;
|
|
738
|
+
plan;
|
|
739
|
+
constructor(source, plan = EMPTY_PLAN) {
|
|
740
|
+
this.source = source;
|
|
741
|
+
this.plan = plan;
|
|
742
|
+
}
|
|
743
|
+
/** Add a field comparison. Multiple where() calls are AND-combined. */
|
|
744
|
+
where(field, op, value) {
|
|
745
|
+
const clause = { type: "field", field, op, value };
|
|
746
|
+
return new _Query(this.source, {
|
|
747
|
+
...this.plan,
|
|
748
|
+
clauses: [...this.plan.clauses, clause]
|
|
749
|
+
});
|
|
750
|
+
}
|
|
751
|
+
/**
|
|
752
|
+
* Logical OR group. Pass a callback that builds a sub-query.
|
|
753
|
+
* Each clause inside the callback is OR-combined; the group itself
|
|
754
|
+
* joins the parent plan with AND.
|
|
755
|
+
*/
|
|
756
|
+
or(builder) {
|
|
757
|
+
const sub = builder(new _Query(this.source));
|
|
758
|
+
const group = {
|
|
759
|
+
type: "group",
|
|
760
|
+
op: "or",
|
|
761
|
+
clauses: sub.plan.clauses
|
|
762
|
+
};
|
|
763
|
+
return new _Query(this.source, {
|
|
764
|
+
...this.plan,
|
|
765
|
+
clauses: [...this.plan.clauses, group]
|
|
766
|
+
});
|
|
767
|
+
}
|
|
768
|
+
/**
|
|
769
|
+
* Logical AND group. Same shape as `or()` but every clause inside the group
|
|
770
|
+
* must match. Useful for explicit grouping inside a larger OR.
|
|
771
|
+
*/
|
|
772
|
+
and(builder) {
|
|
773
|
+
const sub = builder(new _Query(this.source));
|
|
774
|
+
const group = {
|
|
775
|
+
type: "group",
|
|
776
|
+
op: "and",
|
|
777
|
+
clauses: sub.plan.clauses
|
|
778
|
+
};
|
|
779
|
+
return new _Query(this.source, {
|
|
780
|
+
...this.plan,
|
|
781
|
+
clauses: [...this.plan.clauses, group]
|
|
782
|
+
});
|
|
783
|
+
}
|
|
784
|
+
/** Escape hatch: add an arbitrary predicate function. Not serializable. */
|
|
785
|
+
filter(fn) {
|
|
786
|
+
const clause = {
|
|
787
|
+
type: "filter",
|
|
788
|
+
fn
|
|
789
|
+
};
|
|
790
|
+
return new _Query(this.source, {
|
|
791
|
+
...this.plan,
|
|
792
|
+
clauses: [...this.plan.clauses, clause]
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
/** Sort by a field. Subsequent calls are tie-breakers. */
|
|
796
|
+
orderBy(field, direction = "asc") {
|
|
797
|
+
return new _Query(this.source, {
|
|
798
|
+
...this.plan,
|
|
799
|
+
orderBy: [...this.plan.orderBy, { field, direction }]
|
|
800
|
+
});
|
|
801
|
+
}
|
|
802
|
+
/** Cap the result size. */
|
|
803
|
+
limit(n) {
|
|
804
|
+
return new _Query(this.source, { ...this.plan, limit: n });
|
|
805
|
+
}
|
|
806
|
+
/** Skip the first N matching records (after ordering). */
|
|
807
|
+
offset(n) {
|
|
808
|
+
return new _Query(this.source, { ...this.plan, offset: n });
|
|
809
|
+
}
|
|
810
|
+
/** Execute the plan and return the matching records. */
|
|
811
|
+
toArray() {
|
|
812
|
+
return executePlanWithSource(this.source, this.plan);
|
|
813
|
+
}
|
|
814
|
+
/** Return the first matching record, or null. */
|
|
815
|
+
first() {
|
|
816
|
+
const result = executePlanWithSource(this.source, { ...this.plan, limit: 1 });
|
|
817
|
+
return result[0] ?? null;
|
|
818
|
+
}
|
|
819
|
+
/** Return the number of matching records (after where/filter, before limit). */
|
|
820
|
+
count() {
|
|
821
|
+
const { candidates, remainingClauses } = candidateRecords(this.source, this.plan.clauses);
|
|
822
|
+
if (remainingClauses.length === 0) return candidates.length;
|
|
823
|
+
return filterRecords(candidates, remainingClauses).length;
|
|
824
|
+
}
|
|
825
|
+
/**
|
|
826
|
+
* Re-run the query whenever the source notifies of changes.
|
|
827
|
+
* Returns an unsubscribe function. The callback receives the latest result.
|
|
828
|
+
* Throws if the source does not support subscriptions.
|
|
829
|
+
*/
|
|
830
|
+
subscribe(cb) {
|
|
831
|
+
if (!this.source.subscribe) {
|
|
832
|
+
throw new Error("Query source does not support subscriptions. Pass a source with a subscribe() method.");
|
|
833
|
+
}
|
|
834
|
+
cb(this.toArray());
|
|
835
|
+
return this.source.subscribe(() => cb(this.toArray()));
|
|
836
|
+
}
|
|
837
|
+
/**
|
|
838
|
+
* Return the plan as a JSON-friendly object. FilterClause entries are
|
|
839
|
+
* stripped (their `fn` cannot be serialized) and replaced with
|
|
840
|
+
* { type: 'filter', fn: '[function]' } so devtools can still see them.
|
|
841
|
+
*/
|
|
842
|
+
toPlan() {
|
|
843
|
+
return serializePlan(this.plan);
|
|
844
|
+
}
|
|
845
|
+
};
|
|
846
|
+
function executePlanWithSource(source, plan) {
|
|
847
|
+
const { candidates, remainingClauses } = candidateRecords(source, plan.clauses);
|
|
848
|
+
let result = remainingClauses.length === 0 ? [...candidates] : filterRecords(candidates, remainingClauses);
|
|
849
|
+
if (plan.orderBy.length > 0) {
|
|
850
|
+
result = sortRecords(result, plan.orderBy);
|
|
851
|
+
}
|
|
852
|
+
if (plan.offset > 0) {
|
|
853
|
+
result = result.slice(plan.offset);
|
|
854
|
+
}
|
|
855
|
+
if (plan.limit !== void 0) {
|
|
856
|
+
result = result.slice(0, plan.limit);
|
|
857
|
+
}
|
|
858
|
+
return result;
|
|
859
|
+
}
|
|
860
|
+
function candidateRecords(source, clauses) {
|
|
861
|
+
const indexes = source.getIndexes?.();
|
|
862
|
+
if (!indexes || !source.lookupById || clauses.length === 0) {
|
|
863
|
+
return { candidates: source.snapshot(), remainingClauses: clauses };
|
|
864
|
+
}
|
|
865
|
+
const lookupById = (id) => source.lookupById?.(id);
|
|
866
|
+
for (let i = 0; i < clauses.length; i++) {
|
|
867
|
+
const clause = clauses[i];
|
|
868
|
+
if (clause.type !== "field") continue;
|
|
869
|
+
if (!indexes.has(clause.field)) continue;
|
|
870
|
+
let ids = null;
|
|
871
|
+
if (clause.op === "==") {
|
|
872
|
+
ids = indexes.lookupEqual(clause.field, clause.value);
|
|
873
|
+
} else if (clause.op === "in" && Array.isArray(clause.value)) {
|
|
874
|
+
ids = indexes.lookupIn(clause.field, clause.value);
|
|
875
|
+
}
|
|
876
|
+
if (ids !== null) {
|
|
877
|
+
const remaining = [];
|
|
878
|
+
for (let j = 0; j < clauses.length; j++) {
|
|
879
|
+
if (j !== i) remaining.push(clauses[j]);
|
|
880
|
+
}
|
|
881
|
+
return {
|
|
882
|
+
candidates: materializeIds(ids, lookupById),
|
|
883
|
+
remainingClauses: remaining
|
|
884
|
+
};
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
return { candidates: source.snapshot(), remainingClauses: clauses };
|
|
888
|
+
}
|
|
889
|
+
function materializeIds(ids, lookupById) {
|
|
890
|
+
const out = [];
|
|
891
|
+
for (const id of ids) {
|
|
892
|
+
const record = lookupById(id);
|
|
893
|
+
if (record !== void 0) out.push(record);
|
|
894
|
+
}
|
|
895
|
+
return out;
|
|
896
|
+
}
|
|
897
|
+
function executePlan(records, plan) {
|
|
898
|
+
let result = filterRecords(records, plan.clauses);
|
|
899
|
+
if (plan.orderBy.length > 0) {
|
|
900
|
+
result = sortRecords(result, plan.orderBy);
|
|
901
|
+
}
|
|
902
|
+
if (plan.offset > 0) {
|
|
903
|
+
result = result.slice(plan.offset);
|
|
904
|
+
}
|
|
905
|
+
if (plan.limit !== void 0) {
|
|
906
|
+
result = result.slice(0, plan.limit);
|
|
907
|
+
}
|
|
908
|
+
return result;
|
|
909
|
+
}
|
|
910
|
+
function filterRecords(records, clauses) {
|
|
911
|
+
if (clauses.length === 0) return [...records];
|
|
912
|
+
const out = [];
|
|
913
|
+
for (const r of records) {
|
|
914
|
+
let matches = true;
|
|
915
|
+
for (const clause of clauses) {
|
|
916
|
+
if (!evaluateClause(r, clause)) {
|
|
917
|
+
matches = false;
|
|
918
|
+
break;
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
if (matches) out.push(r);
|
|
922
|
+
}
|
|
923
|
+
return out;
|
|
924
|
+
}
|
|
925
|
+
function sortRecords(records, orderBy) {
|
|
926
|
+
return [...records].sort((a, b) => {
|
|
927
|
+
for (const { field, direction } of orderBy) {
|
|
928
|
+
const av = readField(a, field);
|
|
929
|
+
const bv = readField(b, field);
|
|
930
|
+
const cmp = compareValues(av, bv);
|
|
931
|
+
if (cmp !== 0) return direction === "asc" ? cmp : -cmp;
|
|
932
|
+
}
|
|
933
|
+
return 0;
|
|
934
|
+
});
|
|
935
|
+
}
|
|
936
|
+
function readField(record, field) {
|
|
937
|
+
if (record === null || record === void 0) return void 0;
|
|
938
|
+
if (!field.includes(".")) {
|
|
939
|
+
return record[field];
|
|
940
|
+
}
|
|
941
|
+
const segments = field.split(".");
|
|
942
|
+
let cursor = record;
|
|
943
|
+
for (const segment of segments) {
|
|
944
|
+
if (cursor === null || cursor === void 0) return void 0;
|
|
945
|
+
cursor = cursor[segment];
|
|
946
|
+
}
|
|
947
|
+
return cursor;
|
|
948
|
+
}
|
|
949
|
+
function compareValues(a, b) {
|
|
950
|
+
if (a === void 0 || a === null) return b === void 0 || b === null ? 0 : 1;
|
|
951
|
+
if (b === void 0 || b === null) return -1;
|
|
952
|
+
if (typeof a === "number" && typeof b === "number") return a - b;
|
|
953
|
+
if (typeof a === "string" && typeof b === "string") return a < b ? -1 : a > b ? 1 : 0;
|
|
954
|
+
if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();
|
|
955
|
+
return 0;
|
|
956
|
+
}
|
|
957
|
+
function serializePlan(plan) {
|
|
958
|
+
return {
|
|
959
|
+
clauses: plan.clauses.map(serializeClause),
|
|
960
|
+
orderBy: plan.orderBy,
|
|
961
|
+
limit: plan.limit,
|
|
962
|
+
offset: plan.offset
|
|
963
|
+
};
|
|
964
|
+
}
|
|
965
|
+
function serializeClause(clause) {
|
|
966
|
+
if (clause.type === "filter") {
|
|
967
|
+
return { type: "filter", fn: "[function]" };
|
|
968
|
+
}
|
|
969
|
+
if (clause.type === "group") {
|
|
970
|
+
return {
|
|
971
|
+
type: "group",
|
|
972
|
+
op: clause.op,
|
|
973
|
+
clauses: clause.clauses.map(serializeClause)
|
|
974
|
+
};
|
|
975
|
+
}
|
|
976
|
+
return clause;
|
|
977
|
+
}
|
|
978
|
+
|
|
979
|
+
// src/query/indexes.ts
|
|
980
|
+
var CollectionIndexes = class {
|
|
981
|
+
indexes = /* @__PURE__ */ new Map();
|
|
982
|
+
/**
|
|
983
|
+
* Declare an index. Subsequent record additions are tracked under it.
|
|
984
|
+
* Calling this twice for the same field is a no-op (idempotent).
|
|
985
|
+
*/
|
|
986
|
+
declare(field) {
|
|
987
|
+
if (this.indexes.has(field)) return;
|
|
988
|
+
this.indexes.set(field, { field, buckets: /* @__PURE__ */ new Map() });
|
|
989
|
+
}
|
|
990
|
+
/** True if the given field has a declared index. */
|
|
991
|
+
has(field) {
|
|
992
|
+
return this.indexes.has(field);
|
|
993
|
+
}
|
|
994
|
+
/** All declared field names, in declaration order. */
|
|
995
|
+
fields() {
|
|
996
|
+
return [...this.indexes.keys()];
|
|
997
|
+
}
|
|
998
|
+
/**
|
|
999
|
+
* Build all declared indexes from a snapshot of records.
|
|
1000
|
+
* Called once per hydration. O(N × indexes.size).
|
|
1001
|
+
*/
|
|
1002
|
+
build(records) {
|
|
1003
|
+
for (const idx of this.indexes.values()) {
|
|
1004
|
+
idx.buckets.clear();
|
|
1005
|
+
for (const { id, record } of records) {
|
|
1006
|
+
addToIndex(idx, id, record);
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
/**
|
|
1011
|
+
* Insert or update a single record across all indexes.
|
|
1012
|
+
* Called by `Collection.put()` after the encrypted write succeeds.
|
|
1013
|
+
*
|
|
1014
|
+
* If `previousRecord` is provided, the record is removed from any old
|
|
1015
|
+
* buckets first — this is the update path. Pass `null` for fresh adds.
|
|
1016
|
+
*/
|
|
1017
|
+
upsert(id, newRecord, previousRecord) {
|
|
1018
|
+
if (this.indexes.size === 0) return;
|
|
1019
|
+
if (previousRecord !== null) {
|
|
1020
|
+
this.remove(id, previousRecord);
|
|
1021
|
+
}
|
|
1022
|
+
for (const idx of this.indexes.values()) {
|
|
1023
|
+
addToIndex(idx, id, newRecord);
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
/**
|
|
1027
|
+
* Remove a record from all indexes. Called by `Collection.delete()`
|
|
1028
|
+
* (and as the first half of `upsert` for the update path).
|
|
1029
|
+
*/
|
|
1030
|
+
remove(id, record) {
|
|
1031
|
+
if (this.indexes.size === 0) return;
|
|
1032
|
+
for (const idx of this.indexes.values()) {
|
|
1033
|
+
removeFromIndex(idx, id, record);
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
/** Drop all index data. Called when the collection is invalidated. */
|
|
1037
|
+
clear() {
|
|
1038
|
+
for (const idx of this.indexes.values()) {
|
|
1039
|
+
idx.buckets.clear();
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
/**
|
|
1043
|
+
* Equality lookup: return the set of record ids whose `field` matches
|
|
1044
|
+
* the given value. Returns `null` if no index covers the field — the
|
|
1045
|
+
* caller should fall back to a linear scan.
|
|
1046
|
+
*
|
|
1047
|
+
* The returned Set is a reference to the index's internal storage —
|
|
1048
|
+
* callers must NOT mutate it.
|
|
1049
|
+
*/
|
|
1050
|
+
lookupEqual(field, value) {
|
|
1051
|
+
const idx = this.indexes.get(field);
|
|
1052
|
+
if (!idx) return null;
|
|
1053
|
+
const key = stringifyKey(value);
|
|
1054
|
+
return idx.buckets.get(key) ?? EMPTY_SET;
|
|
1055
|
+
}
|
|
1056
|
+
/**
|
|
1057
|
+
* Set lookup: return the union of record ids whose `field` matches any
|
|
1058
|
+
* of the given values. Returns `null` if no index covers the field.
|
|
1059
|
+
*/
|
|
1060
|
+
lookupIn(field, values) {
|
|
1061
|
+
const idx = this.indexes.get(field);
|
|
1062
|
+
if (!idx) return null;
|
|
1063
|
+
const out = /* @__PURE__ */ new Set();
|
|
1064
|
+
for (const value of values) {
|
|
1065
|
+
const key = stringifyKey(value);
|
|
1066
|
+
const bucket = idx.buckets.get(key);
|
|
1067
|
+
if (bucket) {
|
|
1068
|
+
for (const id of bucket) out.add(id);
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
return out;
|
|
1072
|
+
}
|
|
1073
|
+
};
|
|
1074
|
+
var EMPTY_SET = /* @__PURE__ */ new Set();
|
|
1075
|
+
function stringifyKey(value) {
|
|
1076
|
+
if (value === null || value === void 0) return "\0NULL\0";
|
|
1077
|
+
if (typeof value === "string") return value;
|
|
1078
|
+
if (typeof value === "number" || typeof value === "boolean") return String(value);
|
|
1079
|
+
if (value instanceof Date) return value.toISOString();
|
|
1080
|
+
return "\0OBJECT\0";
|
|
1081
|
+
}
|
|
1082
|
+
function addToIndex(idx, id, record) {
|
|
1083
|
+
const value = readPath(record, idx.field);
|
|
1084
|
+
if (value === null || value === void 0) return;
|
|
1085
|
+
const key = stringifyKey(value);
|
|
1086
|
+
let bucket = idx.buckets.get(key);
|
|
1087
|
+
if (!bucket) {
|
|
1088
|
+
bucket = /* @__PURE__ */ new Set();
|
|
1089
|
+
idx.buckets.set(key, bucket);
|
|
1090
|
+
}
|
|
1091
|
+
bucket.add(id);
|
|
1092
|
+
}
|
|
1093
|
+
function removeFromIndex(idx, id, record) {
|
|
1094
|
+
const value = readPath(record, idx.field);
|
|
1095
|
+
if (value === null || value === void 0) return;
|
|
1096
|
+
const key = stringifyKey(value);
|
|
1097
|
+
const bucket = idx.buckets.get(key);
|
|
1098
|
+
if (!bucket) return;
|
|
1099
|
+
bucket.delete(id);
|
|
1100
|
+
if (bucket.size === 0) idx.buckets.delete(key);
|
|
1101
|
+
}
|
|
1102
|
+
|
|
1103
|
+
// src/cache/lru.ts
|
|
1104
|
+
var Lru = class {
|
|
1105
|
+
entries = /* @__PURE__ */ new Map();
|
|
1106
|
+
maxRecords;
|
|
1107
|
+
maxBytes;
|
|
1108
|
+
currentBytes = 0;
|
|
1109
|
+
hits = 0;
|
|
1110
|
+
misses = 0;
|
|
1111
|
+
evictions = 0;
|
|
1112
|
+
constructor(options) {
|
|
1113
|
+
if (options.maxRecords === void 0 && options.maxBytes === void 0) {
|
|
1114
|
+
throw new Error("Lru: must specify maxRecords, maxBytes, or both");
|
|
1115
|
+
}
|
|
1116
|
+
this.maxRecords = options.maxRecords;
|
|
1117
|
+
this.maxBytes = options.maxBytes;
|
|
1118
|
+
}
|
|
1119
|
+
/**
|
|
1120
|
+
* Look up a key. Hits promote the entry to most-recently-used; misses
|
|
1121
|
+
* return undefined. Both update the running stats counters.
|
|
1122
|
+
*/
|
|
1123
|
+
get(key) {
|
|
1124
|
+
const entry = this.entries.get(key);
|
|
1125
|
+
if (!entry) {
|
|
1126
|
+
this.misses++;
|
|
1127
|
+
return void 0;
|
|
1128
|
+
}
|
|
1129
|
+
this.entries.delete(key);
|
|
1130
|
+
this.entries.set(key, entry);
|
|
1131
|
+
this.hits++;
|
|
1132
|
+
return entry.value;
|
|
1133
|
+
}
|
|
1134
|
+
/**
|
|
1135
|
+
* Insert or update a key. If the key already exists, its size is
|
|
1136
|
+
* accounted for and the entry is promoted to MRU. After insertion,
|
|
1137
|
+
* eviction runs to maintain both budgets.
|
|
1138
|
+
*/
|
|
1139
|
+
set(key, value, size) {
|
|
1140
|
+
const existing = this.entries.get(key);
|
|
1141
|
+
if (existing) {
|
|
1142
|
+
this.currentBytes -= existing.size;
|
|
1143
|
+
this.entries.delete(key);
|
|
1144
|
+
}
|
|
1145
|
+
this.entries.set(key, { value, size });
|
|
1146
|
+
this.currentBytes += size;
|
|
1147
|
+
this.evictUntilUnderBudget();
|
|
1148
|
+
}
|
|
1149
|
+
/**
|
|
1150
|
+
* Remove a key without affecting hit/miss stats. Used by `Collection.delete()`.
|
|
1151
|
+
* Returns true if the key was present.
|
|
1152
|
+
*/
|
|
1153
|
+
remove(key) {
|
|
1154
|
+
const existing = this.entries.get(key);
|
|
1155
|
+
if (!existing) return false;
|
|
1156
|
+
this.currentBytes -= existing.size;
|
|
1157
|
+
this.entries.delete(key);
|
|
1158
|
+
return true;
|
|
1159
|
+
}
|
|
1160
|
+
/** True if the cache currently holds an entry for the given key. */
|
|
1161
|
+
has(key) {
|
|
1162
|
+
return this.entries.has(key);
|
|
1163
|
+
}
|
|
1164
|
+
/**
|
|
1165
|
+
* Drop every entry. Stats counters survive — call `resetStats()` if you
|
|
1166
|
+
* want a clean slate. Used by `Collection.invalidate()` on key rotation.
|
|
1167
|
+
*/
|
|
1168
|
+
clear() {
|
|
1169
|
+
this.entries.clear();
|
|
1170
|
+
this.currentBytes = 0;
|
|
1171
|
+
}
|
|
1172
|
+
/** Reset hit/miss/eviction counters to zero. Does NOT touch entries. */
|
|
1173
|
+
resetStats() {
|
|
1174
|
+
this.hits = 0;
|
|
1175
|
+
this.misses = 0;
|
|
1176
|
+
this.evictions = 0;
|
|
1177
|
+
}
|
|
1178
|
+
/** Snapshot of current cache statistics. Cheap — no copying. */
|
|
1179
|
+
stats() {
|
|
1180
|
+
return {
|
|
1181
|
+
hits: this.hits,
|
|
1182
|
+
misses: this.misses,
|
|
1183
|
+
evictions: this.evictions,
|
|
1184
|
+
size: this.entries.size,
|
|
1185
|
+
bytes: this.currentBytes
|
|
1186
|
+
};
|
|
1187
|
+
}
|
|
1188
|
+
/**
|
|
1189
|
+
* Iterate over all currently-cached values. Order is least-recently-used
|
|
1190
|
+
* first. Used by tests and devtools — production callers should use
|
|
1191
|
+
* `Collection.scan()` instead.
|
|
1192
|
+
*/
|
|
1193
|
+
*values() {
|
|
1194
|
+
for (const entry of this.entries.values()) yield entry.value;
|
|
1195
|
+
}
|
|
1196
|
+
/**
|
|
1197
|
+
* Walk the cache from the LRU end and drop entries until both budgets
|
|
1198
|
+
* are satisfied. Called after every `set()`. Single pass — entries are
|
|
1199
|
+
* never re-promoted during eviction.
|
|
1200
|
+
*/
|
|
1201
|
+
evictUntilUnderBudget() {
|
|
1202
|
+
while (this.overBudget()) {
|
|
1203
|
+
const oldest = this.entries.keys().next();
|
|
1204
|
+
if (oldest.done) return;
|
|
1205
|
+
const key = oldest.value;
|
|
1206
|
+
const entry = this.entries.get(key);
|
|
1207
|
+
if (entry) this.currentBytes -= entry.size;
|
|
1208
|
+
this.entries.delete(key);
|
|
1209
|
+
this.evictions++;
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
overBudget() {
|
|
1213
|
+
if (this.maxRecords !== void 0 && this.entries.size > this.maxRecords) return true;
|
|
1214
|
+
if (this.maxBytes !== void 0 && this.currentBytes > this.maxBytes) return true;
|
|
1215
|
+
return false;
|
|
1216
|
+
}
|
|
1217
|
+
};
|
|
1218
|
+
|
|
1219
|
+
// src/cache/policy.ts
|
|
1220
|
+
var UNITS = {
|
|
1221
|
+
"": 1,
|
|
1222
|
+
"B": 1,
|
|
1223
|
+
"KB": 1024,
|
|
1224
|
+
"MB": 1024 * 1024,
|
|
1225
|
+
"GB": 1024 * 1024 * 1024
|
|
1226
|
+
// 'TB' deliberately not supported — if you need it, you're not using NOYDB.
|
|
1227
|
+
};
|
|
1228
|
+
function parseBytes(input) {
|
|
1229
|
+
if (typeof input === "number") {
|
|
1230
|
+
if (!Number.isFinite(input) || input <= 0) {
|
|
1231
|
+
throw new Error(`parseBytes: numeric input must be a positive finite number, got ${String(input)}`);
|
|
1232
|
+
}
|
|
1233
|
+
return Math.floor(input);
|
|
1234
|
+
}
|
|
1235
|
+
const trimmed = input.trim();
|
|
1236
|
+
if (trimmed === "") {
|
|
1237
|
+
throw new Error("parseBytes: empty string is not a valid byte budget");
|
|
1238
|
+
}
|
|
1239
|
+
const match = /^([0-9]+(?:\.[0-9]+)?)\s*([A-Za-z]*)$/.exec(trimmed);
|
|
1240
|
+
if (!match) {
|
|
1241
|
+
throw new Error(`parseBytes: invalid byte budget "${input}". Expected format: "1024", "50KB", "50MB", "1GB"`);
|
|
1242
|
+
}
|
|
1243
|
+
const value = parseFloat(match[1]);
|
|
1244
|
+
const unit = (match[2] ?? "").toUpperCase();
|
|
1245
|
+
if (!(unit in UNITS)) {
|
|
1246
|
+
throw new Error(`parseBytes: unknown unit "${match[2]}" in "${input}". Supported: B, KB, MB, GB`);
|
|
1247
|
+
}
|
|
1248
|
+
const bytes = Math.floor(value * UNITS[unit]);
|
|
1249
|
+
if (bytes <= 0) {
|
|
1250
|
+
throw new Error(`parseBytes: byte budget must be > 0, got ${bytes} from "${input}"`);
|
|
1251
|
+
}
|
|
1252
|
+
return bytes;
|
|
1253
|
+
}
|
|
1254
|
+
function estimateRecordBytes(record) {
|
|
1255
|
+
try {
|
|
1256
|
+
return JSON.stringify(record).length;
|
|
1257
|
+
} catch {
|
|
1258
|
+
return 0;
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
|
|
642
1262
|
// src/collection.ts
|
|
1263
|
+
var fallbackWarned = /* @__PURE__ */ new Set();
|
|
1264
|
+
function warnOnceFallback(adapterName) {
|
|
1265
|
+
if (fallbackWarned.has(adapterName)) return;
|
|
1266
|
+
fallbackWarned.add(adapterName);
|
|
1267
|
+
if (typeof process !== "undefined" && process.env["NODE_ENV"] === "test") return;
|
|
1268
|
+
console.warn(
|
|
1269
|
+
`[noy-db] Adapter "${adapterName}" does not implement listPage(); Collection.scan()/listPage() are using a synthetic fallback (slower). Add a listPage method to opt into the streaming fast path.`
|
|
1270
|
+
);
|
|
1271
|
+
}
|
|
643
1272
|
var Collection = class {
|
|
644
1273
|
adapter;
|
|
645
1274
|
compartment;
|
|
@@ -650,9 +1279,41 @@ var Collection = class {
|
|
|
650
1279
|
getDEK;
|
|
651
1280
|
onDirty;
|
|
652
1281
|
historyConfig;
|
|
653
|
-
// In-memory cache of decrypted records
|
|
1282
|
+
// In-memory cache of decrypted records (eager mode only). Lazy mode
|
|
1283
|
+
// uses `lru` instead. Both fields exist so a single Collection instance
|
|
1284
|
+
// doesn't need a runtime branch on every cache access.
|
|
654
1285
|
cache = /* @__PURE__ */ new Map();
|
|
655
1286
|
hydrated = false;
|
|
1287
|
+
/**
|
|
1288
|
+
* Lazy mode flag. `true` when constructed with `prefetch: false`.
|
|
1289
|
+
* In lazy mode the cache is bounded by an LRU and `list()`/`query()`
|
|
1290
|
+
* throw — callers must use `scan()` or per-id `get()` instead.
|
|
1291
|
+
*/
|
|
1292
|
+
lazy;
|
|
1293
|
+
/**
|
|
1294
|
+
* LRU cache for lazy mode. Only allocated when `prefetch: false` is set.
|
|
1295
|
+
* Stores `{ record, version }` entries the same shape as `this.cache`.
|
|
1296
|
+
* Tree-shaking note: importing Collection without setting `prefetch:false`
|
|
1297
|
+
* still pulls in the Lru class today; future bundle-size work could
|
|
1298
|
+
* lazy-import the cache module.
|
|
1299
|
+
*/
|
|
1300
|
+
lru;
|
|
1301
|
+
/**
|
|
1302
|
+
* In-memory secondary indexes for the query DSL.
|
|
1303
|
+
*
|
|
1304
|
+
* Built during `ensureHydrated()` and maintained on every put/delete.
|
|
1305
|
+
* The query executor consults these for `==` and `in` operators on
|
|
1306
|
+
* indexed fields, falling back to a linear scan for unindexed fields
|
|
1307
|
+
* or unsupported operators.
|
|
1308
|
+
*
|
|
1309
|
+
* v0.3 ships in-memory only — persistence as encrypted blobs is a
|
|
1310
|
+
* follow-up. See `query/indexes.ts` for the design rationale.
|
|
1311
|
+
*
|
|
1312
|
+
* Indexes are INCOMPATIBLE with lazy mode in v0.3 — the constructor
|
|
1313
|
+
* rejects the combination because evicted records would silently
|
|
1314
|
+
* disappear from the index without notification.
|
|
1315
|
+
*/
|
|
1316
|
+
indexes = new CollectionIndexes();
|
|
656
1317
|
constructor(opts) {
|
|
657
1318
|
this.adapter = opts.adapter;
|
|
658
1319
|
this.compartment = opts.compartment;
|
|
@@ -663,9 +1324,43 @@ var Collection = class {
|
|
|
663
1324
|
this.getDEK = opts.getDEK;
|
|
664
1325
|
this.onDirty = opts.onDirty;
|
|
665
1326
|
this.historyConfig = opts.historyConfig ?? { enabled: true };
|
|
1327
|
+
this.lazy = opts.prefetch === false;
|
|
1328
|
+
if (this.lazy) {
|
|
1329
|
+
if (opts.indexes && opts.indexes.length > 0) {
|
|
1330
|
+
throw new Error(
|
|
1331
|
+
`Collection "${this.name}": secondary indexes are not supported in lazy mode (prefetch: false). Either remove the indexes option or use prefetch: true. Index + lazy support is tracked as a v0.4 follow-up.`
|
|
1332
|
+
);
|
|
1333
|
+
}
|
|
1334
|
+
if (!opts.cache || opts.cache.maxRecords === void 0 && opts.cache.maxBytes === void 0) {
|
|
1335
|
+
throw new Error(
|
|
1336
|
+
`Collection "${this.name}": lazy mode (prefetch: false) requires a cache option with maxRecords and/or maxBytes. An unbounded lazy cache defeats the purpose.`
|
|
1337
|
+
);
|
|
1338
|
+
}
|
|
1339
|
+
const lruOptions = {};
|
|
1340
|
+
if (opts.cache.maxRecords !== void 0) lruOptions.maxRecords = opts.cache.maxRecords;
|
|
1341
|
+
if (opts.cache.maxBytes !== void 0) lruOptions.maxBytes = parseBytes(opts.cache.maxBytes);
|
|
1342
|
+
this.lru = new Lru(lruOptions);
|
|
1343
|
+
this.hydrated = true;
|
|
1344
|
+
} else {
|
|
1345
|
+
this.lru = null;
|
|
1346
|
+
if (opts.indexes) {
|
|
1347
|
+
for (const def of opts.indexes) {
|
|
1348
|
+
this.indexes.declare(def);
|
|
1349
|
+
}
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
666
1352
|
}
|
|
667
1353
|
/** Get a single record by ID. Returns null if not found. */
|
|
668
1354
|
async get(id) {
|
|
1355
|
+
if (this.lazy && this.lru) {
|
|
1356
|
+
const cached = this.lru.get(id);
|
|
1357
|
+
if (cached) return cached.record;
|
|
1358
|
+
const envelope = await this.adapter.get(this.compartment, this.name, id);
|
|
1359
|
+
if (!envelope) return null;
|
|
1360
|
+
const record = await this.decryptRecord(envelope);
|
|
1361
|
+
this.lru.set(id, { record, version: envelope._v }, estimateRecordBytes(record));
|
|
1362
|
+
return record;
|
|
1363
|
+
}
|
|
669
1364
|
await this.ensureHydrated();
|
|
670
1365
|
const entry = this.cache.get(id);
|
|
671
1366
|
return entry ? entry.record : null;
|
|
@@ -675,8 +1370,20 @@ var Collection = class {
|
|
|
675
1370
|
if (!hasWritePermission(this.keyring, this.name)) {
|
|
676
1371
|
throw new ReadOnlyError();
|
|
677
1372
|
}
|
|
678
|
-
|
|
679
|
-
|
|
1373
|
+
let existing;
|
|
1374
|
+
if (this.lazy && this.lru) {
|
|
1375
|
+
existing = this.lru.get(id);
|
|
1376
|
+
if (!existing) {
|
|
1377
|
+
const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
|
|
1378
|
+
if (previousEnvelope) {
|
|
1379
|
+
const previousRecord = await this.decryptRecord(previousEnvelope);
|
|
1380
|
+
existing = { record: previousRecord, version: previousEnvelope._v };
|
|
1381
|
+
}
|
|
1382
|
+
}
|
|
1383
|
+
} else {
|
|
1384
|
+
await this.ensureHydrated();
|
|
1385
|
+
existing = this.cache.get(id);
|
|
1386
|
+
}
|
|
680
1387
|
const version = existing ? existing.version + 1 : 1;
|
|
681
1388
|
if (existing && this.historyConfig.enabled !== false) {
|
|
682
1389
|
const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
|
|
@@ -695,7 +1402,12 @@ var Collection = class {
|
|
|
695
1402
|
}
|
|
696
1403
|
const envelope = await this.encryptRecord(record, version);
|
|
697
1404
|
await this.adapter.put(this.compartment, this.name, id, envelope);
|
|
698
|
-
this.
|
|
1405
|
+
if (this.lazy && this.lru) {
|
|
1406
|
+
this.lru.set(id, { record, version }, estimateRecordBytes(record));
|
|
1407
|
+
} else {
|
|
1408
|
+
this.cache.set(id, { record, version });
|
|
1409
|
+
this.indexes.upsert(id, record, existing ? existing.record : null);
|
|
1410
|
+
}
|
|
699
1411
|
await this.onDirty?.(this.name, id, "put", version);
|
|
700
1412
|
this.emitter.emit("change", {
|
|
701
1413
|
compartment: this.compartment,
|
|
@@ -709,13 +1421,32 @@ var Collection = class {
|
|
|
709
1421
|
if (!hasWritePermission(this.keyring, this.name)) {
|
|
710
1422
|
throw new ReadOnlyError();
|
|
711
1423
|
}
|
|
712
|
-
|
|
1424
|
+
let existing;
|
|
1425
|
+
if (this.lazy && this.lru) {
|
|
1426
|
+
existing = this.lru.get(id);
|
|
1427
|
+
if (!existing && this.historyConfig.enabled !== false) {
|
|
1428
|
+
const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
|
|
1429
|
+
if (previousEnvelope) {
|
|
1430
|
+
const previousRecord = await this.decryptRecord(previousEnvelope);
|
|
1431
|
+
existing = { record: previousRecord, version: previousEnvelope._v };
|
|
1432
|
+
}
|
|
1433
|
+
}
|
|
1434
|
+
} else {
|
|
1435
|
+
existing = this.cache.get(id);
|
|
1436
|
+
}
|
|
713
1437
|
if (existing && this.historyConfig.enabled !== false) {
|
|
714
1438
|
const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
|
|
715
1439
|
await saveHistory(this.adapter, this.compartment, this.name, id, historyEnvelope);
|
|
716
1440
|
}
|
|
717
1441
|
await this.adapter.delete(this.compartment, this.name, id);
|
|
718
|
-
this.
|
|
1442
|
+
if (this.lazy && this.lru) {
|
|
1443
|
+
this.lru.remove(id);
|
|
1444
|
+
} else {
|
|
1445
|
+
this.cache.delete(id);
|
|
1446
|
+
if (existing) {
|
|
1447
|
+
this.indexes.remove(id, existing.record);
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
719
1450
|
await this.onDirty?.(this.name, id, "delete", existing?.version ?? 0);
|
|
720
1451
|
this.emitter.emit("change", {
|
|
721
1452
|
compartment: this.compartment,
|
|
@@ -724,14 +1455,70 @@ var Collection = class {
|
|
|
724
1455
|
action: "delete"
|
|
725
1456
|
});
|
|
726
1457
|
}
|
|
727
|
-
/**
|
|
1458
|
+
/**
|
|
1459
|
+
* List all records in the collection.
|
|
1460
|
+
*
|
|
1461
|
+
* Throws in lazy mode — bulk listing defeats the purpose of lazy
|
|
1462
|
+
* hydration. Use `scan()` to iterate over the full collection
|
|
1463
|
+
* page-by-page without holding more than `pageSize` records in memory.
|
|
1464
|
+
*/
|
|
728
1465
|
async list() {
|
|
1466
|
+
if (this.lazy) {
|
|
1467
|
+
throw new Error(
|
|
1468
|
+
`Collection "${this.name}": list() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) to iterate over the full collection.`
|
|
1469
|
+
);
|
|
1470
|
+
}
|
|
729
1471
|
await this.ensureHydrated();
|
|
730
1472
|
return [...this.cache.values()].map((e) => e.record);
|
|
731
1473
|
}
|
|
732
|
-
/** Filter records by a predicate. */
|
|
733
1474
|
query(predicate) {
|
|
734
|
-
|
|
1475
|
+
if (this.lazy) {
|
|
1476
|
+
throw new Error(
|
|
1477
|
+
`Collection "${this.name}": query() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) and filter the streamed records with a regular for-await loop. Streaming queries land in v0.4.`
|
|
1478
|
+
);
|
|
1479
|
+
}
|
|
1480
|
+
if (predicate !== void 0) {
|
|
1481
|
+
return [...this.cache.values()].map((e) => e.record).filter(predicate);
|
|
1482
|
+
}
|
|
1483
|
+
const source = {
|
|
1484
|
+
snapshot: () => [...this.cache.values()].map((e) => e.record),
|
|
1485
|
+
subscribe: (cb) => {
|
|
1486
|
+
const handler = (event) => {
|
|
1487
|
+
if (event.compartment === this.compartment && event.collection === this.name) {
|
|
1488
|
+
cb();
|
|
1489
|
+
}
|
|
1490
|
+
};
|
|
1491
|
+
this.emitter.on("change", handler);
|
|
1492
|
+
return () => this.emitter.off("change", handler);
|
|
1493
|
+
},
|
|
1494
|
+
// Index-aware fast path for `==` and `in` operators on indexed
|
|
1495
|
+
// fields. The Query builder consults these when present and falls
|
|
1496
|
+
// back to a linear scan otherwise.
|
|
1497
|
+
getIndexes: () => this.getIndexes(),
|
|
1498
|
+
lookupById: (id) => this.cache.get(id)?.record
|
|
1499
|
+
};
|
|
1500
|
+
return new Query(source);
|
|
1501
|
+
}
|
|
1502
|
+
/**
|
|
1503
|
+
* Cache statistics — useful for devtools, monitoring, and verifying
|
|
1504
|
+
* that LRU eviction is happening as expected in lazy mode.
|
|
1505
|
+
*
|
|
1506
|
+
* In eager mode, returns size only (no hits/misses are tracked because
|
|
1507
|
+
* every read is a cache hit by construction). In lazy mode, returns
|
|
1508
|
+
* the full LRU stats: `{ hits, misses, evictions, size, bytes }`.
|
|
1509
|
+
*/
|
|
1510
|
+
cacheStats() {
|
|
1511
|
+
if (this.lazy && this.lru) {
|
|
1512
|
+
return { ...this.lru.stats(), lazy: true };
|
|
1513
|
+
}
|
|
1514
|
+
return {
|
|
1515
|
+
hits: 0,
|
|
1516
|
+
misses: 0,
|
|
1517
|
+
evictions: 0,
|
|
1518
|
+
size: this.cache.size,
|
|
1519
|
+
bytes: 0,
|
|
1520
|
+
lazy: false
|
|
1521
|
+
};
|
|
735
1522
|
}
|
|
736
1523
|
// ─── History Methods ────────────────────────────────────────────
|
|
737
1524
|
/** Get version history for a record, newest first. */
|
|
@@ -822,11 +1609,105 @@ var Collection = class {
|
|
|
822
1609
|
return clearHistory(this.adapter, this.compartment, this.name, id);
|
|
823
1610
|
}
|
|
824
1611
|
// ─── Core Methods ─────────────────────────────────────────────
|
|
825
|
-
/**
|
|
1612
|
+
/**
|
|
1613
|
+
* Count records in the collection.
|
|
1614
|
+
*
|
|
1615
|
+
* In eager mode this returns the in-memory cache size (instant). In
|
|
1616
|
+
* lazy mode it asks the adapter via `list()` to enumerate ids — slower
|
|
1617
|
+
* but still correct, and avoids loading any record bodies into memory.
|
|
1618
|
+
*/
|
|
826
1619
|
async count() {
|
|
1620
|
+
if (this.lazy) {
|
|
1621
|
+
const ids = await this.adapter.list(this.compartment, this.name);
|
|
1622
|
+
return ids.length;
|
|
1623
|
+
}
|
|
827
1624
|
await this.ensureHydrated();
|
|
828
1625
|
return this.cache.size;
|
|
829
1626
|
}
|
|
1627
|
+
// ─── Pagination & Streaming ───────────────────────────────────
|
|
1628
|
+
/**
|
|
1629
|
+
* Fetch a single page of records via the adapter's optional `listPage`
|
|
1630
|
+
* extension. Returns the decrypted records for this page plus an opaque
|
|
1631
|
+
* cursor for the next page.
|
|
1632
|
+
*
|
|
1633
|
+
* Pass `cursor: undefined` (or omit it) to start from the beginning.
|
|
1634
|
+
* The final page returns `nextCursor: null`.
|
|
1635
|
+
*
|
|
1636
|
+
* If the adapter does NOT implement `listPage`, this falls back to a
|
|
1637
|
+
* synthetic implementation: it loads all ids via `list()`, sorts them,
|
|
1638
|
+
* and slices a window. The first call emits a one-time console.warn so
|
|
1639
|
+
* developers can spot adapters that should opt into the fast path.
|
|
1640
|
+
*/
|
|
1641
|
+
async listPage(opts = {}) {
|
|
1642
|
+
const limit = opts.limit ?? 100;
|
|
1643
|
+
if (this.adapter.listPage) {
|
|
1644
|
+
const result = await this.adapter.listPage(this.compartment, this.name, opts.cursor, limit);
|
|
1645
|
+
const decrypted = [];
|
|
1646
|
+
for (const { record, version, id } of await this.decryptPage(result.items)) {
|
|
1647
|
+
if (!this.lazy && !this.cache.has(id)) {
|
|
1648
|
+
this.cache.set(id, { record, version });
|
|
1649
|
+
}
|
|
1650
|
+
decrypted.push(record);
|
|
1651
|
+
}
|
|
1652
|
+
return { items: decrypted, nextCursor: result.nextCursor };
|
|
1653
|
+
}
|
|
1654
|
+
warnOnceFallback(this.adapter.name ?? "unknown");
|
|
1655
|
+
const ids = (await this.adapter.list(this.compartment, this.name)).slice().sort();
|
|
1656
|
+
const start = opts.cursor ? parseInt(opts.cursor, 10) : 0;
|
|
1657
|
+
const end = Math.min(start + limit, ids.length);
|
|
1658
|
+
const items = [];
|
|
1659
|
+
for (let i = start; i < end; i++) {
|
|
1660
|
+
const id = ids[i];
|
|
1661
|
+
const envelope = await this.adapter.get(this.compartment, this.name, id);
|
|
1662
|
+
if (envelope) {
|
|
1663
|
+
const record = await this.decryptRecord(envelope);
|
|
1664
|
+
items.push(record);
|
|
1665
|
+
if (!this.lazy && !this.cache.has(id)) {
|
|
1666
|
+
this.cache.set(id, { record, version: envelope._v });
|
|
1667
|
+
}
|
|
1668
|
+
}
|
|
1669
|
+
}
|
|
1670
|
+
return {
|
|
1671
|
+
items,
|
|
1672
|
+
nextCursor: end < ids.length ? String(end) : null
|
|
1673
|
+
};
|
|
1674
|
+
}
|
|
1675
|
+
/**
|
|
1676
|
+
* Stream every record in the collection page-by-page, yielding decrypted
|
|
1677
|
+
* records as an `AsyncIterable<T>`. The whole point: process collections
|
|
1678
|
+
* larger than RAM without ever holding more than `pageSize` records
|
|
1679
|
+
* decrypted at once.
|
|
1680
|
+
*
|
|
1681
|
+
* @example
|
|
1682
|
+
* ```ts
|
|
1683
|
+
* for await (const record of invoices.scan({ pageSize: 500 })) {
|
|
1684
|
+
* await processOne(record)
|
|
1685
|
+
* }
|
|
1686
|
+
* ```
|
|
1687
|
+
*
|
|
1688
|
+
* Uses `adapter.listPage` when available; otherwise falls back to the
|
|
1689
|
+
* synthetic pagination path with the same one-time warning.
|
|
1690
|
+
*/
|
|
1691
|
+
async *scan(opts = {}) {
|
|
1692
|
+
const pageSize = opts.pageSize ?? 100;
|
|
1693
|
+
let page = await this.listPage({ limit: pageSize });
|
|
1694
|
+
while (true) {
|
|
1695
|
+
for (const item of page.items) {
|
|
1696
|
+
yield item;
|
|
1697
|
+
}
|
|
1698
|
+
if (page.nextCursor === null) return;
|
|
1699
|
+
page = await this.listPage({ cursor: page.nextCursor, limit: pageSize });
|
|
1700
|
+
}
|
|
1701
|
+
}
|
|
1702
|
+
/** Decrypt a page of envelopes returned by `adapter.listPage`. */
|
|
1703
|
+
async decryptPage(items) {
|
|
1704
|
+
const out = [];
|
|
1705
|
+
for (const { id, envelope } of items) {
|
|
1706
|
+
const record = await this.decryptRecord(envelope);
|
|
1707
|
+
out.push({ id, record, version: envelope._v });
|
|
1708
|
+
}
|
|
1709
|
+
return out;
|
|
1710
|
+
}
|
|
830
1711
|
// ─── Internal ──────────────────────────────────────────────────
|
|
831
1712
|
/** Load all records from adapter into memory cache. */
|
|
832
1713
|
async ensureHydrated() {
|
|
@@ -840,6 +1721,7 @@ var Collection = class {
|
|
|
840
1721
|
}
|
|
841
1722
|
}
|
|
842
1723
|
this.hydrated = true;
|
|
1724
|
+
this.rebuildIndexes();
|
|
843
1725
|
}
|
|
844
1726
|
/** Hydrate from a pre-loaded snapshot (used by Compartment). */
|
|
845
1727
|
async hydrateFromSnapshot(records) {
|
|
@@ -848,6 +1730,34 @@ var Collection = class {
|
|
|
848
1730
|
this.cache.set(id, { record, version: envelope._v });
|
|
849
1731
|
}
|
|
850
1732
|
this.hydrated = true;
|
|
1733
|
+
this.rebuildIndexes();
|
|
1734
|
+
}
|
|
1735
|
+
/**
|
|
1736
|
+
* Rebuild secondary indexes from the current in-memory cache.
|
|
1737
|
+
*
|
|
1738
|
+
* Called after any bulk hydration. Incremental put/delete updates
|
|
1739
|
+
* are handled by `indexes.upsert()` / `indexes.remove()` directly,
|
|
1740
|
+
* so this only fires for full reloads.
|
|
1741
|
+
*
|
|
1742
|
+
* Synchronous and O(N × indexes.size); for the v0.3 target scale of
|
|
1743
|
+
* 1K–50K records this completes in single-digit milliseconds.
|
|
1744
|
+
*/
|
|
1745
|
+
rebuildIndexes() {
|
|
1746
|
+
if (this.indexes.fields().length === 0) return;
|
|
1747
|
+
const snapshot = [];
|
|
1748
|
+
for (const [id, entry] of this.cache) {
|
|
1749
|
+
snapshot.push({ id, record: entry.record });
|
|
1750
|
+
}
|
|
1751
|
+
this.indexes.build(snapshot);
|
|
1752
|
+
}
|
|
1753
|
+
/**
|
|
1754
|
+
* Get the in-memory index store. Used by `Query` to short-circuit
|
|
1755
|
+
* `==` and `in` lookups when an index covers the where clause.
|
|
1756
|
+
*
|
|
1757
|
+
* Returns `null` if no indexes are declared on this collection.
|
|
1758
|
+
*/
|
|
1759
|
+
getIndexes() {
|
|
1760
|
+
return this.indexes.fields().length > 0 ? this.indexes : null;
|
|
851
1761
|
}
|
|
852
1762
|
/** Get all records as encrypted envelopes (for dump). */
|
|
853
1763
|
async dumpEnvelopes() {
|
|
@@ -919,11 +1829,25 @@ var Compartment = class {
|
|
|
919
1829
|
return getDEKFn(collectionName);
|
|
920
1830
|
};
|
|
921
1831
|
}
|
|
922
|
-
/**
|
|
923
|
-
|
|
1832
|
+
/**
|
|
1833
|
+
* Open a typed collection within this compartment.
|
|
1834
|
+
*
|
|
1835
|
+
* - `options.indexes` declares secondary indexes for the query DSL.
|
|
1836
|
+
* Indexes are computed in memory after decryption; adapters never
|
|
1837
|
+
* see plaintext index data.
|
|
1838
|
+
* - `options.prefetch` (default `true`) controls hydration. Eager mode
|
|
1839
|
+
* loads everything on first access; lazy mode (`prefetch: false`)
|
|
1840
|
+
* loads records on demand and bounds memory via the LRU cache.
|
|
1841
|
+
* - `options.cache` configures the LRU bounds. Required in lazy mode.
|
|
1842
|
+
* Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.
|
|
1843
|
+
*
|
|
1844
|
+
* Lazy mode + indexes is rejected at construction time — see the
|
|
1845
|
+
* Collection constructor for the rationale.
|
|
1846
|
+
*/
|
|
1847
|
+
collection(collectionName, options) {
|
|
924
1848
|
let coll = this.collectionCache.get(collectionName);
|
|
925
1849
|
if (!coll) {
|
|
926
|
-
|
|
1850
|
+
const collOpts = {
|
|
927
1851
|
adapter: this.adapter,
|
|
928
1852
|
compartment: this.name,
|
|
929
1853
|
name: collectionName,
|
|
@@ -933,7 +1857,11 @@ var Compartment = class {
|
|
|
933
1857
|
getDEK: this.getDEK,
|
|
934
1858
|
onDirty: this.onDirty,
|
|
935
1859
|
historyConfig: this.historyConfig
|
|
936
|
-
}
|
|
1860
|
+
};
|
|
1861
|
+
if (options?.indexes !== void 0) collOpts.indexes = options.indexes;
|
|
1862
|
+
if (options?.prefetch !== void 0) collOpts.prefetch = options.prefetch;
|
|
1863
|
+
if (options?.cache !== void 0) collOpts.cache = options.cache;
|
|
1864
|
+
coll = new Collection(collOpts);
|
|
937
1865
|
this.collectionCache.set(collectionName, coll);
|
|
938
1866
|
}
|
|
939
1867
|
return coll;
|
|
@@ -1634,10 +2562,12 @@ function estimateEntropy(passphrase) {
|
|
|
1634
2562
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1635
2563
|
0 && (module.exports = {
|
|
1636
2564
|
Collection,
|
|
2565
|
+
CollectionIndexes,
|
|
1637
2566
|
Compartment,
|
|
1638
2567
|
ConflictError,
|
|
1639
2568
|
DecryptionError,
|
|
1640
2569
|
InvalidKeyError,
|
|
2570
|
+
Lru,
|
|
1641
2571
|
NOYDB_BACKUP_VERSION,
|
|
1642
2572
|
NOYDB_FORMAT_VERSION,
|
|
1643
2573
|
NOYDB_KEYRING_VERSION,
|
|
@@ -1648,6 +2578,7 @@ function estimateEntropy(passphrase) {
|
|
|
1648
2578
|
Noydb,
|
|
1649
2579
|
NoydbError,
|
|
1650
2580
|
PermissionDeniedError,
|
|
2581
|
+
Query,
|
|
1651
2582
|
ReadOnlyError,
|
|
1652
2583
|
SyncEngine,
|
|
1653
2584
|
TamperedError,
|
|
@@ -1657,9 +2588,15 @@ function estimateEntropy(passphrase) {
|
|
|
1657
2588
|
diff,
|
|
1658
2589
|
enrollBiometric,
|
|
1659
2590
|
estimateEntropy,
|
|
2591
|
+
estimateRecordBytes,
|
|
2592
|
+
evaluateClause,
|
|
2593
|
+
evaluateFieldClause,
|
|
2594
|
+
executePlan,
|
|
1660
2595
|
formatDiff,
|
|
1661
2596
|
isBiometricAvailable,
|
|
1662
2597
|
loadBiometric,
|
|
2598
|
+
parseBytes,
|
|
2599
|
+
readPath,
|
|
1663
2600
|
removeBiometric,
|
|
1664
2601
|
saveBiometric,
|
|
1665
2602
|
unlockBiometric,
|