@noy-db/hub 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +197 -0
- package/dist/aggregate/index.cjs +476 -0
- package/dist/aggregate/index.cjs.map +1 -0
- package/dist/aggregate/index.d.cts +38 -0
- package/dist/aggregate/index.d.ts +38 -0
- package/dist/aggregate/index.js +53 -0
- package/dist/aggregate/index.js.map +1 -0
- package/dist/blobs/index.cjs +1480 -0
- package/dist/blobs/index.cjs.map +1 -0
- package/dist/blobs/index.d.cts +45 -0
- package/dist/blobs/index.d.ts +45 -0
- package/dist/blobs/index.js +48 -0
- package/dist/blobs/index.js.map +1 -0
- package/dist/bundle/index.cjs +436 -0
- package/dist/bundle/index.cjs.map +1 -0
- package/dist/bundle/index.d.cts +7 -0
- package/dist/bundle/index.d.ts +7 -0
- package/dist/bundle/index.js +40 -0
- package/dist/bundle/index.js.map +1 -0
- package/dist/chunk-2QR2PQTT.js +217 -0
- package/dist/chunk-2QR2PQTT.js.map +1 -0
- package/dist/chunk-4OWFYIDQ.js +79 -0
- package/dist/chunk-4OWFYIDQ.js.map +1 -0
- package/dist/chunk-5AATM2M2.js +90 -0
- package/dist/chunk-5AATM2M2.js.map +1 -0
- package/dist/chunk-ACLDOTNQ.js +543 -0
- package/dist/chunk-ACLDOTNQ.js.map +1 -0
- package/dist/chunk-BTDCBVJW.js +160 -0
- package/dist/chunk-BTDCBVJW.js.map +1 -0
- package/dist/chunk-CIMZBAZB.js +72 -0
- package/dist/chunk-CIMZBAZB.js.map +1 -0
- package/dist/chunk-E445ICYI.js +365 -0
- package/dist/chunk-E445ICYI.js.map +1 -0
- package/dist/chunk-EXQRC2L4.js +722 -0
- package/dist/chunk-EXQRC2L4.js.map +1 -0
- package/dist/chunk-FZU343FL.js +32 -0
- package/dist/chunk-FZU343FL.js.map +1 -0
- package/dist/chunk-GJILMRPO.js +354 -0
- package/dist/chunk-GJILMRPO.js.map +1 -0
- package/dist/chunk-GOUT6DND.js +1285 -0
- package/dist/chunk-GOUT6DND.js.map +1 -0
- package/dist/chunk-J66GRPNH.js +111 -0
- package/dist/chunk-J66GRPNH.js.map +1 -0
- package/dist/chunk-M2F2JAWB.js +464 -0
- package/dist/chunk-M2F2JAWB.js.map +1 -0
- package/dist/chunk-M5INGEFC.js +84 -0
- package/dist/chunk-M5INGEFC.js.map +1 -0
- package/dist/chunk-M62XNWRA.js +72 -0
- package/dist/chunk-M62XNWRA.js.map +1 -0
- package/dist/chunk-MR4424N3.js +275 -0
- package/dist/chunk-MR4424N3.js.map +1 -0
- package/dist/chunk-NPC4LFV5.js +132 -0
- package/dist/chunk-NPC4LFV5.js.map +1 -0
- package/dist/chunk-NXFEYLVG.js +311 -0
- package/dist/chunk-NXFEYLVG.js.map +1 -0
- package/dist/chunk-R36SIKES.js +79 -0
- package/dist/chunk-R36SIKES.js.map +1 -0
- package/dist/chunk-TDR6T5CJ.js +381 -0
- package/dist/chunk-TDR6T5CJ.js.map +1 -0
- package/dist/chunk-UF3BUNQZ.js +1 -0
- package/dist/chunk-UF3BUNQZ.js.map +1 -0
- package/dist/chunk-UQFSPSWG.js +1109 -0
- package/dist/chunk-UQFSPSWG.js.map +1 -0
- package/dist/chunk-USKYUS74.js +793 -0
- package/dist/chunk-USKYUS74.js.map +1 -0
- package/dist/chunk-XCL3WP6J.js +121 -0
- package/dist/chunk-XCL3WP6J.js.map +1 -0
- package/dist/chunk-XHFOENR2.js +680 -0
- package/dist/chunk-XHFOENR2.js.map +1 -0
- package/dist/chunk-ZFKD4QMV.js +430 -0
- package/dist/chunk-ZFKD4QMV.js.map +1 -0
- package/dist/chunk-ZLMV3TUA.js +490 -0
- package/dist/chunk-ZLMV3TUA.js.map +1 -0
- package/dist/chunk-ZRG4V3F5.js +17 -0
- package/dist/chunk-ZRG4V3F5.js.map +1 -0
- package/dist/consent/index.cjs +204 -0
- package/dist/consent/index.cjs.map +1 -0
- package/dist/consent/index.d.cts +24 -0
- package/dist/consent/index.d.ts +24 -0
- package/dist/consent/index.js +23 -0
- package/dist/consent/index.js.map +1 -0
- package/dist/crdt/index.cjs +152 -0
- package/dist/crdt/index.cjs.map +1 -0
- package/dist/crdt/index.d.cts +30 -0
- package/dist/crdt/index.d.ts +30 -0
- package/dist/crdt/index.js +24 -0
- package/dist/crdt/index.js.map +1 -0
- package/dist/crypto-IVKU7YTT.js +44 -0
- package/dist/crypto-IVKU7YTT.js.map +1 -0
- package/dist/delegation-XDJCBTI2.js +16 -0
- package/dist/delegation-XDJCBTI2.js.map +1 -0
- package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
- package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
- package/dist/hash-9KO1BGxh.d.cts +63 -0
- package/dist/hash-ChfJjRjQ.d.ts +63 -0
- package/dist/history/index.cjs +1215 -0
- package/dist/history/index.cjs.map +1 -0
- package/dist/history/index.d.cts +62 -0
- package/dist/history/index.d.ts +62 -0
- package/dist/history/index.js +79 -0
- package/dist/history/index.js.map +1 -0
- package/dist/i18n/index.cjs +746 -0
- package/dist/i18n/index.cjs.map +1 -0
- package/dist/i18n/index.d.cts +38 -0
- package/dist/i18n/index.d.ts +38 -0
- package/dist/i18n/index.js +55 -0
- package/dist/i18n/index.js.map +1 -0
- package/dist/index-BRHBCmLt.d.ts +1940 -0
- package/dist/index-C8kQtmOk.d.ts +380 -0
- package/dist/index-DN-J-5wT.d.cts +1940 -0
- package/dist/index-DhjMjz7L.d.cts +380 -0
- package/dist/index.cjs +14756 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +269 -0
- package/dist/index.d.ts +269 -0
- package/dist/index.js +6085 -0
- package/dist/index.js.map +1 -0
- package/dist/indexing/index.cjs +736 -0
- package/dist/indexing/index.cjs.map +1 -0
- package/dist/indexing/index.d.cts +36 -0
- package/dist/indexing/index.d.ts +36 -0
- package/dist/indexing/index.js +77 -0
- package/dist/indexing/index.js.map +1 -0
- package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
- package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
- package/dist/ledger-2NX4L7PN.js +33 -0
- package/dist/ledger-2NX4L7PN.js.map +1 -0
- package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
- package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
- package/dist/periods/index.cjs +1035 -0
- package/dist/periods/index.cjs.map +1 -0
- package/dist/periods/index.d.cts +21 -0
- package/dist/periods/index.d.ts +21 -0
- package/dist/periods/index.js +25 -0
- package/dist/periods/index.js.map +1 -0
- package/dist/predicate-SBHmi6D0.d.cts +161 -0
- package/dist/predicate-SBHmi6D0.d.ts +161 -0
- package/dist/query/index.cjs +1957 -0
- package/dist/query/index.cjs.map +1 -0
- package/dist/query/index.d.cts +3 -0
- package/dist/query/index.d.ts +3 -0
- package/dist/query/index.js +62 -0
- package/dist/query/index.js.map +1 -0
- package/dist/session/index.cjs +487 -0
- package/dist/session/index.cjs.map +1 -0
- package/dist/session/index.d.cts +45 -0
- package/dist/session/index.d.ts +45 -0
- package/dist/session/index.js +44 -0
- package/dist/session/index.js.map +1 -0
- package/dist/shadow/index.cjs +133 -0
- package/dist/shadow/index.cjs.map +1 -0
- package/dist/shadow/index.d.cts +16 -0
- package/dist/shadow/index.d.ts +16 -0
- package/dist/shadow/index.js +20 -0
- package/dist/shadow/index.js.map +1 -0
- package/dist/store/index.cjs +1069 -0
- package/dist/store/index.cjs.map +1 -0
- package/dist/store/index.d.cts +491 -0
- package/dist/store/index.d.ts +491 -0
- package/dist/store/index.js +34 -0
- package/dist/store/index.js.map +1 -0
- package/dist/strategy-BSxFXGzb.d.cts +110 -0
- package/dist/strategy-BSxFXGzb.d.ts +110 -0
- package/dist/strategy-D-SrOLCl.d.cts +548 -0
- package/dist/strategy-D-SrOLCl.d.ts +548 -0
- package/dist/sync/index.cjs +1062 -0
- package/dist/sync/index.cjs.map +1 -0
- package/dist/sync/index.d.cts +42 -0
- package/dist/sync/index.d.ts +42 -0
- package/dist/sync/index.js +28 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/team/index.cjs +1233 -0
- package/dist/team/index.cjs.map +1 -0
- package/dist/team/index.d.cts +117 -0
- package/dist/team/index.d.ts +117 -0
- package/dist/team/index.js +39 -0
- package/dist/team/index.js.map +1 -0
- package/dist/tx/index.cjs +212 -0
- package/dist/tx/index.cjs.map +1 -0
- package/dist/tx/index.d.cts +20 -0
- package/dist/tx/index.d.ts +20 -0
- package/dist/tx/index.js +20 -0
- package/dist/tx/index.js.map +1 -0
- package/dist/types-BZpCZB8N.d.ts +7526 -0
- package/dist/types-Bfs0qr5F.d.cts +7526 -0
- package/dist/ulid-COREQ2RQ.js +9 -0
- package/dist/ulid-COREQ2RQ.js.map +1 -0
- package/dist/util/index.cjs +230 -0
- package/dist/util/index.cjs.map +1 -0
- package/dist/util/index.d.cts +77 -0
- package/dist/util/index.d.ts +77 -0
- package/dist/util/index.js +190 -0
- package/dist/util/index.js.map +1 -0
- package/package.json +244 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/query/predicate.ts"],"sourcesContent":["/**\n * Operator implementations for the query DSL.\n *\n * All predicates run client-side, AFTER decryption — they never see ciphertext.\n * This file is dependency-free and tree-shakeable.\n */\n\n/** Comparison operators supported by the where() builder. */\nexport type Operator =\n | '=='\n | '!='\n | '<'\n | '<='\n | '>'\n | '>='\n | 'in'\n | 'contains'\n | 'startsWith'\n | 'between'\n\n/**\n * A single field comparison clause inside a query plan.\n * Plans are JSON-serializable, so this type uses primitives only.\n */\nexport interface FieldClause {\n readonly type: 'field'\n readonly field: string\n readonly op: Operator\n readonly value: unknown\n}\n\n/**\n * A user-supplied predicate function escape hatch. Not serializable.\n *\n * The predicate accepts `unknown` at the type level so the surrounding\n * Clause type can stay non-parametric — this keeps Collection<T> covariant\n * in T at the public API surface. Builder methods cast user predicates\n * (typed `(record: T) => boolean`) into this shape on the way in.\n */\nexport interface FilterClause {\n readonly type: 'filter'\n readonly fn: (record: unknown) => boolean\n}\n\n/** A logical group of clauses combined by AND or OR. */\nexport interface GroupClause {\n readonly type: 'group'\n readonly op: 'and' | 'or'\n readonly clauses: readonly Clause[]\n}\n\nexport type Clause = FieldClause | FilterClause | GroupClause\n\n/**\n * Read a possibly nested field path like \"address.city\" from a record.\n * Returns undefined if any segment is missing.\n */\nexport function readPath(record: unknown, path: string): unknown {\n if (record === null || record === undefined) return undefined\n if (!path.includes('.')) {\n return (record as Record<string, unknown>)[path]\n }\n const segments = path.split('.')\n let cursor: unknown = record\n for (const segment of segments) {\n if (cursor === null || cursor === undefined) return undefined\n cursor = (cursor as Record<string, unknown>)[segment]\n }\n return cursor\n}\n\n/**\n * Evaluate a single field clause against a record.\n * Returns false on type mismatches rather than throwing — query results\n * exclude non-matching records by definition.\n */\nexport function evaluateFieldClause(record: unknown, clause: FieldClause): boolean {\n const actual = readPath(record, clause.field)\n const { op, value } = clause\n\n switch (op) {\n case '==':\n return actual === value\n case '!=':\n return actual !== value\n case '<':\n return isComparable(actual, value) && (actual as number) < (value as number)\n case '<=':\n return isComparable(actual, value) && (actual as number) <= (value as number)\n case '>':\n return isComparable(actual, value) && (actual as number) > (value as number)\n case '>=':\n return isComparable(actual, value) && (actual as number) >= (value as number)\n case 'in':\n return Array.isArray(value) && value.includes(actual)\n case 'contains':\n if (typeof actual === 'string') return typeof value === 'string' && actual.includes(value)\n if (Array.isArray(actual)) return actual.includes(value)\n return false\n case 'startsWith':\n return typeof actual === 'string' && typeof value === 'string' && actual.startsWith(value)\n case 'between': {\n if (!Array.isArray(value) || value.length !== 2) return false\n const [lo, hi] = value\n if (!isComparable(actual, lo) || !isComparable(actual, hi)) return false\n return (actual as number) >= (lo as number) && (actual as number) <= (hi as number)\n }\n default: {\n // Exhaustiveness — TS will error if a new operator is added without a case.\n const _exhaustive: never = op\n void _exhaustive\n return false\n }\n }\n}\n\n/**\n * Two values are \"comparable\" if they share an order-defined runtime type.\n * Strings compare lexicographically; numbers and Dates numerically; otherwise false.\n */\nfunction isComparable(a: unknown, b: unknown): boolean {\n if (typeof a === 'number' && typeof b === 'number') return true\n if (typeof a === 'string' && typeof b === 'string') return true\n if (a instanceof Date && b instanceof Date) return true\n return false\n}\n\n/**\n * Evaluate any clause (field / filter / group) against a record.\n * The recursion depth is bounded by the user's query expression — no risk of\n * blowing the stack on a 50K-record collection.\n */\nexport function evaluateClause(record: unknown, clause: Clause): boolean {\n switch (clause.type) {\n case 'field':\n return evaluateFieldClause(record, clause)\n case 'filter':\n return clause.fn(record)\n case 'group':\n if (clause.op === 'and') {\n for (const child of clause.clauses) {\n if (!evaluateClause(record, child)) return false\n }\n return true\n } else {\n for (const child of clause.clauses) {\n if (evaluateClause(record, child)) return true\n }\n return false\n }\n }\n}\n"],"mappings":";AAyDO,SAAS,SAAS,QAAiB,MAAuB;AAC/D,MAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,MAAI,CAAC,KAAK,SAAS,GAAG,GAAG;AACvB,WAAQ,OAAmC,IAAI;AAAA,EACjD;AACA,QAAM,WAAW,KAAK,MAAM,GAAG;AAC/B,MAAI,SAAkB;AACtB,aAAW,WAAW,UAAU;AAC9B,QAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,aAAU,OAAmC,OAAO;AAAA,EACtD;AACA,SAAO;AACT;AAOO,SAAS,oBAAoB,QAAiB,QAA8B;AACjF,QAAM,SAAS,SAAS,QAAQ,OAAO,KAAK;AAC5C,QAAM,EAAE,IAAI,MAAM,IAAI;AAEtB,UAAQ,IAAI;AAAA,IACV,KAAK;AACH,aAAO,WAAW;AAAA,IACpB,KAAK;AACH,aAAO,WAAW;AAAA,IACpB,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,SAAqB;AAAA,IAC9D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,UAAsB;AAAA,IAC/D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,SAAqB;AAAA,IAC9D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,UAAsB;AAAA,IAC/D,KAAK;AACH,aAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IACtD,KAAK;AACH,UAAI,OAAO,WAAW,SAAU,QAAO,OAAO,UAAU,YAAY,OAAO,SAAS,KAAK;AACzF,UAAI,MAAM,QAAQ,MAAM,EAAG,QAAO,OAAO,SAAS,KAAK;AACvD,aAAO;AAAA,IACT,KAAK;AACH,aAAO,OAAO,WAAW,YAAY,OAAO,UAAU,YAAY,OAAO,WAAW,KAAK;AAAA,IAC3F,KAAK,WAAW;AACd,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,EAAG,QAAO;AACxD,YAAM,CAAC,IAAI,EAAE,IAAI;AACjB,UAAI,CAAC,aAAa,QAAQ,EAAE,KAAK,CAAC,aAAa,QAAQ,EAAE,EAAG,QAAO;AACnE,aAAQ,UAAsB,MAAkB,UAAsB;AAAA,IACxE;AAAA,IACA,SAAS;AAEP,YAAM,cAAqB;AAC3B,WAAK;AACL,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAMA,SAAS,aAAa,GAAY,GAAqB;AACrD,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO;AAC3D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO;AAC3D,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO;AACnD,SAAO;AACT;AAOO,SAAS,eAAe,QAAiB,QAAyB;AACvE,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO,oBAAoB,QAAQ,MAAM;AAAA,IAC3C,KAAK;AACH,aAAO,OAAO,GAAG,MAAM;AAAA,IACzB,KAAK;AACH,UAAI,OAAO,OAAO,OAAO;AACvB,mBAAW,SAAS,OAAO,SAAS;AAClC,cAAI,CAAC,eAAe,QAAQ,KAAK,EAAG,QAAO;AAAA,QAC7C;AACA,eAAO;AAAA,MACT,OAAO;AACL,mBAAW,SAAS,OAAO,SAAS;AAClC,cAAI,eAAe,QAAQ,KAAK,EAAG,QAAO;AAAA,QAC5C;AACA,eAAO;AAAA,MACT;AAAA,EACJ;AACF;","names":[]}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import {
|
|
2
|
+
generateULID
|
|
3
|
+
} from "./chunk-FZU343FL.js";
|
|
4
|
+
import {
|
|
5
|
+
decrypt,
|
|
6
|
+
encrypt
|
|
7
|
+
} from "./chunk-MR4424N3.js";
|
|
8
|
+
|
|
9
|
+
// src/consent/consent.ts
|
|
10
|
+
var CONSENT_AUDIT_COLLECTION = "_consent_audit";
|
|
11
|
+
async function writeConsentEntry(adapter, vault, encrypted, entry, getDEK) {
|
|
12
|
+
const id = generateULID();
|
|
13
|
+
const full = {
|
|
14
|
+
id,
|
|
15
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
16
|
+
...entry
|
|
17
|
+
};
|
|
18
|
+
const envelope = await buildEnvelope(full, encrypted, getDEK);
|
|
19
|
+
await adapter.put(vault, CONSENT_AUDIT_COLLECTION, id, envelope);
|
|
20
|
+
}
|
|
21
|
+
async function loadConsentEntries(adapter, vault, encrypted, getDEK, filter = {}) {
|
|
22
|
+
const ids = await adapter.list(vault, CONSENT_AUDIT_COLLECTION);
|
|
23
|
+
const entries = [];
|
|
24
|
+
for (const id of ids.sort()) {
|
|
25
|
+
const envelope = await adapter.get(vault, CONSENT_AUDIT_COLLECTION, id);
|
|
26
|
+
if (!envelope) continue;
|
|
27
|
+
const entry = await decryptEntry(envelope, encrypted, getDEK);
|
|
28
|
+
if (!matchesFilter(entry, filter)) continue;
|
|
29
|
+
entries.push(entry);
|
|
30
|
+
}
|
|
31
|
+
return entries;
|
|
32
|
+
}
|
|
33
|
+
async function buildEnvelope(entry, encrypted, getDEK) {
|
|
34
|
+
const json = JSON.stringify(entry);
|
|
35
|
+
if (!encrypted) {
|
|
36
|
+
return {
|
|
37
|
+
_noydb: 1,
|
|
38
|
+
_v: 1,
|
|
39
|
+
_ts: entry.timestamp,
|
|
40
|
+
_iv: "",
|
|
41
|
+
_data: json
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
const dek = await getDEK(CONSENT_AUDIT_COLLECTION);
|
|
45
|
+
const { iv, data } = await encrypt(json, dek);
|
|
46
|
+
return {
|
|
47
|
+
_noydb: 1,
|
|
48
|
+
_v: 1,
|
|
49
|
+
_ts: entry.timestamp,
|
|
50
|
+
_iv: iv,
|
|
51
|
+
_data: data
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
async function decryptEntry(envelope, encrypted, getDEK) {
|
|
55
|
+
const json = encrypted ? await decrypt(envelope._iv, envelope._data, await getDEK(CONSENT_AUDIT_COLLECTION)) : envelope._data;
|
|
56
|
+
return JSON.parse(json);
|
|
57
|
+
}
|
|
58
|
+
function matchesFilter(entry, f) {
|
|
59
|
+
if (f.since && entry.timestamp < f.since) return false;
|
|
60
|
+
if (f.until && entry.timestamp > f.until) return false;
|
|
61
|
+
if (f.collection && entry.collection !== f.collection) return false;
|
|
62
|
+
if (f.actor && entry.actor !== f.actor) return false;
|
|
63
|
+
if (f.purpose && entry.purpose !== f.purpose) return false;
|
|
64
|
+
return true;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export {
|
|
68
|
+
CONSENT_AUDIT_COLLECTION,
|
|
69
|
+
writeConsentEntry,
|
|
70
|
+
loadConsentEntries
|
|
71
|
+
};
|
|
72
|
+
//# sourceMappingURL=chunk-M62XNWRA.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/consent/consent.ts"],"sourcesContent":["/**\n * Consent boundaries — per-access audit log.\n *\n * ```ts\n * const audit = await vault.withConsent(\n * { purpose: 'quarterly-review', consentHash: '7f3a...' },\n * async () => {\n * const invoices = await vault.collection<Invoice>('invoices').list()\n * return invoices\n * },\n * )\n *\n * const log = await vault.consentAudit({ since: '2026-01-01T00:00:00Z' })\n * // → entries: { actor, purpose, consentHash, ts, op, collection, id }\n * ```\n *\n * ## Contract\n *\n * Every `get` / `put` / `delete` that happens inside a `withConsent`\n * callback writes one entry to the reserved `_consent_audit`\n * collection. Entries are encrypted with the vault's consent-audit\n * DEK (separate from per-user-collection DEKs so access-log queries\n * don't require unwrapping individual collection keys). Outside a\n * `withConsent` scope, no entries are written — consent is\n * opt-in by design (GDPR Art. 7: *demonstrable*, *specific*\n * consent).\n *\n * ## Why store the hash, not the consent text?\n *\n * The `consentHash` is the sha256 of whatever consent receipt the\n * actor presented (a signed GDPR banner click, a HIPAA authorisation\n * PDF, an API-level `X-Consent-Hash` header). Storing only the hash:\n *\n * 1. Keeps the audit log small and indexable.\n * 2. Preserves zero-knowledge at the adapter — adapters see\n * ciphertext envelopes of `{ actor, purpose, consentHash, ts,\n * op, collection, id }`, never the consent record itself.\n * 3. Lets the regulator verify a presented consent doc matches\n * the logged hash at audit time without the system ever\n * possessing the doc.\n *\n * ## Concurrency\n *\n * The consent context lives on the {@link Vault} instance. Two\n * concurrent `withConsent` calls on the same Vault would stomp each\n * other — documented limitation; adopters needing per-flight scope\n * should use separate Vault instances or an AsyncLocalStorage shim.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore } from '../types.js'\nimport { encrypt, decrypt } from '../crypto.js'\nimport { generateULID } from '../bundle/ulid.js'\n\n/** Reserved collection for consent-audit entries. */\nexport const CONSENT_AUDIT_COLLECTION = '_consent_audit'\n\n/**\n * The consent scope active for a block of work. Set via\n * `vault.withConsent()`; observed by the collection's access hooks.\n */\nexport interface ConsentContext {\n /**\n * What this access is for. Used by the audit query (`consentAudit\n * ({ purpose })`) and carried in the stored entry. Free-form; the\n * regulator or compliance tooling decides the vocabulary.\n */\n readonly purpose: string\n /**\n * Hex-encoded sha256 of whatever consent artefact the actor\n * presented. Stored as-is in each entry.\n */\n readonly consentHash: string\n}\n\n/** Access operation recorded in an audit entry. */\nexport type ConsentOp = 'get' | 'put' | 'delete'\n\n/** One consent-audit record, as decrypted for the caller. */\nexport interface ConsentAuditEntry {\n /** ULID — stable insertion-order key. */\n readonly id: string\n readonly timestamp: string\n readonly actor: string\n readonly purpose: string\n readonly consentHash: string\n readonly op: ConsentOp\n readonly collection: string\n readonly recordId: string\n}\n\n/** Filter passed to `vault.consentAudit()`. */\nexport interface ConsentAuditFilter {\n /** Only entries at or after this ISO timestamp. */\n readonly since?: string\n /** Only entries at or before this ISO timestamp. */\n readonly until?: string\n /** Match entries targeting this collection. */\n readonly collection?: string\n /** Match entries written by this actor. */\n readonly actor?: string\n /** Match entries with this purpose. */\n readonly purpose?: string\n}\n\n/**\n * Write one audit entry. Called by Vault's onAccess hook when a\n * consent context is active.\n */\nexport async function writeConsentEntry(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n entry: Omit<ConsentAuditEntry, 'id' | 'timestamp'>,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<void> {\n const id = generateULID()\n const full: ConsentAuditEntry = {\n id,\n timestamp: new Date().toISOString(),\n ...entry,\n }\n const envelope = await buildEnvelope(full, encrypted, getDEK)\n await adapter.put(vault, CONSENT_AUDIT_COLLECTION, id, envelope)\n}\n\n/** Load + decrypt + filter all entries. */\nexport async function loadConsentEntries(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n filter: ConsentAuditFilter = {},\n): Promise<ConsentAuditEntry[]> {\n const ids = await adapter.list(vault, CONSENT_AUDIT_COLLECTION)\n const entries: ConsentAuditEntry[] = []\n\n for (const id of ids.sort()) {\n const envelope = await adapter.get(vault, CONSENT_AUDIT_COLLECTION, id)\n if (!envelope) continue\n const entry = await decryptEntry(envelope, encrypted, getDEK)\n if (!matchesFilter(entry, filter)) continue\n entries.push(entry)\n }\n return entries\n}\n\n// ── internals ──────────────────────────────────────────────────────\n\nasync function buildEnvelope(\n entry: ConsentAuditEntry,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<EncryptedEnvelope> {\n const json = JSON.stringify(entry)\n if (!encrypted) {\n return {\n _noydb: 1,\n _v: 1,\n _ts: entry.timestamp,\n _iv: '',\n _data: json,\n }\n }\n const dek = await getDEK(CONSENT_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: 1,\n _v: 1,\n _ts: entry.timestamp,\n _iv: iv,\n _data: data,\n }\n}\n\nasync function decryptEntry(\n envelope: EncryptedEnvelope,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<ConsentAuditEntry> {\n const json = encrypted\n ? await decrypt(envelope._iv, envelope._data, await getDEK(CONSENT_AUDIT_COLLECTION))\n : envelope._data\n return JSON.parse(json) as ConsentAuditEntry\n}\n\nfunction matchesFilter(entry: ConsentAuditEntry, f: ConsentAuditFilter): boolean {\n if (f.since && entry.timestamp < f.since) return false\n if (f.until && entry.timestamp > f.until) return false\n if (f.collection && entry.collection !== f.collection) return false\n if (f.actor && entry.actor !== f.actor) return false\n if (f.purpose && entry.purpose !== f.purpose) return false\n return true\n}\n"],"mappings":";;;;;;;;;AAuDO,IAAM,2BAA2B;AAsDxC,eAAsB,kBACpB,SACA,OACA,WACA,OACA,QACe;AACf,QAAM,KAAK,aAAa;AACxB,QAAM,OAA0B;AAAA,IAC9B;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,cAAc,MAAM,WAAW,MAAM;AAC5D,QAAM,QAAQ,IAAI,OAAO,0BAA0B,IAAI,QAAQ;AACjE;AAGA,eAAsB,mBACpB,SACA,OACA,WACA,QACA,SAA6B,CAAC,GACA;AAC9B,QAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,wBAAwB;AAC9D,QAAM,UAA+B,CAAC;AAEtC,aAAW,MAAM,IAAI,KAAK,GAAG;AAC3B,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,0BAA0B,EAAE;AACtE,QAAI,CAAC,SAAU;AACf,UAAM,QAAQ,MAAM,aAAa,UAAU,WAAW,MAAM;AAC5D,QAAI,CAAC,cAAc,OAAO,MAAM,EAAG;AACnC,YAAQ,KAAK,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAIA,eAAe,cACb,OACA,WACA,QAC4B;AAC5B,QAAM,OAAO,KAAK,UAAU,KAAK;AACjC,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,IACT;AAAA,EACF;AACA,QAAM,MAAM,MAAM,OAAO,wBAAwB;AACjD,QAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,KAAK,MAAM;AAAA,IACX,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AACF;AAEA,eAAe,aACb,UACA,WACA,QAC4B;AAC5B,QAAM,OAAO,YACT,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,MAAM,OAAO,wBAAwB,CAAC,IAClF,SAAS;AACb,SAAO,KAAK,MAAM,IAAI;AACxB;AAEA,SAAS,cAAc,OAA0B,GAAgC;AAC/E,MAAI,EAAE,SAAS,MAAM,YAAY,EAAE,MAAO,QAAO;AACjD,MAAI,EAAE,SAAS,MAAM,YAAY,EAAE,MAAO,QAAO;AACjD,MAAI,EAAE,cAAc,MAAM,eAAe,EAAE,WAAY,QAAO;AAC9D,MAAI,EAAE,SAAS,MAAM,UAAU,EAAE,MAAO,QAAO;AAC/C,MAAI,EAAE,WAAW,MAAM,YAAY,EAAE,QAAS,QAAO;AACrD,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import {
|
|
2
|
+
DecryptionError,
|
|
3
|
+
InvalidKeyError,
|
|
4
|
+
TamperedError
|
|
5
|
+
} from "./chunk-ACLDOTNQ.js";
|
|
6
|
+
|
|
7
|
+
// src/crypto.ts
|
|
8
|
+
var PBKDF2_ITERATIONS = 6e5;
|
|
9
|
+
var SALT_BYTES = 32;
|
|
10
|
+
var IV_BYTES = 12;
|
|
11
|
+
var KEY_BITS = 256;
|
|
12
|
+
var subtle = globalThis.crypto.subtle;
|
|
13
|
+
async function deriveKey(passphrase, salt) {
|
|
14
|
+
const keyMaterial = await subtle.importKey(
|
|
15
|
+
"raw",
|
|
16
|
+
new TextEncoder().encode(passphrase),
|
|
17
|
+
"PBKDF2",
|
|
18
|
+
false,
|
|
19
|
+
["deriveKey"]
|
|
20
|
+
);
|
|
21
|
+
return subtle.deriveKey(
|
|
22
|
+
{
|
|
23
|
+
name: "PBKDF2",
|
|
24
|
+
salt,
|
|
25
|
+
iterations: PBKDF2_ITERATIONS,
|
|
26
|
+
hash: "SHA-256"
|
|
27
|
+
},
|
|
28
|
+
keyMaterial,
|
|
29
|
+
{ name: "AES-KW", length: KEY_BITS },
|
|
30
|
+
false,
|
|
31
|
+
["wrapKey", "unwrapKey"]
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
async function generateDEK() {
|
|
35
|
+
return subtle.generateKey(
|
|
36
|
+
{ name: "AES-GCM", length: KEY_BITS },
|
|
37
|
+
true,
|
|
38
|
+
// extractable — needed for AES-KW wrapping
|
|
39
|
+
["encrypt", "decrypt"]
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
async function wrapKey(dek, kek) {
|
|
43
|
+
const wrapped = await subtle.wrapKey("raw", dek, kek, "AES-KW");
|
|
44
|
+
return bufferToBase64(wrapped);
|
|
45
|
+
}
|
|
46
|
+
async function unwrapKey(wrappedBase64, kek) {
|
|
47
|
+
try {
|
|
48
|
+
return await subtle.unwrapKey(
|
|
49
|
+
"raw",
|
|
50
|
+
base64ToBuffer(wrappedBase64),
|
|
51
|
+
kek,
|
|
52
|
+
"AES-KW",
|
|
53
|
+
{ name: "AES-GCM", length: KEY_BITS },
|
|
54
|
+
true,
|
|
55
|
+
["encrypt", "decrypt"]
|
|
56
|
+
);
|
|
57
|
+
} catch {
|
|
58
|
+
throw new InvalidKeyError();
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async function encrypt(plaintext, dek) {
|
|
62
|
+
const iv = generateIV();
|
|
63
|
+
const encoded = new TextEncoder().encode(plaintext);
|
|
64
|
+
const ciphertext = await subtle.encrypt(
|
|
65
|
+
{ name: "AES-GCM", iv },
|
|
66
|
+
dek,
|
|
67
|
+
encoded
|
|
68
|
+
);
|
|
69
|
+
return {
|
|
70
|
+
iv: bufferToBase64(iv),
|
|
71
|
+
data: bufferToBase64(ciphertext)
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
async function decrypt(ivBase64, dataBase64, dek) {
|
|
75
|
+
const iv = base64ToBuffer(ivBase64);
|
|
76
|
+
const ciphertext = base64ToBuffer(dataBase64);
|
|
77
|
+
try {
|
|
78
|
+
const plaintext = await subtle.decrypt(
|
|
79
|
+
{ name: "AES-GCM", iv },
|
|
80
|
+
dek,
|
|
81
|
+
ciphertext
|
|
82
|
+
);
|
|
83
|
+
return new TextDecoder().decode(plaintext);
|
|
84
|
+
} catch (err) {
|
|
85
|
+
if (err instanceof Error && err.name === "OperationError") {
|
|
86
|
+
throw new TamperedError();
|
|
87
|
+
}
|
|
88
|
+
throw new DecryptionError(
|
|
89
|
+
err instanceof Error ? err.message : "Decryption failed"
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async function encryptBytes(data, dek) {
|
|
94
|
+
const iv = generateIV();
|
|
95
|
+
const ciphertext = await subtle.encrypt(
|
|
96
|
+
{ name: "AES-GCM", iv },
|
|
97
|
+
dek,
|
|
98
|
+
data
|
|
99
|
+
);
|
|
100
|
+
return {
|
|
101
|
+
iv: bufferToBase64(iv),
|
|
102
|
+
data: bufferToBase64(ciphertext)
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
async function decryptBytes(ivBase64, dataBase64, dek) {
|
|
106
|
+
const iv = base64ToBuffer(ivBase64);
|
|
107
|
+
const ciphertext = base64ToBuffer(dataBase64);
|
|
108
|
+
try {
|
|
109
|
+
const plaintext = await subtle.decrypt(
|
|
110
|
+
{ name: "AES-GCM", iv },
|
|
111
|
+
dek,
|
|
112
|
+
ciphertext
|
|
113
|
+
);
|
|
114
|
+
return new Uint8Array(plaintext);
|
|
115
|
+
} catch (err) {
|
|
116
|
+
if (err instanceof Error && err.name === "OperationError") {
|
|
117
|
+
throw new TamperedError();
|
|
118
|
+
}
|
|
119
|
+
throw new DecryptionError(
|
|
120
|
+
err instanceof Error ? err.message : "Decryption failed"
|
|
121
|
+
);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
async function sha256Hex(data) {
|
|
125
|
+
const hash = await subtle.digest("SHA-256", data);
|
|
126
|
+
return Array.from(new Uint8Array(hash)).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
127
|
+
}
|
|
128
|
+
async function hmacSha256Hex(key, data) {
|
|
129
|
+
const rawKey = await subtle.exportKey("raw", key);
|
|
130
|
+
const hmacKey = await subtle.importKey(
|
|
131
|
+
"raw",
|
|
132
|
+
rawKey,
|
|
133
|
+
{ name: "HMAC", hash: "SHA-256" },
|
|
134
|
+
false,
|
|
135
|
+
["sign"]
|
|
136
|
+
);
|
|
137
|
+
const sig = await subtle.sign("HMAC", hmacKey, data);
|
|
138
|
+
return Array.from(new Uint8Array(sig)).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
139
|
+
}
|
|
140
|
+
async function encryptBytesWithAAD(data, dek, aad) {
|
|
141
|
+
const iv = generateIV();
|
|
142
|
+
const ciphertext = await subtle.encrypt(
|
|
143
|
+
{
|
|
144
|
+
name: "AES-GCM",
|
|
145
|
+
iv,
|
|
146
|
+
additionalData: aad
|
|
147
|
+
},
|
|
148
|
+
dek,
|
|
149
|
+
data
|
|
150
|
+
);
|
|
151
|
+
return {
|
|
152
|
+
iv: bufferToBase64(iv),
|
|
153
|
+
data: bufferToBase64(ciphertext)
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
async function decryptBytesWithAAD(ivBase64, dataBase64, dek, aad) {
|
|
157
|
+
const iv = base64ToBuffer(ivBase64);
|
|
158
|
+
const ciphertext = base64ToBuffer(dataBase64);
|
|
159
|
+
try {
|
|
160
|
+
const plaintext = await subtle.decrypt(
|
|
161
|
+
{
|
|
162
|
+
name: "AES-GCM",
|
|
163
|
+
iv,
|
|
164
|
+
additionalData: aad
|
|
165
|
+
},
|
|
166
|
+
dek,
|
|
167
|
+
ciphertext
|
|
168
|
+
);
|
|
169
|
+
return new Uint8Array(plaintext);
|
|
170
|
+
} catch (err) {
|
|
171
|
+
if (err instanceof Error && err.name === "OperationError") {
|
|
172
|
+
throw new TamperedError();
|
|
173
|
+
}
|
|
174
|
+
throw new DecryptionError(
|
|
175
|
+
err instanceof Error ? err.message : "Decryption failed"
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
async function derivePresenceKey(dek, collectionName) {
|
|
180
|
+
const rawDek = await subtle.exportKey("raw", dek);
|
|
181
|
+
const hkdfKey = await subtle.importKey(
|
|
182
|
+
"raw",
|
|
183
|
+
rawDek,
|
|
184
|
+
"HKDF",
|
|
185
|
+
false,
|
|
186
|
+
["deriveBits"]
|
|
187
|
+
);
|
|
188
|
+
const salt = new TextEncoder().encode("noydb-presence");
|
|
189
|
+
const info = new TextEncoder().encode(collectionName);
|
|
190
|
+
const bits = await subtle.deriveBits(
|
|
191
|
+
{ name: "HKDF", hash: "SHA-256", salt, info },
|
|
192
|
+
hkdfKey,
|
|
193
|
+
KEY_BITS
|
|
194
|
+
);
|
|
195
|
+
return subtle.importKey(
|
|
196
|
+
"raw",
|
|
197
|
+
bits,
|
|
198
|
+
{ name: "AES-GCM", length: KEY_BITS },
|
|
199
|
+
false,
|
|
200
|
+
["encrypt", "decrypt"]
|
|
201
|
+
);
|
|
202
|
+
}
|
|
203
|
+
async function deriveDeterministicIV(dek, context, plaintext) {
|
|
204
|
+
const rawDek = await subtle.exportKey("raw", dek);
|
|
205
|
+
const hkdfKey = await subtle.importKey("raw", rawDek, "HKDF", false, ["deriveBits"]);
|
|
206
|
+
const salt = new TextEncoder().encode("noydb-deterministic-v1");
|
|
207
|
+
const info = new TextEncoder().encode(`${context}\0${plaintext}`);
|
|
208
|
+
const bits = await subtle.deriveBits(
|
|
209
|
+
{ name: "HKDF", hash: "SHA-256", salt, info },
|
|
210
|
+
hkdfKey,
|
|
211
|
+
IV_BYTES * 8
|
|
212
|
+
);
|
|
213
|
+
return new Uint8Array(bits);
|
|
214
|
+
}
|
|
215
|
+
async function encryptDeterministic(plaintext, dek, context) {
|
|
216
|
+
const iv = await deriveDeterministicIV(dek, context, plaintext);
|
|
217
|
+
const encoded = new TextEncoder().encode(plaintext);
|
|
218
|
+
const ciphertext = await subtle.encrypt(
|
|
219
|
+
{ name: "AES-GCM", iv },
|
|
220
|
+
dek,
|
|
221
|
+
encoded
|
|
222
|
+
);
|
|
223
|
+
return {
|
|
224
|
+
iv: bufferToBase64(iv),
|
|
225
|
+
data: bufferToBase64(ciphertext)
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
async function decryptDeterministic(ivBase64, dataBase64, dek) {
|
|
229
|
+
return decrypt(ivBase64, dataBase64, dek);
|
|
230
|
+
}
|
|
231
|
+
function generateIV() {
|
|
232
|
+
return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES));
|
|
233
|
+
}
|
|
234
|
+
function generateSalt() {
|
|
235
|
+
return globalThis.crypto.getRandomValues(new Uint8Array(SALT_BYTES));
|
|
236
|
+
}
|
|
237
|
+
function bufferToBase64(buffer) {
|
|
238
|
+
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);
|
|
239
|
+
let binary = "";
|
|
240
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
241
|
+
binary += String.fromCharCode(bytes[i]);
|
|
242
|
+
}
|
|
243
|
+
return btoa(binary);
|
|
244
|
+
}
|
|
245
|
+
function base64ToBuffer(base64) {
|
|
246
|
+
const binary = atob(base64);
|
|
247
|
+
const bytes = new Uint8Array(binary.length);
|
|
248
|
+
for (let i = 0; i < binary.length; i++) {
|
|
249
|
+
bytes[i] = binary.charCodeAt(i);
|
|
250
|
+
}
|
|
251
|
+
return bytes;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
export {
|
|
255
|
+
deriveKey,
|
|
256
|
+
generateDEK,
|
|
257
|
+
wrapKey,
|
|
258
|
+
unwrapKey,
|
|
259
|
+
encrypt,
|
|
260
|
+
decrypt,
|
|
261
|
+
encryptBytes,
|
|
262
|
+
decryptBytes,
|
|
263
|
+
sha256Hex,
|
|
264
|
+
hmacSha256Hex,
|
|
265
|
+
encryptBytesWithAAD,
|
|
266
|
+
decryptBytesWithAAD,
|
|
267
|
+
derivePresenceKey,
|
|
268
|
+
encryptDeterministic,
|
|
269
|
+
decryptDeterministic,
|
|
270
|
+
generateIV,
|
|
271
|
+
generateSalt,
|
|
272
|
+
bufferToBase64,
|
|
273
|
+
base64ToBuffer
|
|
274
|
+
};
|
|
275
|
+
//# sourceMappingURL=chunk-MR4424N3.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/crypto.ts"],"sourcesContent":["/**\n * Cryptographic primitives — thin wrappers around the Web Crypto API.\n *\n * ## Design principle\n *\n * **Zero npm crypto dependencies.** Every operation uses `globalThis.crypto.subtle`,\n * which is available natively in Node.js ≥ 18, all modern browsers, and\n * Deno/Bun. This avoids supply-chain risk from third-party crypto packages and\n * ensures the library stays auditable.\n *\n * ## Algorithms\n *\n * | Use case | Algorithm | Parameters |\n * |----------|-----------|------------|\n * | Key derivation | PBKDF2-SHA256 | 600,000 iterations, 32-byte salt |\n * | Record encryption | AES-256-GCM | 12-byte random IV per operation |\n * | DEK wrapping | AES-KW (RFC 3394) | 256-bit KEK |\n * | Binary encrypt | AES-256-GCM | same as record encryption |\n * | Integrity | HMAC-SHA256 | for presence channels |\n * | Content hash | SHA-256 | for ledger and bundle integrity |\n *\n * ## Key lifecycle\n *\n * ```\n * passphrase + salt\n * └─► deriveKey() → KEK (CryptoKey, extractable: false)\n * └─► wrapKey() → wrapped DEK bytes [stored in keyring]\n * └─► unwrapKey() → DEK (CryptoKey) [memory only during session]\n * └─► encrypt() / decrypt() → ciphertext / plaintext\n * ```\n *\n * IVs are generated fresh by {@link generateIV} on every encrypt call.\n * Reusing an IV with the same key would break GCM's authentication guarantee —\n * this function should be the only place IVs are produced.\n *\n * @module\n */\n\nimport { DecryptionError, InvalidKeyError, TamperedError } from './errors.js'\n\nconst PBKDF2_ITERATIONS = 600_000\nconst SALT_BYTES = 32\nconst IV_BYTES = 12\nconst KEY_BITS = 256\n\nconst subtle = globalThis.crypto.subtle\n\n// ─── Key Derivation ────────────────────────────────────────────────────\n\n/** Derive a KEK from a passphrase and salt using PBKDF2-SHA256. */\nexport async function deriveKey(\n passphrase: string,\n salt: Uint8Array,\n): Promise<CryptoKey> {\n const keyMaterial = await subtle.importKey(\n 'raw',\n new TextEncoder().encode(passphrase),\n 'PBKDF2',\n false,\n ['deriveKey'],\n )\n\n return subtle.deriveKey(\n {\n name: 'PBKDF2',\n salt: salt as BufferSource,\n iterations: PBKDF2_ITERATIONS,\n hash: 'SHA-256',\n },\n keyMaterial,\n { name: 'AES-KW', length: KEY_BITS },\n false,\n ['wrapKey', 'unwrapKey'],\n )\n}\n\n// ─── DEK Generation ────────────────────────────────────────────────────\n\n/** Generate a random AES-256-GCM data encryption key. */\nexport async function generateDEK(): Promise<CryptoKey> {\n return subtle.generateKey(\n { name: 'AES-GCM', length: KEY_BITS },\n true, // extractable — needed for AES-KW wrapping\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Key Wrapping ──────────────────────────────────────────────────────\n\n/** Wrap (encrypt) a DEK with a KEK using AES-KW. Returns base64 string. */\nexport async function wrapKey(dek: CryptoKey, kek: CryptoKey): Promise<string> {\n const wrapped = await subtle.wrapKey('raw', dek, kek, 'AES-KW')\n return bufferToBase64(wrapped)\n}\n\n/** Unwrap (decrypt) a DEK from base64 string using a KEK. */\nexport async function unwrapKey(\n wrappedBase64: string,\n kek: CryptoKey,\n): Promise<CryptoKey> {\n try {\n return await subtle.unwrapKey(\n 'raw',\n base64ToBuffer(wrappedBase64) as BufferSource,\n kek,\n 'AES-KW',\n { name: 'AES-GCM', length: KEY_BITS },\n true,\n ['encrypt', 'decrypt'],\n )\n } catch {\n throw new InvalidKeyError()\n }\n}\n\n// ─── Encrypt / Decrypt ─────────────────────────────────────────────────\n\nexport interface EncryptResult {\n iv: string // base64\n data: string // base64\n}\n\n/** Encrypt plaintext JSON string with AES-256-GCM. Fresh IV per call. */\nexport async function encrypt(\n plaintext: string,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const encoded = new TextEncoder().encode(plaintext)\n\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/** Decrypt AES-256-GCM ciphertext. Throws on wrong key or tampered data. */\nexport async function decrypt(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new TextDecoder().decode(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Binary Encrypt / Decrypt ────────\n\n/**\n * Encrypt raw bytes with AES-256-GCM using a fresh random IV.\n * Used by the attachment store so binary blobs avoid double base64 encoding\n * (the existing `encrypt()` function calls `TextEncoder` on a string — here\n * we pass the `Uint8Array` directly to `subtle.encrypt`).\n */\nexport async function encryptBytes(\n data: Uint8Array,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext back to raw bytes.\n * Counterpart to `encryptBytes`. Throws `TamperedError` on auth-tag failure.\n */\nexport async function decryptBytes(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n/**\n * SHA-256 hex digest of raw bytes. Used to derive content-addressed\n * eTags for blob deduplication. Computed on plaintext bytes\n * before compression and encryption so the eTag identifies content, not\n * ciphertext, and survives re-encryption (key rotation, re-upload).\n */\nexport async function sha256Hex(data: Uint8Array): Promise<string> {\n const hash = await subtle.digest('SHA-256', data as unknown as BufferSource)\n return Array.from(new Uint8Array(hash))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── HMAC-SHA-256 ─────────────────────────────\n\n/**\n * Compute HMAC-SHA-256(key, data) and return hex string.\n *\n * Used to derive content-addressed eTags that are opaque to the store:\n * ```\n * eTag = hmacSha256Hex(blobDEK, plaintext)\n * ```\n *\n * Unlike a plain SHA-256, the HMAC is keyed by the vault-shared `_blob` DEK,\n * so an attacker with store access cannot pre-compute eTags for known files.\n * Deduplication still works within a vault (same key + same content = same eTag).\n */\nexport async function hmacSha256Hex(key: CryptoKey, data: Uint8Array): Promise<string> {\n // Export AES-GCM DEK raw bytes → import as HMAC key\n const rawKey = await subtle.exportKey('raw', key)\n const hmacKey = await subtle.importKey(\n 'raw',\n rawKey,\n { name: 'HMAC', hash: 'SHA-256' },\n false,\n ['sign'],\n )\n const sig = await subtle.sign('HMAC', hmacKey, data as unknown as BufferSource)\n return Array.from(new Uint8Array(sig))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── AAD-aware Binary Encrypt / Decrypt ──\n\n/**\n * Encrypt raw bytes with AES-256-GCM using Additional Authenticated Data.\n *\n * The AAD binds each chunk to its parent blob and position, preventing\n * chunk reorder, substitution, and truncation attacks:\n * ```\n * AAD = UTF-8(\"{eTag}:{chunkIndex}:{chunkCount}\")\n * ```\n *\n * The AAD is NOT stored — the reader reconstructs it from `BlobObject`\n * metadata and passes it to `decryptBytesWithAAD`.\n */\nexport async function encryptBytesWithAAD(\n data: Uint8Array,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext with AAD verification.\n *\n * If the AAD does not match the one used at encryption time (e.g. because\n * a chunk was reordered or substituted from another blob), the GCM auth\n * tag fails and this throws `TamperedError`.\n */\nexport async function decryptBytesWithAAD(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Presence Key Derivation ──────────────────────────────\n\n/**\n * Derive an AES-256-GCM presence key from a collection DEK using HKDF-SHA256.\n *\n * The presence key is domain-separated from the data DEK by the fixed salt\n * `'noydb-presence'` and the `info` = collection name. This means:\n * - The adapter never sees the presence key.\n * - Presence payloads rotate automatically when the collection DEK is rotated.\n * - Revoked users cannot derive the new presence key after a DEK rotation.\n *\n * @param dek The collection's AES-256-GCM DEK (extractable).\n * @param collectionName Used as the HKDF `info` parameter for domain separation.\n * @returns A non-extractable AES-256-GCM key suitable for presence payload encryption.\n */\nexport async function derivePresenceKey(dek: CryptoKey, collectionName: string): Promise<CryptoKey> {\n // Step 1: export DEK raw bytes\n const rawDek = await subtle.exportKey('raw', dek)\n\n // Step 2: import as HKDF key material\n const hkdfKey = await subtle.importKey(\n 'raw',\n rawDek,\n 'HKDF',\n false,\n ['deriveBits'],\n )\n\n // Step 3: derive 256 bits with salt='noydb-presence' and info=collectionName\n const salt = new TextEncoder().encode('noydb-presence')\n const info = new TextEncoder().encode(collectionName)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n KEY_BITS,\n )\n\n // Step 4: import derived bits as AES-GCM key\n return subtle.importKey(\n 'raw',\n bits,\n { name: 'AES-GCM', length: KEY_BITS },\n false,\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Deterministic Encryption ────────────────────────────\n\n/**\n * Derive a deterministic 12-byte IV from `{ DEK, context, plaintext }`\n * via HKDF-SHA256. Given the same three inputs, the IV is identical, so\n * `encryptDeterministic` produces the same ciphertext on every call —\n * which is precisely what enables blind equality search on encrypted\n * fields.\n *\n * **The side channel this opens.** Two records whose field value is the\n * same produce the same ciphertext. An observer with store access can\n * therefore tell which records share a value — not *what* the value is,\n * but the equivalence class. This is the well-known trade-off of\n * deterministic encryption and is why the feature is strictly opt-in\n * per field, guarded by `acknowledgeDeterministicRisk: true` at\n * collection creation.\n *\n * The context string MUST include the collection name and field name,\n * so:\n * - The same plaintext in two different fields encrypts differently\n * (no cross-field equality leak).\n * - The same plaintext in two different collections (different DEKs)\n * encrypts differently by virtue of the key, even before HKDF\n * domain separation kicks in.\n */\nasync function deriveDeterministicIV(\n dek: CryptoKey,\n context: string,\n plaintext: string,\n): Promise<Uint8Array> {\n const rawDek = await subtle.exportKey('raw', dek)\n const hkdfKey = await subtle.importKey('raw', rawDek, 'HKDF', false, ['deriveBits'])\n const salt = new TextEncoder().encode('noydb-deterministic-v1')\n const info = new TextEncoder().encode(`${context}\\x00${plaintext}`)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n IV_BYTES * 8,\n )\n return new Uint8Array(bits)\n}\n\n/**\n * Encrypt a plaintext string with AES-256-GCM and a deterministic,\n * HKDF-derived IV.\n *\n * The same `{ dek, context, plaintext }` triple always produces the\n * same `{ iv, data }` — call this twice and you can string-compare the\n * ciphertexts to check equality of the inputs without decrypting them.\n *\n * @param context Domain-separation string — by convention\n * `'<collection>/<field>'`. Different contexts encrypt\n * the same plaintext to different ciphertexts, so\n * `email` in collection `users` does not collide with\n * `email` in collection `customers`.\n */\nexport async function encryptDeterministic(\n plaintext: string,\n dek: CryptoKey,\n context: string,\n): Promise<EncryptResult> {\n const iv = await deriveDeterministicIV(dek, context, plaintext)\n const encoded = new TextEncoder().encode(plaintext)\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Counterpart to {@link encryptDeterministic}. The IV is stored\n * alongside the ciphertext (exactly like the randomized path), so\n * decrypt uses the stored IV and verifies the GCM auth tag — a tampered\n * ciphertext throws `TamperedError` just like randomized AES-GCM.\n */\nexport async function decryptDeterministic(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n return decrypt(ivBase64, dataBase64, dek)\n}\n\n// ─── Random Generation ─────────────────────────────────────────────────\n\n/** Generate a random 12-byte IV for AES-GCM. */\nexport function generateIV(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES))\n}\n\n/** Generate a random 32-byte salt for PBKDF2. */\nexport function generateSalt(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(SALT_BYTES))\n}\n\n// ─── Base64 Helpers ────────────────────────────────────────────────────\n\nexport function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string {\n const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)\n let binary = ''\n for (let i = 0; i < bytes.length; i++) {\n binary += String.fromCharCode(bytes[i]!)\n }\n return btoa(binary)\n}\n\nexport function base64ToBuffer(base64: string): Uint8Array<ArrayBuffer> {\n const binary = atob(base64)\n const bytes = new Uint8Array(binary.length)\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i)\n }\n return bytes\n}\n"],"mappings":";;;;;;;AAwCA,IAAM,oBAAoB;AAC1B,IAAM,aAAa;AACnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAEjB,IAAM,SAAS,WAAW,OAAO;AAKjC,eAAsB,UACpB,YACA,MACoB;AACpB,QAAM,cAAc,MAAM,OAAO;AAAA,IAC/B;AAAA,IACA,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,IACnC;AAAA,IACA;AAAA,IACA,CAAC,WAAW;AAAA,EACd;AAEA,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,MAAM;AAAA,IACR;AAAA,IACA;AAAA,IACA,EAAE,MAAM,UAAU,QAAQ,SAAS;AAAA,IACnC;AAAA,IACA,CAAC,WAAW,WAAW;AAAA,EACzB;AACF;AAKA,eAAsB,cAAkC;AACtD,SAAO,OAAO;AAAA,IACZ,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AAKA,eAAsB,QAAQ,KAAgB,KAAiC;AAC7E,QAAM,UAAU,MAAM,OAAO,QAAQ,OAAO,KAAK,KAAK,QAAQ;AAC9D,SAAO,eAAe,OAAO;AAC/B;AAGA,eAAsB,UACpB,eACA,KACoB;AACpB,MAAI;AACF,WAAO,MAAM,OAAO;AAAA,MAClB;AAAA,MACA,eAAe,aAAa;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,MACpC;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AAAA,EACF,QAAQ;AACN,UAAM,IAAI,gBAAgB;AAAA,EAC5B;AACF;AAUA,eAAsB,QACpB,WACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAElD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAGA,eAAsB,QACpB,UACA,YACA,KACiB;AACjB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAE5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,YAAY,EAAE,OAAO,SAAS;AAAA,EAC3C,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAUA,eAAsB,aACpB,MACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAMA,eAAsB,aACpB,UACA,YACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAQA,eAAsB,UAAU,MAAmC;AACjE,QAAM,OAAO,MAAM,OAAO,OAAO,WAAW,IAA+B;AAC3E,SAAO,MAAM,KAAK,IAAI,WAAW,IAAI,CAAC,EACnC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,cAAc,KAAgB,MAAmC;AAErF,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,IAChC;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AACA,QAAM,MAAM,MAAM,OAAO,KAAK,QAAQ,SAAS,IAA+B;AAC9E,SAAO,MAAM,KAAK,IAAI,WAAW,GAAG,CAAC,EAClC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,oBACpB,MACA,KACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AASA,eAAsB,oBACpB,UACA,YACA,KACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B;AAAA,QACE,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAiBA,eAAsB,kBAAkB,KAAgB,gBAA4C;AAElG,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAGhD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,CAAC,YAAY;AAAA,EACf;AAGA,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,gBAAgB;AACtD,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,cAAc;AACpD,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA;AAAA,EACF;AAGA,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AA2BA,eAAe,sBACb,KACA,SACA,WACqB;AACrB,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO,UAAU,OAAO,QAAQ,QAAQ,OAAO,CAAC,YAAY,CAAC;AACnF,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,wBAAwB;AAC9D,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,GAAG,OAAO,KAAO,SAAS,EAAE;AAClE,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA,WAAW;AAAA,EACb;AACA,SAAO,IAAI,WAAW,IAAI;AAC5B;AAgBA,eAAsB,qBACpB,WACA,KACA,SACwB;AACxB,QAAM,KAAK,MAAM,sBAAsB,KAAK,SAAS,SAAS;AAC9D,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAClD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAQA,eAAsB,qBACpB,UACA,YACA,KACiB;AACjB,SAAO,QAAQ,UAAU,YAAY,GAAG;AAC1C;AAKO,SAAS,aAAyB;AACvC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,QAAQ,CAAC;AACnE;AAGO,SAAS,eAA2B;AACzC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,UAAU,CAAC;AACrE;AAIO,SAAS,eAAe,QAA0C;AACvE,QAAM,QAAQ,kBAAkB,aAAa,SAAS,IAAI,WAAW,MAAM;AAC3E,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAU,OAAO,aAAa,MAAM,CAAC,CAAE;AAAA,EACzC;AACA,SAAO,KAAK,MAAM;AACpB;AAEO,SAAS,eAAe,QAAyC;AACtE,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import {
|
|
2
|
+
readPath
|
|
3
|
+
} from "./chunk-M5INGEFC.js";
|
|
4
|
+
|
|
5
|
+
// src/indexing/eager-indexes.ts
|
|
6
|
+
var CollectionIndexes = class {
|
|
7
|
+
indexes = /* @__PURE__ */ new Map();
|
|
8
|
+
/**
|
|
9
|
+
* Declare an index. Subsequent record additions are tracked under it.
|
|
10
|
+
* Calling this twice for the same field is a no-op (idempotent).
|
|
11
|
+
*/
|
|
12
|
+
declare(field) {
|
|
13
|
+
if (this.indexes.has(field)) return;
|
|
14
|
+
this.indexes.set(field, { field, buckets: /* @__PURE__ */ new Map() });
|
|
15
|
+
}
|
|
16
|
+
/** True if the given field has a declared index. */
|
|
17
|
+
has(field) {
|
|
18
|
+
return this.indexes.has(field);
|
|
19
|
+
}
|
|
20
|
+
/** All declared field names, in declaration order. */
|
|
21
|
+
fields() {
|
|
22
|
+
return [...this.indexes.keys()];
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Build all declared indexes from a snapshot of records.
|
|
26
|
+
* Called once per hydration. O(N × indexes.size).
|
|
27
|
+
*/
|
|
28
|
+
build(records) {
|
|
29
|
+
for (const idx of this.indexes.values()) {
|
|
30
|
+
idx.buckets.clear();
|
|
31
|
+
for (const { id, record } of records) {
|
|
32
|
+
addToIndex(idx, id, record);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Insert or update a single record across all indexes.
|
|
38
|
+
* Called by `Collection.put()` after the encrypted write succeeds.
|
|
39
|
+
*
|
|
40
|
+
* If `previousRecord` is provided, the record is removed from any old
|
|
41
|
+
* buckets first — this is the update path. Pass `null` for fresh adds.
|
|
42
|
+
*/
|
|
43
|
+
upsert(id, newRecord, previousRecord) {
|
|
44
|
+
if (this.indexes.size === 0) return;
|
|
45
|
+
if (previousRecord !== null) {
|
|
46
|
+
this.remove(id, previousRecord);
|
|
47
|
+
}
|
|
48
|
+
for (const idx of this.indexes.values()) {
|
|
49
|
+
addToIndex(idx, id, newRecord);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Remove a record from all indexes. Called by `Collection.delete()`
|
|
54
|
+
* (and as the first half of `upsert` for the update path).
|
|
55
|
+
*/
|
|
56
|
+
remove(id, record) {
|
|
57
|
+
if (this.indexes.size === 0) return;
|
|
58
|
+
for (const idx of this.indexes.values()) {
|
|
59
|
+
removeFromIndex(idx, id, record);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
/** Drop all index data. Called when the collection is invalidated. */
|
|
63
|
+
clear() {
|
|
64
|
+
for (const idx of this.indexes.values()) {
|
|
65
|
+
idx.buckets.clear();
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Equality lookup: return the set of record ids whose `field` matches
|
|
70
|
+
* the given value. Returns `null` if no index covers the field — the
|
|
71
|
+
* caller should fall back to a linear scan.
|
|
72
|
+
*
|
|
73
|
+
* The returned Set is a reference to the index's internal storage —
|
|
74
|
+
* callers must NOT mutate it.
|
|
75
|
+
*/
|
|
76
|
+
lookupEqual(field, value) {
|
|
77
|
+
const idx = this.indexes.get(field);
|
|
78
|
+
if (!idx) return null;
|
|
79
|
+
const key = stringifyKey(value);
|
|
80
|
+
return idx.buckets.get(key) ?? EMPTY_SET;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Set lookup: return the union of record ids whose `field` matches any
|
|
84
|
+
* of the given values. Returns `null` if no index covers the field.
|
|
85
|
+
*/
|
|
86
|
+
lookupIn(field, values) {
|
|
87
|
+
const idx = this.indexes.get(field);
|
|
88
|
+
if (!idx) return null;
|
|
89
|
+
const out = /* @__PURE__ */ new Set();
|
|
90
|
+
for (const value of values) {
|
|
91
|
+
const key = stringifyKey(value);
|
|
92
|
+
const bucket = idx.buckets.get(key);
|
|
93
|
+
if (bucket) {
|
|
94
|
+
for (const id of bucket) out.add(id);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return out;
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
var EMPTY_SET = /* @__PURE__ */ new Set();
|
|
101
|
+
function stringifyKey(value) {
|
|
102
|
+
if (value === null || value === void 0) return "\0NULL\0";
|
|
103
|
+
if (typeof value === "string") return value;
|
|
104
|
+
if (typeof value === "number" || typeof value === "boolean") return String(value);
|
|
105
|
+
if (value instanceof Date) return value.toISOString();
|
|
106
|
+
return "\0OBJECT\0";
|
|
107
|
+
}
|
|
108
|
+
function addToIndex(idx, id, record) {
|
|
109
|
+
const value = readPath(record, idx.field);
|
|
110
|
+
if (value === null || value === void 0) return;
|
|
111
|
+
const key = stringifyKey(value);
|
|
112
|
+
let bucket = idx.buckets.get(key);
|
|
113
|
+
if (!bucket) {
|
|
114
|
+
bucket = /* @__PURE__ */ new Set();
|
|
115
|
+
idx.buckets.set(key, bucket);
|
|
116
|
+
}
|
|
117
|
+
bucket.add(id);
|
|
118
|
+
}
|
|
119
|
+
function removeFromIndex(idx, id, record) {
|
|
120
|
+
const value = readPath(record, idx.field);
|
|
121
|
+
if (value === null || value === void 0) return;
|
|
122
|
+
const key = stringifyKey(value);
|
|
123
|
+
const bucket = idx.buckets.get(key);
|
|
124
|
+
if (!bucket) return;
|
|
125
|
+
bucket.delete(id);
|
|
126
|
+
if (bucket.size === 0) idx.buckets.delete(key);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
export {
|
|
130
|
+
CollectionIndexes
|
|
131
|
+
};
|
|
132
|
+
//# sourceMappingURL=chunk-NPC4LFV5.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/indexing/eager-indexes.ts"],"sourcesContent":["/**\n * Secondary indexes for the query DSL.\n *\n * ships **in-memory hash indexes**:\n * - Built during `Collection.ensureHydrated()` from the decrypted cache\n * - Maintained incrementally on `put` and `delete`\n * - Consulted by the query executor for `==` and `in` operators on\n * indexed fields, falling back to a linear scan otherwise\n * - Live entirely in memory — no adapter writes for the index itself\n *\n * Persistent encrypted index blobs (the spec's \"store as a separate\n * AES-256-GCM blob\" note) are deferred to a follow-up issue. The reasons\n * are documented in the PR body — short version: at the target\n * scale of 1K–50K records, building the index during hydrate is free,\n * so persistence buys nothing measurable.\n */\n\nimport { readPath } from '../query/predicate.js'\n\n/**\n * Index declaration accepted by `Collection`'s constructor.\n *\n * Accepts:\n * - `string` — a single-field hash index (`'clientId'`)\n * - `{ fields: [...] }` or `readonly string[]` — a composite index\n * over an ordered field tuple. Only lazy-mode\n * collections consume composite declarations today; eager mode\n * silently treats a composite as equivalent to declaring each\n * component field as its own single-field index.\n *\n * Additive variants (unique constraints, partial indexes) will land as\n * further union members without breaking existing declarations.\n */\nexport type IndexDef = string | { readonly fields: readonly string[] } | readonly string[]\n\n/**\n * Internal representation of a built hash index.\n *\n * Maps stringified field values to the set of record ids whose value\n * for that field matches. Stringification keeps the index simple and\n * works uniformly for primitives (`'open'`, `'42'`, `'true'`).\n *\n * Records whose indexed field is `undefined` or `null` are NOT inserted\n * — `query().where('field', '==', undefined)` falls back to a linear\n * scan, which is the conservative behavior.\n */\nexport interface HashIndex {\n readonly field: string\n readonly buckets: Map<string, Set<string>>\n}\n\n/**\n * Container for all indexes on a single collection.\n *\n * Methods are pure with respect to the in-memory `buckets` Map — they\n * never touch the adapter or the keyring. The Collection class owns\n * lifecycle (build on hydrate, maintain on put/delete).\n */\nexport class CollectionIndexes {\n private readonly indexes = new Map<string, HashIndex>()\n\n /**\n * Declare an index. Subsequent record additions are tracked under it.\n * Calling this twice for the same field is a no-op (idempotent).\n */\n declare(field: string): void {\n if (this.indexes.has(field)) return\n this.indexes.set(field, { field, buckets: new Map() })\n }\n\n /** True if the given field has a declared index. */\n has(field: string): boolean {\n return this.indexes.has(field)\n }\n\n /** All declared field names, in declaration order. */\n fields(): string[] {\n return [...this.indexes.keys()]\n }\n\n /**\n * Build all declared indexes from a snapshot of records.\n * Called once per hydration. O(N × indexes.size).\n */\n build<T>(records: ReadonlyArray<{ id: string; record: T }>): void {\n for (const idx of this.indexes.values()) {\n idx.buckets.clear()\n for (const { id, record } of records) {\n addToIndex(idx, id, record)\n }\n }\n }\n\n /**\n * Insert or update a single record across all indexes.\n * Called by `Collection.put()` after the encrypted write succeeds.\n *\n * If `previousRecord` is provided, the record is removed from any old\n * buckets first — this is the update path. Pass `null` for fresh adds.\n */\n upsert<T>(id: string, newRecord: T, previousRecord: T | null): void {\n if (this.indexes.size === 0) return\n if (previousRecord !== null) {\n this.remove(id, previousRecord)\n }\n for (const idx of this.indexes.values()) {\n addToIndex(idx, id, newRecord)\n }\n }\n\n /**\n * Remove a record from all indexes. Called by `Collection.delete()`\n * (and as the first half of `upsert` for the update path).\n */\n remove<T>(id: string, record: T): void {\n if (this.indexes.size === 0) return\n for (const idx of this.indexes.values()) {\n removeFromIndex(idx, id, record)\n }\n }\n\n /** Drop all index data. Called when the collection is invalidated. */\n clear(): void {\n for (const idx of this.indexes.values()) {\n idx.buckets.clear()\n }\n }\n\n /**\n * Equality lookup: return the set of record ids whose `field` matches\n * the given value. Returns `null` if no index covers the field — the\n * caller should fall back to a linear scan.\n *\n * The returned Set is a reference to the index's internal storage —\n * callers must NOT mutate it.\n */\n lookupEqual(field: string, value: unknown): ReadonlySet<string> | null {\n const idx = this.indexes.get(field)\n if (!idx) return null\n const key = stringifyKey(value)\n return idx.buckets.get(key) ?? EMPTY_SET\n }\n\n /**\n * Set lookup: return the union of record ids whose `field` matches any\n * of the given values. Returns `null` if no index covers the field.\n */\n lookupIn(field: string, values: readonly unknown[]): ReadonlySet<string> | null {\n const idx = this.indexes.get(field)\n if (!idx) return null\n const out = new Set<string>()\n for (const value of values) {\n const key = stringifyKey(value)\n const bucket = idx.buckets.get(key)\n if (bucket) {\n for (const id of bucket) out.add(id)\n }\n }\n return out\n }\n}\n\nconst EMPTY_SET: ReadonlySet<string> = new Set()\n\n/**\n * Stringify a value into a stable bucket key.\n *\n * `null`/`undefined` produce a sentinel that records will never match\n * (so we never index nullish values — `where('x', '==', null)` falls back\n * to a linear scan). Numbers, booleans, strings, and Date objects are\n * coerced via `String()`. Objects produce a sentinel that no real record\n * will match — querying with object values is a code smell.\n */\nfunction stringifyKey(value: unknown): string {\n if (value === null || value === undefined) return '\\0NULL\\0'\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean') return String(value)\n if (value instanceof Date) return value.toISOString()\n return '\\0OBJECT\\0'\n}\n\nfunction addToIndex<T>(idx: HashIndex, id: string, record: T): void {\n const value = readPath(record, idx.field)\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n let bucket = idx.buckets.get(key)\n if (!bucket) {\n bucket = new Set()\n idx.buckets.set(key, bucket)\n }\n bucket.add(id)\n}\n\nfunction removeFromIndex<T>(idx: HashIndex, id: string, record: T): void {\n const value = readPath(record, idx.field)\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n const bucket = idx.buckets.get(key)\n if (!bucket) return\n bucket.delete(id)\n // Clean up empty buckets so the Map doesn't accumulate dead keys.\n if (bucket.size === 0) idx.buckets.delete(key)\n}\n"],"mappings":";;;;;AA0DO,IAAM,oBAAN,MAAwB;AAAA,EACZ,UAAU,oBAAI,IAAuB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMtD,QAAQ,OAAqB;AAC3B,QAAI,KAAK,QAAQ,IAAI,KAAK,EAAG;AAC7B,SAAK,QAAQ,IAAI,OAAO,EAAE,OAAO,SAAS,oBAAI,IAAI,EAAE,CAAC;AAAA,EACvD;AAAA;AAAA,EAGA,IAAI,OAAwB;AAC1B,WAAO,KAAK,QAAQ,IAAI,KAAK;AAAA,EAC/B;AAAA;AAAA,EAGA,SAAmB;AACjB,WAAO,CAAC,GAAG,KAAK,QAAQ,KAAK,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAS,SAAyD;AAChE,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,UAAI,QAAQ,MAAM;AAClB,iBAAW,EAAE,IAAI,OAAO,KAAK,SAAS;AACpC,mBAAW,KAAK,IAAI,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAU,IAAY,WAAc,gBAAgC;AAClE,QAAI,KAAK,QAAQ,SAAS,EAAG;AAC7B,QAAI,mBAAmB,MAAM;AAC3B,WAAK,OAAO,IAAI,cAAc;AAAA,IAChC;AACA,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,iBAAW,KAAK,IAAI,SAAS;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAU,IAAY,QAAiB;AACrC,QAAI,KAAK,QAAQ,SAAS,EAAG;AAC7B,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,sBAAgB,KAAK,IAAI,MAAM;AAAA,IACjC;AAAA,EACF;AAAA;AAAA,EAGA,QAAc;AACZ,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,UAAI,QAAQ,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,YAAY,OAAe,OAA4C;AACrE,UAAM,MAAM,KAAK,QAAQ,IAAI,KAAK;AAClC,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,MAAM,aAAa,KAAK;AAC9B,WAAO,IAAI,QAAQ,IAAI,GAAG,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,OAAe,QAAwD;AAC9E,UAAM,MAAM,KAAK,QAAQ,IAAI,KAAK;AAClC,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,MAAM,oBAAI,IAAY;AAC5B,eAAW,SAAS,QAAQ;AAC1B,YAAM,MAAM,aAAa,KAAK;AAC9B,YAAM,SAAS,IAAI,QAAQ,IAAI,GAAG;AAClC,UAAI,QAAQ;AACV,mBAAW,MAAM,OAAQ,KAAI,IAAI,EAAE;AAAA,MACrC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEA,IAAM,YAAiC,oBAAI,IAAI;AAW/C,SAAS,aAAa,OAAwB;AAC5C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAW,QAAO,OAAO,KAAK;AAChF,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO;AACT;AAEA,SAAS,WAAc,KAAgB,IAAY,QAAiB;AAClE,QAAM,QAAQ,SAAS,QAAQ,IAAI,KAAK;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,MAAI,SAAS,IAAI,QAAQ,IAAI,GAAG;AAChC,MAAI,CAAC,QAAQ;AACX,aAAS,oBAAI,IAAI;AACjB,QAAI,QAAQ,IAAI,KAAK,MAAM;AAAA,EAC7B;AACA,SAAO,IAAI,EAAE;AACf;AAEA,SAAS,gBAAmB,KAAgB,IAAY,QAAiB;AACvE,QAAM,QAAQ,SAAS,QAAQ,IAAI,KAAK;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,QAAM,SAAS,IAAI,QAAQ,IAAI,GAAG;AAClC,MAAI,CAAC,OAAQ;AACb,SAAO,OAAO,EAAE;AAEhB,MAAI,OAAO,SAAS,EAAG,KAAI,QAAQ,OAAO,GAAG;AAC/C;","names":[]}
|