@noy-db/hub 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +197 -0
- package/dist/aggregate/index.cjs +476 -0
- package/dist/aggregate/index.cjs.map +1 -0
- package/dist/aggregate/index.d.cts +38 -0
- package/dist/aggregate/index.d.ts +38 -0
- package/dist/aggregate/index.js +53 -0
- package/dist/aggregate/index.js.map +1 -0
- package/dist/blobs/index.cjs +1480 -0
- package/dist/blobs/index.cjs.map +1 -0
- package/dist/blobs/index.d.cts +45 -0
- package/dist/blobs/index.d.ts +45 -0
- package/dist/blobs/index.js +48 -0
- package/dist/blobs/index.js.map +1 -0
- package/dist/bundle/index.cjs +436 -0
- package/dist/bundle/index.cjs.map +1 -0
- package/dist/bundle/index.d.cts +7 -0
- package/dist/bundle/index.d.ts +7 -0
- package/dist/bundle/index.js +40 -0
- package/dist/bundle/index.js.map +1 -0
- package/dist/chunk-2QR2PQTT.js +217 -0
- package/dist/chunk-2QR2PQTT.js.map +1 -0
- package/dist/chunk-4OWFYIDQ.js +79 -0
- package/dist/chunk-4OWFYIDQ.js.map +1 -0
- package/dist/chunk-5AATM2M2.js +90 -0
- package/dist/chunk-5AATM2M2.js.map +1 -0
- package/dist/chunk-ACLDOTNQ.js +543 -0
- package/dist/chunk-ACLDOTNQ.js.map +1 -0
- package/dist/chunk-BTDCBVJW.js +160 -0
- package/dist/chunk-BTDCBVJW.js.map +1 -0
- package/dist/chunk-CIMZBAZB.js +72 -0
- package/dist/chunk-CIMZBAZB.js.map +1 -0
- package/dist/chunk-E445ICYI.js +365 -0
- package/dist/chunk-E445ICYI.js.map +1 -0
- package/dist/chunk-EXQRC2L4.js +722 -0
- package/dist/chunk-EXQRC2L4.js.map +1 -0
- package/dist/chunk-FZU343FL.js +32 -0
- package/dist/chunk-FZU343FL.js.map +1 -0
- package/dist/chunk-GJILMRPO.js +354 -0
- package/dist/chunk-GJILMRPO.js.map +1 -0
- package/dist/chunk-GOUT6DND.js +1285 -0
- package/dist/chunk-GOUT6DND.js.map +1 -0
- package/dist/chunk-J66GRPNH.js +111 -0
- package/dist/chunk-J66GRPNH.js.map +1 -0
- package/dist/chunk-M2F2JAWB.js +464 -0
- package/dist/chunk-M2F2JAWB.js.map +1 -0
- package/dist/chunk-M5INGEFC.js +84 -0
- package/dist/chunk-M5INGEFC.js.map +1 -0
- package/dist/chunk-M62XNWRA.js +72 -0
- package/dist/chunk-M62XNWRA.js.map +1 -0
- package/dist/chunk-MR4424N3.js +275 -0
- package/dist/chunk-MR4424N3.js.map +1 -0
- package/dist/chunk-NPC4LFV5.js +132 -0
- package/dist/chunk-NPC4LFV5.js.map +1 -0
- package/dist/chunk-NXFEYLVG.js +311 -0
- package/dist/chunk-NXFEYLVG.js.map +1 -0
- package/dist/chunk-R36SIKES.js +79 -0
- package/dist/chunk-R36SIKES.js.map +1 -0
- package/dist/chunk-TDR6T5CJ.js +381 -0
- package/dist/chunk-TDR6T5CJ.js.map +1 -0
- package/dist/chunk-UF3BUNQZ.js +1 -0
- package/dist/chunk-UF3BUNQZ.js.map +1 -0
- package/dist/chunk-UQFSPSWG.js +1109 -0
- package/dist/chunk-UQFSPSWG.js.map +1 -0
- package/dist/chunk-USKYUS74.js +793 -0
- package/dist/chunk-USKYUS74.js.map +1 -0
- package/dist/chunk-XCL3WP6J.js +121 -0
- package/dist/chunk-XCL3WP6J.js.map +1 -0
- package/dist/chunk-XHFOENR2.js +680 -0
- package/dist/chunk-XHFOENR2.js.map +1 -0
- package/dist/chunk-ZFKD4QMV.js +430 -0
- package/dist/chunk-ZFKD4QMV.js.map +1 -0
- package/dist/chunk-ZLMV3TUA.js +490 -0
- package/dist/chunk-ZLMV3TUA.js.map +1 -0
- package/dist/chunk-ZRG4V3F5.js +17 -0
- package/dist/chunk-ZRG4V3F5.js.map +1 -0
- package/dist/consent/index.cjs +204 -0
- package/dist/consent/index.cjs.map +1 -0
- package/dist/consent/index.d.cts +24 -0
- package/dist/consent/index.d.ts +24 -0
- package/dist/consent/index.js +23 -0
- package/dist/consent/index.js.map +1 -0
- package/dist/crdt/index.cjs +152 -0
- package/dist/crdt/index.cjs.map +1 -0
- package/dist/crdt/index.d.cts +30 -0
- package/dist/crdt/index.d.ts +30 -0
- package/dist/crdt/index.js +24 -0
- package/dist/crdt/index.js.map +1 -0
- package/dist/crypto-IVKU7YTT.js +44 -0
- package/dist/crypto-IVKU7YTT.js.map +1 -0
- package/dist/delegation-XDJCBTI2.js +16 -0
- package/dist/delegation-XDJCBTI2.js.map +1 -0
- package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
- package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
- package/dist/hash-9KO1BGxh.d.cts +63 -0
- package/dist/hash-ChfJjRjQ.d.ts +63 -0
- package/dist/history/index.cjs +1215 -0
- package/dist/history/index.cjs.map +1 -0
- package/dist/history/index.d.cts +62 -0
- package/dist/history/index.d.ts +62 -0
- package/dist/history/index.js +79 -0
- package/dist/history/index.js.map +1 -0
- package/dist/i18n/index.cjs +746 -0
- package/dist/i18n/index.cjs.map +1 -0
- package/dist/i18n/index.d.cts +38 -0
- package/dist/i18n/index.d.ts +38 -0
- package/dist/i18n/index.js +55 -0
- package/dist/i18n/index.js.map +1 -0
- package/dist/index-BRHBCmLt.d.ts +1940 -0
- package/dist/index-C8kQtmOk.d.ts +380 -0
- package/dist/index-DN-J-5wT.d.cts +1940 -0
- package/dist/index-DhjMjz7L.d.cts +380 -0
- package/dist/index.cjs +14756 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +269 -0
- package/dist/index.d.ts +269 -0
- package/dist/index.js +6085 -0
- package/dist/index.js.map +1 -0
- package/dist/indexing/index.cjs +736 -0
- package/dist/indexing/index.cjs.map +1 -0
- package/dist/indexing/index.d.cts +36 -0
- package/dist/indexing/index.d.ts +36 -0
- package/dist/indexing/index.js +77 -0
- package/dist/indexing/index.js.map +1 -0
- package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
- package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
- package/dist/ledger-2NX4L7PN.js +33 -0
- package/dist/ledger-2NX4L7PN.js.map +1 -0
- package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
- package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
- package/dist/periods/index.cjs +1035 -0
- package/dist/periods/index.cjs.map +1 -0
- package/dist/periods/index.d.cts +21 -0
- package/dist/periods/index.d.ts +21 -0
- package/dist/periods/index.js +25 -0
- package/dist/periods/index.js.map +1 -0
- package/dist/predicate-SBHmi6D0.d.cts +161 -0
- package/dist/predicate-SBHmi6D0.d.ts +161 -0
- package/dist/query/index.cjs +1957 -0
- package/dist/query/index.cjs.map +1 -0
- package/dist/query/index.d.cts +3 -0
- package/dist/query/index.d.ts +3 -0
- package/dist/query/index.js +62 -0
- package/dist/query/index.js.map +1 -0
- package/dist/session/index.cjs +487 -0
- package/dist/session/index.cjs.map +1 -0
- package/dist/session/index.d.cts +45 -0
- package/dist/session/index.d.ts +45 -0
- package/dist/session/index.js +44 -0
- package/dist/session/index.js.map +1 -0
- package/dist/shadow/index.cjs +133 -0
- package/dist/shadow/index.cjs.map +1 -0
- package/dist/shadow/index.d.cts +16 -0
- package/dist/shadow/index.d.ts +16 -0
- package/dist/shadow/index.js +20 -0
- package/dist/shadow/index.js.map +1 -0
- package/dist/store/index.cjs +1069 -0
- package/dist/store/index.cjs.map +1 -0
- package/dist/store/index.d.cts +491 -0
- package/dist/store/index.d.ts +491 -0
- package/dist/store/index.js +34 -0
- package/dist/store/index.js.map +1 -0
- package/dist/strategy-BSxFXGzb.d.cts +110 -0
- package/dist/strategy-BSxFXGzb.d.ts +110 -0
- package/dist/strategy-D-SrOLCl.d.cts +548 -0
- package/dist/strategy-D-SrOLCl.d.ts +548 -0
- package/dist/sync/index.cjs +1062 -0
- package/dist/sync/index.cjs.map +1 -0
- package/dist/sync/index.d.cts +42 -0
- package/dist/sync/index.d.ts +42 -0
- package/dist/sync/index.js +28 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/team/index.cjs +1233 -0
- package/dist/team/index.cjs.map +1 -0
- package/dist/team/index.d.cts +117 -0
- package/dist/team/index.d.ts +117 -0
- package/dist/team/index.js +39 -0
- package/dist/team/index.js.map +1 -0
- package/dist/tx/index.cjs +212 -0
- package/dist/tx/index.cjs.map +1 -0
- package/dist/tx/index.d.cts +20 -0
- package/dist/tx/index.d.ts +20 -0
- package/dist/tx/index.js +20 -0
- package/dist/tx/index.js.map +1 -0
- package/dist/types-BZpCZB8N.d.ts +7526 -0
- package/dist/types-Bfs0qr5F.d.cts +7526 -0
- package/dist/ulid-COREQ2RQ.js +9 -0
- package/dist/ulid-COREQ2RQ.js.map +1 -0
- package/dist/util/index.cjs +230 -0
- package/dist/util/index.cjs.map +1 -0
- package/dist/util/index.d.cts +77 -0
- package/dist/util/index.d.ts +77 -0
- package/dist/util/index.js +190 -0
- package/dist/util/index.js.map +1 -0
- package/package.json +244 -0
|
@@ -0,0 +1,1215 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/history/index.ts
|
|
21
|
+
var history_exports = {};
|
|
22
|
+
__export(history_exports, {
|
|
23
|
+
CollectionInstant: () => CollectionInstant,
|
|
24
|
+
LEDGER_COLLECTION: () => LEDGER_COLLECTION,
|
|
25
|
+
LEDGER_DELTAS_COLLECTION: () => LEDGER_DELTAS_COLLECTION,
|
|
26
|
+
LedgerStore: () => LedgerStore,
|
|
27
|
+
VaultInstant: () => VaultInstant,
|
|
28
|
+
applyPatch: () => applyPatch,
|
|
29
|
+
canonicalJson: () => canonicalJson,
|
|
30
|
+
clearHistory: () => clearHistory,
|
|
31
|
+
computePatch: () => computePatch,
|
|
32
|
+
diff: () => diff,
|
|
33
|
+
envelopePayloadHash: () => envelopePayloadHash,
|
|
34
|
+
formatDiff: () => formatDiff,
|
|
35
|
+
getHistory: () => getHistory,
|
|
36
|
+
getVersionEnvelope: () => getVersionEnvelope,
|
|
37
|
+
hashEntry: () => hashEntry,
|
|
38
|
+
paddedIndex: () => paddedIndex,
|
|
39
|
+
parseIndex: () => parseIndex,
|
|
40
|
+
pruneHistory: () => pruneHistory,
|
|
41
|
+
saveHistory: () => saveHistory,
|
|
42
|
+
sha256Hex: () => sha256Hex,
|
|
43
|
+
withHistory: () => withHistory
|
|
44
|
+
});
|
|
45
|
+
module.exports = __toCommonJS(history_exports);
|
|
46
|
+
|
|
47
|
+
// src/history/history.ts
|
|
48
|
+
var HISTORY_COLLECTION = "_history";
|
|
49
|
+
var VERSION_PAD = 10;
|
|
50
|
+
function historyId(collection, recordId, version) {
|
|
51
|
+
return `${collection}:${recordId}:${String(version).padStart(VERSION_PAD, "0")}`;
|
|
52
|
+
}
|
|
53
|
+
function matchesPrefix(id, collection, recordId) {
|
|
54
|
+
if (recordId) {
|
|
55
|
+
return id.startsWith(`${collection}:${recordId}:`);
|
|
56
|
+
}
|
|
57
|
+
return id.startsWith(`${collection}:`);
|
|
58
|
+
}
|
|
59
|
+
async function saveHistory(adapter, vault, collection, recordId, envelope) {
|
|
60
|
+
const id = historyId(collection, recordId, envelope._v);
|
|
61
|
+
await adapter.put(vault, HISTORY_COLLECTION, id, envelope);
|
|
62
|
+
}
|
|
63
|
+
async function getHistory(adapter, vault, collection, recordId, options) {
|
|
64
|
+
const allIds = await adapter.list(vault, HISTORY_COLLECTION);
|
|
65
|
+
const matchingIds = allIds.filter((id) => matchesPrefix(id, collection, recordId)).sort().reverse();
|
|
66
|
+
const entries = [];
|
|
67
|
+
for (const id of matchingIds) {
|
|
68
|
+
const envelope = await adapter.get(vault, HISTORY_COLLECTION, id);
|
|
69
|
+
if (!envelope) continue;
|
|
70
|
+
if (options?.from && envelope._ts < options.from) continue;
|
|
71
|
+
if (options?.to && envelope._ts > options.to) continue;
|
|
72
|
+
entries.push(envelope);
|
|
73
|
+
if (options?.limit && entries.length >= options.limit) break;
|
|
74
|
+
}
|
|
75
|
+
return entries;
|
|
76
|
+
}
|
|
77
|
+
async function getVersionEnvelope(adapter, vault, collection, recordId, version) {
|
|
78
|
+
const id = historyId(collection, recordId, version);
|
|
79
|
+
return adapter.get(vault, HISTORY_COLLECTION, id);
|
|
80
|
+
}
|
|
81
|
+
async function pruneHistory(adapter, vault, collection, recordId, options) {
|
|
82
|
+
const allIds = await adapter.list(vault, HISTORY_COLLECTION);
|
|
83
|
+
const matchingIds = allIds.filter((id) => recordId ? matchesPrefix(id, collection, recordId) : matchesPrefix(id, collection)).sort();
|
|
84
|
+
let toDelete = [];
|
|
85
|
+
if (options.keepVersions !== void 0) {
|
|
86
|
+
const keep = options.keepVersions;
|
|
87
|
+
if (matchingIds.length > keep) {
|
|
88
|
+
toDelete = matchingIds.slice(0, matchingIds.length - keep);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
if (options.beforeDate) {
|
|
92
|
+
for (const id of matchingIds) {
|
|
93
|
+
if (toDelete.includes(id)) continue;
|
|
94
|
+
const envelope = await adapter.get(vault, HISTORY_COLLECTION, id);
|
|
95
|
+
if (envelope && envelope._ts < options.beforeDate) {
|
|
96
|
+
toDelete.push(id);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
const uniqueDeletes = [...new Set(toDelete)];
|
|
101
|
+
for (const id of uniqueDeletes) {
|
|
102
|
+
await adapter.delete(vault, HISTORY_COLLECTION, id);
|
|
103
|
+
}
|
|
104
|
+
return uniqueDeletes.length;
|
|
105
|
+
}
|
|
106
|
+
async function clearHistory(adapter, vault, collection, recordId) {
|
|
107
|
+
const allIds = await adapter.list(vault, HISTORY_COLLECTION);
|
|
108
|
+
let toDelete;
|
|
109
|
+
if (collection && recordId) {
|
|
110
|
+
toDelete = allIds.filter((id) => matchesPrefix(id, collection, recordId));
|
|
111
|
+
} else if (collection) {
|
|
112
|
+
toDelete = allIds.filter((id) => matchesPrefix(id, collection));
|
|
113
|
+
} else {
|
|
114
|
+
toDelete = allIds;
|
|
115
|
+
}
|
|
116
|
+
for (const id of toDelete) {
|
|
117
|
+
await adapter.delete(vault, HISTORY_COLLECTION, id);
|
|
118
|
+
}
|
|
119
|
+
return toDelete.length;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// src/history/diff.ts
|
|
123
|
+
function diff(oldObj, newObj, basePath = "") {
|
|
124
|
+
const changes = [];
|
|
125
|
+
if (oldObj === newObj) return changes;
|
|
126
|
+
if (oldObj == null && newObj != null) {
|
|
127
|
+
return [{ path: basePath || "(root)", type: "added", to: newObj }];
|
|
128
|
+
}
|
|
129
|
+
if (oldObj != null && newObj == null) {
|
|
130
|
+
return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
|
|
131
|
+
}
|
|
132
|
+
if (typeof oldObj !== typeof newObj) {
|
|
133
|
+
return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
|
|
134
|
+
}
|
|
135
|
+
if (typeof oldObj !== "object") {
|
|
136
|
+
return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
|
|
137
|
+
}
|
|
138
|
+
if (Array.isArray(oldObj) && Array.isArray(newObj)) {
|
|
139
|
+
const maxLen = Math.max(oldObj.length, newObj.length);
|
|
140
|
+
for (let i = 0; i < maxLen; i++) {
|
|
141
|
+
const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
|
|
142
|
+
if (i >= oldObj.length) {
|
|
143
|
+
changes.push({ path: p, type: "added", to: newObj[i] });
|
|
144
|
+
} else if (i >= newObj.length) {
|
|
145
|
+
changes.push({ path: p, type: "removed", from: oldObj[i] });
|
|
146
|
+
} else {
|
|
147
|
+
changes.push(...diff(oldObj[i], newObj[i], p));
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
return changes;
|
|
151
|
+
}
|
|
152
|
+
const oldRecord = oldObj;
|
|
153
|
+
const newRecord = newObj;
|
|
154
|
+
const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
|
|
155
|
+
for (const key of allKeys) {
|
|
156
|
+
const p = basePath ? `${basePath}.${key}` : key;
|
|
157
|
+
if (!(key in oldRecord)) {
|
|
158
|
+
changes.push({ path: p, type: "added", to: newRecord[key] });
|
|
159
|
+
} else if (!(key in newRecord)) {
|
|
160
|
+
changes.push({ path: p, type: "removed", from: oldRecord[key] });
|
|
161
|
+
} else {
|
|
162
|
+
changes.push(...diff(oldRecord[key], newRecord[key], p));
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return changes;
|
|
166
|
+
}
|
|
167
|
+
function formatDiff(changes) {
|
|
168
|
+
if (changes.length === 0) return "(no changes)";
|
|
169
|
+
return changes.map((c) => {
|
|
170
|
+
switch (c.type) {
|
|
171
|
+
case "added":
|
|
172
|
+
return `+ ${c.path}: ${JSON.stringify(c.to)}`;
|
|
173
|
+
case "removed":
|
|
174
|
+
return `- ${c.path}: ${JSON.stringify(c.from)}`;
|
|
175
|
+
case "changed":
|
|
176
|
+
return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
|
|
177
|
+
}
|
|
178
|
+
}).join("\n");
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// src/types.ts
|
|
182
|
+
var NOYDB_FORMAT_VERSION = 1;
|
|
183
|
+
|
|
184
|
+
// src/errors.ts
|
|
185
|
+
var NoydbError = class extends Error {
|
|
186
|
+
/** Machine-readable error code. Stable across library versions. */
|
|
187
|
+
code;
|
|
188
|
+
constructor(code, message) {
|
|
189
|
+
super(message);
|
|
190
|
+
this.name = "NoydbError";
|
|
191
|
+
this.code = code;
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
var DecryptionError = class extends NoydbError {
|
|
195
|
+
constructor(message = "Decryption failed") {
|
|
196
|
+
super("DECRYPTION_FAILED", message);
|
|
197
|
+
this.name = "DecryptionError";
|
|
198
|
+
}
|
|
199
|
+
};
|
|
200
|
+
var TamperedError = class extends NoydbError {
|
|
201
|
+
constructor(message = "Data integrity check failed \u2014 record may have been tampered with") {
|
|
202
|
+
super("TAMPERED", message);
|
|
203
|
+
this.name = "TamperedError";
|
|
204
|
+
}
|
|
205
|
+
};
|
|
206
|
+
var ReadOnlyAtInstantError = class extends NoydbError {
|
|
207
|
+
constructor(operation, timestamp) {
|
|
208
|
+
super(
|
|
209
|
+
"READ_ONLY_AT_INSTANT",
|
|
210
|
+
`Cannot ${operation}() on a vault view anchored at ${timestamp} \u2014 time-machine views are read-only`
|
|
211
|
+
);
|
|
212
|
+
this.name = "ReadOnlyAtInstantError";
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
var ConflictError = class extends NoydbError {
|
|
216
|
+
/** The actual stored version at the time of conflict. */
|
|
217
|
+
version;
|
|
218
|
+
constructor(version, message = "Version conflict") {
|
|
219
|
+
super("CONFLICT", message);
|
|
220
|
+
this.name = "ConflictError";
|
|
221
|
+
this.version = version;
|
|
222
|
+
}
|
|
223
|
+
};
|
|
224
|
+
var LedgerContentionError = class extends NoydbError {
|
|
225
|
+
attempts;
|
|
226
|
+
constructor(attempts) {
|
|
227
|
+
super(
|
|
228
|
+
"LEDGER_CONTENTION",
|
|
229
|
+
`LedgerStore.append: failed to claim a chain slot after ${attempts} optimistic-CAS retries`
|
|
230
|
+
);
|
|
231
|
+
this.name = "LedgerContentionError";
|
|
232
|
+
this.attempts = attempts;
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
// src/crypto.ts
|
|
237
|
+
var IV_BYTES = 12;
|
|
238
|
+
var subtle = globalThis.crypto.subtle;
|
|
239
|
+
async function encrypt(plaintext, dek) {
|
|
240
|
+
const iv = generateIV();
|
|
241
|
+
const encoded = new TextEncoder().encode(plaintext);
|
|
242
|
+
const ciphertext = await subtle.encrypt(
|
|
243
|
+
{ name: "AES-GCM", iv },
|
|
244
|
+
dek,
|
|
245
|
+
encoded
|
|
246
|
+
);
|
|
247
|
+
return {
|
|
248
|
+
iv: bufferToBase64(iv),
|
|
249
|
+
data: bufferToBase64(ciphertext)
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
async function decrypt(ivBase64, dataBase64, dek) {
|
|
253
|
+
const iv = base64ToBuffer(ivBase64);
|
|
254
|
+
const ciphertext = base64ToBuffer(dataBase64);
|
|
255
|
+
try {
|
|
256
|
+
const plaintext = await subtle.decrypt(
|
|
257
|
+
{ name: "AES-GCM", iv },
|
|
258
|
+
dek,
|
|
259
|
+
ciphertext
|
|
260
|
+
);
|
|
261
|
+
return new TextDecoder().decode(plaintext);
|
|
262
|
+
} catch (err) {
|
|
263
|
+
if (err instanceof Error && err.name === "OperationError") {
|
|
264
|
+
throw new TamperedError();
|
|
265
|
+
}
|
|
266
|
+
throw new DecryptionError(
|
|
267
|
+
err instanceof Error ? err.message : "Decryption failed"
|
|
268
|
+
);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
function generateIV() {
|
|
272
|
+
return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES));
|
|
273
|
+
}
|
|
274
|
+
function bufferToBase64(buffer) {
|
|
275
|
+
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);
|
|
276
|
+
let binary = "";
|
|
277
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
278
|
+
binary += String.fromCharCode(bytes[i]);
|
|
279
|
+
}
|
|
280
|
+
return btoa(binary);
|
|
281
|
+
}
|
|
282
|
+
function base64ToBuffer(base64) {
|
|
283
|
+
const binary = atob(base64);
|
|
284
|
+
const bytes = new Uint8Array(binary.length);
|
|
285
|
+
for (let i = 0; i < binary.length; i++) {
|
|
286
|
+
bytes[i] = binary.charCodeAt(i);
|
|
287
|
+
}
|
|
288
|
+
return bytes;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// src/history/ledger/entry.ts
|
|
292
|
+
function canonicalJson(value) {
|
|
293
|
+
if (value === null) return "null";
|
|
294
|
+
if (typeof value === "boolean") return value ? "true" : "false";
|
|
295
|
+
if (typeof value === "number") {
|
|
296
|
+
if (!Number.isFinite(value)) {
|
|
297
|
+
throw new Error(
|
|
298
|
+
`canonicalJson: refusing to encode non-finite number ${String(value)}`
|
|
299
|
+
);
|
|
300
|
+
}
|
|
301
|
+
return JSON.stringify(value);
|
|
302
|
+
}
|
|
303
|
+
if (typeof value === "string") return JSON.stringify(value);
|
|
304
|
+
if (typeof value === "bigint") {
|
|
305
|
+
throw new Error("canonicalJson: BigInt is not JSON-serializable");
|
|
306
|
+
}
|
|
307
|
+
if (typeof value === "undefined" || typeof value === "function") {
|
|
308
|
+
throw new Error(
|
|
309
|
+
`canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
|
|
310
|
+
);
|
|
311
|
+
}
|
|
312
|
+
if (Array.isArray(value)) {
|
|
313
|
+
return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
|
|
314
|
+
}
|
|
315
|
+
if (typeof value === "object") {
|
|
316
|
+
const obj = value;
|
|
317
|
+
const keys = Object.keys(obj).sort();
|
|
318
|
+
const parts = [];
|
|
319
|
+
for (const key of keys) {
|
|
320
|
+
parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
|
|
321
|
+
}
|
|
322
|
+
return "{" + parts.join(",") + "}";
|
|
323
|
+
}
|
|
324
|
+
throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
|
|
325
|
+
}
|
|
326
|
+
async function sha256Hex(input) {
|
|
327
|
+
const bytes = new TextEncoder().encode(input);
|
|
328
|
+
const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
|
|
329
|
+
return bytesToHex(new Uint8Array(digest));
|
|
330
|
+
}
|
|
331
|
+
async function hashEntry(entry) {
|
|
332
|
+
return sha256Hex(canonicalJson(entry));
|
|
333
|
+
}
|
|
334
|
+
function bytesToHex(bytes) {
|
|
335
|
+
const hex = new Array(bytes.length);
|
|
336
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
337
|
+
hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
|
|
338
|
+
}
|
|
339
|
+
return hex.join("");
|
|
340
|
+
}
|
|
341
|
+
function paddedIndex(index) {
|
|
342
|
+
return String(index).padStart(10, "0");
|
|
343
|
+
}
|
|
344
|
+
function parseIndex(key) {
|
|
345
|
+
return Number.parseInt(key, 10);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
// src/history/ledger/patch.ts
|
|
349
|
+
function computePatch(prev, next) {
|
|
350
|
+
const ops = [];
|
|
351
|
+
diff2(prev, next, "", ops);
|
|
352
|
+
return ops;
|
|
353
|
+
}
|
|
354
|
+
function diff2(prev, next, path, out) {
|
|
355
|
+
if (prev === next) return;
|
|
356
|
+
if (prev === null || next === null) {
|
|
357
|
+
out.push({ op: "replace", path, value: next });
|
|
358
|
+
return;
|
|
359
|
+
}
|
|
360
|
+
const prevIsArray = Array.isArray(prev);
|
|
361
|
+
const nextIsArray = Array.isArray(next);
|
|
362
|
+
const prevIsObject = typeof prev === "object" && !prevIsArray;
|
|
363
|
+
const nextIsObject = typeof next === "object" && !nextIsArray;
|
|
364
|
+
if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
|
|
365
|
+
out.push({ op: "replace", path, value: next });
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
if (prevIsArray && nextIsArray) {
|
|
369
|
+
if (!arrayDeepEqual(prev, next)) {
|
|
370
|
+
out.push({ op: "replace", path, value: next });
|
|
371
|
+
}
|
|
372
|
+
return;
|
|
373
|
+
}
|
|
374
|
+
if (prevIsObject && nextIsObject) {
|
|
375
|
+
const prevObj = prev;
|
|
376
|
+
const nextObj = next;
|
|
377
|
+
const prevKeys = Object.keys(prevObj);
|
|
378
|
+
const nextKeys = Object.keys(nextObj);
|
|
379
|
+
for (const key of prevKeys) {
|
|
380
|
+
const childPath = path + "/" + escapePathSegment(key);
|
|
381
|
+
if (!(key in nextObj)) {
|
|
382
|
+
out.push({ op: "remove", path: childPath });
|
|
383
|
+
} else {
|
|
384
|
+
diff2(prevObj[key], nextObj[key], childPath, out);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
for (const key of nextKeys) {
|
|
388
|
+
if (!(key in prevObj)) {
|
|
389
|
+
out.push({
|
|
390
|
+
op: "add",
|
|
391
|
+
path: path + "/" + escapePathSegment(key),
|
|
392
|
+
value: nextObj[key]
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
out.push({ op: "replace", path, value: next });
|
|
399
|
+
}
|
|
400
|
+
function arrayDeepEqual(a, b) {
|
|
401
|
+
if (a.length !== b.length) return false;
|
|
402
|
+
for (let i = 0; i < a.length; i++) {
|
|
403
|
+
if (!deepEqual(a[i], b[i])) return false;
|
|
404
|
+
}
|
|
405
|
+
return true;
|
|
406
|
+
}
|
|
407
|
+
function deepEqual(a, b) {
|
|
408
|
+
if (a === b) return true;
|
|
409
|
+
if (a === null || b === null) return false;
|
|
410
|
+
if (typeof a !== typeof b) return false;
|
|
411
|
+
if (typeof a !== "object") return false;
|
|
412
|
+
const aArray = Array.isArray(a);
|
|
413
|
+
const bArray = Array.isArray(b);
|
|
414
|
+
if (aArray !== bArray) return false;
|
|
415
|
+
if (aArray && bArray) return arrayDeepEqual(a, b);
|
|
416
|
+
const aObj = a;
|
|
417
|
+
const bObj = b;
|
|
418
|
+
const aKeys = Object.keys(aObj);
|
|
419
|
+
const bKeys = Object.keys(bObj);
|
|
420
|
+
if (aKeys.length !== bKeys.length) return false;
|
|
421
|
+
for (const key of aKeys) {
|
|
422
|
+
if (!(key in bObj)) return false;
|
|
423
|
+
if (!deepEqual(aObj[key], bObj[key])) return false;
|
|
424
|
+
}
|
|
425
|
+
return true;
|
|
426
|
+
}
|
|
427
|
+
function applyPatch(base, patch) {
|
|
428
|
+
let result = clone(base);
|
|
429
|
+
for (const op of patch) {
|
|
430
|
+
result = applyOp(result, op);
|
|
431
|
+
}
|
|
432
|
+
return result;
|
|
433
|
+
}
|
|
434
|
+
function applyOp(doc, op) {
|
|
435
|
+
if (op.path === "") {
|
|
436
|
+
if (op.op === "remove") return null;
|
|
437
|
+
return clone(op.value);
|
|
438
|
+
}
|
|
439
|
+
const segments = parsePath(op.path);
|
|
440
|
+
return walkAndApply(doc, segments, op);
|
|
441
|
+
}
|
|
442
|
+
function walkAndApply(doc, segments, op) {
|
|
443
|
+
if (segments.length === 0) {
|
|
444
|
+
throw new Error("walkAndApply: empty segments (internal error)");
|
|
445
|
+
}
|
|
446
|
+
const [head, ...rest] = segments;
|
|
447
|
+
if (head === void 0) throw new Error("walkAndApply: undefined segment");
|
|
448
|
+
if (rest.length === 0) {
|
|
449
|
+
return applyAtTerminal(doc, head, op);
|
|
450
|
+
}
|
|
451
|
+
if (Array.isArray(doc)) {
|
|
452
|
+
const idx = parseArrayIndex(head, doc.length);
|
|
453
|
+
const child = doc[idx];
|
|
454
|
+
const newChild = walkAndApply(child, rest, op);
|
|
455
|
+
const next = doc.slice();
|
|
456
|
+
next[idx] = newChild;
|
|
457
|
+
return next;
|
|
458
|
+
}
|
|
459
|
+
if (doc !== null && typeof doc === "object") {
|
|
460
|
+
const obj = doc;
|
|
461
|
+
if (!(head in obj)) {
|
|
462
|
+
throw new Error(`applyPatch: path segment "${head}" not found in object`);
|
|
463
|
+
}
|
|
464
|
+
const newChild = walkAndApply(obj[head], rest, op);
|
|
465
|
+
return { ...obj, [head]: newChild };
|
|
466
|
+
}
|
|
467
|
+
throw new Error(
|
|
468
|
+
`applyPatch: cannot step into ${typeof doc} at segment "${head}"`
|
|
469
|
+
);
|
|
470
|
+
}
|
|
471
|
+
function applyAtTerminal(doc, segment, op) {
|
|
472
|
+
if (Array.isArray(doc)) {
|
|
473
|
+
const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
|
|
474
|
+
const next = doc.slice();
|
|
475
|
+
if (op.op === "remove") {
|
|
476
|
+
next.splice(idx, 1);
|
|
477
|
+
return next;
|
|
478
|
+
}
|
|
479
|
+
if (op.op === "add") {
|
|
480
|
+
next.splice(idx, 0, clone(op.value));
|
|
481
|
+
return next;
|
|
482
|
+
}
|
|
483
|
+
if (op.op === "replace") {
|
|
484
|
+
if (idx >= doc.length) {
|
|
485
|
+
throw new Error(
|
|
486
|
+
`applyPatch: replace at out-of-bounds array index ${idx}`
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
next[idx] = clone(op.value);
|
|
490
|
+
return next;
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
if (doc !== null && typeof doc === "object") {
|
|
494
|
+
const obj = doc;
|
|
495
|
+
if (op.op === "remove") {
|
|
496
|
+
if (!(segment in obj)) {
|
|
497
|
+
throw new Error(
|
|
498
|
+
`applyPatch: remove on missing key "${segment}"`
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
const next = { ...obj };
|
|
502
|
+
delete next[segment];
|
|
503
|
+
return next;
|
|
504
|
+
}
|
|
505
|
+
if (op.op === "add") {
|
|
506
|
+
return { ...obj, [segment]: clone(op.value) };
|
|
507
|
+
}
|
|
508
|
+
if (op.op === "replace") {
|
|
509
|
+
if (!(segment in obj)) {
|
|
510
|
+
throw new Error(
|
|
511
|
+
`applyPatch: replace on missing key "${segment}"`
|
|
512
|
+
);
|
|
513
|
+
}
|
|
514
|
+
return { ...obj, [segment]: clone(op.value) };
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
throw new Error(
|
|
518
|
+
`applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
|
|
519
|
+
);
|
|
520
|
+
}
|
|
521
|
+
function escapePathSegment(segment) {
|
|
522
|
+
return segment.replace(/~/g, "~0").replace(/\//g, "~1");
|
|
523
|
+
}
|
|
524
|
+
function unescapePathSegment(segment) {
|
|
525
|
+
return segment.replace(/~1/g, "/").replace(/~0/g, "~");
|
|
526
|
+
}
|
|
527
|
+
function parsePath(path) {
|
|
528
|
+
if (!path.startsWith("/")) {
|
|
529
|
+
throw new Error(`applyPatch: path must start with '/', got "${path}"`);
|
|
530
|
+
}
|
|
531
|
+
return path.slice(1).split("/").map(unescapePathSegment);
|
|
532
|
+
}
|
|
533
|
+
function parseArrayIndex(segment, max) {
|
|
534
|
+
if (!/^\d+$/.test(segment)) {
|
|
535
|
+
throw new Error(
|
|
536
|
+
`applyPatch: array index must be a non-negative integer, got "${segment}"`
|
|
537
|
+
);
|
|
538
|
+
}
|
|
539
|
+
const idx = Number.parseInt(segment, 10);
|
|
540
|
+
if (idx < 0 || idx > max) {
|
|
541
|
+
throw new Error(
|
|
542
|
+
`applyPatch: array index ${idx} out of range [0, ${max}]`
|
|
543
|
+
);
|
|
544
|
+
}
|
|
545
|
+
return idx;
|
|
546
|
+
}
|
|
547
|
+
function clone(value) {
|
|
548
|
+
if (value === null || value === void 0) return value;
|
|
549
|
+
if (typeof value !== "object") return value;
|
|
550
|
+
return JSON.parse(JSON.stringify(value));
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
// src/history/ledger/constants.ts
|
|
554
|
+
var LEDGER_COLLECTION = "_ledger";
|
|
555
|
+
var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
|
|
556
|
+
|
|
557
|
+
// src/history/ledger/hash.ts
|
|
558
|
+
async function envelopePayloadHash(envelope) {
|
|
559
|
+
if (!envelope) return "";
|
|
560
|
+
return sha256Hex(envelope._data);
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
// src/history/ledger/store.ts
|
|
564
|
+
var MAX_APPEND_ATTEMPTS = 8;
|
|
565
|
+
var LedgerStore = class {
|
|
566
|
+
adapter;
|
|
567
|
+
vault;
|
|
568
|
+
encrypted;
|
|
569
|
+
getDEK;
|
|
570
|
+
actor;
|
|
571
|
+
/**
|
|
572
|
+
* In-memory cache of the chain head — the most recently appended
|
|
573
|
+
* entry along with its precomputed hash. Without this, every
|
|
574
|
+
* `append()` would re-load every prior entry to recompute the
|
|
575
|
+
* prevHash, making N puts O(N²) — a 1K-record stress test goes from
|
|
576
|
+
* < 100ms to a multi-second timeout.
|
|
577
|
+
*
|
|
578
|
+
* The cache is populated on first read (`append`, `head`, `verify`)
|
|
579
|
+
* and updated in-place on every successful `append`. Single-writer
|
|
580
|
+
* usage (the assumption) keeps it consistent. A second
|
|
581
|
+
* LedgerStore instance writing to the same vault would not
|
|
582
|
+
* see the first instance's appends in its cached state — that's the
|
|
583
|
+
* concurrency caveat documented at the class level.
|
|
584
|
+
*
|
|
585
|
+
* Sentinel `undefined` means "not yet loaded"; an explicit `null`
|
|
586
|
+
* value means "loaded and confirmed empty" — distinguishing these
|
|
587
|
+
* matters because an empty ledger is a valid state (genesis prevHash
|
|
588
|
+
* is the empty string), and we don't want to re-scan the adapter
|
|
589
|
+
* just because the chain is freshly initialized.
|
|
590
|
+
*/
|
|
591
|
+
headCache = void 0;
|
|
592
|
+
constructor(opts) {
|
|
593
|
+
this.adapter = opts.adapter;
|
|
594
|
+
this.vault = opts.vault;
|
|
595
|
+
this.encrypted = opts.encrypted;
|
|
596
|
+
this.getDEK = opts.getDEK;
|
|
597
|
+
this.actor = opts.actor;
|
|
598
|
+
}
|
|
599
|
+
/**
|
|
600
|
+
* Lazily load (or return cached) the current chain head. The cache
|
|
601
|
+
* sentinel is `undefined` until first access; after the first call,
|
|
602
|
+
* the cache holds either a `{ entry, hash }` for non-empty ledgers
|
|
603
|
+
* or `null` for empty ones.
|
|
604
|
+
*/
|
|
605
|
+
async getCachedHead() {
|
|
606
|
+
if (this.headCache !== void 0) return this.headCache;
|
|
607
|
+
const entries = await this.loadAllEntries();
|
|
608
|
+
const last = entries[entries.length - 1];
|
|
609
|
+
if (!last) {
|
|
610
|
+
this.headCache = null;
|
|
611
|
+
return null;
|
|
612
|
+
}
|
|
613
|
+
this.headCache = { entry: last, hash: await hashEntry(last) };
|
|
614
|
+
return this.headCache;
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Append a new entry to the ledger. Returns the full entry that was
|
|
618
|
+
* written (with its assigned index and computed prevHash) so the
|
|
619
|
+
* caller can use the hash for downstream purposes (e.g., embedding
|
|
620
|
+
* in a verifiable backup).
|
|
621
|
+
*
|
|
622
|
+
* This is the **only** way to add entries. Direct adapter writes to
|
|
623
|
+
* `_ledger/` would bypass the chain math and would be caught by the
|
|
624
|
+
* next `verify()` call as a divergence.
|
|
625
|
+
*
|
|
626
|
+
* ## Multi-writer correctness
|
|
627
|
+
*
|
|
628
|
+
* Append is implemented as an optimistic-CAS retry loop. On every
|
|
629
|
+
* attempt:
|
|
630
|
+
*
|
|
631
|
+
* 1. Read fresh head (cache invalidated on retry).
|
|
632
|
+
* 2. Compute `nextIndex = head.index + 1`, `prevHash = hash(head)`.
|
|
633
|
+
* 3. Encrypt delta payload IN MEMORY (no adapter write yet) so we
|
|
634
|
+
* can compute `deltaHash` before claiming the chain slot.
|
|
635
|
+
* 4. Build + encrypt the entry envelope.
|
|
636
|
+
* 5. `adapter.put(_ledger, paddedIndex, envelope, expectedVersion: 0)`
|
|
637
|
+
* — the `expectedVersion: 0` asserts "this slot must not exist."
|
|
638
|
+
* Stores with `casAtomic: true` honor the CAS check; under
|
|
639
|
+
* contention the second writer's put throws `ConflictError`.
|
|
640
|
+
* 6. On `ConflictError`: invalidate the head cache, sleep with
|
|
641
|
+
* bounded backoff + jitter, retry. After `MAX_APPEND_ATTEMPTS`
|
|
642
|
+
* retries throw {@link LedgerContentionError}.
|
|
643
|
+
* 7. On success: write the delta envelope (if any) at the same
|
|
644
|
+
* index. Update the head cache.
|
|
645
|
+
*
|
|
646
|
+
* Entry-first ordering matters: writing the delta first under
|
|
647
|
+
* contention would orphan delta records at indices the writer never
|
|
648
|
+
* actually claimed. The deltaHash is computed off the encrypted
|
|
649
|
+
* envelope's `_data` field, which doesn't require the envelope to
|
|
650
|
+
* be persisted.
|
|
651
|
+
*
|
|
652
|
+
* Stores with `casAtomic: false` (file, s3, r2 by default) silently
|
|
653
|
+
* accept the `expectedVersion: 0` argument and proceed without a
|
|
654
|
+
* CAS check. Concurrent appends against those stores remain
|
|
655
|
+
* best-effort — pair them with an advisory lock or with sync
|
|
656
|
+
* single-writer discipline.
|
|
657
|
+
*/
|
|
658
|
+
async append(input) {
|
|
659
|
+
let lastConflict;
|
|
660
|
+
for (let attempt = 0; attempt < MAX_APPEND_ATTEMPTS; attempt++) {
|
|
661
|
+
if (attempt > 0) {
|
|
662
|
+
this.headCache = void 0;
|
|
663
|
+
}
|
|
664
|
+
try {
|
|
665
|
+
return await this.appendOnce(input);
|
|
666
|
+
} catch (err) {
|
|
667
|
+
if (err instanceof ConflictError) {
|
|
668
|
+
lastConflict = err;
|
|
669
|
+
if (attempt < MAX_APPEND_ATTEMPTS - 1) {
|
|
670
|
+
await sleepBackoff(attempt);
|
|
671
|
+
}
|
|
672
|
+
continue;
|
|
673
|
+
}
|
|
674
|
+
throw err;
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
void lastConflict;
|
|
678
|
+
throw new LedgerContentionError(MAX_APPEND_ATTEMPTS);
|
|
679
|
+
}
|
|
680
|
+
/**
|
|
681
|
+
* One attempt at the append cycle. Throws `ConflictError` when the
|
|
682
|
+
* CAS check on the entry put fails — `append()` catches that and
|
|
683
|
+
* retries. Any other error propagates to the caller.
|
|
684
|
+
*/
|
|
685
|
+
async appendOnce(input) {
|
|
686
|
+
const cached = await this.getCachedHead();
|
|
687
|
+
const lastEntry = cached?.entry;
|
|
688
|
+
const prevHash = cached?.hash ?? "";
|
|
689
|
+
const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
|
|
690
|
+
let deltaEnvelope;
|
|
691
|
+
let deltaHash;
|
|
692
|
+
if (input.delta !== void 0) {
|
|
693
|
+
deltaEnvelope = await this.encryptDelta(input.delta);
|
|
694
|
+
deltaHash = await sha256Hex(deltaEnvelope._data);
|
|
695
|
+
}
|
|
696
|
+
const entryBase = {
|
|
697
|
+
index: nextIndex,
|
|
698
|
+
prevHash,
|
|
699
|
+
op: input.op,
|
|
700
|
+
collection: input.collection,
|
|
701
|
+
id: input.id,
|
|
702
|
+
version: input.version,
|
|
703
|
+
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
704
|
+
actor: input.actor === "" ? this.actor : input.actor,
|
|
705
|
+
payloadHash: input.payloadHash
|
|
706
|
+
};
|
|
707
|
+
const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
|
|
708
|
+
const envelope = await this.encryptEntry(entry);
|
|
709
|
+
await this.adapter.put(
|
|
710
|
+
this.vault,
|
|
711
|
+
LEDGER_COLLECTION,
|
|
712
|
+
paddedIndex(entry.index),
|
|
713
|
+
envelope,
|
|
714
|
+
0
|
|
715
|
+
);
|
|
716
|
+
if (deltaEnvelope) {
|
|
717
|
+
await this.adapter.put(
|
|
718
|
+
this.vault,
|
|
719
|
+
LEDGER_DELTAS_COLLECTION,
|
|
720
|
+
paddedIndex(entry.index),
|
|
721
|
+
deltaEnvelope,
|
|
722
|
+
0
|
|
723
|
+
);
|
|
724
|
+
}
|
|
725
|
+
this.headCache = { entry, hash: await hashEntry(entry) };
|
|
726
|
+
return entry;
|
|
727
|
+
}
|
|
728
|
+
/**
|
|
729
|
+
* Load a delta payload by its entry index. Returns `null` if the
|
|
730
|
+
* entry at that index doesn't reference a delta (genesis puts and
|
|
731
|
+
* deletes leave the slot empty) or if the delta row is missing
|
|
732
|
+
* (possible after a `pruneHistory` fold).
|
|
733
|
+
*
|
|
734
|
+
* The caller is responsible for deciding what to do with a missing
|
|
735
|
+
* delta — `ledger.reconstruct()` uses it as a "stop walking
|
|
736
|
+
* backward" signal and falls back to the on-disk current value.
|
|
737
|
+
*/
|
|
738
|
+
async loadDelta(index) {
|
|
739
|
+
const envelope = await this.adapter.get(
|
|
740
|
+
this.vault,
|
|
741
|
+
LEDGER_DELTAS_COLLECTION,
|
|
742
|
+
paddedIndex(index)
|
|
743
|
+
);
|
|
744
|
+
if (!envelope) return null;
|
|
745
|
+
if (!this.encrypted) {
|
|
746
|
+
return JSON.parse(envelope._data);
|
|
747
|
+
}
|
|
748
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
749
|
+
const json = await decrypt(envelope._iv, envelope._data, dek);
|
|
750
|
+
return JSON.parse(json);
|
|
751
|
+
}
|
|
752
|
+
/** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
|
|
753
|
+
async encryptDelta(patch) {
|
|
754
|
+
const json = JSON.stringify(patch);
|
|
755
|
+
if (!this.encrypted) {
|
|
756
|
+
return {
|
|
757
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
758
|
+
_v: 1,
|
|
759
|
+
_ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
760
|
+
_iv: "",
|
|
761
|
+
_data: json,
|
|
762
|
+
_by: this.actor
|
|
763
|
+
};
|
|
764
|
+
}
|
|
765
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
766
|
+
const { iv, data } = await encrypt(json, dek);
|
|
767
|
+
return {
|
|
768
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
769
|
+
_v: 1,
|
|
770
|
+
_ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
771
|
+
_iv: iv,
|
|
772
|
+
_data: data,
|
|
773
|
+
_by: this.actor
|
|
774
|
+
};
|
|
775
|
+
}
|
|
776
|
+
/**
|
|
777
|
+
* Read all entries in ascending-index order. Used internally by
|
|
778
|
+
* `append()`, `head()`, `verify()`, and `entries()`. Decryption is
|
|
779
|
+
* serial because the entries are tiny and the overhead of a Promise
|
|
780
|
+
* pool would dominate at realistic chain lengths (< 100K entries).
|
|
781
|
+
*/
|
|
782
|
+
async loadAllEntries() {
|
|
783
|
+
const keys = await this.adapter.list(this.vault, LEDGER_COLLECTION);
|
|
784
|
+
keys.sort();
|
|
785
|
+
const entries = [];
|
|
786
|
+
for (const key of keys) {
|
|
787
|
+
const envelope = await this.adapter.get(
|
|
788
|
+
this.vault,
|
|
789
|
+
LEDGER_COLLECTION,
|
|
790
|
+
key
|
|
791
|
+
);
|
|
792
|
+
if (!envelope) continue;
|
|
793
|
+
entries.push(await this.decryptEntry(envelope));
|
|
794
|
+
}
|
|
795
|
+
return entries;
|
|
796
|
+
}
|
|
797
|
+
/**
|
|
798
|
+
* Return the current head of the ledger: the last entry, its hash,
|
|
799
|
+
* and the total chain length. `null` on an empty ledger so callers
|
|
800
|
+
* can distinguish "no history yet" from "empty history".
|
|
801
|
+
*/
|
|
802
|
+
async head() {
|
|
803
|
+
const cached = await this.getCachedHead();
|
|
804
|
+
if (!cached) return null;
|
|
805
|
+
return {
|
|
806
|
+
entry: cached.entry,
|
|
807
|
+
hash: cached.hash,
|
|
808
|
+
length: cached.entry.index + 1
|
|
809
|
+
};
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Return entries in the requested half-open range `[from, to)`.
|
|
813
|
+
* Defaults: `from = 0`, `to = length`. The indices are clipped to
|
|
814
|
+
* the valid range; no error is thrown for out-of-range queries.
|
|
815
|
+
*/
|
|
816
|
+
async entries(opts = {}) {
|
|
817
|
+
const all = await this.loadAllEntries();
|
|
818
|
+
const from = Math.max(0, opts.from ?? 0);
|
|
819
|
+
const to = Math.min(all.length, opts.to ?? all.length);
|
|
820
|
+
return all.slice(from, to);
|
|
821
|
+
}
|
|
822
|
+
/**
|
|
823
|
+
* Reconstruct a record's state at a given historical version by
|
|
824
|
+
* walking the ledger's delta chain backward from the current state.
|
|
825
|
+
*
|
|
826
|
+
* ## Algorithm
|
|
827
|
+
*
|
|
828
|
+
* Ledger deltas are stored in **reverse** form — each entry's
|
|
829
|
+
* patch describes how to undo that put, transforming the new
|
|
830
|
+
* record back into the previous one. `reconstruct` exploits this
|
|
831
|
+
* by:
|
|
832
|
+
*
|
|
833
|
+
* 1. Finding every ledger entry for `(collection, id)` in the
|
|
834
|
+
* chain, sorted by index ascending.
|
|
835
|
+
* 2. Starting from `current` (the present value of the record,
|
|
836
|
+
* as held by the caller — typically fetched via
|
|
837
|
+
* `Collection.get()`).
|
|
838
|
+
* 3. Walking entries in **descending** index order and applying
|
|
839
|
+
* each entry's reverse patch, stopping when we reach the
|
|
840
|
+
* entry whose version equals `atVersion`.
|
|
841
|
+
*
|
|
842
|
+
* The result is the record as it existed immediately AFTER the
|
|
843
|
+
* put at `atVersion`. To get the state at the genesis put
|
|
844
|
+
* (version 1), the walk runs all the way back through every put
|
|
845
|
+
* after the first.
|
|
846
|
+
*
|
|
847
|
+
* ## Caveats
|
|
848
|
+
*
|
|
849
|
+
* - **Delete entries** break the walk: once we see a delete, the
|
|
850
|
+
* record didn't exist before that point, so there's nothing to
|
|
851
|
+
* reconstruct. We return `null` in that case.
|
|
852
|
+
* - **Missing deltas** (e.g., after `pruneHistory` folds old
|
|
853
|
+
* entries into a base snapshot) also stop the walk. does
|
|
854
|
+
* not ship pruneHistory, so today this only happens if an entry
|
|
855
|
+
* was deleted out-of-band.
|
|
856
|
+
* - The caller MUST pass the correct current value. Passing a
|
|
857
|
+
* mutated object would corrupt the reconstruction — the patch
|
|
858
|
+
* chain is only valid against the exact state that was in
|
|
859
|
+
* effect when the most recent put happened.
|
|
860
|
+
*
|
|
861
|
+
* For, `reconstruct` is the only way to read a historical
|
|
862
|
+
* version via deltas. The legacy `_history` collection still
|
|
863
|
+
* holds full snapshots and `Collection.getVersion()` still reads
|
|
864
|
+
* from there — the two paths coexist until pruneHistory lands in
|
|
865
|
+
* a follow-up and delta becomes the default.
|
|
866
|
+
*/
|
|
867
|
+
async reconstruct(collection, id, current, atVersion) {
|
|
868
|
+
const all = await this.loadAllEntries();
|
|
869
|
+
const matching = all.filter(
|
|
870
|
+
(e) => e.collection === collection && e.id === id
|
|
871
|
+
);
|
|
872
|
+
if (matching.length === 0) {
|
|
873
|
+
return null;
|
|
874
|
+
}
|
|
875
|
+
let state = current;
|
|
876
|
+
for (let i = matching.length - 1; i >= 0; i--) {
|
|
877
|
+
const entry = matching[i];
|
|
878
|
+
if (!entry) continue;
|
|
879
|
+
if (entry.version === atVersion && entry.op !== "delete") {
|
|
880
|
+
return state;
|
|
881
|
+
}
|
|
882
|
+
if (entry.op === "delete") {
|
|
883
|
+
return null;
|
|
884
|
+
}
|
|
885
|
+
if (entry.deltaHash === void 0) {
|
|
886
|
+
if (entry.version === atVersion) return state;
|
|
887
|
+
return null;
|
|
888
|
+
}
|
|
889
|
+
const patch = await this.loadDelta(entry.index);
|
|
890
|
+
if (!patch) {
|
|
891
|
+
return null;
|
|
892
|
+
}
|
|
893
|
+
if (state === null) {
|
|
894
|
+
return null;
|
|
895
|
+
}
|
|
896
|
+
state = applyPatch(state, patch);
|
|
897
|
+
}
|
|
898
|
+
return null;
|
|
899
|
+
}
|
|
900
|
+
/**
|
|
901
|
+
* Walk the chain from genesis forward and verify every link.
|
|
902
|
+
*
|
|
903
|
+
* Returns `{ ok: true, head, length }` if every entry's `prevHash`
|
|
904
|
+
* matches the recomputed hash of its predecessor (and the genesis
|
|
905
|
+
* entry's `prevHash` is the empty string).
|
|
906
|
+
*
|
|
907
|
+
* Returns `{ ok: false, divergedAt, expected, actual }` on the first
|
|
908
|
+
* mismatch. `divergedAt` is the 0-based index of the BROKEN entry
|
|
909
|
+
* — entries before that index still verify cleanly; entries at and
|
|
910
|
+
* after `divergedAt` are untrustworthy.
|
|
911
|
+
*
|
|
912
|
+
* This method detects:
|
|
913
|
+
* - Mutated entry content (fields changed)
|
|
914
|
+
* - Reordered entries (if any adjacent pair swaps, the prevHash
|
|
915
|
+
* of the second no longer matches)
|
|
916
|
+
* - Inserted entries (the inserted entry's prevHash likely fails,
|
|
917
|
+
* and the following entry's prevHash definitely fails)
|
|
918
|
+
* - Deleted entries (the entry after the deletion sees a wrong
|
|
919
|
+
* prevHash)
|
|
920
|
+
*
|
|
921
|
+
* It does NOT detect:
|
|
922
|
+
* - Tampering with the DATA collections that bypassed the ledger
|
|
923
|
+
* entirely (e.g., an attacker who modifies records without
|
|
924
|
+
* appending matching ledger entries — this is why we also
|
|
925
|
+
* plan a `verifyIntegrity()` helper in a follow-up)
|
|
926
|
+
* - Truncation of the chain at the tail (dropping the last N
|
|
927
|
+
* entries leaves a shorter but still consistent chain). External
|
|
928
|
+
* anchoring of `head.hash` to a trusted service is the defense
|
|
929
|
+
* against this.
|
|
930
|
+
*/
|
|
931
|
+
async verify() {
|
|
932
|
+
const entries = await this.loadAllEntries();
|
|
933
|
+
let expectedPrevHash = "";
|
|
934
|
+
for (let i = 0; i < entries.length; i++) {
|
|
935
|
+
const entry = entries[i];
|
|
936
|
+
if (!entry) continue;
|
|
937
|
+
if (entry.prevHash !== expectedPrevHash) {
|
|
938
|
+
return {
|
|
939
|
+
ok: false,
|
|
940
|
+
divergedAt: i,
|
|
941
|
+
expected: expectedPrevHash,
|
|
942
|
+
actual: entry.prevHash
|
|
943
|
+
};
|
|
944
|
+
}
|
|
945
|
+
if (entry.index !== i) {
|
|
946
|
+
return {
|
|
947
|
+
ok: false,
|
|
948
|
+
divergedAt: i,
|
|
949
|
+
expected: `index=${i}`,
|
|
950
|
+
actual: `index=${entry.index}`
|
|
951
|
+
};
|
|
952
|
+
}
|
|
953
|
+
expectedPrevHash = await hashEntry(entry);
|
|
954
|
+
}
|
|
955
|
+
return {
|
|
956
|
+
ok: true,
|
|
957
|
+
head: expectedPrevHash,
|
|
958
|
+
length: entries.length
|
|
959
|
+
};
|
|
960
|
+
}
|
|
961
|
+
// ─── Encryption plumbing ─────────────────────────────────────────
|
|
962
|
+
/**
|
|
963
|
+
* Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
|
|
964
|
+
* envelope's `_v` field is set to `entry.index + 1` so the usual
|
|
965
|
+
* optimistic-concurrency machinery has a reasonable version number
|
|
966
|
+
* to compare against (the ledger is append-only, so concurrent
|
|
967
|
+
* writes should always bump the index).
|
|
968
|
+
*/
|
|
969
|
+
async encryptEntry(entry) {
|
|
970
|
+
const json = canonicalJson(entry);
|
|
971
|
+
if (!this.encrypted) {
|
|
972
|
+
return {
|
|
973
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
974
|
+
_v: entry.index + 1,
|
|
975
|
+
_ts: entry.ts,
|
|
976
|
+
_iv: "",
|
|
977
|
+
_data: json,
|
|
978
|
+
_by: entry.actor
|
|
979
|
+
};
|
|
980
|
+
}
|
|
981
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
982
|
+
const { iv, data } = await encrypt(json, dek);
|
|
983
|
+
return {
|
|
984
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
985
|
+
_v: entry.index + 1,
|
|
986
|
+
_ts: entry.ts,
|
|
987
|
+
_iv: iv,
|
|
988
|
+
_data: data,
|
|
989
|
+
_by: entry.actor
|
|
990
|
+
};
|
|
991
|
+
}
|
|
992
|
+
/** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
|
|
993
|
+
async decryptEntry(envelope) {
|
|
994
|
+
if (!this.encrypted) {
|
|
995
|
+
return JSON.parse(envelope._data);
|
|
996
|
+
}
|
|
997
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
998
|
+
const json = await decrypt(envelope._iv, envelope._data, dek);
|
|
999
|
+
return JSON.parse(json);
|
|
1000
|
+
}
|
|
1001
|
+
};
|
|
1002
|
+
function sleepBackoff(attempt) {
|
|
1003
|
+
const base = 5 * Math.pow(2, attempt);
|
|
1004
|
+
const jitter = Math.random() * base;
|
|
1005
|
+
return new Promise((resolve) => setTimeout(resolve, base + jitter));
|
|
1006
|
+
}
|
|
1007
|
+
|
|
1008
|
+
// src/history/time-machine.ts
|
|
1009
|
+
var VaultInstant = class {
|
|
1010
|
+
constructor(engine, timestamp) {
|
|
1011
|
+
this.engine = engine;
|
|
1012
|
+
this.timestamp = timestamp;
|
|
1013
|
+
}
|
|
1014
|
+
engine;
|
|
1015
|
+
timestamp;
|
|
1016
|
+
/** Get a point-in-time view of a collection. */
|
|
1017
|
+
collection(name) {
|
|
1018
|
+
return new CollectionInstant(this.engine, this.timestamp, name);
|
|
1019
|
+
}
|
|
1020
|
+
};
|
|
1021
|
+
var CollectionInstant = class {
|
|
1022
|
+
constructor(engine, targetTs, name) {
|
|
1023
|
+
this.engine = engine;
|
|
1024
|
+
this.targetTs = targetTs;
|
|
1025
|
+
this.name = name;
|
|
1026
|
+
}
|
|
1027
|
+
engine;
|
|
1028
|
+
targetTs;
|
|
1029
|
+
name;
|
|
1030
|
+
/**
|
|
1031
|
+
* Return the record as it existed at the target timestamp, or
|
|
1032
|
+
* `null` if the record had not been created yet or had already been
|
|
1033
|
+
* deleted by then.
|
|
1034
|
+
*/
|
|
1035
|
+
async get(id) {
|
|
1036
|
+
const envelope = await this.resolveEnvelope(id);
|
|
1037
|
+
if (!envelope) return null;
|
|
1038
|
+
const plaintext = this.engine.encrypted ? await decrypt(envelope._iv, envelope._data, await this.engine.getDEK(this.name)) : envelope._data;
|
|
1039
|
+
return JSON.parse(plaintext);
|
|
1040
|
+
}
|
|
1041
|
+
/**
|
|
1042
|
+
* IDs of records that existed (had at least one `put` and were not
|
|
1043
|
+
* subsequently deleted) at the target timestamp.
|
|
1044
|
+
*
|
|
1045
|
+
* Implemented as a linear scan over history + ledger. Performance
|
|
1046
|
+
* is bounded by total history size (not live-vault size), so the
|
|
1047
|
+
* memory-first vault-scale cap (1K–50K records × average history
|
|
1048
|
+
* depth) still applies.
|
|
1049
|
+
*/
|
|
1050
|
+
async list() {
|
|
1051
|
+
const historyIds = await collectHistoryIds(this.engine.adapter, this.engine.name, this.name);
|
|
1052
|
+
const liveIds = await this.engine.adapter.list(this.engine.name, this.name);
|
|
1053
|
+
const candidateIds = /* @__PURE__ */ new Set([...historyIds, ...liveIds]);
|
|
1054
|
+
const alive = [];
|
|
1055
|
+
for (const id of candidateIds) {
|
|
1056
|
+
const env = await this.resolveEnvelope(id);
|
|
1057
|
+
if (env) alive.push(id);
|
|
1058
|
+
}
|
|
1059
|
+
return alive.sort();
|
|
1060
|
+
}
|
|
1061
|
+
// ── write guards ───────────────────────────────────────────────────
|
|
1062
|
+
async put(_id, _record) {
|
|
1063
|
+
throw new ReadOnlyAtInstantError("put", this.targetTs);
|
|
1064
|
+
}
|
|
1065
|
+
async delete(_id) {
|
|
1066
|
+
throw new ReadOnlyAtInstantError("delete", this.targetTs);
|
|
1067
|
+
}
|
|
1068
|
+
async update(_id, _patch) {
|
|
1069
|
+
throw new ReadOnlyAtInstantError("update", this.targetTs);
|
|
1070
|
+
}
|
|
1071
|
+
// ── internals ─────────────────────────────────────────────────────
|
|
1072
|
+
/**
|
|
1073
|
+
* Return the envelope that represents the record's state at
|
|
1074
|
+
* `targetTs`, accounting for deletes. `null` if the record didn't
|
|
1075
|
+
* exist at that instant.
|
|
1076
|
+
*
|
|
1077
|
+
* ## Why we use the ledger as the authoritative timeline
|
|
1078
|
+
*
|
|
1079
|
+
* The per-version history snapshots saved by `saveHistory()` do
|
|
1080
|
+
* carry a `_ts` field, but that timestamp is the moment the
|
|
1081
|
+
* snapshot was *captured* (i.e. the instant right before the
|
|
1082
|
+
* subsequent overwrite), not the original write time. The ledger,
|
|
1083
|
+
* by contrast, records `ts` at the moment of each `put` / `delete`
|
|
1084
|
+
* — it's the only source that tracks the real timeline. So:
|
|
1085
|
+
*
|
|
1086
|
+
* 1. Walk the ledger; find the latest entry for `(collection, id)`
|
|
1087
|
+
* with `ts ≤ targetTs`.
|
|
1088
|
+
* 2. If that entry is a `delete`, the record was gone at the
|
|
1089
|
+
* target instant — return null.
|
|
1090
|
+
* 3. Otherwise it's a `put` with a specific `version`. Load the
|
|
1091
|
+
* envelope for that version from history, falling back to the
|
|
1092
|
+
* live collection for the most recent version.
|
|
1093
|
+
*
|
|
1094
|
+
* ## Fallback when the ledger is disabled
|
|
1095
|
+
*
|
|
1096
|
+
* If the vault has history disabled, `getLedger()` returns null and
|
|
1097
|
+
* we fall back to comparing envelope `_ts` fields. This is
|
|
1098
|
+
* approximate and gets the *last write* right but may confuse the
|
|
1099
|
+
* intermediate versions; adopters needing accurate time-machine
|
|
1100
|
+
* reads should leave history enabled.
|
|
1101
|
+
*/
|
|
1102
|
+
async resolveEnvelope(id) {
|
|
1103
|
+
const ledger = this.engine.getLedger();
|
|
1104
|
+
if (ledger) {
|
|
1105
|
+
return this.resolveViaLedger(id, ledger);
|
|
1106
|
+
}
|
|
1107
|
+
return this.resolveViaEnvelopeTs(id);
|
|
1108
|
+
}
|
|
1109
|
+
async resolveViaLedger(id, ledger) {
|
|
1110
|
+
const entries = await ledger.entries();
|
|
1111
|
+
let latest = null;
|
|
1112
|
+
for (const e of entries) {
|
|
1113
|
+
if (e.collection !== this.name || e.id !== id) continue;
|
|
1114
|
+
if (e.ts > this.targetTs) break;
|
|
1115
|
+
latest = { op: e.op, version: e.version };
|
|
1116
|
+
}
|
|
1117
|
+
if (!latest) return null;
|
|
1118
|
+
if (latest.op === "delete") return null;
|
|
1119
|
+
return this.loadVersion(id, latest.version);
|
|
1120
|
+
}
|
|
1121
|
+
async resolveViaEnvelopeTs(id) {
|
|
1122
|
+
const history = await getHistory(
|
|
1123
|
+
this.engine.adapter,
|
|
1124
|
+
this.engine.name,
|
|
1125
|
+
this.name,
|
|
1126
|
+
id
|
|
1127
|
+
);
|
|
1128
|
+
const live = await this.engine.adapter.get(this.engine.name, this.name, id);
|
|
1129
|
+
const byVersion = /* @__PURE__ */ new Map();
|
|
1130
|
+
for (const e of history) byVersion.set(e._v, e);
|
|
1131
|
+
if (live) byVersion.set(live._v, live);
|
|
1132
|
+
const sorted = [...byVersion.values()].sort(
|
|
1133
|
+
(a, b) => a._ts < b._ts ? 1 : a._ts > b._ts ? -1 : 0
|
|
1134
|
+
);
|
|
1135
|
+
return sorted.find((e) => e._ts <= this.targetTs) ?? null;
|
|
1136
|
+
}
|
|
1137
|
+
/**
|
|
1138
|
+
* Fetch the envelope for a specific version. The live record (most
|
|
1139
|
+
* recent put) lives in the main collection; prior versions live in
|
|
1140
|
+
* `_history`. We check live first because the common case after a
|
|
1141
|
+
* delete is that we're trying to load the last-live version from
|
|
1142
|
+
* history, and skipping live for the current-version case avoids a
|
|
1143
|
+
* redundant lookup.
|
|
1144
|
+
*/
|
|
1145
|
+
async loadVersion(id, version) {
|
|
1146
|
+
const live = await this.engine.adapter.get(this.engine.name, this.name, id);
|
|
1147
|
+
if (live && live._v === version) return live;
|
|
1148
|
+
const historyId2 = `${this.name}:${id}:${String(version).padStart(10, "0")}`;
|
|
1149
|
+
return await this.engine.adapter.get(this.engine.name, "_history", historyId2);
|
|
1150
|
+
}
|
|
1151
|
+
};
|
|
1152
|
+
async function collectHistoryIds(adapter, vault, collection) {
|
|
1153
|
+
const all = await adapter.list(vault, "_history");
|
|
1154
|
+
const prefix = `${collection}:`;
|
|
1155
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1156
|
+
for (const key of all) {
|
|
1157
|
+
if (!key.startsWith(prefix)) continue;
|
|
1158
|
+
const lastColon = key.lastIndexOf(":");
|
|
1159
|
+
if (lastColon <= prefix.length) continue;
|
|
1160
|
+
const middle = key.slice(prefix.length, lastColon);
|
|
1161
|
+
seen.add(middle);
|
|
1162
|
+
}
|
|
1163
|
+
return [...seen];
|
|
1164
|
+
}
|
|
1165
|
+
|
|
1166
|
+
// src/history/active.ts
|
|
1167
|
+
function withHistory() {
|
|
1168
|
+
return {
|
|
1169
|
+
saveHistory,
|
|
1170
|
+
getHistoryEntries: getHistory,
|
|
1171
|
+
getVersionEnvelope,
|
|
1172
|
+
pruneHistory,
|
|
1173
|
+
clearHistory,
|
|
1174
|
+
envelopePayloadHash,
|
|
1175
|
+
computePatch,
|
|
1176
|
+
diff,
|
|
1177
|
+
buildLedger(opts) {
|
|
1178
|
+
return new LedgerStore({
|
|
1179
|
+
adapter: opts.adapter,
|
|
1180
|
+
vault: opts.vault,
|
|
1181
|
+
encrypted: opts.encrypted,
|
|
1182
|
+
getDEK: opts.getDEK,
|
|
1183
|
+
actor: opts.actor
|
|
1184
|
+
});
|
|
1185
|
+
},
|
|
1186
|
+
buildVaultInstant(engine, timestamp) {
|
|
1187
|
+
return new VaultInstant(engine, timestamp);
|
|
1188
|
+
}
|
|
1189
|
+
};
|
|
1190
|
+
}
|
|
1191
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1192
|
+
0 && (module.exports = {
|
|
1193
|
+
CollectionInstant,
|
|
1194
|
+
LEDGER_COLLECTION,
|
|
1195
|
+
LEDGER_DELTAS_COLLECTION,
|
|
1196
|
+
LedgerStore,
|
|
1197
|
+
VaultInstant,
|
|
1198
|
+
applyPatch,
|
|
1199
|
+
canonicalJson,
|
|
1200
|
+
clearHistory,
|
|
1201
|
+
computePatch,
|
|
1202
|
+
diff,
|
|
1203
|
+
envelopePayloadHash,
|
|
1204
|
+
formatDiff,
|
|
1205
|
+
getHistory,
|
|
1206
|
+
getVersionEnvelope,
|
|
1207
|
+
hashEntry,
|
|
1208
|
+
paddedIndex,
|
|
1209
|
+
parseIndex,
|
|
1210
|
+
pruneHistory,
|
|
1211
|
+
saveHistory,
|
|
1212
|
+
sha256Hex,
|
|
1213
|
+
withHistory
|
|
1214
|
+
});
|
|
1215
|
+
//# sourceMappingURL=index.cjs.map
|