@noy-db/hub 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +197 -0
- package/dist/aggregate/index.cjs +476 -0
- package/dist/aggregate/index.cjs.map +1 -0
- package/dist/aggregate/index.d.cts +38 -0
- package/dist/aggregate/index.d.ts +38 -0
- package/dist/aggregate/index.js +53 -0
- package/dist/aggregate/index.js.map +1 -0
- package/dist/blobs/index.cjs +1480 -0
- package/dist/blobs/index.cjs.map +1 -0
- package/dist/blobs/index.d.cts +45 -0
- package/dist/blobs/index.d.ts +45 -0
- package/dist/blobs/index.js +48 -0
- package/dist/blobs/index.js.map +1 -0
- package/dist/bundle/index.cjs +436 -0
- package/dist/bundle/index.cjs.map +1 -0
- package/dist/bundle/index.d.cts +7 -0
- package/dist/bundle/index.d.ts +7 -0
- package/dist/bundle/index.js +40 -0
- package/dist/bundle/index.js.map +1 -0
- package/dist/chunk-2QR2PQTT.js +217 -0
- package/dist/chunk-2QR2PQTT.js.map +1 -0
- package/dist/chunk-4OWFYIDQ.js +79 -0
- package/dist/chunk-4OWFYIDQ.js.map +1 -0
- package/dist/chunk-5AATM2M2.js +90 -0
- package/dist/chunk-5AATM2M2.js.map +1 -0
- package/dist/chunk-ACLDOTNQ.js +543 -0
- package/dist/chunk-ACLDOTNQ.js.map +1 -0
- package/dist/chunk-BTDCBVJW.js +160 -0
- package/dist/chunk-BTDCBVJW.js.map +1 -0
- package/dist/chunk-CIMZBAZB.js +72 -0
- package/dist/chunk-CIMZBAZB.js.map +1 -0
- package/dist/chunk-E445ICYI.js +365 -0
- package/dist/chunk-E445ICYI.js.map +1 -0
- package/dist/chunk-EXQRC2L4.js +722 -0
- package/dist/chunk-EXQRC2L4.js.map +1 -0
- package/dist/chunk-FZU343FL.js +32 -0
- package/dist/chunk-FZU343FL.js.map +1 -0
- package/dist/chunk-GJILMRPO.js +354 -0
- package/dist/chunk-GJILMRPO.js.map +1 -0
- package/dist/chunk-GOUT6DND.js +1285 -0
- package/dist/chunk-GOUT6DND.js.map +1 -0
- package/dist/chunk-J66GRPNH.js +111 -0
- package/dist/chunk-J66GRPNH.js.map +1 -0
- package/dist/chunk-M2F2JAWB.js +464 -0
- package/dist/chunk-M2F2JAWB.js.map +1 -0
- package/dist/chunk-M5INGEFC.js +84 -0
- package/dist/chunk-M5INGEFC.js.map +1 -0
- package/dist/chunk-M62XNWRA.js +72 -0
- package/dist/chunk-M62XNWRA.js.map +1 -0
- package/dist/chunk-MR4424N3.js +275 -0
- package/dist/chunk-MR4424N3.js.map +1 -0
- package/dist/chunk-NPC4LFV5.js +132 -0
- package/dist/chunk-NPC4LFV5.js.map +1 -0
- package/dist/chunk-NXFEYLVG.js +311 -0
- package/dist/chunk-NXFEYLVG.js.map +1 -0
- package/dist/chunk-R36SIKES.js +79 -0
- package/dist/chunk-R36SIKES.js.map +1 -0
- package/dist/chunk-TDR6T5CJ.js +381 -0
- package/dist/chunk-TDR6T5CJ.js.map +1 -0
- package/dist/chunk-UF3BUNQZ.js +1 -0
- package/dist/chunk-UF3BUNQZ.js.map +1 -0
- package/dist/chunk-UQFSPSWG.js +1109 -0
- package/dist/chunk-UQFSPSWG.js.map +1 -0
- package/dist/chunk-USKYUS74.js +793 -0
- package/dist/chunk-USKYUS74.js.map +1 -0
- package/dist/chunk-XCL3WP6J.js +121 -0
- package/dist/chunk-XCL3WP6J.js.map +1 -0
- package/dist/chunk-XHFOENR2.js +680 -0
- package/dist/chunk-XHFOENR2.js.map +1 -0
- package/dist/chunk-ZFKD4QMV.js +430 -0
- package/dist/chunk-ZFKD4QMV.js.map +1 -0
- package/dist/chunk-ZLMV3TUA.js +490 -0
- package/dist/chunk-ZLMV3TUA.js.map +1 -0
- package/dist/chunk-ZRG4V3F5.js +17 -0
- package/dist/chunk-ZRG4V3F5.js.map +1 -0
- package/dist/consent/index.cjs +204 -0
- package/dist/consent/index.cjs.map +1 -0
- package/dist/consent/index.d.cts +24 -0
- package/dist/consent/index.d.ts +24 -0
- package/dist/consent/index.js +23 -0
- package/dist/consent/index.js.map +1 -0
- package/dist/crdt/index.cjs +152 -0
- package/dist/crdt/index.cjs.map +1 -0
- package/dist/crdt/index.d.cts +30 -0
- package/dist/crdt/index.d.ts +30 -0
- package/dist/crdt/index.js +24 -0
- package/dist/crdt/index.js.map +1 -0
- package/dist/crypto-IVKU7YTT.js +44 -0
- package/dist/crypto-IVKU7YTT.js.map +1 -0
- package/dist/delegation-XDJCBTI2.js +16 -0
- package/dist/delegation-XDJCBTI2.js.map +1 -0
- package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
- package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
- package/dist/hash-9KO1BGxh.d.cts +63 -0
- package/dist/hash-ChfJjRjQ.d.ts +63 -0
- package/dist/history/index.cjs +1215 -0
- package/dist/history/index.cjs.map +1 -0
- package/dist/history/index.d.cts +62 -0
- package/dist/history/index.d.ts +62 -0
- package/dist/history/index.js +79 -0
- package/dist/history/index.js.map +1 -0
- package/dist/i18n/index.cjs +746 -0
- package/dist/i18n/index.cjs.map +1 -0
- package/dist/i18n/index.d.cts +38 -0
- package/dist/i18n/index.d.ts +38 -0
- package/dist/i18n/index.js +55 -0
- package/dist/i18n/index.js.map +1 -0
- package/dist/index-BRHBCmLt.d.ts +1940 -0
- package/dist/index-C8kQtmOk.d.ts +380 -0
- package/dist/index-DN-J-5wT.d.cts +1940 -0
- package/dist/index-DhjMjz7L.d.cts +380 -0
- package/dist/index.cjs +14756 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +269 -0
- package/dist/index.d.ts +269 -0
- package/dist/index.js +6085 -0
- package/dist/index.js.map +1 -0
- package/dist/indexing/index.cjs +736 -0
- package/dist/indexing/index.cjs.map +1 -0
- package/dist/indexing/index.d.cts +36 -0
- package/dist/indexing/index.d.ts +36 -0
- package/dist/indexing/index.js +77 -0
- package/dist/indexing/index.js.map +1 -0
- package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
- package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
- package/dist/ledger-2NX4L7PN.js +33 -0
- package/dist/ledger-2NX4L7PN.js.map +1 -0
- package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
- package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
- package/dist/periods/index.cjs +1035 -0
- package/dist/periods/index.cjs.map +1 -0
- package/dist/periods/index.d.cts +21 -0
- package/dist/periods/index.d.ts +21 -0
- package/dist/periods/index.js +25 -0
- package/dist/periods/index.js.map +1 -0
- package/dist/predicate-SBHmi6D0.d.cts +161 -0
- package/dist/predicate-SBHmi6D0.d.ts +161 -0
- package/dist/query/index.cjs +1957 -0
- package/dist/query/index.cjs.map +1 -0
- package/dist/query/index.d.cts +3 -0
- package/dist/query/index.d.ts +3 -0
- package/dist/query/index.js +62 -0
- package/dist/query/index.js.map +1 -0
- package/dist/session/index.cjs +487 -0
- package/dist/session/index.cjs.map +1 -0
- package/dist/session/index.d.cts +45 -0
- package/dist/session/index.d.ts +45 -0
- package/dist/session/index.js +44 -0
- package/dist/session/index.js.map +1 -0
- package/dist/shadow/index.cjs +133 -0
- package/dist/shadow/index.cjs.map +1 -0
- package/dist/shadow/index.d.cts +16 -0
- package/dist/shadow/index.d.ts +16 -0
- package/dist/shadow/index.js +20 -0
- package/dist/shadow/index.js.map +1 -0
- package/dist/store/index.cjs +1069 -0
- package/dist/store/index.cjs.map +1 -0
- package/dist/store/index.d.cts +491 -0
- package/dist/store/index.d.ts +491 -0
- package/dist/store/index.js +34 -0
- package/dist/store/index.js.map +1 -0
- package/dist/strategy-BSxFXGzb.d.cts +110 -0
- package/dist/strategy-BSxFXGzb.d.ts +110 -0
- package/dist/strategy-D-SrOLCl.d.cts +548 -0
- package/dist/strategy-D-SrOLCl.d.ts +548 -0
- package/dist/sync/index.cjs +1062 -0
- package/dist/sync/index.cjs.map +1 -0
- package/dist/sync/index.d.cts +42 -0
- package/dist/sync/index.d.ts +42 -0
- package/dist/sync/index.js +28 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/team/index.cjs +1233 -0
- package/dist/team/index.cjs.map +1 -0
- package/dist/team/index.d.cts +117 -0
- package/dist/team/index.d.ts +117 -0
- package/dist/team/index.js +39 -0
- package/dist/team/index.js.map +1 -0
- package/dist/tx/index.cjs +212 -0
- package/dist/tx/index.cjs.map +1 -0
- package/dist/tx/index.d.cts +20 -0
- package/dist/tx/index.d.ts +20 -0
- package/dist/tx/index.js +20 -0
- package/dist/tx/index.js.map +1 -0
- package/dist/types-BZpCZB8N.d.ts +7526 -0
- package/dist/types-Bfs0qr5F.d.cts +7526 -0
- package/dist/ulid-COREQ2RQ.js +9 -0
- package/dist/ulid-COREQ2RQ.js.map +1 -0
- package/dist/util/index.cjs +230 -0
- package/dist/util/index.cjs.map +1 -0
- package/dist/util/index.d.cts +77 -0
- package/dist/util/index.d.ts +77 -0
- package/dist/util/index.js +190 -0
- package/dist/util/index.js.map +1 -0
- package/package.json +244 -0
|
@@ -0,0 +1,1035 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __esm = (fn, res) => function __init() {
|
|
7
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
8
|
+
};
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
22
|
+
|
|
23
|
+
// src/types.ts
|
|
24
|
+
var NOYDB_FORMAT_VERSION;
|
|
25
|
+
var init_types = __esm({
|
|
26
|
+
"src/types.ts"() {
|
|
27
|
+
"use strict";
|
|
28
|
+
NOYDB_FORMAT_VERSION = 1;
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
// src/errors.ts
|
|
33
|
+
var NoydbError, DecryptionError, TamperedError, PeriodClosedError, ConflictError, LedgerContentionError, ValidationError;
|
|
34
|
+
var init_errors = __esm({
|
|
35
|
+
"src/errors.ts"() {
|
|
36
|
+
"use strict";
|
|
37
|
+
NoydbError = class extends Error {
|
|
38
|
+
/** Machine-readable error code. Stable across library versions. */
|
|
39
|
+
code;
|
|
40
|
+
constructor(code, message) {
|
|
41
|
+
super(message);
|
|
42
|
+
this.name = "NoydbError";
|
|
43
|
+
this.code = code;
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
DecryptionError = class extends NoydbError {
|
|
47
|
+
constructor(message = "Decryption failed") {
|
|
48
|
+
super("DECRYPTION_FAILED", message);
|
|
49
|
+
this.name = "DecryptionError";
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
TamperedError = class extends NoydbError {
|
|
53
|
+
constructor(message = "Data integrity check failed \u2014 record may have been tampered with") {
|
|
54
|
+
super("TAMPERED", message);
|
|
55
|
+
this.name = "TamperedError";
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
PeriodClosedError = class extends NoydbError {
|
|
59
|
+
periodName;
|
|
60
|
+
endDate;
|
|
61
|
+
recordTs;
|
|
62
|
+
constructor(periodName, endDate, recordTs) {
|
|
63
|
+
super(
|
|
64
|
+
"PERIOD_CLOSED",
|
|
65
|
+
`Cannot modify record (last written ${recordTs}) \u2014 sealed by closed period "${periodName}" (endDate: ${endDate}). Post a compensating entry in a new period instead.`
|
|
66
|
+
);
|
|
67
|
+
this.name = "PeriodClosedError";
|
|
68
|
+
this.periodName = periodName;
|
|
69
|
+
this.endDate = endDate;
|
|
70
|
+
this.recordTs = recordTs;
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
ConflictError = class extends NoydbError {
|
|
74
|
+
/** The actual stored version at the time of conflict. */
|
|
75
|
+
version;
|
|
76
|
+
constructor(version, message = "Version conflict") {
|
|
77
|
+
super("CONFLICT", message);
|
|
78
|
+
this.name = "ConflictError";
|
|
79
|
+
this.version = version;
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
LedgerContentionError = class extends NoydbError {
|
|
83
|
+
attempts;
|
|
84
|
+
constructor(attempts) {
|
|
85
|
+
super(
|
|
86
|
+
"LEDGER_CONTENTION",
|
|
87
|
+
`LedgerStore.append: failed to claim a chain slot after ${attempts} optimistic-CAS retries`
|
|
88
|
+
);
|
|
89
|
+
this.name = "LedgerContentionError";
|
|
90
|
+
this.attempts = attempts;
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
ValidationError = class extends NoydbError {
|
|
94
|
+
constructor(message = "Validation error") {
|
|
95
|
+
super("VALIDATION_ERROR", message);
|
|
96
|
+
this.name = "ValidationError";
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
// src/crypto.ts
|
|
103
|
+
async function encrypt(plaintext, dek) {
|
|
104
|
+
const iv = generateIV();
|
|
105
|
+
const encoded = new TextEncoder().encode(plaintext);
|
|
106
|
+
const ciphertext = await subtle.encrypt(
|
|
107
|
+
{ name: "AES-GCM", iv },
|
|
108
|
+
dek,
|
|
109
|
+
encoded
|
|
110
|
+
);
|
|
111
|
+
return {
|
|
112
|
+
iv: bufferToBase64(iv),
|
|
113
|
+
data: bufferToBase64(ciphertext)
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
async function decrypt(ivBase64, dataBase64, dek) {
|
|
117
|
+
const iv = base64ToBuffer(ivBase64);
|
|
118
|
+
const ciphertext = base64ToBuffer(dataBase64);
|
|
119
|
+
try {
|
|
120
|
+
const plaintext = await subtle.decrypt(
|
|
121
|
+
{ name: "AES-GCM", iv },
|
|
122
|
+
dek,
|
|
123
|
+
ciphertext
|
|
124
|
+
);
|
|
125
|
+
return new TextDecoder().decode(plaintext);
|
|
126
|
+
} catch (err) {
|
|
127
|
+
if (err instanceof Error && err.name === "OperationError") {
|
|
128
|
+
throw new TamperedError();
|
|
129
|
+
}
|
|
130
|
+
throw new DecryptionError(
|
|
131
|
+
err instanceof Error ? err.message : "Decryption failed"
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
function generateIV() {
|
|
136
|
+
return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES));
|
|
137
|
+
}
|
|
138
|
+
function bufferToBase64(buffer) {
|
|
139
|
+
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);
|
|
140
|
+
let binary = "";
|
|
141
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
142
|
+
binary += String.fromCharCode(bytes[i]);
|
|
143
|
+
}
|
|
144
|
+
return btoa(binary);
|
|
145
|
+
}
|
|
146
|
+
function base64ToBuffer(base64) {
|
|
147
|
+
const binary = atob(base64);
|
|
148
|
+
const bytes = new Uint8Array(binary.length);
|
|
149
|
+
for (let i = 0; i < binary.length; i++) {
|
|
150
|
+
bytes[i] = binary.charCodeAt(i);
|
|
151
|
+
}
|
|
152
|
+
return bytes;
|
|
153
|
+
}
|
|
154
|
+
var IV_BYTES, subtle;
|
|
155
|
+
var init_crypto = __esm({
|
|
156
|
+
"src/crypto.ts"() {
|
|
157
|
+
"use strict";
|
|
158
|
+
init_errors();
|
|
159
|
+
IV_BYTES = 12;
|
|
160
|
+
subtle = globalThis.crypto.subtle;
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// src/history/ledger/entry.ts
|
|
165
|
+
function canonicalJson(value) {
|
|
166
|
+
if (value === null) return "null";
|
|
167
|
+
if (typeof value === "boolean") return value ? "true" : "false";
|
|
168
|
+
if (typeof value === "number") {
|
|
169
|
+
if (!Number.isFinite(value)) {
|
|
170
|
+
throw new Error(
|
|
171
|
+
`canonicalJson: refusing to encode non-finite number ${String(value)}`
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
return JSON.stringify(value);
|
|
175
|
+
}
|
|
176
|
+
if (typeof value === "string") return JSON.stringify(value);
|
|
177
|
+
if (typeof value === "bigint") {
|
|
178
|
+
throw new Error("canonicalJson: BigInt is not JSON-serializable");
|
|
179
|
+
}
|
|
180
|
+
if (typeof value === "undefined" || typeof value === "function") {
|
|
181
|
+
throw new Error(
|
|
182
|
+
`canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
if (Array.isArray(value)) {
|
|
186
|
+
return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
|
|
187
|
+
}
|
|
188
|
+
if (typeof value === "object") {
|
|
189
|
+
const obj = value;
|
|
190
|
+
const keys = Object.keys(obj).sort();
|
|
191
|
+
const parts = [];
|
|
192
|
+
for (const key of keys) {
|
|
193
|
+
parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
|
|
194
|
+
}
|
|
195
|
+
return "{" + parts.join(",") + "}";
|
|
196
|
+
}
|
|
197
|
+
throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
|
|
198
|
+
}
|
|
199
|
+
async function sha256Hex(input) {
|
|
200
|
+
const bytes = new TextEncoder().encode(input);
|
|
201
|
+
const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
|
|
202
|
+
return bytesToHex(new Uint8Array(digest));
|
|
203
|
+
}
|
|
204
|
+
async function hashEntry(entry) {
|
|
205
|
+
return sha256Hex(canonicalJson(entry));
|
|
206
|
+
}
|
|
207
|
+
function bytesToHex(bytes) {
|
|
208
|
+
const hex = new Array(bytes.length);
|
|
209
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
210
|
+
hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
|
|
211
|
+
}
|
|
212
|
+
return hex.join("");
|
|
213
|
+
}
|
|
214
|
+
function paddedIndex(index) {
|
|
215
|
+
return String(index).padStart(10, "0");
|
|
216
|
+
}
|
|
217
|
+
function parseIndex(key) {
|
|
218
|
+
return Number.parseInt(key, 10);
|
|
219
|
+
}
|
|
220
|
+
var init_entry = __esm({
|
|
221
|
+
"src/history/ledger/entry.ts"() {
|
|
222
|
+
"use strict";
|
|
223
|
+
}
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
// src/history/ledger/patch.ts
|
|
227
|
+
function computePatch(prev, next) {
|
|
228
|
+
const ops = [];
|
|
229
|
+
diff(prev, next, "", ops);
|
|
230
|
+
return ops;
|
|
231
|
+
}
|
|
232
|
+
function diff(prev, next, path, out) {
|
|
233
|
+
if (prev === next) return;
|
|
234
|
+
if (prev === null || next === null) {
|
|
235
|
+
out.push({ op: "replace", path, value: next });
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
const prevIsArray = Array.isArray(prev);
|
|
239
|
+
const nextIsArray = Array.isArray(next);
|
|
240
|
+
const prevIsObject = typeof prev === "object" && !prevIsArray;
|
|
241
|
+
const nextIsObject = typeof next === "object" && !nextIsArray;
|
|
242
|
+
if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
|
|
243
|
+
out.push({ op: "replace", path, value: next });
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
if (prevIsArray && nextIsArray) {
|
|
247
|
+
if (!arrayDeepEqual(prev, next)) {
|
|
248
|
+
out.push({ op: "replace", path, value: next });
|
|
249
|
+
}
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
if (prevIsObject && nextIsObject) {
|
|
253
|
+
const prevObj = prev;
|
|
254
|
+
const nextObj = next;
|
|
255
|
+
const prevKeys = Object.keys(prevObj);
|
|
256
|
+
const nextKeys = Object.keys(nextObj);
|
|
257
|
+
for (const key of prevKeys) {
|
|
258
|
+
const childPath = path + "/" + escapePathSegment(key);
|
|
259
|
+
if (!(key in nextObj)) {
|
|
260
|
+
out.push({ op: "remove", path: childPath });
|
|
261
|
+
} else {
|
|
262
|
+
diff(prevObj[key], nextObj[key], childPath, out);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
for (const key of nextKeys) {
|
|
266
|
+
if (!(key in prevObj)) {
|
|
267
|
+
out.push({
|
|
268
|
+
op: "add",
|
|
269
|
+
path: path + "/" + escapePathSegment(key),
|
|
270
|
+
value: nextObj[key]
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
out.push({ op: "replace", path, value: next });
|
|
277
|
+
}
|
|
278
|
+
function arrayDeepEqual(a, b) {
|
|
279
|
+
if (a.length !== b.length) return false;
|
|
280
|
+
for (let i = 0; i < a.length; i++) {
|
|
281
|
+
if (!deepEqual(a[i], b[i])) return false;
|
|
282
|
+
}
|
|
283
|
+
return true;
|
|
284
|
+
}
|
|
285
|
+
function deepEqual(a, b) {
|
|
286
|
+
if (a === b) return true;
|
|
287
|
+
if (a === null || b === null) return false;
|
|
288
|
+
if (typeof a !== typeof b) return false;
|
|
289
|
+
if (typeof a !== "object") return false;
|
|
290
|
+
const aArray = Array.isArray(a);
|
|
291
|
+
const bArray = Array.isArray(b);
|
|
292
|
+
if (aArray !== bArray) return false;
|
|
293
|
+
if (aArray && bArray) return arrayDeepEqual(a, b);
|
|
294
|
+
const aObj = a;
|
|
295
|
+
const bObj = b;
|
|
296
|
+
const aKeys = Object.keys(aObj);
|
|
297
|
+
const bKeys = Object.keys(bObj);
|
|
298
|
+
if (aKeys.length !== bKeys.length) return false;
|
|
299
|
+
for (const key of aKeys) {
|
|
300
|
+
if (!(key in bObj)) return false;
|
|
301
|
+
if (!deepEqual(aObj[key], bObj[key])) return false;
|
|
302
|
+
}
|
|
303
|
+
return true;
|
|
304
|
+
}
|
|
305
|
+
function applyPatch(base, patch) {
|
|
306
|
+
let result = clone(base);
|
|
307
|
+
for (const op of patch) {
|
|
308
|
+
result = applyOp(result, op);
|
|
309
|
+
}
|
|
310
|
+
return result;
|
|
311
|
+
}
|
|
312
|
+
function applyOp(doc, op) {
|
|
313
|
+
if (op.path === "") {
|
|
314
|
+
if (op.op === "remove") return null;
|
|
315
|
+
return clone(op.value);
|
|
316
|
+
}
|
|
317
|
+
const segments = parsePath(op.path);
|
|
318
|
+
return walkAndApply(doc, segments, op);
|
|
319
|
+
}
|
|
320
|
+
function walkAndApply(doc, segments, op) {
|
|
321
|
+
if (segments.length === 0) {
|
|
322
|
+
throw new Error("walkAndApply: empty segments (internal error)");
|
|
323
|
+
}
|
|
324
|
+
const [head, ...rest] = segments;
|
|
325
|
+
if (head === void 0) throw new Error("walkAndApply: undefined segment");
|
|
326
|
+
if (rest.length === 0) {
|
|
327
|
+
return applyAtTerminal(doc, head, op);
|
|
328
|
+
}
|
|
329
|
+
if (Array.isArray(doc)) {
|
|
330
|
+
const idx = parseArrayIndex(head, doc.length);
|
|
331
|
+
const child = doc[idx];
|
|
332
|
+
const newChild = walkAndApply(child, rest, op);
|
|
333
|
+
const next = doc.slice();
|
|
334
|
+
next[idx] = newChild;
|
|
335
|
+
return next;
|
|
336
|
+
}
|
|
337
|
+
if (doc !== null && typeof doc === "object") {
|
|
338
|
+
const obj = doc;
|
|
339
|
+
if (!(head in obj)) {
|
|
340
|
+
throw new Error(`applyPatch: path segment "${head}" not found in object`);
|
|
341
|
+
}
|
|
342
|
+
const newChild = walkAndApply(obj[head], rest, op);
|
|
343
|
+
return { ...obj, [head]: newChild };
|
|
344
|
+
}
|
|
345
|
+
throw new Error(
|
|
346
|
+
`applyPatch: cannot step into ${typeof doc} at segment "${head}"`
|
|
347
|
+
);
|
|
348
|
+
}
|
|
349
|
+
function applyAtTerminal(doc, segment, op) {
|
|
350
|
+
if (Array.isArray(doc)) {
|
|
351
|
+
const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
|
|
352
|
+
const next = doc.slice();
|
|
353
|
+
if (op.op === "remove") {
|
|
354
|
+
next.splice(idx, 1);
|
|
355
|
+
return next;
|
|
356
|
+
}
|
|
357
|
+
if (op.op === "add") {
|
|
358
|
+
next.splice(idx, 0, clone(op.value));
|
|
359
|
+
return next;
|
|
360
|
+
}
|
|
361
|
+
if (op.op === "replace") {
|
|
362
|
+
if (idx >= doc.length) {
|
|
363
|
+
throw new Error(
|
|
364
|
+
`applyPatch: replace at out-of-bounds array index ${idx}`
|
|
365
|
+
);
|
|
366
|
+
}
|
|
367
|
+
next[idx] = clone(op.value);
|
|
368
|
+
return next;
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
if (doc !== null && typeof doc === "object") {
|
|
372
|
+
const obj = doc;
|
|
373
|
+
if (op.op === "remove") {
|
|
374
|
+
if (!(segment in obj)) {
|
|
375
|
+
throw new Error(
|
|
376
|
+
`applyPatch: remove on missing key "${segment}"`
|
|
377
|
+
);
|
|
378
|
+
}
|
|
379
|
+
const next = { ...obj };
|
|
380
|
+
delete next[segment];
|
|
381
|
+
return next;
|
|
382
|
+
}
|
|
383
|
+
if (op.op === "add") {
|
|
384
|
+
return { ...obj, [segment]: clone(op.value) };
|
|
385
|
+
}
|
|
386
|
+
if (op.op === "replace") {
|
|
387
|
+
if (!(segment in obj)) {
|
|
388
|
+
throw new Error(
|
|
389
|
+
`applyPatch: replace on missing key "${segment}"`
|
|
390
|
+
);
|
|
391
|
+
}
|
|
392
|
+
return { ...obj, [segment]: clone(op.value) };
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
throw new Error(
|
|
396
|
+
`applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
|
|
397
|
+
);
|
|
398
|
+
}
|
|
399
|
+
function escapePathSegment(segment) {
|
|
400
|
+
return segment.replace(/~/g, "~0").replace(/\//g, "~1");
|
|
401
|
+
}
|
|
402
|
+
function unescapePathSegment(segment) {
|
|
403
|
+
return segment.replace(/~1/g, "/").replace(/~0/g, "~");
|
|
404
|
+
}
|
|
405
|
+
function parsePath(path) {
|
|
406
|
+
if (!path.startsWith("/")) {
|
|
407
|
+
throw new Error(`applyPatch: path must start with '/', got "${path}"`);
|
|
408
|
+
}
|
|
409
|
+
return path.slice(1).split("/").map(unescapePathSegment);
|
|
410
|
+
}
|
|
411
|
+
function parseArrayIndex(segment, max) {
|
|
412
|
+
if (!/^\d+$/.test(segment)) {
|
|
413
|
+
throw new Error(
|
|
414
|
+
`applyPatch: array index must be a non-negative integer, got "${segment}"`
|
|
415
|
+
);
|
|
416
|
+
}
|
|
417
|
+
const idx = Number.parseInt(segment, 10);
|
|
418
|
+
if (idx < 0 || idx > max) {
|
|
419
|
+
throw new Error(
|
|
420
|
+
`applyPatch: array index ${idx} out of range [0, ${max}]`
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
return idx;
|
|
424
|
+
}
|
|
425
|
+
function clone(value) {
|
|
426
|
+
if (value === null || value === void 0) return value;
|
|
427
|
+
if (typeof value !== "object") return value;
|
|
428
|
+
return JSON.parse(JSON.stringify(value));
|
|
429
|
+
}
|
|
430
|
+
var init_patch = __esm({
|
|
431
|
+
"src/history/ledger/patch.ts"() {
|
|
432
|
+
"use strict";
|
|
433
|
+
}
|
|
434
|
+
});
|
|
435
|
+
|
|
436
|
+
// src/history/ledger/constants.ts
|
|
437
|
+
var LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION;
|
|
438
|
+
var init_constants = __esm({
|
|
439
|
+
"src/history/ledger/constants.ts"() {
|
|
440
|
+
"use strict";
|
|
441
|
+
LEDGER_COLLECTION = "_ledger";
|
|
442
|
+
LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
|
|
443
|
+
}
|
|
444
|
+
});
|
|
445
|
+
|
|
446
|
+
// src/history/ledger/hash.ts
|
|
447
|
+
async function envelopePayloadHash(envelope) {
|
|
448
|
+
if (!envelope) return "";
|
|
449
|
+
return sha256Hex(envelope._data);
|
|
450
|
+
}
|
|
451
|
+
var init_hash = __esm({
|
|
452
|
+
"src/history/ledger/hash.ts"() {
|
|
453
|
+
"use strict";
|
|
454
|
+
init_entry();
|
|
455
|
+
}
|
|
456
|
+
});
|
|
457
|
+
|
|
458
|
+
// src/history/ledger/store.ts
|
|
459
|
+
function sleepBackoff(attempt) {
|
|
460
|
+
const base = 5 * Math.pow(2, attempt);
|
|
461
|
+
const jitter = Math.random() * base;
|
|
462
|
+
return new Promise((resolve) => setTimeout(resolve, base + jitter));
|
|
463
|
+
}
|
|
464
|
+
var MAX_APPEND_ATTEMPTS, LedgerStore;
|
|
465
|
+
var init_store = __esm({
|
|
466
|
+
"src/history/ledger/store.ts"() {
|
|
467
|
+
"use strict";
|
|
468
|
+
init_types();
|
|
469
|
+
init_crypto();
|
|
470
|
+
init_errors();
|
|
471
|
+
init_entry();
|
|
472
|
+
init_patch();
|
|
473
|
+
init_constants();
|
|
474
|
+
init_hash();
|
|
475
|
+
MAX_APPEND_ATTEMPTS = 8;
|
|
476
|
+
LedgerStore = class {
|
|
477
|
+
adapter;
|
|
478
|
+
vault;
|
|
479
|
+
encrypted;
|
|
480
|
+
getDEK;
|
|
481
|
+
actor;
|
|
482
|
+
/**
|
|
483
|
+
* In-memory cache of the chain head — the most recently appended
|
|
484
|
+
* entry along with its precomputed hash. Without this, every
|
|
485
|
+
* `append()` would re-load every prior entry to recompute the
|
|
486
|
+
* prevHash, making N puts O(N²) — a 1K-record stress test goes from
|
|
487
|
+
* < 100ms to a multi-second timeout.
|
|
488
|
+
*
|
|
489
|
+
* The cache is populated on first read (`append`, `head`, `verify`)
|
|
490
|
+
* and updated in-place on every successful `append`. Single-writer
|
|
491
|
+
* usage (the assumption) keeps it consistent. A second
|
|
492
|
+
* LedgerStore instance writing to the same vault would not
|
|
493
|
+
* see the first instance's appends in its cached state — that's the
|
|
494
|
+
* concurrency caveat documented at the class level.
|
|
495
|
+
*
|
|
496
|
+
* Sentinel `undefined` means "not yet loaded"; an explicit `null`
|
|
497
|
+
* value means "loaded and confirmed empty" — distinguishing these
|
|
498
|
+
* matters because an empty ledger is a valid state (genesis prevHash
|
|
499
|
+
* is the empty string), and we don't want to re-scan the adapter
|
|
500
|
+
* just because the chain is freshly initialized.
|
|
501
|
+
*/
|
|
502
|
+
headCache = void 0;
|
|
503
|
+
constructor(opts) {
|
|
504
|
+
this.adapter = opts.adapter;
|
|
505
|
+
this.vault = opts.vault;
|
|
506
|
+
this.encrypted = opts.encrypted;
|
|
507
|
+
this.getDEK = opts.getDEK;
|
|
508
|
+
this.actor = opts.actor;
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Lazily load (or return cached) the current chain head. The cache
|
|
512
|
+
* sentinel is `undefined` until first access; after the first call,
|
|
513
|
+
* the cache holds either a `{ entry, hash }` for non-empty ledgers
|
|
514
|
+
* or `null` for empty ones.
|
|
515
|
+
*/
|
|
516
|
+
async getCachedHead() {
|
|
517
|
+
if (this.headCache !== void 0) return this.headCache;
|
|
518
|
+
const entries = await this.loadAllEntries();
|
|
519
|
+
const last = entries[entries.length - 1];
|
|
520
|
+
if (!last) {
|
|
521
|
+
this.headCache = null;
|
|
522
|
+
return null;
|
|
523
|
+
}
|
|
524
|
+
this.headCache = { entry: last, hash: await hashEntry(last) };
|
|
525
|
+
return this.headCache;
|
|
526
|
+
}
|
|
527
|
+
/**
|
|
528
|
+
* Append a new entry to the ledger. Returns the full entry that was
|
|
529
|
+
* written (with its assigned index and computed prevHash) so the
|
|
530
|
+
* caller can use the hash for downstream purposes (e.g., embedding
|
|
531
|
+
* in a verifiable backup).
|
|
532
|
+
*
|
|
533
|
+
* This is the **only** way to add entries. Direct adapter writes to
|
|
534
|
+
* `_ledger/` would bypass the chain math and would be caught by the
|
|
535
|
+
* next `verify()` call as a divergence.
|
|
536
|
+
*
|
|
537
|
+
* ## Multi-writer correctness
|
|
538
|
+
*
|
|
539
|
+
* Append is implemented as an optimistic-CAS retry loop. On every
|
|
540
|
+
* attempt:
|
|
541
|
+
*
|
|
542
|
+
* 1. Read fresh head (cache invalidated on retry).
|
|
543
|
+
* 2. Compute `nextIndex = head.index + 1`, `prevHash = hash(head)`.
|
|
544
|
+
* 3. Encrypt delta payload IN MEMORY (no adapter write yet) so we
|
|
545
|
+
* can compute `deltaHash` before claiming the chain slot.
|
|
546
|
+
* 4. Build + encrypt the entry envelope.
|
|
547
|
+
* 5. `adapter.put(_ledger, paddedIndex, envelope, expectedVersion: 0)`
|
|
548
|
+
* — the `expectedVersion: 0` asserts "this slot must not exist."
|
|
549
|
+
* Stores with `casAtomic: true` honor the CAS check; under
|
|
550
|
+
* contention the second writer's put throws `ConflictError`.
|
|
551
|
+
* 6. On `ConflictError`: invalidate the head cache, sleep with
|
|
552
|
+
* bounded backoff + jitter, retry. After `MAX_APPEND_ATTEMPTS`
|
|
553
|
+
* retries throw {@link LedgerContentionError}.
|
|
554
|
+
* 7. On success: write the delta envelope (if any) at the same
|
|
555
|
+
* index. Update the head cache.
|
|
556
|
+
*
|
|
557
|
+
* Entry-first ordering matters: writing the delta first under
|
|
558
|
+
* contention would orphan delta records at indices the writer never
|
|
559
|
+
* actually claimed. The deltaHash is computed off the encrypted
|
|
560
|
+
* envelope's `_data` field, which doesn't require the envelope to
|
|
561
|
+
* be persisted.
|
|
562
|
+
*
|
|
563
|
+
* Stores with `casAtomic: false` (file, s3, r2 by default) silently
|
|
564
|
+
* accept the `expectedVersion: 0` argument and proceed without a
|
|
565
|
+
* CAS check. Concurrent appends against those stores remain
|
|
566
|
+
* best-effort — pair them with an advisory lock or with sync
|
|
567
|
+
* single-writer discipline.
|
|
568
|
+
*/
|
|
569
|
+
async append(input) {
|
|
570
|
+
let lastConflict;
|
|
571
|
+
for (let attempt = 0; attempt < MAX_APPEND_ATTEMPTS; attempt++) {
|
|
572
|
+
if (attempt > 0) {
|
|
573
|
+
this.headCache = void 0;
|
|
574
|
+
}
|
|
575
|
+
try {
|
|
576
|
+
return await this.appendOnce(input);
|
|
577
|
+
} catch (err) {
|
|
578
|
+
if (err instanceof ConflictError) {
|
|
579
|
+
lastConflict = err;
|
|
580
|
+
if (attempt < MAX_APPEND_ATTEMPTS - 1) {
|
|
581
|
+
await sleepBackoff(attempt);
|
|
582
|
+
}
|
|
583
|
+
continue;
|
|
584
|
+
}
|
|
585
|
+
throw err;
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
void lastConflict;
|
|
589
|
+
throw new LedgerContentionError(MAX_APPEND_ATTEMPTS);
|
|
590
|
+
}
|
|
591
|
+
/**
|
|
592
|
+
* One attempt at the append cycle. Throws `ConflictError` when the
|
|
593
|
+
* CAS check on the entry put fails — `append()` catches that and
|
|
594
|
+
* retries. Any other error propagates to the caller.
|
|
595
|
+
*/
|
|
596
|
+
async appendOnce(input) {
|
|
597
|
+
const cached = await this.getCachedHead();
|
|
598
|
+
const lastEntry = cached?.entry;
|
|
599
|
+
const prevHash = cached?.hash ?? "";
|
|
600
|
+
const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
|
|
601
|
+
let deltaEnvelope;
|
|
602
|
+
let deltaHash;
|
|
603
|
+
if (input.delta !== void 0) {
|
|
604
|
+
deltaEnvelope = await this.encryptDelta(input.delta);
|
|
605
|
+
deltaHash = await sha256Hex(deltaEnvelope._data);
|
|
606
|
+
}
|
|
607
|
+
const entryBase = {
|
|
608
|
+
index: nextIndex,
|
|
609
|
+
prevHash,
|
|
610
|
+
op: input.op,
|
|
611
|
+
collection: input.collection,
|
|
612
|
+
id: input.id,
|
|
613
|
+
version: input.version,
|
|
614
|
+
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
615
|
+
actor: input.actor === "" ? this.actor : input.actor,
|
|
616
|
+
payloadHash: input.payloadHash
|
|
617
|
+
};
|
|
618
|
+
const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
|
|
619
|
+
const envelope = await this.encryptEntry(entry);
|
|
620
|
+
await this.adapter.put(
|
|
621
|
+
this.vault,
|
|
622
|
+
LEDGER_COLLECTION,
|
|
623
|
+
paddedIndex(entry.index),
|
|
624
|
+
envelope,
|
|
625
|
+
0
|
|
626
|
+
);
|
|
627
|
+
if (deltaEnvelope) {
|
|
628
|
+
await this.adapter.put(
|
|
629
|
+
this.vault,
|
|
630
|
+
LEDGER_DELTAS_COLLECTION,
|
|
631
|
+
paddedIndex(entry.index),
|
|
632
|
+
deltaEnvelope,
|
|
633
|
+
0
|
|
634
|
+
);
|
|
635
|
+
}
|
|
636
|
+
this.headCache = { entry, hash: await hashEntry(entry) };
|
|
637
|
+
return entry;
|
|
638
|
+
}
|
|
639
|
+
/**
|
|
640
|
+
* Load a delta payload by its entry index. Returns `null` if the
|
|
641
|
+
* entry at that index doesn't reference a delta (genesis puts and
|
|
642
|
+
* deletes leave the slot empty) or if the delta row is missing
|
|
643
|
+
* (possible after a `pruneHistory` fold).
|
|
644
|
+
*
|
|
645
|
+
* The caller is responsible for deciding what to do with a missing
|
|
646
|
+
* delta — `ledger.reconstruct()` uses it as a "stop walking
|
|
647
|
+
* backward" signal and falls back to the on-disk current value.
|
|
648
|
+
*/
|
|
649
|
+
async loadDelta(index) {
|
|
650
|
+
const envelope = await this.adapter.get(
|
|
651
|
+
this.vault,
|
|
652
|
+
LEDGER_DELTAS_COLLECTION,
|
|
653
|
+
paddedIndex(index)
|
|
654
|
+
);
|
|
655
|
+
if (!envelope) return null;
|
|
656
|
+
if (!this.encrypted) {
|
|
657
|
+
return JSON.parse(envelope._data);
|
|
658
|
+
}
|
|
659
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
660
|
+
const json = await decrypt(envelope._iv, envelope._data, dek);
|
|
661
|
+
return JSON.parse(json);
|
|
662
|
+
}
|
|
663
|
+
/** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
|
|
664
|
+
async encryptDelta(patch) {
|
|
665
|
+
const json = JSON.stringify(patch);
|
|
666
|
+
if (!this.encrypted) {
|
|
667
|
+
return {
|
|
668
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
669
|
+
_v: 1,
|
|
670
|
+
_ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
671
|
+
_iv: "",
|
|
672
|
+
_data: json,
|
|
673
|
+
_by: this.actor
|
|
674
|
+
};
|
|
675
|
+
}
|
|
676
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
677
|
+
const { iv, data } = await encrypt(json, dek);
|
|
678
|
+
return {
|
|
679
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
680
|
+
_v: 1,
|
|
681
|
+
_ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
682
|
+
_iv: iv,
|
|
683
|
+
_data: data,
|
|
684
|
+
_by: this.actor
|
|
685
|
+
};
|
|
686
|
+
}
|
|
687
|
+
/**
|
|
688
|
+
* Read all entries in ascending-index order. Used internally by
|
|
689
|
+
* `append()`, `head()`, `verify()`, and `entries()`. Decryption is
|
|
690
|
+
* serial because the entries are tiny and the overhead of a Promise
|
|
691
|
+
* pool would dominate at realistic chain lengths (< 100K entries).
|
|
692
|
+
*/
|
|
693
|
+
async loadAllEntries() {
|
|
694
|
+
const keys = await this.adapter.list(this.vault, LEDGER_COLLECTION);
|
|
695
|
+
keys.sort();
|
|
696
|
+
const entries = [];
|
|
697
|
+
for (const key of keys) {
|
|
698
|
+
const envelope = await this.adapter.get(
|
|
699
|
+
this.vault,
|
|
700
|
+
LEDGER_COLLECTION,
|
|
701
|
+
key
|
|
702
|
+
);
|
|
703
|
+
if (!envelope) continue;
|
|
704
|
+
entries.push(await this.decryptEntry(envelope));
|
|
705
|
+
}
|
|
706
|
+
return entries;
|
|
707
|
+
}
|
|
708
|
+
/**
|
|
709
|
+
* Return the current head of the ledger: the last entry, its hash,
|
|
710
|
+
* and the total chain length. `null` on an empty ledger so callers
|
|
711
|
+
* can distinguish "no history yet" from "empty history".
|
|
712
|
+
*/
|
|
713
|
+
async head() {
|
|
714
|
+
const cached = await this.getCachedHead();
|
|
715
|
+
if (!cached) return null;
|
|
716
|
+
return {
|
|
717
|
+
entry: cached.entry,
|
|
718
|
+
hash: cached.hash,
|
|
719
|
+
length: cached.entry.index + 1
|
|
720
|
+
};
|
|
721
|
+
}
|
|
722
|
+
/**
|
|
723
|
+
* Return entries in the requested half-open range `[from, to)`.
|
|
724
|
+
* Defaults: `from = 0`, `to = length`. The indices are clipped to
|
|
725
|
+
* the valid range; no error is thrown for out-of-range queries.
|
|
726
|
+
*/
|
|
727
|
+
async entries(opts = {}) {
|
|
728
|
+
const all = await this.loadAllEntries();
|
|
729
|
+
const from = Math.max(0, opts.from ?? 0);
|
|
730
|
+
const to = Math.min(all.length, opts.to ?? all.length);
|
|
731
|
+
return all.slice(from, to);
|
|
732
|
+
}
|
|
733
|
+
/**
|
|
734
|
+
* Reconstruct a record's state at a given historical version by
|
|
735
|
+
* walking the ledger's delta chain backward from the current state.
|
|
736
|
+
*
|
|
737
|
+
* ## Algorithm
|
|
738
|
+
*
|
|
739
|
+
* Ledger deltas are stored in **reverse** form — each entry's
|
|
740
|
+
* patch describes how to undo that put, transforming the new
|
|
741
|
+
* record back into the previous one. `reconstruct` exploits this
|
|
742
|
+
* by:
|
|
743
|
+
*
|
|
744
|
+
* 1. Finding every ledger entry for `(collection, id)` in the
|
|
745
|
+
* chain, sorted by index ascending.
|
|
746
|
+
* 2. Starting from `current` (the present value of the record,
|
|
747
|
+
* as held by the caller — typically fetched via
|
|
748
|
+
* `Collection.get()`).
|
|
749
|
+
* 3. Walking entries in **descending** index order and applying
|
|
750
|
+
* each entry's reverse patch, stopping when we reach the
|
|
751
|
+
* entry whose version equals `atVersion`.
|
|
752
|
+
*
|
|
753
|
+
* The result is the record as it existed immediately AFTER the
|
|
754
|
+
* put at `atVersion`. To get the state at the genesis put
|
|
755
|
+
* (version 1), the walk runs all the way back through every put
|
|
756
|
+
* after the first.
|
|
757
|
+
*
|
|
758
|
+
* ## Caveats
|
|
759
|
+
*
|
|
760
|
+
* - **Delete entries** break the walk: once we see a delete, the
|
|
761
|
+
* record didn't exist before that point, so there's nothing to
|
|
762
|
+
* reconstruct. We return `null` in that case.
|
|
763
|
+
* - **Missing deltas** (e.g., after `pruneHistory` folds old
|
|
764
|
+
* entries into a base snapshot) also stop the walk. does
|
|
765
|
+
* not ship pruneHistory, so today this only happens if an entry
|
|
766
|
+
* was deleted out-of-band.
|
|
767
|
+
* - The caller MUST pass the correct current value. Passing a
|
|
768
|
+
* mutated object would corrupt the reconstruction — the patch
|
|
769
|
+
* chain is only valid against the exact state that was in
|
|
770
|
+
* effect when the most recent put happened.
|
|
771
|
+
*
|
|
772
|
+
* For, `reconstruct` is the only way to read a historical
|
|
773
|
+
* version via deltas. The legacy `_history` collection still
|
|
774
|
+
* holds full snapshots and `Collection.getVersion()` still reads
|
|
775
|
+
* from there — the two paths coexist until pruneHistory lands in
|
|
776
|
+
* a follow-up and delta becomes the default.
|
|
777
|
+
*/
|
|
778
|
+
async reconstruct(collection, id, current, atVersion) {
|
|
779
|
+
const all = await this.loadAllEntries();
|
|
780
|
+
const matching = all.filter(
|
|
781
|
+
(e) => e.collection === collection && e.id === id
|
|
782
|
+
);
|
|
783
|
+
if (matching.length === 0) {
|
|
784
|
+
return null;
|
|
785
|
+
}
|
|
786
|
+
let state = current;
|
|
787
|
+
for (let i = matching.length - 1; i >= 0; i--) {
|
|
788
|
+
const entry = matching[i];
|
|
789
|
+
if (!entry) continue;
|
|
790
|
+
if (entry.version === atVersion && entry.op !== "delete") {
|
|
791
|
+
return state;
|
|
792
|
+
}
|
|
793
|
+
if (entry.op === "delete") {
|
|
794
|
+
return null;
|
|
795
|
+
}
|
|
796
|
+
if (entry.deltaHash === void 0) {
|
|
797
|
+
if (entry.version === atVersion) return state;
|
|
798
|
+
return null;
|
|
799
|
+
}
|
|
800
|
+
const patch = await this.loadDelta(entry.index);
|
|
801
|
+
if (!patch) {
|
|
802
|
+
return null;
|
|
803
|
+
}
|
|
804
|
+
if (state === null) {
|
|
805
|
+
return null;
|
|
806
|
+
}
|
|
807
|
+
state = applyPatch(state, patch);
|
|
808
|
+
}
|
|
809
|
+
return null;
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Walk the chain from genesis forward and verify every link.
|
|
813
|
+
*
|
|
814
|
+
* Returns `{ ok: true, head, length }` if every entry's `prevHash`
|
|
815
|
+
* matches the recomputed hash of its predecessor (and the genesis
|
|
816
|
+
* entry's `prevHash` is the empty string).
|
|
817
|
+
*
|
|
818
|
+
* Returns `{ ok: false, divergedAt, expected, actual }` on the first
|
|
819
|
+
* mismatch. `divergedAt` is the 0-based index of the BROKEN entry
|
|
820
|
+
* — entries before that index still verify cleanly; entries at and
|
|
821
|
+
* after `divergedAt` are untrustworthy.
|
|
822
|
+
*
|
|
823
|
+
* This method detects:
|
|
824
|
+
* - Mutated entry content (fields changed)
|
|
825
|
+
* - Reordered entries (if any adjacent pair swaps, the prevHash
|
|
826
|
+
* of the second no longer matches)
|
|
827
|
+
* - Inserted entries (the inserted entry's prevHash likely fails,
|
|
828
|
+
* and the following entry's prevHash definitely fails)
|
|
829
|
+
* - Deleted entries (the entry after the deletion sees a wrong
|
|
830
|
+
* prevHash)
|
|
831
|
+
*
|
|
832
|
+
* It does NOT detect:
|
|
833
|
+
* - Tampering with the DATA collections that bypassed the ledger
|
|
834
|
+
* entirely (e.g., an attacker who modifies records without
|
|
835
|
+
* appending matching ledger entries — this is why we also
|
|
836
|
+
* plan a `verifyIntegrity()` helper in a follow-up)
|
|
837
|
+
* - Truncation of the chain at the tail (dropping the last N
|
|
838
|
+
* entries leaves a shorter but still consistent chain). External
|
|
839
|
+
* anchoring of `head.hash` to a trusted service is the defense
|
|
840
|
+
* against this.
|
|
841
|
+
*/
|
|
842
|
+
async verify() {
|
|
843
|
+
const entries = await this.loadAllEntries();
|
|
844
|
+
let expectedPrevHash = "";
|
|
845
|
+
for (let i = 0; i < entries.length; i++) {
|
|
846
|
+
const entry = entries[i];
|
|
847
|
+
if (!entry) continue;
|
|
848
|
+
if (entry.prevHash !== expectedPrevHash) {
|
|
849
|
+
return {
|
|
850
|
+
ok: false,
|
|
851
|
+
divergedAt: i,
|
|
852
|
+
expected: expectedPrevHash,
|
|
853
|
+
actual: entry.prevHash
|
|
854
|
+
};
|
|
855
|
+
}
|
|
856
|
+
if (entry.index !== i) {
|
|
857
|
+
return {
|
|
858
|
+
ok: false,
|
|
859
|
+
divergedAt: i,
|
|
860
|
+
expected: `index=${i}`,
|
|
861
|
+
actual: `index=${entry.index}`
|
|
862
|
+
};
|
|
863
|
+
}
|
|
864
|
+
expectedPrevHash = await hashEntry(entry);
|
|
865
|
+
}
|
|
866
|
+
return {
|
|
867
|
+
ok: true,
|
|
868
|
+
head: expectedPrevHash,
|
|
869
|
+
length: entries.length
|
|
870
|
+
};
|
|
871
|
+
}
|
|
872
|
+
// ─── Encryption plumbing ─────────────────────────────────────────
|
|
873
|
+
/**
|
|
874
|
+
* Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
|
|
875
|
+
* envelope's `_v` field is set to `entry.index + 1` so the usual
|
|
876
|
+
* optimistic-concurrency machinery has a reasonable version number
|
|
877
|
+
* to compare against (the ledger is append-only, so concurrent
|
|
878
|
+
* writes should always bump the index).
|
|
879
|
+
*/
|
|
880
|
+
async encryptEntry(entry) {
|
|
881
|
+
const json = canonicalJson(entry);
|
|
882
|
+
if (!this.encrypted) {
|
|
883
|
+
return {
|
|
884
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
885
|
+
_v: entry.index + 1,
|
|
886
|
+
_ts: entry.ts,
|
|
887
|
+
_iv: "",
|
|
888
|
+
_data: json,
|
|
889
|
+
_by: entry.actor
|
|
890
|
+
};
|
|
891
|
+
}
|
|
892
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
893
|
+
const { iv, data } = await encrypt(json, dek);
|
|
894
|
+
return {
|
|
895
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
896
|
+
_v: entry.index + 1,
|
|
897
|
+
_ts: entry.ts,
|
|
898
|
+
_iv: iv,
|
|
899
|
+
_data: data,
|
|
900
|
+
_by: entry.actor
|
|
901
|
+
};
|
|
902
|
+
}
|
|
903
|
+
/** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
|
|
904
|
+
async decryptEntry(envelope) {
|
|
905
|
+
if (!this.encrypted) {
|
|
906
|
+
return JSON.parse(envelope._data);
|
|
907
|
+
}
|
|
908
|
+
const dek = await this.getDEK(LEDGER_COLLECTION);
|
|
909
|
+
const json = await decrypt(envelope._iv, envelope._data, dek);
|
|
910
|
+
return JSON.parse(json);
|
|
911
|
+
}
|
|
912
|
+
};
|
|
913
|
+
}
|
|
914
|
+
});
|
|
915
|
+
|
|
916
|
+
// src/history/ledger/index.ts
|
|
917
|
+
var ledger_exports = {};
|
|
918
|
+
__export(ledger_exports, {
|
|
919
|
+
LEDGER_COLLECTION: () => LEDGER_COLLECTION,
|
|
920
|
+
LEDGER_DELTAS_COLLECTION: () => LEDGER_DELTAS_COLLECTION,
|
|
921
|
+
LedgerStore: () => LedgerStore,
|
|
922
|
+
applyPatch: () => applyPatch,
|
|
923
|
+
canonicalJson: () => canonicalJson,
|
|
924
|
+
computePatch: () => computePatch,
|
|
925
|
+
envelopePayloadHash: () => envelopePayloadHash,
|
|
926
|
+
hashEntry: () => hashEntry,
|
|
927
|
+
paddedIndex: () => paddedIndex,
|
|
928
|
+
parseIndex: () => parseIndex,
|
|
929
|
+
sha256Hex: () => sha256Hex
|
|
930
|
+
});
|
|
931
|
+
var init_ledger = __esm({
|
|
932
|
+
"src/history/ledger/index.ts"() {
|
|
933
|
+
"use strict";
|
|
934
|
+
init_store();
|
|
935
|
+
init_entry();
|
|
936
|
+
init_patch();
|
|
937
|
+
}
|
|
938
|
+
});
|
|
939
|
+
|
|
940
|
+
// src/periods/index.ts
|
|
941
|
+
var periods_exports = {};
|
|
942
|
+
__export(periods_exports, {
|
|
943
|
+
PERIODS_COLLECTION: () => PERIODS_COLLECTION,
|
|
944
|
+
appendPeriodLedgerEntry: () => appendPeriodLedgerEntry,
|
|
945
|
+
assertTsWritable: () => assertTsWritable,
|
|
946
|
+
chainAnchor: () => chainAnchor,
|
|
947
|
+
loadPeriods: () => loadPeriods,
|
|
948
|
+
validatePeriodName: () => validatePeriodName,
|
|
949
|
+
withPeriods: () => withPeriods
|
|
950
|
+
});
|
|
951
|
+
module.exports = __toCommonJS(periods_exports);
|
|
952
|
+
|
|
953
|
+
// src/periods/periods.ts
|
|
954
|
+
init_ledger();
|
|
955
|
+
init_errors();
|
|
956
|
+
var PERIODS_COLLECTION = "_periods";
|
|
957
|
+
async function loadPeriods(adapter, vault, decrypt2) {
|
|
958
|
+
const ids = await adapter.list(vault, PERIODS_COLLECTION);
|
|
959
|
+
const records = [];
|
|
960
|
+
for (const id of ids) {
|
|
961
|
+
const env = await adapter.get(vault, PERIODS_COLLECTION, id);
|
|
962
|
+
if (env) records.push(await decrypt2(env));
|
|
963
|
+
}
|
|
964
|
+
records.sort((a, b) => a.closedAt.localeCompare(b.closedAt));
|
|
965
|
+
return records;
|
|
966
|
+
}
|
|
967
|
+
async function chainAnchor(records) {
|
|
968
|
+
const last = records[records.length - 1];
|
|
969
|
+
if (!last) return { priorPeriodHash: "" };
|
|
970
|
+
const hash = await sha256Hex(canonicalJson(last));
|
|
971
|
+
return { priorPeriodName: last.name, priorPeriodHash: hash };
|
|
972
|
+
}
|
|
973
|
+
function assertTsWritable(existing, incomingRecord, closedPeriods) {
|
|
974
|
+
for (const p of closedPeriods) {
|
|
975
|
+
if (p.kind !== "closed") continue;
|
|
976
|
+
if (p.dateField) {
|
|
977
|
+
const checkRecord = (label, r) => {
|
|
978
|
+
if (!r) return;
|
|
979
|
+
const v = r[p.dateField];
|
|
980
|
+
if (typeof v === "string" && v <= p.endDate) {
|
|
981
|
+
throw new PeriodClosedError(p.name, p.endDate, `${label}[${p.dateField}]=${v}`);
|
|
982
|
+
}
|
|
983
|
+
};
|
|
984
|
+
checkRecord("existing", existing?.record ?? null);
|
|
985
|
+
checkRecord("incoming", incomingRecord);
|
|
986
|
+
continue;
|
|
987
|
+
}
|
|
988
|
+
const existingTs = existing?.ts ?? null;
|
|
989
|
+
if (existingTs !== null && existingTs <= p.endDate) {
|
|
990
|
+
throw new PeriodClosedError(p.name, p.endDate, existingTs);
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
}
|
|
994
|
+
function validatePeriodName(name, existing) {
|
|
995
|
+
if (name.length === 0) {
|
|
996
|
+
throw new ValidationError("Period name cannot be empty.");
|
|
997
|
+
}
|
|
998
|
+
if (existing.some((p) => p.name === name)) {
|
|
999
|
+
throw new ValidationError(`Period "${name}" already exists.`);
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
async function appendPeriodLedgerEntry(ledger, actor, envelope, name) {
|
|
1003
|
+
if (!ledger) return;
|
|
1004
|
+
const { envelopePayloadHash: envelopePayloadHash2 } = await Promise.resolve().then(() => (init_ledger(), ledger_exports));
|
|
1005
|
+
await ledger.append({
|
|
1006
|
+
op: "put",
|
|
1007
|
+
collection: PERIODS_COLLECTION,
|
|
1008
|
+
id: name,
|
|
1009
|
+
version: envelope._v,
|
|
1010
|
+
actor,
|
|
1011
|
+
payloadHash: await envelopePayloadHash2(envelope)
|
|
1012
|
+
});
|
|
1013
|
+
}
|
|
1014
|
+
|
|
1015
|
+
// src/periods/active.ts
|
|
1016
|
+
function withPeriods() {
|
|
1017
|
+
return {
|
|
1018
|
+
loadPeriods,
|
|
1019
|
+
chainAnchor,
|
|
1020
|
+
assertTsWritable,
|
|
1021
|
+
validatePeriodName,
|
|
1022
|
+
appendPeriodLedgerEntry
|
|
1023
|
+
};
|
|
1024
|
+
}
|
|
1025
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1026
|
+
0 && (module.exports = {
|
|
1027
|
+
PERIODS_COLLECTION,
|
|
1028
|
+
appendPeriodLedgerEntry,
|
|
1029
|
+
assertTsWritable,
|
|
1030
|
+
chainAnchor,
|
|
1031
|
+
loadPeriods,
|
|
1032
|
+
validatePeriodName,
|
|
1033
|
+
withPeriods
|
|
1034
|
+
});
|
|
1035
|
+
//# sourceMappingURL=index.cjs.map
|