@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
@@ -0,0 +1,160 @@
1
+ import {
2
+ ConflictError
3
+ } from "./chunk-ACLDOTNQ.js";
4
+
5
+ // src/tx/transaction.ts
6
+ var TxContext = class {
7
+ /** @internal */
8
+ _ops = [];
9
+ /** @internal */
10
+ _db;
11
+ /** @internal */
12
+ constructor(db) {
13
+ this._db = db;
14
+ }
15
+ /** Scope subsequent `collection()` calls to the named vault. */
16
+ vault(name) {
17
+ const v = this._db.vault(name);
18
+ return new TxVault(this, v);
19
+ }
20
+ };
21
+ var TxVault = class {
22
+ /** @internal */
23
+ _ctx;
24
+ /** @internal */
25
+ _vault;
26
+ /** @internal */
27
+ constructor(ctx, vault) {
28
+ this._ctx = ctx;
29
+ this._vault = vault;
30
+ }
31
+ /** Scope subsequent op calls to the named collection. */
32
+ collection(name) {
33
+ const c = this._vault.collection(name);
34
+ return new TxCollection(this._ctx, this._vault, c, name);
35
+ }
36
+ };
37
+ var TxCollection = class {
38
+ /** @internal */
39
+ _ctx;
40
+ /** @internal */
41
+ _vault;
42
+ /** @internal */
43
+ _coll;
44
+ /** @internal */
45
+ _name;
46
+ /** @internal */
47
+ constructor(ctx, vault, coll, name) {
48
+ this._ctx = ctx;
49
+ this._vault = vault;
50
+ this._coll = coll;
51
+ this._name = name;
52
+ }
53
+ /**
54
+ * Read the current committed value, or the most-recently-staged
55
+ * value from the same transaction if one exists.
56
+ */
57
+ async get(id) {
58
+ for (let i = this._ctx._ops.length - 1; i >= 0; i--) {
59
+ const op = this._ctx._ops[i];
60
+ if (op.vaultName === this._vault.name && op.collectionName === this._name && op.id === id) {
61
+ if (op.type === "delete") return null;
62
+ return op.record;
63
+ }
64
+ }
65
+ return this._coll.get(id);
66
+ }
67
+ /**
68
+ * Stage a put. Does not write until the transaction body returns.
69
+ * Supply `{ expectedVersion }` to enforce optimistic concurrency
70
+ * during the commit pre-flight.
71
+ */
72
+ put(id, record, options) {
73
+ const op = {
74
+ type: "put",
75
+ vaultName: this._vault.name,
76
+ collectionName: this._name,
77
+ id,
78
+ record
79
+ };
80
+ if (options?.expectedVersion !== void 0) op.expectedVersion = options.expectedVersion;
81
+ this._ctx._ops.push(op);
82
+ }
83
+ /**
84
+ * Stage a delete. Does not write until the transaction body returns.
85
+ * Supply `{ expectedVersion }` to enforce optimistic concurrency
86
+ * during the commit pre-flight.
87
+ */
88
+ delete(id, options) {
89
+ const op = {
90
+ type: "delete",
91
+ vaultName: this._vault.name,
92
+ collectionName: this._name,
93
+ id
94
+ };
95
+ if (options?.expectedVersion !== void 0) op.expectedVersion = options.expectedVersion;
96
+ this._ctx._ops.push(op);
97
+ }
98
+ };
99
+ async function runTransaction(db, fn) {
100
+ const ctx = new TxContext(db);
101
+ const bodyResult = await fn(ctx);
102
+ if (ctx._ops.length === 0) return bodyResult;
103
+ const priorEnvelopes = /* @__PURE__ */ new Map();
104
+ const store = db._store;
105
+ for (const op of ctx._ops) {
106
+ const key = keyOf(op);
107
+ if (!priorEnvelopes.has(key)) {
108
+ const env = await store.get(op.vaultName, op.collectionName, op.id);
109
+ priorEnvelopes.set(key, env);
110
+ }
111
+ if (op.expectedVersion !== void 0) {
112
+ const env = priorEnvelopes.get(key) ?? null;
113
+ const actual = env?._v ?? 0;
114
+ if (actual !== op.expectedVersion) {
115
+ throw new ConflictError(
116
+ actual,
117
+ `Transaction pre-flight: ${op.vaultName}/${op.collectionName}/${op.id} expected v${op.expectedVersion}, found v${actual}`
118
+ );
119
+ }
120
+ }
121
+ }
122
+ const executed = [];
123
+ try {
124
+ for (const op of ctx._ops) {
125
+ const coll = db.vault(op.vaultName).collection(op.collectionName);
126
+ const key = keyOf(op);
127
+ const prior = priorEnvelopes.get(key) ?? null;
128
+ if (op.type === "put") {
129
+ await coll.put(op.id, op.record);
130
+ } else {
131
+ await coll.delete(op.id);
132
+ }
133
+ executed.push({ op, priorEnvelope: prior });
134
+ }
135
+ return bodyResult;
136
+ } catch (err) {
137
+ for (const { op, priorEnvelope } of executed.slice().reverse()) {
138
+ try {
139
+ if (priorEnvelope) {
140
+ await store.put(op.vaultName, op.collectionName, op.id, priorEnvelope);
141
+ } else {
142
+ await store.delete(op.vaultName, op.collectionName, op.id);
143
+ }
144
+ } catch {
145
+ }
146
+ }
147
+ throw err;
148
+ }
149
+ }
150
+ function keyOf(op) {
151
+ return `${op.vaultName}\0${op.collectionName}\0${op.id}`;
152
+ }
153
+
154
+ export {
155
+ TxContext,
156
+ TxVault,
157
+ TxCollection,
158
+ runTransaction
159
+ };
160
+ //# sourceMappingURL=chunk-BTDCBVJW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/tx/transaction.ts"],"sourcesContent":["/**\n * Multi-record atomic transactions.\n *\n * Lets an application stage writes across two or more collections (or\n * vaults) and commit them all-or-nothing.\n *\n * ```ts\n * await db.transaction(async (tx) => {\n * const inv = tx.vault('acme').collection<Invoice>('invoices')\n * const pay = tx.vault('acme').collection<Payment>('payments')\n * await inv.put(invoiceId, { ...invoice, status: 'paid' })\n * await pay.put(paymentId, { invoiceId, amount, paidAt })\n * })\n * // If the body throws before returning: nothing persisted.\n * // If the body returns: all puts committed; any CAS mismatch rolls\n * // the batch back and surfaces as ConflictError.\n * ```\n *\n * ## Atomicity semantics\n *\n * Ops are buffered during the body. On body-return the hub:\n *\n * 1. **Pre-flight** — re-reads every touched envelope and enforces\n * any caller-supplied `expectedVersion`. A mismatch throws\n * `ConflictError` with *no* writes performed.\n * 2. **Execute** — calls `Collection.put()` / `.delete()` for each\n * staged op in declaration order. History snapshots, ledger\n * appends, and change events fire as normal per op.\n * 3. **Unwind on failure** — if step 2 throws mid-batch, each\n * already-committed op is reverted via the raw store (restoring\n * the captured prior envelope, or deleting if none existed). The\n * ledger is NOT rewritten — audit history preserves the partial\n * commit and the revert.\n *\n * **Crash window.** Steps 2–3 are not a storage-layer transaction —\n * if the process dies between two executed ops, the on-disk state is\n * partial. True all-or-nothing atomicity requires a store that\n * implements `NoydbStore.tx()` (DynamoDB `TransactWriteItems`,\n * IndexedDB `readwrite` transaction, …). This executor declares\n * that future integration point via the `tx?()` method + the\n * `StoreCapabilities.txAtomic` bit, but does not yet delegate\n * to it — the cascade into `Fork · Stores` tracks the per-adapter\n * wire-up.\n *\n * ## Not covered\n *\n * - Cross-sync-peer atomicity. Transactions commit against the\n * primary store only; the sync engine pushes on its normal\n * schedule. For cross-peer two-phase commit use `SyncTransaction`\n * via `db.transaction(vaultName)`.\n * - Read-your-writes within the body. `tx.collection().get(id)`\n * returns the most-recently-staged value for that id when one\n * exists; if no staged op has touched the id, it reads the current\n * committed state. Version numbers returned by `get` reflect the\n * pre-transaction state (staged puts have no version yet).\n *\n * @module\n */\n\nimport type { Noydb } from '../noydb.js'\nimport type { Vault } from '../vault.js'\nimport type { Collection } from '../collection.js'\nimport type { EncryptedEnvelope } from '../types.js'\nimport { ConflictError } from '../errors.js'\n\n/** One op buffered inside a running `TxContext`. @internal */\ninterface StagedOp {\n type: 'put' | 'delete'\n vaultName: string\n collectionName: string\n id: string\n record?: unknown\n expectedVersion?: number\n}\n\n/**\n * Transaction handle passed to the user's body. Use\n * `tx.vault(name).collection<T>(name)` to get a per-collection\n * facade; its `put`/`delete`/`get` calls stage ops against the tx.\n */\nexport class TxContext {\n /** @internal */\n readonly _ops: StagedOp[] = []\n /** @internal */\n readonly _db: Noydb\n\n /** @internal */\n constructor(db: Noydb) {\n this._db = db\n }\n\n /** Scope subsequent `collection()` calls to the named vault. */\n vault(name: string): TxVault {\n const v = this._db.vault(name)\n return new TxVault(this, v)\n }\n}\n\n/** Per-vault facade inside a running transaction. */\nexport class TxVault {\n /** @internal */\n readonly _ctx: TxContext\n /** @internal */\n readonly _vault: Vault\n\n /** @internal */\n constructor(ctx: TxContext, vault: Vault) {\n this._ctx = ctx\n this._vault = vault\n }\n\n /** Scope subsequent op calls to the named collection. */\n collection<T>(name: string): TxCollection<T> {\n const c = this._vault.collection<T>(name)\n return new TxCollection<T>(this._ctx, this._vault, c, name)\n }\n}\n\n/** Per-collection facade inside a running transaction. */\nexport class TxCollection<T> {\n /** @internal */\n readonly _ctx: TxContext\n /** @internal */\n readonly _vault: Vault\n /** @internal */\n readonly _coll: Collection<T>\n /** @internal */\n readonly _name: string\n\n /** @internal */\n constructor(ctx: TxContext, vault: Vault, coll: Collection<T>, name: string) {\n this._ctx = ctx\n this._vault = vault\n this._coll = coll\n this._name = name\n }\n\n /**\n * Read the current committed value, or the most-recently-staged\n * value from the same transaction if one exists.\n */\n async get(id: string): Promise<T | null> {\n for (let i = this._ctx._ops.length - 1; i >= 0; i--) {\n const op = this._ctx._ops[i]!\n if (\n op.vaultName === this._vault.name &&\n op.collectionName === this._name &&\n op.id === id\n ) {\n if (op.type === 'delete') return null\n return op.record as T\n }\n }\n return this._coll.get(id)\n }\n\n /**\n * Stage a put. Does not write until the transaction body returns.\n * Supply `{ expectedVersion }` to enforce optimistic concurrency\n * during the commit pre-flight.\n */\n put(id: string, record: T, options?: { expectedVersion?: number }): void {\n const op: StagedOp = {\n type: 'put',\n vaultName: this._vault.name,\n collectionName: this._name,\n id,\n record,\n }\n if (options?.expectedVersion !== undefined) op.expectedVersion = options.expectedVersion\n this._ctx._ops.push(op)\n }\n\n /**\n * Stage a delete. Does not write until the transaction body returns.\n * Supply `{ expectedVersion }` to enforce optimistic concurrency\n * during the commit pre-flight.\n */\n delete(id: string, options?: { expectedVersion?: number }): void {\n const op: StagedOp = {\n type: 'delete',\n vaultName: this._vault.name,\n collectionName: this._name,\n id,\n }\n if (options?.expectedVersion !== undefined) op.expectedVersion = options.expectedVersion\n this._ctx._ops.push(op)\n }\n}\n\n/**\n * Commit plan: pre-flight check + execution + revert plan. Returned\n * from `runTransaction`.\n *\n * @internal — exposed only for the `Collection.putMany({atomic:true})`\n * wire-up so the bulk path can share the executor without creating\n * an outer TxContext.\n */\nexport async function runTransaction<T>(\n db: Noydb,\n fn: (tx: TxContext) => Promise<T> | T,\n): Promise<T> {\n const ctx = new TxContext(db)\n const bodyResult = await fn(ctx)\n\n if (ctx._ops.length === 0) return bodyResult\n\n // Phase 1 — pre-flight: snapshot every touched envelope and enforce\n // any caller-supplied expectedVersion. Same (vault, coll, id) touched\n // more than once in one tx snapshots only the *initial* committed\n // state; the in-order replay in Phase 2 takes care of successor ops.\n const priorEnvelopes = new Map<string, EncryptedEnvelope | null>()\n const store = db._store\n for (const op of ctx._ops) {\n const key = keyOf(op)\n if (!priorEnvelopes.has(key)) {\n const env = await store.get(op.vaultName, op.collectionName, op.id)\n priorEnvelopes.set(key, env)\n }\n if (op.expectedVersion !== undefined) {\n const env = priorEnvelopes.get(key) ?? null\n const actual = env?._v ?? 0\n if (actual !== op.expectedVersion) {\n throw new ConflictError(\n actual,\n `Transaction pre-flight: ${op.vaultName}/${op.collectionName}/${op.id} ` +\n `expected v${op.expectedVersion}, found v${actual}`,\n )\n }\n }\n }\n\n // Phase 2 — execute via the Collection layer so history snapshots,\n // ledger entries, and change events fire normally. We capture each\n // successful op so a mid-batch throw can revert in Phase 3.\n const executed: Array<{ op: StagedOp; priorEnvelope: EncryptedEnvelope | null }> = []\n try {\n for (const op of ctx._ops) {\n const coll = db.vault(op.vaultName).collection(op.collectionName)\n const key = keyOf(op)\n const prior = priorEnvelopes.get(key) ?? null\n if (op.type === 'put') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n await coll.put(op.id, op.record as any)\n } else {\n await coll.delete(op.id)\n }\n executed.push({ op, priorEnvelope: prior })\n }\n return bodyResult\n } catch (err) {\n // Phase 3 — best-effort revert. Restore captured prior envelopes\n // via the raw store to avoid re-firing Collection-level side\n // effects (we don't want a cascade of change events undoing\n // themselves). The ledger is left as-is: each committed op\n // appended an entry; the revert is deliberately not recorded as a\n // compensating entry because 's contract is \"atomic or not at\n // all\" from the caller's view, not \"every write visible in the\n // audit trail.\" Auditors who need the intermediate state can still\n // reconstruct it by walking the ledger through the failed-tx\n // timestamp.\n for (const { op, priorEnvelope } of executed.slice().reverse()) {\n try {\n if (priorEnvelope) {\n await store.put(op.vaultName, op.collectionName, op.id, priorEnvelope)\n } else {\n await store.delete(op.vaultName, op.collectionName, op.id)\n }\n } catch {\n // swallow — best-effort. Surfacing the revert error would\n // mask the original one that triggered the rollback.\n }\n }\n throw err\n }\n}\n\nfunction keyOf(op: StagedOp): string {\n return `${op.vaultName}\\x00${op.collectionName}\\x00${op.id}`\n}\n"],"mappings":";;;;;AAgFO,IAAM,YAAN,MAAgB;AAAA;AAAA,EAEZ,OAAmB,CAAC;AAAA;AAAA,EAEpB;AAAA;AAAA,EAGT,YAAY,IAAW;AACrB,SAAK,MAAM;AAAA,EACb;AAAA;AAAA,EAGA,MAAM,MAAuB;AAC3B,UAAM,IAAI,KAAK,IAAI,MAAM,IAAI;AAC7B,WAAO,IAAI,QAAQ,MAAM,CAAC;AAAA,EAC5B;AACF;AAGO,IAAM,UAAN,MAAc;AAAA;AAAA,EAEV;AAAA;AAAA,EAEA;AAAA;AAAA,EAGT,YAAY,KAAgB,OAAc;AACxC,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA,EAGA,WAAc,MAA+B;AAC3C,UAAM,IAAI,KAAK,OAAO,WAAc,IAAI;AACxC,WAAO,IAAI,aAAgB,KAAK,MAAM,KAAK,QAAQ,GAAG,IAAI;AAAA,EAC5D;AACF;AAGO,IAAM,eAAN,MAAsB;AAAA;AAAA,EAElB;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAGT,YAAY,KAAgB,OAAc,MAAqB,MAAc;AAC3E,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,QAAQ;AAAA,EACf;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IAAI,IAA+B;AACvC,aAAS,IAAI,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK;AACnD,YAAM,KAAK,KAAK,KAAK,KAAK,CAAC;AAC3B,UACE,GAAG,cAAc,KAAK,OAAO,QAC7B,GAAG,mBAAmB,KAAK,SAC3B,GAAG,OAAO,IACV;AACA,YAAI,GAAG,SAAS,SAAU,QAAO;AACjC,eAAO,GAAG;AAAA,MACZ;AAAA,IACF;AACA,WAAO,KAAK,MAAM,IAAI,EAAE;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,IAAY,QAAW,SAA8C;AACvE,UAAM,KAAe;AAAA,MACnB,MAAM;AAAA,MACN,WAAW,KAAK,OAAO;AAAA,MACvB,gBAAgB,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,IACF;AACA,QAAI,SAAS,oBAAoB,OAAW,IAAG,kBAAkB,QAAQ;AACzE,SAAK,KAAK,KAAK,KAAK,EAAE;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,IAAY,SAA8C;AAC/D,UAAM,KAAe;AAAA,MACnB,MAAM;AAAA,MACN,WAAW,KAAK,OAAO;AAAA,MACvB,gBAAgB,KAAK;AAAA,MACrB;AAAA,IACF;AACA,QAAI,SAAS,oBAAoB,OAAW,IAAG,kBAAkB,QAAQ;AACzE,SAAK,KAAK,KAAK,KAAK,EAAE;AAAA,EACxB;AACF;AAUA,eAAsB,eACpB,IACA,IACY;AACZ,QAAM,MAAM,IAAI,UAAU,EAAE;AAC5B,QAAM,aAAa,MAAM,GAAG,GAAG;AAE/B,MAAI,IAAI,KAAK,WAAW,EAAG,QAAO;AAMlC,QAAM,iBAAiB,oBAAI,IAAsC;AACjE,QAAM,QAAQ,GAAG;AACjB,aAAW,MAAM,IAAI,MAAM;AACzB,UAAM,MAAM,MAAM,EAAE;AACpB,QAAI,CAAC,eAAe,IAAI,GAAG,GAAG;AAC5B,YAAM,MAAM,MAAM,MAAM,IAAI,GAAG,WAAW,GAAG,gBAAgB,GAAG,EAAE;AAClE,qBAAe,IAAI,KAAK,GAAG;AAAA,IAC7B;AACA,QAAI,GAAG,oBAAoB,QAAW;AACpC,YAAM,MAAM,eAAe,IAAI,GAAG,KAAK;AACvC,YAAM,SAAS,KAAK,MAAM;AAC1B,UAAI,WAAW,GAAG,iBAAiB;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA,2BAA2B,GAAG,SAAS,IAAI,GAAG,cAAc,IAAI,GAAG,EAAE,cACtD,GAAG,eAAe,YAAY,MAAM;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAKA,QAAM,WAA6E,CAAC;AACpF,MAAI;AACF,eAAW,MAAM,IAAI,MAAM;AACzB,YAAM,OAAO,GAAG,MAAM,GAAG,SAAS,EAAE,WAAW,GAAG,cAAc;AAChE,YAAM,MAAM,MAAM,EAAE;AACpB,YAAM,QAAQ,eAAe,IAAI,GAAG,KAAK;AACzC,UAAI,GAAG,SAAS,OAAO;AAErB,cAAM,KAAK,IAAI,GAAG,IAAI,GAAG,MAAa;AAAA,MACxC,OAAO;AACL,cAAM,KAAK,OAAO,GAAG,EAAE;AAAA,MACzB;AACA,eAAS,KAAK,EAAE,IAAI,eAAe,MAAM,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT,SAAS,KAAK;AAWZ,eAAW,EAAE,IAAI,cAAc,KAAK,SAAS,MAAM,EAAE,QAAQ,GAAG;AAC9D,UAAI;AACF,YAAI,eAAe;AACjB,gBAAM,MAAM,IAAI,GAAG,WAAW,GAAG,gBAAgB,GAAG,IAAI,aAAa;AAAA,QACvE,OAAO;AACL,gBAAM,MAAM,OAAO,GAAG,WAAW,GAAG,gBAAgB,GAAG,EAAE;AAAA,QAC3D;AAAA,MACF,QAAQ;AAAA,MAGR;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,SAAS,MAAM,IAAsB;AACnC,SAAO,GAAG,GAAG,SAAS,KAAO,GAAG,cAAc,KAAO,GAAG,EAAE;AAC5D;","names":[]}
@@ -0,0 +1,72 @@
1
+ // src/history/ledger/entry.ts
2
+ function canonicalJson(value) {
3
+ if (value === null) return "null";
4
+ if (typeof value === "boolean") return value ? "true" : "false";
5
+ if (typeof value === "number") {
6
+ if (!Number.isFinite(value)) {
7
+ throw new Error(
8
+ `canonicalJson: refusing to encode non-finite number ${String(value)}`
9
+ );
10
+ }
11
+ return JSON.stringify(value);
12
+ }
13
+ if (typeof value === "string") return JSON.stringify(value);
14
+ if (typeof value === "bigint") {
15
+ throw new Error("canonicalJson: BigInt is not JSON-serializable");
16
+ }
17
+ if (typeof value === "undefined" || typeof value === "function") {
18
+ throw new Error(
19
+ `canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
20
+ );
21
+ }
22
+ if (Array.isArray(value)) {
23
+ return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
24
+ }
25
+ if (typeof value === "object") {
26
+ const obj = value;
27
+ const keys = Object.keys(obj).sort();
28
+ const parts = [];
29
+ for (const key of keys) {
30
+ parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
31
+ }
32
+ return "{" + parts.join(",") + "}";
33
+ }
34
+ throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
35
+ }
36
+ async function sha256Hex(input) {
37
+ const bytes = new TextEncoder().encode(input);
38
+ const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
39
+ return bytesToHex(new Uint8Array(digest));
40
+ }
41
+ async function hashEntry(entry) {
42
+ return sha256Hex(canonicalJson(entry));
43
+ }
44
+ function bytesToHex(bytes) {
45
+ const hex = new Array(bytes.length);
46
+ for (let i = 0; i < bytes.length; i++) {
47
+ hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
48
+ }
49
+ return hex.join("");
50
+ }
51
+ function paddedIndex(index) {
52
+ return String(index).padStart(10, "0");
53
+ }
54
+ function parseIndex(key) {
55
+ return Number.parseInt(key, 10);
56
+ }
57
+
58
+ // src/history/ledger/hash.ts
59
+ async function envelopePayloadHash(envelope) {
60
+ if (!envelope) return "";
61
+ return sha256Hex(envelope._data);
62
+ }
63
+
64
+ export {
65
+ canonicalJson,
66
+ sha256Hex,
67
+ hashEntry,
68
+ paddedIndex,
69
+ parseIndex,
70
+ envelopePayloadHash
71
+ };
72
+ //# sourceMappingURL=chunk-CIMZBAZB.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/history/ledger/entry.ts","../src/history/ledger/hash.ts"],"sourcesContent":["/**\n * Ledger entry shape + canonical JSON + sha256 helpers.\n *\n * This file holds the PURE primitives used by the hash-chained ledger:\n * the entry type, the deterministic (sort-stable) JSON encoder, and\n * the sha256 hasher that produces `prevHash` and `ledger.head()`.\n *\n * Everything here is validator-free and side-effect free — the only\n * runtime dep is Web Crypto's `subtle.digest` for the sha256 call,\n * which we already use for every other hashing operation in the core.\n *\n * The hash chain property works like this:\n *\n * hash(entry[i]) = sha256(canonicalJSON(entry[i]))\n * entry[i+1].prevHash = hash(entry[i])\n *\n * Any modification to `entry[i]` (field values, field order, whitespace)\n * produces a different `hash(entry[i])`, which means `entry[i+1]`'s\n * stored `prevHash` no longer matches the recomputed hash, which means\n * `verify()` returns `{ ok: false, divergedAt: i + 1 }`. The chain is\n * append-only and tamper-evident without external anchoring.\n */\n\n/**\n * A single ledger entry in its plaintext form — what gets serialized,\n * hashed, and then encrypted with the ledger DEK before being written\n * to the `_ledger/` adapter collection.\n *\n * ## Why hash the ciphertext, not the plaintext?\n *\n * `payloadHash` is the sha256 of the record's ENCRYPTED envelope bytes,\n * not its plaintext. This matters:\n *\n * 1. **Zero-knowledge preserved.** A user (or a third party) can\n * verify the ledger against the stored envelopes without any\n * decryption keys. The adapter layer already holds only\n * ciphertext, so hashing the ciphertext keeps the ledger at the\n * same privacy level as the adapter.\n *\n * 2. **Determinism.** Plaintext → ciphertext is randomized by the\n * fresh per-write IV, so `hash(plaintext)` would need extra\n * normalization. `hash(ciphertext)` is already deterministic and\n * unique per write.\n *\n * 3. **Detection property.** If an attacker modifies even one byte of\n * the stored ciphertext (trying to flip a record), the hash\n * changes, the ledger's recorded `payloadHash` no longer matches,\n * and a data-integrity check fails. We don't do that check in\n * `verify()` today, but the\n * hook is there for a future `verifyIntegrity()` follow-up.\n *\n * Fields marked `op`, `collection`, `id`, `version`, `ts`, `actor` are\n * plaintext METADATA about the operation — NOT the record itself. The\n * entry is still encrypted at rest via the ledger DEK, but adapters\n * could theoretically infer operation patterns from the sizes and\n * timestamps. This is an accepted trade-off for the tamper-evidence\n * property; full ORAM-level privacy is out of scope for noy-db.\n */\nexport interface LedgerEntry {\n /**\n * Zero-based sequential position of this entry in the chain. The\n * canonical adapter key is this number zero-padded to 10 digits\n * (`\"0000000001\"`) so lexicographic ordering matches numeric order.\n */\n readonly index: number\n\n /**\n * Hex-encoded sha256 of the canonical JSON of the PREVIOUS entry.\n * The genesis entry (index 0) has `prevHash === ''` — the first\n * entry in a fresh vault has nothing to point back to.\n */\n readonly prevHash: string\n\n /**\n * Which kind of mutation this entry records. only supports\n * data operations (`put`, `delete`). Access-control operations\n * (`grant`, `revoke`, `rotate`) will be added in a follow-up once\n * the keyring write path is instrumented — that's tracked in the\n * epic issue.\n */\n readonly op: 'put' | 'delete'\n\n /** The collection the mutation targeted. */\n readonly collection: string\n\n /** The record id the mutation targeted. */\n readonly id: string\n\n /**\n * The record version AFTER this mutation. For `put` this is the\n * newly assigned version; for `delete` this is the version that\n * was deleted (the last version visible to reads).\n */\n readonly version: number\n\n /** ISO timestamp of the mutation. */\n readonly ts: string\n\n /** User id of the actor who performed the mutation. */\n readonly actor: string\n\n /**\n * Hex-encoded sha256 of the encrypted envelope's `_data` field.\n * For `put`, this is the hash of the new ciphertext. For `delete`,\n * it's the hash of the last visible ciphertext at deletion time,\n * or the empty string if nothing was there to delete. Hashing the\n * ciphertext (not the plaintext) preserves zero-knowledge — see\n * the file docstring.\n */\n readonly payloadHash: string\n\n /**\n * Optional hex-encoded sha256 of the encrypted JSON Patch delta\n * blob stored alongside this entry in `_ledger_deltas/`. Present\n * only for `put` operations that had a previous version — the\n * genesis put of a new record, and every `delete`, leave this\n * field undefined.\n *\n * The delta payload itself lives in a sibling internal collection\n * (`_ledger_deltas/<paddedIndex>`) and is encrypted with the\n * ledger DEK. Callers use `ledger.loadDelta(index)` to decrypt and\n * deserialize it when reconstructing a historical version.\n *\n * Why optional instead of always-present: the first put of a\n * record has no previous version to diff against, so storing an\n * empty patch would be noise. For deletes there's no \"next\" state\n * to describe with a delta. Both cases set this field to undefined.\n *\n * Note: the canonical-JSON hasher treats `undefined` as invalid\n * (it's one of the guard rails), so on the wire this field is\n * either `{ deltaHash: '<hex>' }` or absent from the JSON\n * entirely — never `{ deltaHash: undefined }`.\n */\n readonly deltaHash?: string\n}\n\n/**\n * Canonical (sort-stable) JSON encoder.\n *\n * This function is the load-bearing primitive of the hash chain:\n * `sha256(canonicalJSON(entry))` must produce the same hex string\n * every time, on every machine, for the same logical entry — otherwise\n * `verify()` would return `{ ok: false }` on cross-platform reads.\n *\n * JavaScript's `JSON.stringify` is almost canonical, but NOT quite:\n * it preserves the insertion order of object keys, which means\n * `{a:1,b:2}` and `{b:2,a:1}` serialize differently. We fix this by\n * recursively walking objects and sorting their keys before\n * concatenation.\n *\n * Arrays keep their original order (reordering them would change\n * semantics). Numbers, strings, booleans, and `null` use the default\n * JSON encoding. `undefined` and functions are rejected — ledger\n * entries are plain data, and silently dropping `undefined` would\n * break the \"same input → same hash\" property if a caller forgot to\n * omit a field.\n *\n * Performance: one pass per nesting level; O(n log n) for key sorting\n * at each object. Entries are small (< 1 KB) so this is negligible\n * compared to the sha256 call.\n */\nexport function canonicalJson(value: unknown): string {\n if (value === null) return 'null'\n if (typeof value === 'boolean') return value ? 'true' : 'false'\n if (typeof value === 'number') {\n if (!Number.isFinite(value)) {\n throw new Error(\n `canonicalJson: refusing to encode non-finite number ${String(value)}`,\n )\n }\n return JSON.stringify(value)\n }\n if (typeof value === 'string') return JSON.stringify(value)\n if (typeof value === 'bigint') {\n throw new Error('canonicalJson: BigInt is not JSON-serializable')\n }\n if (typeof value === 'undefined' || typeof value === 'function') {\n throw new Error(\n `canonicalJson: refusing to encode ${typeof value} — include all fields explicitly`,\n )\n }\n if (Array.isArray(value)) {\n return '[' + value.map((v) => canonicalJson(v)).join(',') + ']'\n }\n if (typeof value === 'object') {\n const obj = value as Record<string, unknown>\n const keys = Object.keys(obj).sort()\n const parts: string[] = []\n for (const key of keys) {\n parts.push(JSON.stringify(key) + ':' + canonicalJson(obj[key]))\n }\n return '{' + parts.join(',') + '}'\n }\n throw new Error(`canonicalJson: unexpected value type: ${typeof value}`)\n}\n\n/**\n * Compute a hex-encoded sha256 of a string via Web Crypto's subtle API.\n *\n * We use hex (not base64) for hashes because hex is case-insensitive,\n * fixed-length (64 chars), and easier to compare visually in debug\n * output. Base64 would save a few bytes in storage but every encrypted\n * ledger entry is already much larger than the hash itself.\n */\nexport async function sha256Hex(input: string): Promise<string> {\n const bytes = new TextEncoder().encode(input)\n const digest = await globalThis.crypto.subtle.digest('SHA-256', bytes)\n return bytesToHex(new Uint8Array(digest))\n}\n\n/**\n * Compute the canonical hash of a ledger entry. Short wrapper around\n * `canonicalJson` + `sha256Hex`; callers use this instead of composing\n * the two functions every time, so any future change to the hashing\n * pipeline (e.g., adding a domain-separation prefix) lives in one place.\n */\nexport async function hashEntry(entry: LedgerEntry): Promise<string> {\n return sha256Hex(canonicalJson(entry))\n}\n\n/** Convert a Uint8Array to a lowercase hex string. */\nfunction bytesToHex(bytes: Uint8Array): string {\n const hex = new Array<string>(bytes.length)\n for (let i = 0; i < bytes.length; i++) {\n // Non-null assertion: indexing a Uint8Array within bounds always\n // returns a number, but the compiler's noUncheckedIndexedAccess\n // flag widens it to `number | undefined`. Safe here by construction.\n hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, '0')\n }\n return hex.join('')\n}\n\n/**\n * Pad an index to the canonical 10-digit form used as the adapter key.\n * Ten digits is enough for ~10 billion ledger entries per vault\n * — far beyond any realistic use case, but cheap enough that the extra\n * digits don't hurt storage.\n */\nexport function paddedIndex(index: number): string {\n return String(index).padStart(10, '0')\n}\n\n/** Parse a padded adapter key back into a number. Returns NaN on malformed input. */\nexport function parseIndex(key: string): number {\n return Number.parseInt(key, 10)\n}\n","/**\n * Envelope payload hash — pinned in its own leaf module so consumers\n * (DictionaryHandle, the active history strategy) can import it\n * without dragging in the `LedgerStore` class.\n *\n * see `constants.ts` for the broader rationale.\n *\n * @internal\n */\n\nimport type { EncryptedEnvelope } from '../../types.js'\nimport { sha256Hex } from './entry.js'\n\n/**\n * Compute the `payloadHash` value for an encrypted envelope. Used by\n * `LedgerStore.append` for both put (hash the new envelope) and\n * delete (hash the previous envelope) paths, and by\n * `DictionaryHandle` so its ledger entries match the same contract.\n *\n * Returns the empty string when there is no envelope (delete of a\n * never-existed record). The empty string tolerated by the ledger\n * entry's `payloadHash` field as the canonical \"nothing here\" value.\n */\nexport async function envelopePayloadHash(\n envelope: EncryptedEnvelope | null,\n): Promise<string> {\n if (!envelope) return ''\n // `_data` is a base64 string for encrypted envelopes and the raw\n // JSON for plaintext ones. Both are strings, so a single sha256Hex\n // call works for both modes — the hash value differs between\n // encrypted/plaintext compartments because the bytes on disk\n // differ.\n return sha256Hex(envelope._data)\n}\n"],"mappings":";AAiKO,SAAS,cAAc,OAAwB;AACpD,MAAI,UAAU,KAAM,QAAO;AAC3B,MAAI,OAAO,UAAU,UAAW,QAAO,QAAQ,SAAS;AACxD,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,CAAC,OAAO,SAAS,KAAK,GAAG;AAC3B,YAAM,IAAI;AAAA,QACR,uDAAuD,OAAO,KAAK,CAAC;AAAA,MACtE;AAAA,IACF;AACA,WAAO,KAAK,UAAU,KAAK;AAAA,EAC7B;AACA,MAAI,OAAO,UAAU,SAAU,QAAO,KAAK,UAAU,KAAK;AAC1D,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AACA,MAAI,OAAO,UAAU,eAAe,OAAO,UAAU,YAAY;AAC/D,UAAM,IAAI;AAAA,MACR,qCAAqC,OAAO,KAAK;AAAA,IACnD;AAAA,EACF;AACA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,MAAM,IAAI,CAAC,MAAM,cAAc,CAAC,CAAC,EAAE,KAAK,GAAG,IAAI;AAAA,EAC9D;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,MAAM;AACZ,UAAM,OAAO,OAAO,KAAK,GAAG,EAAE,KAAK;AACnC,UAAM,QAAkB,CAAC;AACzB,eAAW,OAAO,MAAM;AACtB,YAAM,KAAK,KAAK,UAAU,GAAG,IAAI,MAAM,cAAc,IAAI,GAAG,CAAC,CAAC;AAAA,IAChE;AACA,WAAO,MAAM,MAAM,KAAK,GAAG,IAAI;AAAA,EACjC;AACA,QAAM,IAAI,MAAM,yCAAyC,OAAO,KAAK,EAAE;AACzE;AAUA,eAAsB,UAAU,OAAgC;AAC9D,QAAM,QAAQ,IAAI,YAAY,EAAE,OAAO,KAAK;AAC5C,QAAM,SAAS,MAAM,WAAW,OAAO,OAAO,OAAO,WAAW,KAAK;AACrE,SAAO,WAAW,IAAI,WAAW,MAAM,CAAC;AAC1C;AAQA,eAAsB,UAAU,OAAqC;AACnE,SAAO,UAAU,cAAc,KAAK,CAAC;AACvC;AAGA,SAAS,WAAW,OAA2B;AAC7C,QAAM,MAAM,IAAI,MAAc,MAAM,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAIrC,QAAI,CAAC,KAAK,MAAM,CAAC,KAAK,GAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACvD;AACA,SAAO,IAAI,KAAK,EAAE;AACpB;AAQO,SAAS,YAAY,OAAuB;AACjD,SAAO,OAAO,KAAK,EAAE,SAAS,IAAI,GAAG;AACvC;AAGO,SAAS,WAAW,KAAqB;AAC9C,SAAO,OAAO,SAAS,KAAK,EAAE;AAChC;;;AC9NA,eAAsB,oBACpB,UACiB;AACjB,MAAI,CAAC,SAAU,QAAO;AAMtB,SAAO,UAAU,SAAS,KAAK;AACjC;","names":[]}
@@ -0,0 +1,365 @@
1
+ import {
2
+ generateULID
3
+ } from "./chunk-FZU343FL.js";
4
+ import {
5
+ base64ToBuffer,
6
+ bufferToBase64
7
+ } from "./chunk-MR4424N3.js";
8
+ import {
9
+ SessionExpiredError,
10
+ SessionNotFoundError,
11
+ SessionPolicyError,
12
+ ValidationError
13
+ } from "./chunk-ACLDOTNQ.js";
14
+
15
+ // src/session/session.ts
16
+ var subtle = globalThis.crypto.subtle;
17
+ var DEFAULT_TTL_MS = 60 * 60 * 1e3;
18
+ var sessionKeyStore = /* @__PURE__ */ new Map();
19
+ async function createSession(keyring, vault, options = {}) {
20
+ const ttlMs = options.ttlMs ?? DEFAULT_TTL_MS;
21
+ const sessionId = generateULID();
22
+ const expiresAt = new Date(Date.now() + ttlMs).toISOString();
23
+ const sessionKey = await subtle.generateKey(
24
+ { name: "AES-GCM", length: 256 },
25
+ false,
26
+ // non-extractable — this is the tab-scope security invariant
27
+ ["encrypt", "decrypt"]
28
+ );
29
+ const dekMap = {};
30
+ for (const [collName, dek] of keyring.deks) {
31
+ const raw = await subtle.exportKey("raw", dek);
32
+ dekMap[collName] = bufferToBase64(raw);
33
+ }
34
+ const payload = JSON.stringify({
35
+ userId: keyring.userId,
36
+ displayName: keyring.displayName,
37
+ role: keyring.role,
38
+ permissions: keyring.permissions,
39
+ deks: dekMap,
40
+ salt: bufferToBase64(keyring.salt)
41
+ });
42
+ const iv = globalThis.crypto.getRandomValues(new Uint8Array(12));
43
+ const encrypted = await subtle.encrypt(
44
+ { name: "AES-GCM", iv },
45
+ sessionKey,
46
+ new TextEncoder().encode(payload)
47
+ );
48
+ const token = {
49
+ _noydb_session: 1,
50
+ sessionId,
51
+ userId: keyring.userId,
52
+ vault,
53
+ role: keyring.role,
54
+ expiresAt,
55
+ wrappedKek: bufferToBase64(encrypted),
56
+ kekIv: bufferToBase64(iv)
57
+ };
58
+ sessionKeyStore.set(sessionId, sessionKey);
59
+ return { token, sessionId };
60
+ }
61
+ async function resolveSession(token) {
62
+ if (Date.now() > new Date(token.expiresAt).getTime()) {
63
+ sessionKeyStore.delete(token.sessionId);
64
+ throw new SessionExpiredError(token.sessionId);
65
+ }
66
+ const sessionKey = sessionKeyStore.get(token.sessionId);
67
+ if (!sessionKey) {
68
+ throw new SessionNotFoundError(token.sessionId);
69
+ }
70
+ const iv = base64ToBuffer(token.kekIv);
71
+ const ciphertext = base64ToBuffer(token.wrappedKek);
72
+ let plaintext;
73
+ try {
74
+ plaintext = await subtle.decrypt(
75
+ { name: "AES-GCM", iv },
76
+ sessionKey,
77
+ ciphertext
78
+ );
79
+ } catch {
80
+ throw new SessionNotFoundError(token.sessionId);
81
+ }
82
+ const payload = JSON.parse(new TextDecoder().decode(plaintext));
83
+ const deks = /* @__PURE__ */ new Map();
84
+ for (const [collName, rawBase64] of Object.entries(payload.deks)) {
85
+ const dek = await subtle.importKey(
86
+ "raw",
87
+ base64ToBuffer(rawBase64),
88
+ { name: "AES-GCM", length: 256 },
89
+ true,
90
+ ["encrypt", "decrypt"]
91
+ );
92
+ deks.set(collName, dek);
93
+ }
94
+ return {
95
+ userId: payload.userId,
96
+ displayName: payload.displayName,
97
+ role: payload.role,
98
+ permissions: payload.permissions,
99
+ deks,
100
+ kek: null,
101
+ // KEK not available in session context
102
+ salt: base64ToBuffer(payload.salt)
103
+ };
104
+ }
105
+ function revokeSession(sessionId) {
106
+ sessionKeyStore.delete(sessionId);
107
+ }
108
+ function isSessionAlive(token) {
109
+ if (Date.now() > new Date(token.expiresAt).getTime()) return false;
110
+ return sessionKeyStore.has(token.sessionId);
111
+ }
112
+ function revokeAllSessions() {
113
+ sessionKeyStore.clear();
114
+ }
115
+ function activeSessionCount() {
116
+ return sessionKeyStore.size;
117
+ }
118
+
119
+ // src/session/session-policy.ts
120
+ var PolicyEnforcer = class {
121
+ policy;
122
+ sessionId;
123
+ onRevoke;
124
+ createdAt;
125
+ lastActivityAt;
126
+ idleTimer = null;
127
+ absoluteTimer = null;
128
+ visibilityHandler = null;
129
+ constructor(opts) {
130
+ this.policy = opts.policy;
131
+ this.sessionId = opts.sessionId;
132
+ this.onRevoke = opts.onRevoke;
133
+ this.createdAt = Date.now();
134
+ this.lastActivityAt = Date.now();
135
+ this.scheduleIdleTimer();
136
+ this.scheduleAbsoluteTimer();
137
+ this.registerBackgroundLock();
138
+ }
139
+ /**
140
+ * Record an activity timestamp and reset the idle timer.
141
+ * Call this at the top of every Noydb public method.
142
+ */
143
+ touch() {
144
+ this.lastActivityAt = Date.now();
145
+ this.scheduleIdleTimer();
146
+ }
147
+ /**
148
+ * Check whether the given operation is allowed under the active policy.
149
+ * Throws `SessionPolicyError` if the operation requires re-authentication.
150
+ * Throws `SessionExpiredError` if the absolute timeout has been exceeded
151
+ * (defensive check in case the timer fired before the call arrived).
152
+ *
153
+ * This is a synchronous check — callers don't await it.
154
+ */
155
+ checkOperation(op) {
156
+ const { absoluteTimeoutMs } = this.policy;
157
+ if (absoluteTimeoutMs !== void 0 && Date.now() - this.createdAt >= absoluteTimeoutMs) {
158
+ this.expire("absolute");
159
+ throw new SessionExpiredError(this.sessionId);
160
+ }
161
+ const required = this.policy.requireReAuthFor ?? [];
162
+ if (required.includes(op)) {
163
+ throw new SessionPolicyError(op);
164
+ }
165
+ }
166
+ /**
167
+ * Tear down timers and background-lock listener. Call from `Noydb.close()`
168
+ * and whenever the session is revoked externally.
169
+ */
170
+ destroy() {
171
+ if (this.idleTimer) {
172
+ clearTimeout(this.idleTimer);
173
+ this.idleTimer = null;
174
+ }
175
+ if (this.absoluteTimer) {
176
+ clearTimeout(this.absoluteTimer);
177
+ this.absoluteTimer = null;
178
+ }
179
+ if (this.visibilityHandler && typeof document !== "undefined") {
180
+ document.removeEventListener("visibilitychange", this.visibilityHandler);
181
+ this.visibilityHandler = null;
182
+ }
183
+ }
184
+ /** How long since the last activity, in ms. */
185
+ get idleMs() {
186
+ return Date.now() - this.lastActivityAt;
187
+ }
188
+ /** How long since session creation, in ms. */
189
+ get ageMs() {
190
+ return Date.now() - this.createdAt;
191
+ }
192
+ // ── Private ──────────────────────────────────────────────────────────
193
+ scheduleIdleTimer() {
194
+ const { idleTimeoutMs } = this.policy;
195
+ if (!idleTimeoutMs) return;
196
+ if (this.idleTimer) clearTimeout(this.idleTimer);
197
+ this.idleTimer = setTimeout(() => {
198
+ this.expire("idle");
199
+ }, idleTimeoutMs);
200
+ }
201
+ scheduleAbsoluteTimer() {
202
+ const { absoluteTimeoutMs } = this.policy;
203
+ if (!absoluteTimeoutMs) return;
204
+ if (this.absoluteTimer) clearTimeout(this.absoluteTimer);
205
+ this.absoluteTimer = setTimeout(() => {
206
+ this.expire("absolute");
207
+ }, absoluteTimeoutMs);
208
+ }
209
+ registerBackgroundLock() {
210
+ if (!this.policy.lockOnBackground) return;
211
+ if (typeof document === "undefined") return;
212
+ this.visibilityHandler = () => {
213
+ if (document.hidden) {
214
+ this.expire("background");
215
+ }
216
+ };
217
+ document.addEventListener("visibilitychange", this.visibilityHandler);
218
+ }
219
+ expire(reason) {
220
+ this.destroy();
221
+ revokeSession(this.sessionId);
222
+ this.onRevoke(reason);
223
+ }
224
+ };
225
+ function createEnforcer(opts) {
226
+ return new PolicyEnforcer(opts);
227
+ }
228
+ function validateSessionPolicy(policy) {
229
+ const { idleTimeoutMs, absoluteTimeoutMs } = policy;
230
+ if (idleTimeoutMs !== void 0 && (typeof idleTimeoutMs !== "number" || idleTimeoutMs <= 0)) {
231
+ throw new Error(`SessionPolicy.idleTimeoutMs must be a positive number, got ${idleTimeoutMs}`);
232
+ }
233
+ if (absoluteTimeoutMs !== void 0 && (typeof absoluteTimeoutMs !== "number" || absoluteTimeoutMs <= 0)) {
234
+ throw new Error(`SessionPolicy.absoluteTimeoutMs must be a positive number, got ${absoluteTimeoutMs}`);
235
+ }
236
+ if (idleTimeoutMs !== void 0 && absoluteTimeoutMs !== void 0 && idleTimeoutMs >= absoluteTimeoutMs) {
237
+ throw new Error(
238
+ `SessionPolicy.idleTimeoutMs (${idleTimeoutMs}ms) must be less than absoluteTimeoutMs (${absoluteTimeoutMs}ms)`
239
+ );
240
+ }
241
+ }
242
+
243
+ // src/session/dev-unlock.ts
244
+ var REQUIRED_ACKNOWLEDGE = "I-UNDERSTAND-THIS-DISABLES-UNLOCK-SECURITY";
245
+ var STORAGE_PREFIX = "noydb:dev-unlock:";
246
+ function assertDevEnvironment() {
247
+ if (typeof process !== "undefined" && process.env.NODE_ENV === "production") {
248
+ throw new ValidationError(
249
+ 'devUnlock is not available in production builds. process.env.NODE_ENV is "production".'
250
+ );
251
+ }
252
+ if (typeof globalThis !== "undefined" && globalThis.__vite_is_production__ === true) {
253
+ throw new ValidationError("devUnlock is not available in production builds.");
254
+ }
255
+ if (typeof window !== "undefined" && typeof window.location !== "undefined") {
256
+ const host = window.location.hostname;
257
+ if (host !== "localhost" && host !== "127.0.0.1" && host !== "::1" && !host.endsWith(".local")) {
258
+ throw new ValidationError(
259
+ `devUnlock is only available on localhost. Current hostname: "${host}". Set NODE_ENV=development and run on localhost to use dev unlock.`
260
+ );
261
+ }
262
+ }
263
+ }
264
+ function storageKey(vault, userId) {
265
+ return `${STORAGE_PREFIX}${vault}:${userId}`;
266
+ }
267
+ function resolveStorage(persistAcrossTabs) {
268
+ if (typeof window === "undefined") {
269
+ throw new ValidationError("devUnlock requires a browser environment (window.sessionStorage / window.localStorage).");
270
+ }
271
+ return persistAcrossTabs ? window.localStorage : window.sessionStorage;
272
+ }
273
+ async function enableDevUnlock(vault, userId, keyring, options) {
274
+ if (options.acknowledge !== REQUIRED_ACKNOWLEDGE) {
275
+ throw new ValidationError(
276
+ `devUnlock requires acknowledge: '${REQUIRED_ACKNOWLEDGE}'. Got: '${options.acknowledge}'. This is intentional \u2014 the full string must appear in your source.`
277
+ );
278
+ }
279
+ assertDevEnvironment();
280
+ const storage = resolveStorage(options.persistAcrossTabs);
281
+ const dekMap = {};
282
+ for (const [collName, dek] of keyring.deks) {
283
+ const raw = await globalThis.crypto.subtle.exportKey("raw", dek);
284
+ dekMap[collName] = bufferToBase64(raw);
285
+ }
286
+ const payload = JSON.stringify({
287
+ _noydb_dev_unlock: 1,
288
+ userId: keyring.userId,
289
+ displayName: keyring.displayName,
290
+ role: keyring.role,
291
+ permissions: keyring.permissions,
292
+ deks: dekMap,
293
+ salt: bufferToBase64(keyring.salt)
294
+ });
295
+ storage.setItem(storageKey(vault, userId), payload);
296
+ console.warn(
297
+ "%c\u26A0\uFE0F NOYDB DEV UNLOCK ACTIVE \u26A0\uFE0F",
298
+ "color: red; font-size: 16px; font-weight: bold",
299
+ `
300
+
301
+ Compartment "${vault}" user "${userId}" is stored in ${options.persistAcrossTabs ? "localStorage" : "sessionStorage"} in PLAINTEXT DEKs.
302
+ This is ONLY safe for local development. Never use in production.
303
+ Call clearDevUnlock() to remove.`
304
+ );
305
+ }
306
+ async function loadDevUnlock(vault, userId, options = {}) {
307
+ if (typeof window === "undefined") return null;
308
+ const storage = resolveStorage(options.persistAcrossTabs);
309
+ const raw = storage.getItem(storageKey(vault, userId));
310
+ if (!raw) return null;
311
+ let parsed;
312
+ try {
313
+ parsed = JSON.parse(raw);
314
+ } catch {
315
+ return null;
316
+ }
317
+ if (parsed._noydb_dev_unlock !== 1) return null;
318
+ const deks = /* @__PURE__ */ new Map();
319
+ for (const [collName, rawBase64] of Object.entries(parsed.deks)) {
320
+ const dek = await globalThis.crypto.subtle.importKey(
321
+ "raw",
322
+ base64ToBuffer(rawBase64),
323
+ { name: "AES-GCM", length: 256 },
324
+ true,
325
+ ["encrypt", "decrypt"]
326
+ );
327
+ deks.set(collName, dek);
328
+ }
329
+ return {
330
+ userId: parsed.userId,
331
+ displayName: parsed.displayName,
332
+ role: parsed.role,
333
+ permissions: parsed.permissions,
334
+ deks,
335
+ kek: null,
336
+ salt: base64ToBuffer(parsed.salt)
337
+ };
338
+ }
339
+ function clearDevUnlock(vault, userId, options = {}) {
340
+ if (typeof window === "undefined") return;
341
+ const storage = resolveStorage(options.persistAcrossTabs);
342
+ storage.removeItem(storageKey(vault, userId));
343
+ }
344
+ function isDevUnlockActive(vault, userId, options = {}) {
345
+ if (typeof window === "undefined") return false;
346
+ const storage = resolveStorage(options.persistAcrossTabs);
347
+ return storage.getItem(storageKey(vault, userId)) !== null;
348
+ }
349
+
350
+ export {
351
+ createSession,
352
+ resolveSession,
353
+ revokeSession,
354
+ isSessionAlive,
355
+ revokeAllSessions,
356
+ activeSessionCount,
357
+ PolicyEnforcer,
358
+ createEnforcer,
359
+ validateSessionPolicy,
360
+ enableDevUnlock,
361
+ loadDevUnlock,
362
+ clearDevUnlock,
363
+ isDevUnlockActive
364
+ };
365
+ //# sourceMappingURL=chunk-E445ICYI.js.map