@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
@@ -0,0 +1,79 @@
1
+ import {
2
+ ensureCollectionDEK
3
+ } from "./chunk-M2F2JAWB.js";
4
+ import {
5
+ NOYDB_FORMAT_VERSION
6
+ } from "./chunk-ZRG4V3F5.js";
7
+ import {
8
+ decrypt,
9
+ encrypt
10
+ } from "./chunk-MR4424N3.js";
11
+ import {
12
+ PermissionDeniedError
13
+ } from "./chunk-ACLDOTNQ.js";
14
+
15
+ // src/team/sync-credentials.ts
16
+ var SYNC_CREDENTIALS_COLLECTION = "_sync_credentials";
17
+ function requireAdminAccess(keyring) {
18
+ if (keyring.role !== "owner" && keyring.role !== "admin") {
19
+ throw new PermissionDeniedError(
20
+ `Sync credentials require owner or admin role. Current role: "${keyring.role}"`
21
+ );
22
+ }
23
+ }
24
+ async function putCredential(adapter, vault, keyring, credential) {
25
+ requireAdminAccess(keyring);
26
+ const getDek = await ensureCollectionDEK(adapter, vault, keyring);
27
+ const dek = await getDek(SYNC_CREDENTIALS_COLLECTION);
28
+ const { iv, data } = await encrypt(JSON.stringify(credential), dek);
29
+ const existing = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, credential.adapterId);
30
+ const version = existing ? existing._v + 1 : 1;
31
+ const envelope = {
32
+ _noydb: NOYDB_FORMAT_VERSION,
33
+ _v: version,
34
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
35
+ _iv: iv,
36
+ _data: data,
37
+ _by: keyring.userId
38
+ };
39
+ await adapter.put(
40
+ vault,
41
+ SYNC_CREDENTIALS_COLLECTION,
42
+ credential.adapterId,
43
+ envelope,
44
+ existing ? existing._v : void 0
45
+ );
46
+ }
47
+ async function getCredential(adapter, vault, keyring, adapterId) {
48
+ requireAdminAccess(keyring);
49
+ const getDek = await ensureCollectionDEK(adapter, vault, keyring);
50
+ const dek = await getDek(SYNC_CREDENTIALS_COLLECTION);
51
+ const envelope = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, adapterId);
52
+ if (!envelope) return null;
53
+ const plaintext = await decrypt(envelope._iv, envelope._data, dek);
54
+ return JSON.parse(plaintext);
55
+ }
56
+ async function deleteCredential(adapter, vault, keyring, adapterId) {
57
+ requireAdminAccess(keyring);
58
+ await adapter.delete(vault, SYNC_CREDENTIALS_COLLECTION, adapterId);
59
+ }
60
+ async function listCredentials(adapter, vault, keyring) {
61
+ requireAdminAccess(keyring);
62
+ return adapter.list(vault, SYNC_CREDENTIALS_COLLECTION);
63
+ }
64
+ async function credentialStatus(adapter, vault, keyring, adapterId) {
65
+ const credential = await getCredential(adapter, vault, keyring, adapterId);
66
+ if (!credential) return { exists: false };
67
+ const expired = credential.expiresAt ? Date.now() > new Date(credential.expiresAt).getTime() : false;
68
+ return { exists: true, expired };
69
+ }
70
+
71
+ export {
72
+ SYNC_CREDENTIALS_COLLECTION,
73
+ putCredential,
74
+ getCredential,
75
+ deleteCredential,
76
+ listCredentials,
77
+ credentialStatus
78
+ };
79
+ //# sourceMappingURL=chunk-4OWFYIDQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/team/sync-credentials.ts"],"sourcesContent":["/**\n * _sync_credentials reserved collection —\n *\n * Stores per-adapter OAuth tokens (and any other long-lived sync secrets) as\n * encrypted records inside the vault itself. Tokens are wrapped with the\n * compartment's own DEK, live on disk as ciphertext like any other record, and\n * are accessed only through the dedicated API in this module — never via\n * `vault.collection('_sync_credentials')`.\n *\n * Design decisions\n * ────────────────\n *\n * **Why a reserved collection, not a separate store?**\n * The compartment's existing encryption stack (AES-256-GCM + collection DEK)\n * is exactly the right primitive for protecting OAuth tokens at rest. Using a\n * separate store would require a new encryption surface, new adapter calls,\n * and a new backup/restore path — all of which already exist for collections.\n *\n * **Why not exposed as a regular collection?**\n * The same reason `_keyring` and `_ledger` aren't: they have invariants that\n * must be enforced (naming scheme, no cross-user leakage, no schema\n * validation, no history/ledger writes for privacy). Routing through a\n * dedicated API enforces those invariants.\n *\n * **Token lifecycle:**\n * - `putCredential(vault, adapterId, token)` — store or overwrite\n * - `getCredential(vault, adapterId)` — load and decrypt\n * - `deleteCredential(vault, adapterId)` — remove\n * - `listCredentials(vault)` — enumerate adapter IDs (not tokens)\n *\n * The `adapterId` is the record ID within the `_sync_credentials` collection.\n * It should be a stable, human-readable identifier for the adapter instance\n * (e.g. `'google-drive'`, `'dropbox'`, `'s3-prod'`).\n *\n * **ACL:** only `owner` and `admin` roles can read/write sync credentials.\n * Operators, viewers, and clients cannot call this API. The check is made\n * against the caller's keyring role at call time.\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport type { UnlockedKeyring } from './keyring.js'\nimport { encrypt, decrypt } from '../crypto.js'\nimport { ensureCollectionDEK } from './keyring.js'\nimport { PermissionDeniedError } from '../errors.js'\n\n/** The reserved collection name. Never collides with user collections. */\nexport const SYNC_CREDENTIALS_COLLECTION = '_sync_credentials'\n\n// ─── Token types ──────────────────────────────────────────────────────\n\n/**\n * An OAuth/auth token stored in `_sync_credentials`.\n *\n * Fields mirror the OAuth2 token response shape. `customData` is an escape\n * hatch for adapter-specific secrets (API keys, connection strings, etc.)\n * that don't fit the OAuth2 shape.\n */\nexport interface SyncCredential {\n /** Stable identifier for the adapter instance (e.g. 'google-drive'). */\n readonly adapterId: string\n /** OAuth token type, usually 'Bearer'. */\n readonly tokenType: string\n /** The access token. Expires at `expiresAt` if set. */\n readonly accessToken: string\n /** Long-lived refresh token for renewing the access token. */\n readonly refreshToken?: string\n /** ISO timestamp when `accessToken` expires. Absent means \"no expiry\". */\n readonly expiresAt?: string\n /** Space-separated OAuth scopes. */\n readonly scopes?: string\n /** Adapter-specific opaque data (API keys, endpoints, etc.). */\n readonly customData?: Record<string, string>\n}\n\n// ─── Access check ─────────────────────────────────────────────────────\n\nfunction requireAdminAccess(keyring: UnlockedKeyring): void {\n if (keyring.role !== 'owner' && keyring.role !== 'admin') {\n throw new PermissionDeniedError(\n `Sync credentials require owner or admin role. Current role: \"${keyring.role}\"`,\n )\n }\n}\n\n// ─── Public API ────────────────────────────────────────────────────────\n\n/**\n * Store or overwrite a sync credential for the given adapter.\n *\n * The credential is encrypted with the `_sync_credentials` collection DEK\n * (auto-generated on first use). The record ID is the `adapterId`.\n *\n * Requires owner or admin role.\n */\nexport async function putCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n credential: SyncCredential,\n): Promise<void> {\n requireAdminAccess(keyring)\n\n const getDek = await ensureCollectionDEK(adapter, vault, keyring)\n const dek = await getDek(SYNC_CREDENTIALS_COLLECTION)\n\n const { iv, data } = await encrypt(JSON.stringify(credential), dek)\n\n const existing = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, credential.adapterId)\n const version = existing ? existing._v + 1 : 1\n\n const envelope: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: keyring.userId,\n }\n\n await adapter.put(\n vault,\n SYNC_CREDENTIALS_COLLECTION,\n credential.adapterId,\n envelope,\n existing ? existing._v : undefined,\n )\n}\n\n/**\n * Load and decrypt a sync credential for the given adapter ID.\n *\n * Returns `null` if no credential exists for this adapter.\n * Requires owner or admin role.\n */\nexport async function getCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<SyncCredential | null> {\n requireAdminAccess(keyring)\n\n const getDek = await ensureCollectionDEK(adapter, vault, keyring)\n const dek = await getDek(SYNC_CREDENTIALS_COLLECTION)\n\n const envelope = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, adapterId)\n if (!envelope) return null\n\n const plaintext = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(plaintext) as SyncCredential\n}\n\n/**\n * Delete a sync credential by adapter ID.\n *\n * No-op if the credential doesn't exist. Requires owner or admin role.\n */\nexport async function deleteCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<void> {\n requireAdminAccess(keyring)\n await adapter.delete(vault, SYNC_CREDENTIALS_COLLECTION, adapterId)\n}\n\n/**\n * List all adapter IDs that have stored credentials.\n *\n * Returns only the IDs, never the credential payloads. Useful for\n * displaying \"connected adapters\" in UI without decrypting tokens.\n * Requires owner or admin role.\n */\nexport async function listCredentials(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n): Promise<string[]> {\n requireAdminAccess(keyring)\n return adapter.list(vault, SYNC_CREDENTIALS_COLLECTION)\n}\n\n/**\n * Check whether a credential exists and whether its access token has expired.\n *\n * Returns `{ exists: false }` if no credential is stored, or\n * `{ exists: true, expired: boolean }` based on the `expiresAt` field.\n * Requires owner or admin role.\n */\nexport async function credentialStatus(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<{ exists: false } | { exists: true; expired: boolean }> {\n const credential = await getCredential(adapter, vault, keyring, adapterId)\n if (!credential) return { exists: false }\n\n const expired = credential.expiresAt\n ? Date.now() > new Date(credential.expiresAt).getTime()\n : false\n\n return { exists: true, expired }\n}\n"],"mappings":";;;;;;;;;;;;;;;AA+CO,IAAM,8BAA8B;AA8B3C,SAAS,mBAAmB,SAAgC;AAC1D,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,SAAS;AACxD,UAAM,IAAI;AAAA,MACR,gEAAgE,QAAQ,IAAI;AAAA,IAC9E;AAAA,EACF;AACF;AAYA,eAAsB,cACpB,SACA,OACA,SACA,YACe;AACf,qBAAmB,OAAO;AAE1B,QAAM,SAAS,MAAM,oBAAoB,SAAS,OAAO,OAAO;AAChE,QAAM,MAAM,MAAM,OAAO,2BAA2B;AAEpD,QAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,KAAK,UAAU,UAAU,GAAG,GAAG;AAElE,QAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,6BAA6B,WAAW,SAAS;AAC3F,QAAM,UAAU,WAAW,SAAS,KAAK,IAAI;AAE7C,QAAM,WAA8B;AAAA,IAClC,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC5B,KAAK;AAAA,IACL,OAAO;AAAA,IACP,KAAK,QAAQ;AAAA,EACf;AAEA,QAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA,WAAW;AAAA,IACX;AAAA,IACA,WAAW,SAAS,KAAK;AAAA,EAC3B;AACF;AAQA,eAAsB,cACpB,SACA,OACA,SACA,WACgC;AAChC,qBAAmB,OAAO;AAE1B,QAAM,SAAS,MAAM,oBAAoB,SAAS,OAAO,OAAO;AAChE,QAAM,MAAM,MAAM,OAAO,2BAA2B;AAEpD,QAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,6BAA6B,SAAS;AAChF,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AACjE,SAAO,KAAK,MAAM,SAAS;AAC7B;AAOA,eAAsB,iBACpB,SACA,OACA,SACA,WACe;AACf,qBAAmB,OAAO;AAC1B,QAAM,QAAQ,OAAO,OAAO,6BAA6B,SAAS;AACpE;AASA,eAAsB,gBACpB,SACA,OACA,SACmB;AACnB,qBAAmB,OAAO;AAC1B,SAAO,QAAQ,KAAK,OAAO,2BAA2B;AACxD;AASA,eAAsB,iBACpB,SACA,OACA,SACA,WACiE;AACjE,QAAM,aAAa,MAAM,cAAc,SAAS,OAAO,SAAS,SAAS;AACzE,MAAI,CAAC,WAAY,QAAO,EAAE,QAAQ,MAAM;AAExC,QAAM,UAAU,WAAW,YACvB,KAAK,IAAI,IAAI,IAAI,KAAK,WAAW,SAAS,EAAE,QAAQ,IACpD;AAEJ,SAAO,EAAE,QAAQ,MAAM,QAAQ;AACjC;","names":[]}
@@ -0,0 +1,90 @@
1
+ import {
2
+ canonicalJson,
3
+ sha256Hex
4
+ } from "./chunk-CIMZBAZB.js";
5
+ import {
6
+ PeriodClosedError,
7
+ ValidationError
8
+ } from "./chunk-ACLDOTNQ.js";
9
+
10
+ // src/periods/periods.ts
11
+ var PERIODS_COLLECTION = "_periods";
12
+ async function loadPeriods(adapter, vault, decrypt) {
13
+ const ids = await adapter.list(vault, PERIODS_COLLECTION);
14
+ const records = [];
15
+ for (const id of ids) {
16
+ const env = await adapter.get(vault, PERIODS_COLLECTION, id);
17
+ if (env) records.push(await decrypt(env));
18
+ }
19
+ records.sort((a, b) => a.closedAt.localeCompare(b.closedAt));
20
+ return records;
21
+ }
22
+ async function chainAnchor(records) {
23
+ const last = records[records.length - 1];
24
+ if (!last) return { priorPeriodHash: "" };
25
+ const hash = await sha256Hex(canonicalJson(last));
26
+ return { priorPeriodName: last.name, priorPeriodHash: hash };
27
+ }
28
+ function assertTsWritable(existing, incomingRecord, closedPeriods) {
29
+ for (const p of closedPeriods) {
30
+ if (p.kind !== "closed") continue;
31
+ if (p.dateField) {
32
+ const checkRecord = (label, r) => {
33
+ if (!r) return;
34
+ const v = r[p.dateField];
35
+ if (typeof v === "string" && v <= p.endDate) {
36
+ throw new PeriodClosedError(p.name, p.endDate, `${label}[${p.dateField}]=${v}`);
37
+ }
38
+ };
39
+ checkRecord("existing", existing?.record ?? null);
40
+ checkRecord("incoming", incomingRecord);
41
+ continue;
42
+ }
43
+ const existingTs = existing?.ts ?? null;
44
+ if (existingTs !== null && existingTs <= p.endDate) {
45
+ throw new PeriodClosedError(p.name, p.endDate, existingTs);
46
+ }
47
+ }
48
+ }
49
+ function validatePeriodName(name, existing) {
50
+ if (name.length === 0) {
51
+ throw new ValidationError("Period name cannot be empty.");
52
+ }
53
+ if (existing.some((p) => p.name === name)) {
54
+ throw new ValidationError(`Period "${name}" already exists.`);
55
+ }
56
+ }
57
+ async function appendPeriodLedgerEntry(ledger, actor, envelope, name) {
58
+ if (!ledger) return;
59
+ const { envelopePayloadHash } = await import("./ledger-2NX4L7PN.js");
60
+ await ledger.append({
61
+ op: "put",
62
+ collection: PERIODS_COLLECTION,
63
+ id: name,
64
+ version: envelope._v,
65
+ actor,
66
+ payloadHash: await envelopePayloadHash(envelope)
67
+ });
68
+ }
69
+
70
+ // src/periods/active.ts
71
+ function withPeriods() {
72
+ return {
73
+ loadPeriods,
74
+ chainAnchor,
75
+ assertTsWritable,
76
+ validatePeriodName,
77
+ appendPeriodLedgerEntry
78
+ };
79
+ }
80
+
81
+ export {
82
+ PERIODS_COLLECTION,
83
+ loadPeriods,
84
+ chainAnchor,
85
+ assertTsWritable,
86
+ validatePeriodName,
87
+ appendPeriodLedgerEntry,
88
+ withPeriods
89
+ };
90
+ //# sourceMappingURL=chunk-5AATM2M2.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/periods/periods.ts","../src/periods/active.ts"],"sourcesContent":["/**\n * Accounting-period closure + opening.\n *\n * A closed period seals every record whose envelope `_ts` is at or\n * before the period's `endDate`: further writes (`put` / `delete`)\n * against such records throw {@link PeriodClosedError}. The period\n * itself is stored as a record in the reserved `_periods` collection\n * and written through the normal ledger-instrumented path, so every\n * closure appends a tamper-evident entry to the vault's hash chain.\n *\n * ## Closure model\n *\n * ```\n * vault.closePeriod({ name: 'FY2026-Q1', endDate: '2026-03-31' })\n * └─► PeriodRecord written to _periods/<name>\n * ├─ priorPeriodName / priorPeriodHash — chain to last close\n * ├─ closedAt / closedBy — provenance\n * └─ normal ledger append fires (LedgerStore.append)\n * ```\n *\n * Enforcement (`assertTsWritable`) is vault-local: the Vault caches\n * the list of closed periods on first read and consults that cache in\n * the `Collection.put` / `.delete` path via the `periodGuard` hook.\n *\n * ## Opening model\n *\n * ```\n * vault.openPeriod({\n * name: 'FY2026-Q2',\n * startDate: '2026-04-01',\n * fromPeriod: 'FY2026-Q1',\n * carryForward: async (priorView) => Record<string, Record<string, unknown>>,\n * })\n * ```\n *\n * `carryForward` receives a read-only `VaultInstant` anchored at the\n * prior period's `endDate` (built via `vault.at(endDate)`) so the\n * callback can compute closing aggregates from the sealed state. The\n * returned `{ [collectionName]: { [id]: record } }` map is written\n * before the new `PeriodRecord` lands — opening balances materialise\n * as normal records with fresh timestamps that fall outside every\n * closed period.\n *\n * ## Not covered\n *\n * - Partial re-opening of a closed period. If an auditor needs to\n * make a correction inside a sealed period, the sanctioned path is\n * a compensating entry in the NEW period, not an unlock of the\n * old one.\n * - Automatic period rollover. `closePeriod` / `openPeriod` are\n * deliberately explicit operator calls so the caller decides when\n * the boundary lands.\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport type { LedgerStore } from '../history/ledger/index.js'\nimport { sha256Hex, canonicalJson } from '../history/ledger/index.js'\nimport { PeriodClosedError, ValidationError } from '../errors.js'\n\n/** The reserved collection name holding closed-period metadata. */\nexport const PERIODS_COLLECTION = '_periods'\n\n/**\n * Stored record for one closed or opened accounting period. One entry\n * per period, keyed by `name` in the reserved `_periods` collection.\n *\n * The hash chain between periods is computed at read time by\n * `loadPeriods()` — each record carries the name + hash of its\n * predecessor so a tamper with any period's record breaks the chain\n * into the next one, the same way the ledger's `prevHash` works.\n */\nexport interface PeriodRecord {\n /** Human-readable name (e.g., `'FY2026-Q1'`). Unique per vault. */\n readonly name: string\n /**\n * Role discriminator. A period is `'closed'` from the moment its\n * `closedAt` is recorded; `'opened'` marks a period whose opening\n * entries have been carried forward via {@link openPeriod}. Many\n * workflows will produce one opened period per closed period (the\n * opened one is the SUCCESSOR — its `startDate` equals the prior\n * `endDate + 1 day`).\n */\n readonly kind: 'closed' | 'opened'\n /** ISO date — inclusive upper bound for records belonging to this period. */\n readonly endDate: string\n /** ISO date — lower bound (present on opened periods only). */\n readonly startDate?: string\n /**\n * Record field carrying the business date (e.g. `'date'` on an\n * invoice, `'paidAt'` on a payment). The guard compares\n * `record[dateField]` against `endDate` — NOT the envelope `_ts`.\n * Accounting entries booked late (business date `2026-01-15`,\n * write-time `2026-04-22`) still get sealed when Q1 closes at\n * `2026-03-31` because the comparison uses the business date.\n *\n * Optional for backwards compat. When absent, the guard falls back\n * to envelope `_ts` — that's a write-time seal, appropriate for\n * content that doesn't carry a logical business date (e.g. system\n * settings) but almost never right for accounting ledgers.\n */\n readonly dateField?: string\n /** ISO timestamp recorded at `closePeriod()` / `openPeriod()` call time. */\n readonly closedAt: string\n /** userId of the keyring that invoked the close/open. */\n readonly closedBy: string\n /** Name of the prior period this one chains to, if any. */\n readonly priorPeriodName?: string\n /** sha256(canonicalJson(priorPeriod)) — empty for the first period. */\n readonly priorPeriodHash: string\n /**\n * Opened periods only — the names of the collections whose\n * carry-forward aggregates were written by {@link openPeriod}.\n * Recorded for auditability so a future `verifyPeriodChain()` can\n * cross-check the opening balances against the closing snapshot.\n */\n readonly openingCollections?: readonly string[]\n}\n\n/** Options for `vault.closePeriod()`. */\nexport interface ClosePeriodOptions {\n /** Human-readable name. Must not collide with an existing period. */\n readonly name: string\n /**\n * Inclusive upper cutoff. A record is sealed when its\n * `record[dateField]` (or, if absent, the envelope `_ts`) is at or\n * before this ISO timestamp.\n */\n readonly endDate: string\n /**\n * Record field carrying the business date used for period\n * membership. Recommended for accounting workflows — e.g. an\n * invoice booked late (write-time after close) is still sealed\n * when its `invoice.date` falls inside the closed period.\n *\n * Omit to use envelope `_ts` (write-time seal). This fallback\n * rarely matches real-world accounting semantics; prefer passing\n * an explicit `dateField`.\n */\n readonly dateField?: string\n}\n\n/** Options for `vault.openPeriod()`. */\nexport interface OpenPeriodOptions<TCollections = Record<string, Record<string, unknown>>> {\n /** Human-readable name for the new period. Must be unique. */\n readonly name: string\n /** ISO lower bound of the new period (usually prior `endDate + 1 day`). */\n readonly startDate: string\n /**\n * Name of the prior CLOSED period this one chains from. The prior\n * period's record is verified to exist and to be `kind: 'closed'`;\n * its `endDate` is made available to the `carryForward` callback.\n */\n readonly fromPeriod: string\n /**\n * Receives a read-only facade over the vault's CURRENT state,\n * plus the prior period's `endDate`. Accounting semantics: after\n * a period closes, records with `record[dateField] <= endDate`\n * are frozen — current state equals closing state, so a caller\n * can compute closing balances by querying the live collection\n * with a `where('date', '<=', priorEndDate)` filter.\n *\n * Returns opening-balance records keyed by collection name.\n * Example:\n *\n * ```ts\n * carryForward: async (ctx) => {\n * const closing = await ctx.collection<Journal>('journal')\n * .query().where('date', '<=', ctx.priorEndDate).toArray()\n * const opening: Record<string, Journal> = {}\n * for (const entry of closing) {\n * opening[`OB-${entry.id}`] = { ...entry, date: '2026-04-01' }\n * }\n * return { journal: opening }\n * }\n * ```\n */\n readonly carryForward: (\n ctx: CarryForwardContext,\n ) => Promise<TCollections> | TCollections\n}\n\n/**\n * Context passed to `OpenPeriodOptions.carryForward`. Exposes a\n * read-only subset of the live vault (`collection(name).get/list`)\n * plus the prior period's `endDate` so business-date filters can\n * be built by the caller.\n *\n * Writes go via the return value, not via the facade — the\n * `collection()` here is deliberately restricted to reads.\n */\nexport interface CarryForwardContext {\n /** The prior period's `endDate` — the boundary of the closing snapshot. */\n readonly priorEndDate: string\n /** Read-only collection facade over current vault state. */\n collection<T = unknown>(name: string): ReadOnlyCollection<T>\n}\n\n/** Minimum read surface exposed to `carryForward`. */\nexport interface ReadOnlyCollection<T> {\n get(id: string): Promise<T | null>\n list(): Promise<T[]>\n}\n\n/**\n * Load every period record currently stored on the adapter.\n * Decrypting is the caller's responsibility (we return plain records\n * so the vault can use its own `_periods` DEK).\n *\n * @internal — called by Vault methods that need the closed-period\n * cache. Not part of the public API surface.\n */\nexport async function loadPeriods(\n adapter: NoydbStore,\n vault: string,\n decrypt: (envelope: EncryptedEnvelope) => Promise<PeriodRecord>,\n): Promise<PeriodRecord[]> {\n const ids = await adapter.list(vault, PERIODS_COLLECTION)\n const records: PeriodRecord[] = []\n for (const id of ids) {\n const env = await adapter.get(vault, PERIODS_COLLECTION, id)\n if (env) records.push(await decrypt(env))\n }\n // Stable order by closedAt so chain verification is reproducible.\n records.sort((a, b) => a.closedAt.localeCompare(b.closedAt))\n return records\n}\n\n/**\n * Given the current ordered period list, pick the last entry that\n * belongs to the hash chain — used as the `priorPeriodHash` anchor\n * for the next closure/opening.\n *\n * @internal\n */\nexport async function chainAnchor(\n records: readonly PeriodRecord[],\n): Promise<{ priorPeriodName?: string; priorPeriodHash: string }> {\n const last = records[records.length - 1]\n if (!last) return { priorPeriodHash: '' }\n const hash = await sha256Hex(canonicalJson(last as unknown as Record<string, unknown>))\n return { priorPeriodName: last.name, priorPeriodHash: hash }\n}\n\n/**\n * Throw `PeriodClosedError` if the record being touched falls within\n * any closed period.\n *\n * Three signals, evaluated per period:\n *\n * 1. If the period declares a `dateField`, the guard reads\n * `record[dateField]` on BOTH the existing (prior) record AND the\n * incoming (new) record. Either comparing `<= endDate` triggers\n * the error — callers cannot slide a record into a closed period\n * by editing its date field.\n * 2. If the period has no `dateField`, the guard falls back to the\n * envelope `_ts` of the existing record. Fresh inserts (no\n * existing envelope) pass.\n * 3. For a delete, only the existing side is checked.\n *\n * @internal\n */\nexport function assertTsWritable(\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incomingRecord: Record<string, unknown> | null,\n closedPeriods: readonly PeriodRecord[],\n): void {\n for (const p of closedPeriods) {\n if (p.kind !== 'closed') continue\n if (p.dateField) {\n const checkRecord = (label: string, r: Record<string, unknown> | null): void => {\n if (!r) return\n const v = r[p.dateField!]\n if (typeof v === 'string' && v <= p.endDate) {\n throw new PeriodClosedError(p.name, p.endDate, `${label}[${p.dateField}]=${v}`)\n }\n }\n checkRecord('existing', existing?.record ?? null)\n checkRecord('incoming', incomingRecord)\n continue\n }\n // Fallback: write-time seal via envelope _ts.\n const existingTs = existing?.ts ?? null\n if (existingTs !== null && existingTs <= p.endDate) {\n throw new PeriodClosedError(p.name, p.endDate, existingTs)\n }\n }\n}\n\n/**\n * Sanity-check a proposed period name + endDate against existing\n * records. Shared by closePeriod / openPeriod so the two pathways\n * produce identical diagnostics.\n *\n * @internal\n */\nexport function validatePeriodName(\n name: string,\n existing: readonly PeriodRecord[],\n): void {\n if (name.length === 0) {\n throw new ValidationError('Period name cannot be empty.')\n }\n if (existing.some((p) => p.name === name)) {\n throw new ValidationError(`Period \"${name}\" already exists.`)\n }\n}\n\n/**\n * Wire a reserved-collection ledger append for a period record. The\n * period itself is stored via the adapter as an encrypted envelope;\n * the ledger entry is a normal `put` with the period's payloadHash,\n * so period closures inherit the chain's tamper-evidence.\n *\n * @internal\n */\nexport async function appendPeriodLedgerEntry(\n ledger: LedgerStore | null,\n actor: string,\n envelope: EncryptedEnvelope,\n name: string,\n): Promise<void> {\n if (!ledger) return\n const { envelopePayloadHash } = await import('../history/ledger/index.js')\n await ledger.append({\n op: 'put',\n collection: PERIODS_COLLECTION,\n id: name,\n version: envelope._v,\n actor,\n payloadHash: await envelopePayloadHash(envelope),\n })\n}\n","/**\n * Active periods strategy factory. Only reachable through the\n * `@noy-db/hub/periods` subpath.\n */\n\nimport {\n loadPeriods,\n chainAnchor,\n assertTsWritable,\n validatePeriodName,\n appendPeriodLedgerEntry,\n} from './periods.js'\nimport type { PeriodsStrategy } from './strategy.js'\n\n/**\n * Build the default periods strategy. Pass into\n * `createNoydb({ periodsStrategy: withPeriods() })` to enable\n * `vault.closePeriod()` / `vault.openPeriod()` / write-guards.\n */\nexport function withPeriods(): PeriodsStrategy {\n return {\n loadPeriods,\n chainAnchor,\n assertTsWritable,\n validatePeriodName,\n appendPeriodLedgerEntry,\n }\n}\n"],"mappings":";;;;;;;;;;AA8DO,IAAM,qBAAqB;AAuJlC,eAAsB,YACpB,SACA,OACA,SACyB;AACzB,QAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,kBAAkB;AACxD,QAAM,UAA0B,CAAC;AACjC,aAAW,MAAM,KAAK;AACpB,UAAM,MAAM,MAAM,QAAQ,IAAI,OAAO,oBAAoB,EAAE;AAC3D,QAAI,IAAK,SAAQ,KAAK,MAAM,QAAQ,GAAG,CAAC;AAAA,EAC1C;AAEA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,cAAc,EAAE,QAAQ,CAAC;AAC3D,SAAO;AACT;AASA,eAAsB,YACpB,SACgE;AAChE,QAAM,OAAO,QAAQ,QAAQ,SAAS,CAAC;AACvC,MAAI,CAAC,KAAM,QAAO,EAAE,iBAAiB,GAAG;AACxC,QAAM,OAAO,MAAM,UAAU,cAAc,IAA0C,CAAC;AACtF,SAAO,EAAE,iBAAiB,KAAK,MAAM,iBAAiB,KAAK;AAC7D;AAoBO,SAAS,iBACd,UACA,gBACA,eACM;AACN,aAAW,KAAK,eAAe;AAC7B,QAAI,EAAE,SAAS,SAAU;AACzB,QAAI,EAAE,WAAW;AACf,YAAM,cAAc,CAAC,OAAe,MAA4C;AAC9E,YAAI,CAAC,EAAG;AACR,cAAM,IAAI,EAAE,EAAE,SAAU;AACxB,YAAI,OAAO,MAAM,YAAY,KAAK,EAAE,SAAS;AAC3C,gBAAM,IAAI,kBAAkB,EAAE,MAAM,EAAE,SAAS,GAAG,KAAK,IAAI,EAAE,SAAS,KAAK,CAAC,EAAE;AAAA,QAChF;AAAA,MACF;AACA,kBAAY,YAAY,UAAU,UAAU,IAAI;AAChD,kBAAY,YAAY,cAAc;AACtC;AAAA,IACF;AAEA,UAAM,aAAa,UAAU,MAAM;AACnC,QAAI,eAAe,QAAQ,cAAc,EAAE,SAAS;AAClD,YAAM,IAAI,kBAAkB,EAAE,MAAM,EAAE,SAAS,UAAU;AAAA,IAC3D;AAAA,EACF;AACF;AASO,SAAS,mBACd,MACA,UACM;AACN,MAAI,KAAK,WAAW,GAAG;AACrB,UAAM,IAAI,gBAAgB,8BAA8B;AAAA,EAC1D;AACA,MAAI,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,GAAG;AACzC,UAAM,IAAI,gBAAgB,WAAW,IAAI,mBAAmB;AAAA,EAC9D;AACF;AAUA,eAAsB,wBACpB,QACA,OACA,UACA,MACe;AACf,MAAI,CAAC,OAAQ;AACb,QAAM,EAAE,oBAAoB,IAAI,MAAM,OAAO,sBAA4B;AACzE,QAAM,OAAO,OAAO;AAAA,IAClB,IAAI;AAAA,IACJ,YAAY;AAAA,IACZ,IAAI;AAAA,IACJ,SAAS,SAAS;AAAA,IAClB;AAAA,IACA,aAAa,MAAM,oBAAoB,QAAQ;AAAA,EACjD,CAAC;AACH;;;AC1TO,SAAS,cAA+B;AAC7C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}