emdash 0.1.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (202) hide show
  1. package/dist/{apply-kC39ev1Z.mjs → apply-Bqoekfbe.mjs} +57 -10
  2. package/dist/apply-Bqoekfbe.mjs.map +1 -0
  3. package/dist/astro/index.d.mts +23 -9
  4. package/dist/astro/index.d.mts.map +1 -1
  5. package/dist/astro/index.mjs +90 -25
  6. package/dist/astro/index.mjs.map +1 -1
  7. package/dist/astro/middleware/auth.d.mts +3 -3
  8. package/dist/astro/middleware/auth.d.mts.map +1 -1
  9. package/dist/astro/middleware/auth.mjs +126 -55
  10. package/dist/astro/middleware/auth.mjs.map +1 -1
  11. package/dist/astro/middleware/redirect.mjs +2 -2
  12. package/dist/astro/middleware/request-context.mjs +1 -1
  13. package/dist/astro/middleware.d.mts.map +1 -1
  14. package/dist/astro/middleware.mjs +80 -41
  15. package/dist/astro/middleware.mjs.map +1 -1
  16. package/dist/astro/types.d.mts +27 -6
  17. package/dist/astro/types.d.mts.map +1 -1
  18. package/dist/{byline-CL847F26.mjs → byline-BGj9p9Ht.mjs} +53 -31
  19. package/dist/byline-BGj9p9Ht.mjs.map +1 -0
  20. package/dist/{bylines-C2a-2TGt.mjs → bylines-BihaoIDY.mjs} +12 -10
  21. package/dist/{bylines-C2a-2TGt.mjs.map → bylines-BihaoIDY.mjs.map} +1 -1
  22. package/dist/cli/index.mjs +17 -14
  23. package/dist/cli/index.mjs.map +1 -1
  24. package/dist/{config-CKE8p9xM.mjs → config-Cq8H0SfX.mjs} +2 -10
  25. package/dist/{config-CKE8p9xM.mjs.map → config-Cq8H0SfX.mjs.map} +1 -1
  26. package/dist/{content-D6C2WsZC.mjs → content-BsBoyj8G.mjs} +35 -5
  27. package/dist/content-BsBoyj8G.mjs.map +1 -0
  28. package/dist/db/index.mjs +2 -2
  29. package/dist/{default-Cyi4aAxu.mjs → default-WYlzADZL.mjs} +1 -1
  30. package/dist/{default-Cyi4aAxu.mjs.map → default-WYlzADZL.mjs.map} +1 -1
  31. package/dist/{dialect-helpers-B9uSp2GJ.mjs → dialect-helpers-DhTzaUxP.mjs} +4 -1
  32. package/dist/dialect-helpers-DhTzaUxP.mjs.map +1 -0
  33. package/dist/{error-Cxz0tQeO.mjs → error-DrxtnGPg.mjs} +1 -1
  34. package/dist/{error-Cxz0tQeO.mjs.map → error-DrxtnGPg.mjs.map} +1 -1
  35. package/dist/{index-CLBc4gw-.d.mts → index-Cff7AimE.d.mts} +77 -15
  36. package/dist/index-Cff7AimE.d.mts.map +1 -0
  37. package/dist/index.d.mts +6 -6
  38. package/dist/index.mjs +19 -19
  39. package/dist/{load-yOOlckBj.mjs → load-Veizk2cT.mjs} +1 -1
  40. package/dist/{load-yOOlckBj.mjs.map → load-Veizk2cT.mjs.map} +1 -1
  41. package/dist/{loader-fz8Q_3EO.mjs → loader-BmYdf3Dr.mjs} +4 -2
  42. package/dist/loader-BmYdf3Dr.mjs.map +1 -0
  43. package/dist/{manifest-schema-CL8DWO9b.mjs → manifest-schema-CuMio1A9.mjs} +1 -1
  44. package/dist/{manifest-schema-CL8DWO9b.mjs.map → manifest-schema-CuMio1A9.mjs.map} +1 -1
  45. package/dist/media/local-runtime.d.mts +4 -4
  46. package/dist/page/index.d.mts +10 -1
  47. package/dist/page/index.d.mts.map +1 -1
  48. package/dist/page/index.mjs +8 -4
  49. package/dist/page/index.mjs.map +1 -1
  50. package/dist/plugins/adapt-sandbox-entry.d.mts +3 -3
  51. package/dist/plugins/adapt-sandbox-entry.mjs +1 -1
  52. package/dist/{query-BVYN0PJ6.mjs → query-sesiOndV.mjs} +20 -8
  53. package/dist/{query-BVYN0PJ6.mjs.map → query-sesiOndV.mjs.map} +1 -1
  54. package/dist/{redirect-DIfIni3r.mjs → redirect-DUAk-Yl_.mjs} +9 -2
  55. package/dist/redirect-DUAk-Yl_.mjs.map +1 -0
  56. package/dist/{registry-BNYQKX_d.mjs → registry-DU18yVo0.mjs} +14 -4
  57. package/dist/registry-DU18yVo0.mjs.map +1 -0
  58. package/dist/{runner-BraqvGYk.mjs → runner-Biufrii2.mjs} +157 -132
  59. package/dist/runner-Biufrii2.mjs.map +1 -0
  60. package/dist/runner-EAtf0ZIe.d.mts.map +1 -1
  61. package/dist/runtime.d.mts +3 -3
  62. package/dist/runtime.mjs +2 -2
  63. package/dist/{search-C1gg67nN.mjs → search-BXB-jfu2.mjs} +241 -109
  64. package/dist/search-BXB-jfu2.mjs.map +1 -0
  65. package/dist/seed/index.d.mts +1 -1
  66. package/dist/seed/index.mjs +10 -10
  67. package/dist/seo/index.d.mts +1 -1
  68. package/dist/storage/local.d.mts +1 -1
  69. package/dist/storage/local.mjs +1 -1
  70. package/dist/storage/s3.d.mts +11 -3
  71. package/dist/storage/s3.d.mts.map +1 -1
  72. package/dist/storage/s3.mjs +76 -15
  73. package/dist/storage/s3.mjs.map +1 -1
  74. package/dist/{tokens-DpgrkrXK.mjs → tokens-DrB-W6Q-.mjs} +1 -1
  75. package/dist/{tokens-DpgrkrXK.mjs.map → tokens-DrB-W6Q-.mjs.map} +1 -1
  76. package/dist/{types-BRuPJGdV.d.mts → types-BbsYgi_R.d.mts} +3 -1
  77. package/dist/types-BbsYgi_R.d.mts.map +1 -0
  78. package/dist/{types-CUBbjgmP.mjs → types-Bec-r_3_.mjs} +1 -1
  79. package/dist/types-Bec-r_3_.mjs.map +1 -0
  80. package/dist/{types-DaNLHo_T.d.mts → types-C1-PVaS_.d.mts} +14 -6
  81. package/dist/types-C1-PVaS_.d.mts.map +1 -0
  82. package/dist/types-CMMN0pNg.mjs.map +1 -1
  83. package/dist/{types-BQo5JS0J.d.mts → types-CaKte3hR.d.mts} +78 -6
  84. package/dist/types-CaKte3hR.d.mts.map +1 -0
  85. package/dist/{types-CiA5Gac0.mjs → types-DuNbGKjF.mjs} +1 -1
  86. package/dist/{types-CiA5Gac0.mjs.map → types-DuNbGKjF.mjs.map} +1 -1
  87. package/dist/{validate-_rsF-Dx_.mjs → validate-CXnRKfJK.mjs} +2 -2
  88. package/dist/{validate-_rsF-Dx_.mjs.map → validate-CXnRKfJK.mjs.map} +1 -1
  89. package/dist/{validate-CqRJb_xU.mjs → validate-VPnKoIzW.mjs} +11 -11
  90. package/dist/{validate-CqRJb_xU.mjs.map → validate-VPnKoIzW.mjs.map} +1 -1
  91. package/dist/{validate-HtxZeaBi.d.mts → validate-bfg9OR6N.d.mts} +2 -2
  92. package/dist/{validate-HtxZeaBi.d.mts.map → validate-bfg9OR6N.d.mts.map} +1 -1
  93. package/dist/version-REAapfsU.mjs +7 -0
  94. package/dist/version-REAapfsU.mjs.map +1 -0
  95. package/package.json +6 -6
  96. package/src/api/csrf.ts +13 -2
  97. package/src/api/handlers/content.ts +7 -0
  98. package/src/api/handlers/dashboard.ts +4 -8
  99. package/src/api/handlers/device-flow.ts +55 -37
  100. package/src/api/handlers/index.ts +6 -1
  101. package/src/api/handlers/redirects.ts +95 -3
  102. package/src/api/handlers/seo.ts +48 -21
  103. package/src/api/public-url.ts +84 -0
  104. package/src/api/schemas/content.ts +2 -2
  105. package/src/api/schemas/menus.ts +12 -2
  106. package/src/api/schemas/redirects.ts +1 -0
  107. package/src/astro/integration/index.ts +30 -7
  108. package/src/astro/integration/routes.ts +13 -2
  109. package/src/astro/integration/runtime.ts +7 -5
  110. package/src/astro/integration/vite-config.ts +55 -9
  111. package/src/astro/middleware/auth.ts +60 -56
  112. package/src/astro/middleware/csp.ts +25 -0
  113. package/src/astro/middleware.ts +31 -3
  114. package/src/astro/routes/PluginRegistry.tsx +8 -2
  115. package/src/astro/routes/admin.astro +7 -2
  116. package/src/astro/routes/api/admin/users/[id]/disable.ts +18 -12
  117. package/src/astro/routes/api/admin/users/[id]/index.ts +26 -5
  118. package/src/astro/routes/api/auth/invite/complete.ts +3 -2
  119. package/src/astro/routes/api/auth/oauth/[provider]/callback.ts +2 -1
  120. package/src/astro/routes/api/auth/oauth/[provider].ts +2 -1
  121. package/src/astro/routes/api/auth/passkey/options.ts +3 -2
  122. package/src/astro/routes/api/auth/passkey/register/options.ts +3 -2
  123. package/src/astro/routes/api/auth/passkey/register/verify.ts +3 -2
  124. package/src/astro/routes/api/auth/passkey/verify.ts +3 -2
  125. package/src/astro/routes/api/auth/signup/complete.ts +3 -2
  126. package/src/astro/routes/api/comments/[collection]/[contentId]/index.ts +2 -0
  127. package/src/astro/routes/api/content/[collection]/index.ts +31 -3
  128. package/src/astro/routes/api/import/wordpress/execute.ts +9 -0
  129. package/src/astro/routes/api/import/wordpress/rewrite-urls.ts +2 -0
  130. package/src/astro/routes/api/import/wordpress-plugin/execute.ts +10 -0
  131. package/src/astro/routes/api/manifest.ts +4 -1
  132. package/src/astro/routes/api/media/providers/[providerId]/[itemId].ts +7 -2
  133. package/src/astro/routes/api/oauth/authorize.ts +12 -7
  134. package/src/astro/routes/api/oauth/device/code.ts +5 -1
  135. package/src/astro/routes/api/setup/admin-verify.ts +3 -2
  136. package/src/astro/routes/api/setup/admin.ts +3 -2
  137. package/src/astro/routes/api/setup/dev-bypass.ts +2 -1
  138. package/src/astro/routes/api/setup/index.ts +3 -2
  139. package/src/astro/routes/api/snapshot.ts +2 -1
  140. package/src/astro/routes/api/themes/preview.ts +2 -1
  141. package/src/astro/routes/api/well-known/auth.ts +1 -0
  142. package/src/astro/routes/api/well-known/oauth-authorization-server.ts +3 -2
  143. package/src/astro/routes/api/well-known/oauth-protected-resource.ts +3 -2
  144. package/src/astro/routes/robots.txt.ts +5 -1
  145. package/src/astro/routes/sitemap-[collection].xml.ts +104 -0
  146. package/src/astro/routes/sitemap.xml.ts +18 -23
  147. package/src/astro/storage/adapters.ts +19 -5
  148. package/src/astro/storage/types.ts +12 -4
  149. package/src/astro/types.ts +28 -1
  150. package/src/auth/passkey-config.ts +6 -10
  151. package/src/bylines/index.ts +13 -10
  152. package/src/cli/commands/login.ts +5 -2
  153. package/src/components/InlinePortableTextEditor.tsx +5 -3
  154. package/src/content/converters/portable-text-to-prosemirror.ts +50 -2
  155. package/src/database/dialect-helpers.ts +3 -0
  156. package/src/database/migrations/034_published_at_index.ts +29 -0
  157. package/src/database/migrations/runner.ts +2 -0
  158. package/src/database/repositories/byline.ts +48 -42
  159. package/src/database/repositories/content.ts +28 -1
  160. package/src/database/repositories/options.ts +9 -3
  161. package/src/database/repositories/redirect.ts +13 -0
  162. package/src/database/repositories/seo.ts +34 -17
  163. package/src/database/repositories/types.ts +2 -0
  164. package/src/database/validate.ts +10 -10
  165. package/src/emdash-runtime.ts +66 -19
  166. package/src/import/index.ts +1 -1
  167. package/src/import/sources/wxr.ts +45 -2
  168. package/src/index.ts +10 -1
  169. package/src/loader.ts +2 -0
  170. package/src/mcp/server.ts +85 -5
  171. package/src/menus/index.ts +6 -1
  172. package/src/page/context.ts +13 -1
  173. package/src/page/jsonld.ts +10 -6
  174. package/src/page/seo-contributions.ts +1 -1
  175. package/src/plugins/context.ts +145 -35
  176. package/src/plugins/manager.ts +12 -0
  177. package/src/plugins/types.ts +80 -4
  178. package/src/query.ts +18 -0
  179. package/src/redirects/loops.ts +318 -0
  180. package/src/schema/registry.ts +8 -0
  181. package/src/search/fts-manager.ts +4 -0
  182. package/src/settings/index.ts +64 -0
  183. package/src/storage/s3.ts +94 -25
  184. package/src/storage/types.ts +13 -5
  185. package/src/utils/chunks.ts +17 -0
  186. package/src/utils/slugify.ts +11 -0
  187. package/src/version.ts +12 -0
  188. package/dist/apply-kC39ev1Z.mjs.map +0 -1
  189. package/dist/byline-CL847F26.mjs.map +0 -1
  190. package/dist/content-D6C2WsZC.mjs.map +0 -1
  191. package/dist/dialect-helpers-B9uSp2GJ.mjs.map +0 -1
  192. package/dist/index-CLBc4gw-.d.mts.map +0 -1
  193. package/dist/loader-fz8Q_3EO.mjs.map +0 -1
  194. package/dist/redirect-DIfIni3r.mjs.map +0 -1
  195. package/dist/registry-BNYQKX_d.mjs.map +0 -1
  196. package/dist/runner-BraqvGYk.mjs.map +0 -1
  197. package/dist/search-C1gg67nN.mjs.map +0 -1
  198. package/dist/types-BQo5JS0J.d.mts.map +0 -1
  199. package/dist/types-BRuPJGdV.d.mts.map +0 -1
  200. package/dist/types-CUBbjgmP.mjs.map +0 -1
  201. package/dist/types-DaNLHo_T.d.mts.map +0 -1
  202. /package/src/astro/routes/api/media/file/{[key].ts → [...key].ts} +0 -0
@@ -1,3 +1,3 @@
1
1
  import "../types-DRjfYOEv.mjs";
2
- import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-HtxZeaBi.mjs";
2
+ import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-bfg9OR6N.mjs";
3
3
  export { type SeedApplyOptions, type SeedApplyResult, type SeedCollection, type SeedContentEntry, type SeedField, type SeedFile, type SeedMenu, type SeedMenuItem, type SeedRedirect, type SeedSection, type SeedTaxonomy, type SeedTaxonomyTerm, type SeedWidget, type SeedWidgetArea, type ValidationResult, applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
@@ -1,15 +1,15 @@
1
- import "../dialect-helpers-B9uSp2GJ.mjs";
2
- import "../content-D6C2WsZC.mjs";
1
+ import "../dialect-helpers-DhTzaUxP.mjs";
2
+ import "../content-BsBoyj8G.mjs";
3
3
  import "../base64-MBPo9ozB.mjs";
4
4
  import "../types-CMMN0pNg.mjs";
5
5
  import "../media-DqHVh136.mjs";
6
- import { t as applySeed } from "../apply-kC39ev1Z.mjs";
7
- import "../registry-BNYQKX_d.mjs";
8
- import "../redirect-DIfIni3r.mjs";
9
- import "../byline-CL847F26.mjs";
10
- import "../loader-fz8Q_3EO.mjs";
11
- import { t as validateSeed } from "../validate-_rsF-Dx_.mjs";
12
- import { t as defaultSeed } from "../default-Cyi4aAxu.mjs";
13
- import { n as loadUserSeed, t as loadSeed } from "../load-yOOlckBj.mjs";
6
+ import { t as applySeed } from "../apply-Bqoekfbe.mjs";
7
+ import "../registry-DU18yVo0.mjs";
8
+ import "../redirect-DUAk-Yl_.mjs";
9
+ import "../byline-BGj9p9Ht.mjs";
10
+ import "../loader-BmYdf3Dr.mjs";
11
+ import { t as validateSeed } from "../validate-CXnRKfJK.mjs";
12
+ import { t as defaultSeed } from "../default-WYlzADZL.mjs";
13
+ import { n as loadUserSeed, t as loadSeed } from "../load-Veizk2cT.mjs";
14
14
 
15
15
  export { applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
@@ -1,4 +1,4 @@
1
- import { i as ContentSeo } from "../types-BRuPJGdV.mjs";
1
+ import { i as ContentSeo } from "../types-BbsYgi_R.mjs";
2
2
 
3
3
  //#region src/seo/index.d.ts
4
4
  /**
@@ -1,4 +1,4 @@
1
- import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-DaNLHo_T.mjs";
1
+ import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-C1-PVaS_.mjs";
2
2
 
3
3
  //#region src/storage/local.d.ts
4
4
  /**
@@ -1,4 +1,4 @@
1
- import { t as EmDashStorageError } from "../types-CUBbjgmP.mjs";
1
+ import { t as EmDashStorageError } from "../types-Bec-r_3_.mjs";
2
2
  import mime from "mime/lite";
3
3
  import * as path from "node:path";
4
4
  import { createReadStream, existsSync } from "node:fs";
@@ -1,6 +1,13 @@
1
- import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-DaNLHo_T.mjs";
1
+ import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-C1-PVaS_.mjs";
2
2
 
3
3
  //#region src/storage/s3.d.ts
4
+ /**
5
+ * Build the merged config: for each field, use the explicit value if present,
6
+ * otherwise fall back to the corresponding S3_* env var. Validate once on the
7
+ * final merged result so a malformed env var never breaks the build when the
8
+ * caller provides that field explicitly.
9
+ */
10
+ declare function resolveS3Config(partial: Record<string, unknown>): S3StorageConfig;
4
11
  /**
5
12
  * S3-compatible storage implementation
6
13
  */
@@ -24,9 +31,10 @@ declare class S3Storage implements Storage {
24
31
  }
25
32
  /**
26
33
  * Create S3 storage adapter
27
- * This is the factory function called at runtime
34
+ * This is the factory function called at runtime.
35
+ * Config fields are merged with S3_* env vars; env vars fill in any missing fields.
28
36
  */
29
37
  declare function createStorage(config: Record<string, unknown>): Storage;
30
38
  //#endregion
31
- export { S3Storage, createStorage };
39
+ export { S3Storage, createStorage, resolveS3Config };
32
40
  //# sourceMappingURL=s3.d.mts.map
@@ -1 +1 @@
1
- {"version":3,"file":"s3.d.mts","names":[],"sources":["../../src/storage/s3.ts"],"mappings":";;;;;;cAwCa,SAAA,YAAqB,OAAA;EAAA,QACzB,MAAA;EAAA,QACA,MAAA;EAAA,QACA,SAAA;EAAA,QACA,QAAA;cAEI,MAAA,EAAQ,eAAA;EAiBd,MAAA,CAAO,OAAA;IACZ,GAAA;IACA,IAAA,EAAM,MAAA,GAAS,UAAA,GAAa,cAAA,CAAe,UAAA;IAC3C,WAAA;EAAA,IACG,OAAA,CAAQ,YAAA;EAoCN,QAAA,CAAS,GAAA,WAAc,OAAA,CAAQ,cAAA;EAgC/B,MAAA,CAAO,GAAA,WAAc,OAAA;EAgBrB,MAAA,CAAO,GAAA,WAAc,OAAA;EAiBrB,IAAA,CAAK,OAAA,GAAS,WAAA,GAAmB,OAAA,CAAQ,UAAA;EA4BzC,kBAAA,CAAmB,OAAA,EAAS,mBAAA,GAAsB,OAAA,CAAQ,eAAA;EAiChE,YAAA,CAAa,GAAA;AAAA;;;;;iBAaE,aAAA,CAAc,MAAA,EAAQ,MAAA,oBAA0B,OAAA"}
1
+ {"version":3,"file":"s3.d.mts","names":[],"sources":["../../src/storage/s3.ts"],"mappings":";;;;;;;;AA0HA;iBA3DgB,eAAA,CAAgB,OAAA,EAAS,MAAA,oBAA0B,eAAA;;;;cA2DtD,SAAA,YAAqB,OAAA;EAAA,QACzB,MAAA;EAAA,QACA,MAAA;EAAA,QACA,SAAA;EAAA,QACA,QAAA;cAEI,MAAA,EAAQ,eAAA;EAqBd,MAAA,CAAO,OAAA;IACZ,GAAA;IACA,IAAA,EAAM,MAAA,GAAS,UAAA,GAAa,cAAA,CAAe,UAAA;IAC3C,WAAA;EAAA,IACG,OAAA,CAAQ,YAAA;EAoCN,QAAA,CAAS,GAAA,WAAc,OAAA,CAAQ,cAAA;EAgC/B,MAAA,CAAO,GAAA,WAAc,OAAA;EAgBrB,MAAA,CAAO,GAAA,WAAc,OAAA;EAiBrB,IAAA,CAAK,OAAA,GAAS,WAAA,GAAmB,OAAA,CAAQ,UAAA;EA4BzC,kBAAA,CAAmB,OAAA,EAAS,mBAAA,GAAsB,OAAA,CAAQ,eAAA;EAiChE,YAAA,CAAa,GAAA;AAAA;;;;;;iBAcE,aAAA,CAAc,MAAA,EAAQ,MAAA,oBAA0B,OAAA"}
@@ -1,4 +1,5 @@
1
- import { t as EmDashStorageError } from "../types-CUBbjgmP.mjs";
1
+ import { t as EmDashStorageError } from "../types-Bec-r_3_.mjs";
2
+ import { z } from "zod";
2
3
  import { DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
3
4
  import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
4
5
 
@@ -9,6 +10,74 @@ import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
9
10
  * Uses the AWS SDK v3 for S3 operations.
10
11
  * Works with AWS S3, Cloudflare R2, Minio, and other S3-compatible services.
11
12
  */
13
+ const ENV_KEYS = {
14
+ endpoint: "S3_ENDPOINT",
15
+ bucket: "S3_BUCKET",
16
+ accessKeyId: "S3_ACCESS_KEY_ID",
17
+ secretAccessKey: "S3_SECRET_ACCESS_KEY",
18
+ region: "S3_REGION",
19
+ publicUrl: "S3_PUBLIC_URL"
20
+ };
21
+ function fail(msg) {
22
+ throw new EmDashStorageError(msg, "MISSING_S3_CONFIG");
23
+ }
24
+ const s3ConfigSchema = z.object({
25
+ endpoint: z.url({
26
+ protocol: /^https?$/,
27
+ error: "is not a valid http/https URL"
28
+ }).optional(),
29
+ bucket: z.string().optional(),
30
+ accessKeyId: z.string().optional(),
31
+ secretAccessKey: z.string().optional(),
32
+ region: z.string().optional(),
33
+ publicUrl: z.string().optional()
34
+ });
35
+ function isConfigKey(key) {
36
+ return typeof key === "string" && key in ENV_KEYS;
37
+ }
38
+ /**
39
+ * Build the merged config: for each field, use the explicit value if present,
40
+ * otherwise fall back to the corresponding S3_* env var. Validate once on the
41
+ * final merged result so a malformed env var never breaks the build when the
42
+ * caller provides that field explicitly.
43
+ */
44
+ function resolveS3Config(partial) {
45
+ const raw = {};
46
+ for (const [field, envKey] of Object.entries(ENV_KEYS)) {
47
+ const explicit = partial[field];
48
+ if (explicit !== void 0 && explicit !== "") {
49
+ raw[field] = explicit;
50
+ continue;
51
+ }
52
+ const envVal = typeof process !== "undefined" && process.env ? process.env[envKey] : void 0;
53
+ if (envVal !== void 0 && envVal !== "") raw[field] = envVal;
54
+ }
55
+ const result = s3ConfigSchema.safeParse(raw);
56
+ if (!result.success) {
57
+ const issue = result.error.issues[0];
58
+ const pathKey = issue?.path[0];
59
+ if (!issue || !isConfigKey(pathKey)) fail("S3 config validation failed");
60
+ fail(`${partial[pathKey] !== void 0 && partial[pathKey] !== "" ? `s3({ ${pathKey} })` : ENV_KEYS[pathKey]} ${issue.message}`);
61
+ }
62
+ const merged = result.data;
63
+ const endpoint = merged.endpoint;
64
+ const bucket = merged.bucket;
65
+ if (!endpoint || !bucket) {
66
+ const missing = [];
67
+ if (!endpoint) missing.push(`endpoint: set ${ENV_KEYS.endpoint} or pass endpoint to s3({...})`);
68
+ if (!bucket) missing.push(`bucket: set ${ENV_KEYS.bucket} or pass bucket to s3({...})`);
69
+ fail(`missing required S3 config: ${missing.join("; ")}`);
70
+ }
71
+ const accessKeyId = merged.accessKeyId;
72
+ const secretAccessKey = merged.secretAccessKey;
73
+ if (accessKeyId && !secretAccessKey) fail(`S3 credentials incomplete: accessKeyId is set but secretAccessKey is missing (set ${ENV_KEYS.secretAccessKey} or pass secretAccessKey to s3({...}))`);
74
+ if (secretAccessKey && !accessKeyId) fail(`S3 credentials incomplete: secretAccessKey is set but accessKeyId is missing (set ${ENV_KEYS.accessKeyId} or pass accessKeyId to s3({...}))`);
75
+ return {
76
+ ...merged,
77
+ endpoint,
78
+ bucket
79
+ };
80
+ }
12
81
  const TRAILING_SLASH_PATTERN = /\/$/;
13
82
  /** Type guard for AWS SDK errors (have a `name` property) */
14
83
  function hasErrorName(error) {
@@ -29,10 +98,10 @@ var S3Storage = class {
29
98
  this.client = new S3Client({
30
99
  endpoint: config.endpoint,
31
100
  region: config.region || "auto",
32
- credentials: {
101
+ ...config.accessKeyId && config.secretAccessKey ? { credentials: {
33
102
  accessKeyId: config.accessKeyId,
34
103
  secretAccessKey: config.secretAccessKey
35
- },
104
+ } } : {},
36
105
  forcePathStyle: true
37
106
  });
38
107
  }
@@ -155,21 +224,13 @@ var S3Storage = class {
155
224
  };
156
225
  /**
157
226
  * Create S3 storage adapter
158
- * This is the factory function called at runtime
227
+ * This is the factory function called at runtime.
228
+ * Config fields are merged with S3_* env vars; env vars fill in any missing fields.
159
229
  */
160
230
  function createStorage(config) {
161
- const { endpoint, bucket, accessKeyId, secretAccessKey, region, publicUrl } = config;
162
- if (typeof endpoint !== "string" || typeof bucket !== "string" || typeof accessKeyId !== "string" || typeof secretAccessKey !== "string") throw new Error("S3Storage requires 'endpoint', 'bucket', 'accessKeyId', and 'secretAccessKey' string config values");
163
- return new S3Storage({
164
- endpoint,
165
- bucket,
166
- accessKeyId,
167
- secretAccessKey,
168
- region: typeof region === "string" ? region : void 0,
169
- publicUrl: typeof publicUrl === "string" ? publicUrl : void 0
170
- });
231
+ return new S3Storage(resolveS3Config(config));
171
232
  }
172
233
 
173
234
  //#endregion
174
- export { S3Storage, createStorage };
235
+ export { S3Storage, createStorage, resolveS3Config };
175
236
  //# sourceMappingURL=s3.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"s3.mjs","names":[],"sources":["../../src/storage/s3.ts"],"sourcesContent":["/**\n * S3-Compatible Storage Implementation\n *\n * Uses the AWS SDK v3 for S3 operations.\n * Works with AWS S3, Cloudflare R2, Minio, and other S3-compatible services.\n */\n\nimport {\n\tS3Client,\n\tPutObjectCommand,\n\tGetObjectCommand,\n\tDeleteObjectCommand,\n\tHeadObjectCommand,\n\tListObjectsV2Command,\n\ttype ListObjectsV2Response,\n} from \"@aws-sdk/client-s3\";\nimport { getSignedUrl } from \"@aws-sdk/s3-request-presigner\";\n\nimport type {\n\tStorage,\n\tS3StorageConfig,\n\tUploadResult,\n\tDownloadResult,\n\tListResult,\n\tListOptions,\n\tSignedUploadUrl,\n\tSignedUploadOptions,\n} from \"./types.js\";\nimport { EmDashStorageError } from \"./types.js\";\n\nconst TRAILING_SLASH_PATTERN = /\\/$/;\n\n/** Type guard for AWS SDK errors (have a `name` property) */\nfunction hasErrorName(error: unknown): error is Error & { name: string } {\n\treturn error instanceof Error && typeof error.name === \"string\";\n}\n\n/**\n * S3-compatible storage implementation\n */\nexport class S3Storage implements Storage {\n\tprivate client: S3Client;\n\tprivate bucket: string;\n\tprivate publicUrl?: string;\n\tprivate endpoint: string;\n\n\tconstructor(config: S3StorageConfig) {\n\t\tthis.bucket = config.bucket;\n\t\tthis.publicUrl = config.publicUrl;\n\t\tthis.endpoint = config.endpoint;\n\n\t\tthis.client = new S3Client({\n\t\t\tendpoint: config.endpoint,\n\t\t\tregion: config.region || \"auto\",\n\t\t\tcredentials: {\n\t\t\t\taccessKeyId: config.accessKeyId,\n\t\t\t\tsecretAccessKey: config.secretAccessKey,\n\t\t\t},\n\t\t\t// Required for R2 and some S3-compatible services\n\t\t\tforcePathStyle: true,\n\t\t});\n\t}\n\n\tasync upload(options: {\n\t\tkey: string;\n\t\tbody: Buffer | Uint8Array | ReadableStream<Uint8Array>;\n\t\tcontentType: string;\n\t}): Promise<UploadResult> {\n\t\ttry {\n\t\t\t// Convert ReadableStream to Buffer if needed\n\t\t\tlet body: Buffer | Uint8Array;\n\t\t\tif (options.body instanceof ReadableStream) {\n\t\t\t\tconst chunks: Uint8Array[] = [];\n\t\t\t\tconst reader = options.body.getReader();\n\t\t\t\twhile (true) {\n\t\t\t\t\tconst { done, value } = await reader.read();\n\t\t\t\t\tif (done) break;\n\t\t\t\t\tchunks.push(value);\n\t\t\t\t}\n\t\t\t\tbody = Buffer.concat(chunks);\n\t\t\t} else {\n\t\t\t\tbody = options.body;\n\t\t\t}\n\n\t\t\tawait this.client.send(\n\t\t\t\tnew PutObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: options.key,\n\t\t\t\t\tBody: body,\n\t\t\t\t\tContentType: options.contentType,\n\t\t\t\t}),\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tkey: options.key,\n\t\t\t\turl: this.getPublicUrl(options.key),\n\t\t\t\tsize: body.length,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(`Failed to upload file: ${options.key}`, \"UPLOAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync download(key: string): Promise<DownloadResult> {\n\t\ttry {\n\t\t\tconst response = await this.client.send(\n\t\t\t\tnew GetObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\n\t\t\tif (!response.Body) {\n\t\t\t\tthrow new EmDashStorageError(`File not found: ${key}`, \"NOT_FOUND\");\n\t\t\t}\n\n\t\t\t// Convert SDK stream to web ReadableStream\n\t\t\tconst body = response.Body.transformToWebStream();\n\n\t\t\treturn {\n\t\t\t\tbody,\n\t\t\t\tcontentType: response.ContentType || \"application/octet-stream\",\n\t\t\t\tsize: response.ContentLength || 0,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tif (\n\t\t\t\terror instanceof EmDashStorageError ||\n\t\t\t\t(hasErrorName(error) && error.name === \"NoSuchKey\")\n\t\t\t) {\n\t\t\t\tthrow new EmDashStorageError(`File not found: ${key}`, \"NOT_FOUND\", error);\n\t\t\t}\n\t\t\tthrow new EmDashStorageError(`Failed to download file: ${key}`, \"DOWNLOAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync delete(key: string): Promise<void> {\n\t\ttry {\n\t\t\tawait this.client.send(\n\t\t\t\tnew DeleteObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\t\t} catch (error) {\n\t\t\t// S3 delete is idempotent, so we ignore \"not found\" errors\n\t\t\tif (!hasErrorName(error) || error.name !== \"NoSuchKey\") {\n\t\t\t\tthrow new EmDashStorageError(`Failed to delete file: ${key}`, \"DELETE_FAILED\", error);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync exists(key: string): Promise<boolean> {\n\t\ttry {\n\t\t\tawait this.client.send(\n\t\t\t\tnew HeadObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\t\t\treturn true;\n\t\t} catch (error) {\n\t\t\tif (hasErrorName(error) && error.name === \"NotFound\") {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\tthrow new EmDashStorageError(`Failed to check file existence: ${key}`, \"HEAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync list(options: ListOptions = {}): Promise<ListResult> {\n\t\ttry {\n\t\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- S3 client.send returns generic output; narrowing to ListObjectsV2Response\n\t\t\tconst response = (await this.client.send(\n\t\t\t\tnew ListObjectsV2Command({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tPrefix: options.prefix,\n\t\t\t\t\tMaxKeys: options.limit,\n\t\t\t\t\tContinuationToken: options.cursor,\n\t\t\t\t}),\n\t\t\t)) as ListObjectsV2Response;\n\n\t\t\treturn {\n\t\t\t\tfiles: (response.Contents || []).map(\n\t\t\t\t\t(item: { Key?: string; Size?: number; LastModified?: Date; ETag?: string }) => ({\n\t\t\t\t\t\tkey: item.Key!,\n\t\t\t\t\t\tsize: item.Size || 0,\n\t\t\t\t\t\tlastModified: item.LastModified || new Date(),\n\t\t\t\t\t\tetag: item.ETag,\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t\tnextCursor: response.NextContinuationToken,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(\"Failed to list files\", \"LIST_FAILED\", error);\n\t\t}\n\t}\n\n\tasync getSignedUploadUrl(options: SignedUploadOptions): Promise<SignedUploadUrl> {\n\t\ttry {\n\t\t\tconst expiresIn = options.expiresIn || 3600; // 1 hour default\n\n\t\t\tconst command = new PutObjectCommand({\n\t\t\t\tBucket: this.bucket,\n\t\t\t\tKey: options.key,\n\t\t\t\tContentType: options.contentType,\n\t\t\t\tContentLength: options.size,\n\t\t\t});\n\n\t\t\tconst url = await getSignedUrl(this.client, command, { expiresIn });\n\n\t\t\tconst expiresAt = new Date(Date.now() + expiresIn * 1000).toISOString();\n\n\t\t\treturn {\n\t\t\t\turl,\n\t\t\t\tmethod: \"PUT\",\n\t\t\t\theaders: {\n\t\t\t\t\t\"Content-Type\": options.contentType,\n\t\t\t\t\t...(options.size ? { \"Content-Length\": String(options.size) } : {}),\n\t\t\t\t},\n\t\t\t\texpiresAt,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(\n\t\t\t\t`Failed to generate signed URL for: ${options.key}`,\n\t\t\t\t\"SIGNED_URL_FAILED\",\n\t\t\t\terror,\n\t\t\t);\n\t\t}\n\t}\n\n\tgetPublicUrl(key: string): string {\n\t\tif (this.publicUrl) {\n\t\t\treturn `${this.publicUrl.replace(TRAILING_SLASH_PATTERN, \"\")}/${key}`;\n\t\t}\n\t\t// Default to endpoint + bucket + key\n\t\treturn `${this.endpoint.replace(TRAILING_SLASH_PATTERN, \"\")}/${this.bucket}/${key}`;\n\t}\n}\n\n/**\n * Create S3 storage adapter\n * This is the factory function called at runtime\n */\nexport function createStorage(config: Record<string, unknown>): Storage {\n\tconst { endpoint, bucket, accessKeyId, secretAccessKey, region, publicUrl } = config;\n\tif (\n\t\ttypeof endpoint !== \"string\" ||\n\t\ttypeof bucket !== \"string\" ||\n\t\ttypeof accessKeyId !== \"string\" ||\n\t\ttypeof secretAccessKey !== \"string\"\n\t) {\n\t\tthrow new Error(\n\t\t\t\"S3Storage requires 'endpoint', 'bucket', 'accessKeyId', and 'secretAccessKey' string config values\",\n\t\t);\n\t}\n\treturn new S3Storage({\n\t\tendpoint,\n\t\tbucket,\n\t\taccessKeyId,\n\t\tsecretAccessKey,\n\t\tregion: typeof region === \"string\" ? region : undefined,\n\t\tpublicUrl: typeof publicUrl === \"string\" ? publicUrl : undefined,\n\t});\n}\n"],"mappings":";;;;;;;;;;;AA8BA,MAAM,yBAAyB;;AAG/B,SAAS,aAAa,OAAmD;AACxE,QAAO,iBAAiB,SAAS,OAAO,MAAM,SAAS;;;;;AAMxD,IAAa,YAAb,MAA0C;CACzC,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,QAAyB;AACpC,OAAK,SAAS,OAAO;AACrB,OAAK,YAAY,OAAO;AACxB,OAAK,WAAW,OAAO;AAEvB,OAAK,SAAS,IAAI,SAAS;GAC1B,UAAU,OAAO;GACjB,QAAQ,OAAO,UAAU;GACzB,aAAa;IACZ,aAAa,OAAO;IACpB,iBAAiB,OAAO;IACxB;GAED,gBAAgB;GAChB,CAAC;;CAGH,MAAM,OAAO,SAIa;AACzB,MAAI;GAEH,IAAI;AACJ,OAAI,QAAQ,gBAAgB,gBAAgB;IAC3C,MAAM,SAAuB,EAAE;IAC/B,MAAM,SAAS,QAAQ,KAAK,WAAW;AACvC,WAAO,MAAM;KACZ,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,SAAI,KAAM;AACV,YAAO,KAAK,MAAM;;AAEnB,WAAO,OAAO,OAAO,OAAO;SAE5B,QAAO,QAAQ;AAGhB,SAAM,KAAK,OAAO,KACjB,IAAI,iBAAiB;IACpB,QAAQ,KAAK;IACb,KAAK,QAAQ;IACb,MAAM;IACN,aAAa,QAAQ;IACrB,CAAC,CACF;AAED,UAAO;IACN,KAAK,QAAQ;IACb,KAAK,KAAK,aAAa,QAAQ,IAAI;IACnC,MAAM,KAAK;IACX;WACO,OAAO;AACf,SAAM,IAAI,mBAAmB,0BAA0B,QAAQ,OAAO,iBAAiB,MAAM;;;CAI/F,MAAM,SAAS,KAAsC;AACpD,MAAI;GACH,MAAM,WAAW,MAAM,KAAK,OAAO,KAClC,IAAI,iBAAiB;IACpB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;AAED,OAAI,CAAC,SAAS,KACb,OAAM,IAAI,mBAAmB,mBAAmB,OAAO,YAAY;AAMpE,UAAO;IACN,MAHY,SAAS,KAAK,sBAAsB;IAIhD,aAAa,SAAS,eAAe;IACrC,MAAM,SAAS,iBAAiB;IAChC;WACO,OAAO;AACf,OACC,iBAAiB,sBAChB,aAAa,MAAM,IAAI,MAAM,SAAS,YAEvC,OAAM,IAAI,mBAAmB,mBAAmB,OAAO,aAAa,MAAM;AAE3E,SAAM,IAAI,mBAAmB,4BAA4B,OAAO,mBAAmB,MAAM;;;CAI3F,MAAM,OAAO,KAA4B;AACxC,MAAI;AACH,SAAM,KAAK,OAAO,KACjB,IAAI,oBAAoB;IACvB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;WACO,OAAO;AAEf,OAAI,CAAC,aAAa,MAAM,IAAI,MAAM,SAAS,YAC1C,OAAM,IAAI,mBAAmB,0BAA0B,OAAO,iBAAiB,MAAM;;;CAKxF,MAAM,OAAO,KAA+B;AAC3C,MAAI;AACH,SAAM,KAAK,OAAO,KACjB,IAAI,kBAAkB;IACrB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;AACD,UAAO;WACC,OAAO;AACf,OAAI,aAAa,MAAM,IAAI,MAAM,SAAS,WACzC,QAAO;AAER,SAAM,IAAI,mBAAmB,mCAAmC,OAAO,eAAe,MAAM;;;CAI9F,MAAM,KAAK,UAAuB,EAAE,EAAuB;AAC1D,MAAI;GAEH,MAAM,WAAY,MAAM,KAAK,OAAO,KACnC,IAAI,qBAAqB;IACxB,QAAQ,KAAK;IACb,QAAQ,QAAQ;IAChB,SAAS,QAAQ;IACjB,mBAAmB,QAAQ;IAC3B,CAAC,CACF;AAED,UAAO;IACN,QAAQ,SAAS,YAAY,EAAE,EAAE,KAC/B,UAA+E;KAC/E,KAAK,KAAK;KACV,MAAM,KAAK,QAAQ;KACnB,cAAc,KAAK,gCAAgB,IAAI,MAAM;KAC7C,MAAM,KAAK;KACX,EACD;IACD,YAAY,SAAS;IACrB;WACO,OAAO;AACf,SAAM,IAAI,mBAAmB,wBAAwB,eAAe,MAAM;;;CAI5E,MAAM,mBAAmB,SAAwD;AAChF,MAAI;GACH,MAAM,YAAY,QAAQ,aAAa;GAEvC,MAAM,UAAU,IAAI,iBAAiB;IACpC,QAAQ,KAAK;IACb,KAAK,QAAQ;IACb,aAAa,QAAQ;IACrB,eAAe,QAAQ;IACvB,CAAC;GAEF,MAAM,MAAM,MAAM,aAAa,KAAK,QAAQ,SAAS,EAAE,WAAW,CAAC;GAEnE,MAAM,YAAY,IAAI,KAAK,KAAK,KAAK,GAAG,YAAY,IAAK,CAAC,aAAa;AAEvE,UAAO;IACN;IACA,QAAQ;IACR,SAAS;KACR,gBAAgB,QAAQ;KACxB,GAAI,QAAQ,OAAO,EAAE,kBAAkB,OAAO,QAAQ,KAAK,EAAE,GAAG,EAAE;KAClE;IACD;IACA;WACO,OAAO;AACf,SAAM,IAAI,mBACT,sCAAsC,QAAQ,OAC9C,qBACA,MACA;;;CAIH,aAAa,KAAqB;AACjC,MAAI,KAAK,UACR,QAAO,GAAG,KAAK,UAAU,QAAQ,wBAAwB,GAAG,CAAC,GAAG;AAGjE,SAAO,GAAG,KAAK,SAAS,QAAQ,wBAAwB,GAAG,CAAC,GAAG,KAAK,OAAO,GAAG;;;;;;;AAQhF,SAAgB,cAAc,QAA0C;CACvE,MAAM,EAAE,UAAU,QAAQ,aAAa,iBAAiB,QAAQ,cAAc;AAC9E,KACC,OAAO,aAAa,YACpB,OAAO,WAAW,YAClB,OAAO,gBAAgB,YACvB,OAAO,oBAAoB,SAE3B,OAAM,IAAI,MACT,qGACA;AAEF,QAAO,IAAI,UAAU;EACpB;EACA;EACA;EACA;EACA,QAAQ,OAAO,WAAW,WAAW,SAAS;EAC9C,WAAW,OAAO,cAAc,WAAW,YAAY;EACvD,CAAC"}
1
+ {"version":3,"file":"s3.mjs","names":[],"sources":["../../src/storage/s3.ts"],"sourcesContent":["/**\n * S3-Compatible Storage Implementation\n *\n * Uses the AWS SDK v3 for S3 operations.\n * Works with AWS S3, Cloudflare R2, Minio, and other S3-compatible services.\n */\n\nimport {\n\tS3Client,\n\tPutObjectCommand,\n\tGetObjectCommand,\n\tDeleteObjectCommand,\n\tHeadObjectCommand,\n\tListObjectsV2Command,\n\ttype ListObjectsV2Response,\n} from \"@aws-sdk/client-s3\";\nimport { getSignedUrl } from \"@aws-sdk/s3-request-presigner\";\nimport { z } from \"zod\";\n\nimport type {\n\tStorage,\n\tS3StorageConfig,\n\tUploadResult,\n\tDownloadResult,\n\tListResult,\n\tListOptions,\n\tSignedUploadUrl,\n\tSignedUploadOptions,\n} from \"./types.js\";\nimport { EmDashStorageError } from \"./types.js\";\n\nconst ENV_KEYS = {\n\tendpoint: \"S3_ENDPOINT\",\n\tbucket: \"S3_BUCKET\",\n\taccessKeyId: \"S3_ACCESS_KEY_ID\",\n\tsecretAccessKey: \"S3_SECRET_ACCESS_KEY\",\n\tregion: \"S3_REGION\",\n\tpublicUrl: \"S3_PUBLIC_URL\",\n} as const satisfies Record<keyof S3StorageConfig, string>;\n\nfunction fail(msg: string): never {\n\tthrow new EmDashStorageError(msg, \"MISSING_S3_CONFIG\");\n}\n\nconst s3ConfigSchema = z.object({\n\tendpoint: z.url({ protocol: /^https?$/, error: \"is not a valid http/https URL\" }).optional(),\n\tbucket: z.string().optional(),\n\taccessKeyId: z.string().optional(),\n\tsecretAccessKey: z.string().optional(),\n\tregion: z.string().optional(),\n\tpublicUrl: z.string().optional(),\n});\n\nfunction isConfigKey(key: unknown): key is keyof S3StorageConfig {\n\treturn typeof key === \"string\" && key in ENV_KEYS;\n}\n\n/**\n * Build the merged config: for each field, use the explicit value if present,\n * otherwise fall back to the corresponding S3_* env var. Validate once on the\n * final merged result so a malformed env var never breaks the build when the\n * caller provides that field explicitly.\n */\nexport function resolveS3Config(partial: Record<string, unknown>): S3StorageConfig {\n\tconst raw: Record<string, unknown> = {};\n\tfor (const [field, envKey] of Object.entries(ENV_KEYS)) {\n\t\tconst explicit = partial[field];\n\t\tif (explicit !== undefined && explicit !== \"\") {\n\t\t\traw[field] = explicit;\n\t\t\tcontinue;\n\t\t}\n\t\tconst envVal = typeof process !== \"undefined\" && process.env ? process.env[envKey] : undefined;\n\t\tif (envVal !== undefined && envVal !== \"\") {\n\t\t\traw[field] = envVal;\n\t\t}\n\t}\n\n\tconst result = s3ConfigSchema.safeParse(raw);\n\tif (!result.success) {\n\t\tconst issue = result.error.issues[0];\n\t\tconst pathKey = issue?.path[0];\n\t\tif (!issue || !isConfigKey(pathKey)) fail(\"S3 config validation failed\");\n\t\tconst fromExplicit = partial[pathKey] !== undefined && partial[pathKey] !== \"\";\n\t\tconst label = fromExplicit ? `s3({ ${pathKey} })` : ENV_KEYS[pathKey];\n\t\tfail(`${label} ${issue.message}`);\n\t}\n\tconst merged = result.data;\n\n\tconst endpoint = merged.endpoint;\n\tconst bucket = merged.bucket;\n\tif (!endpoint || !bucket) {\n\t\tconst missing: string[] = [];\n\t\tif (!endpoint) missing.push(`endpoint: set ${ENV_KEYS.endpoint} or pass endpoint to s3({...})`);\n\t\tif (!bucket) missing.push(`bucket: set ${ENV_KEYS.bucket} or pass bucket to s3({...})`);\n\t\tfail(`missing required S3 config: ${missing.join(\"; \")}`);\n\t}\n\tconst accessKeyId = merged.accessKeyId;\n\tconst secretAccessKey = merged.secretAccessKey;\n\tif (accessKeyId && !secretAccessKey) {\n\t\tfail(\n\t\t\t`S3 credentials incomplete: accessKeyId is set but secretAccessKey is missing (set ${ENV_KEYS.secretAccessKey} or pass secretAccessKey to s3({...}))`,\n\t\t);\n\t}\n\tif (secretAccessKey && !accessKeyId) {\n\t\tfail(\n\t\t\t`S3 credentials incomplete: secretAccessKey is set but accessKeyId is missing (set ${ENV_KEYS.accessKeyId} or pass accessKeyId to s3({...}))`,\n\t\t);\n\t}\n\n\treturn { ...merged, endpoint, bucket };\n}\n\nconst TRAILING_SLASH_PATTERN = /\\/$/;\n\n/** Type guard for AWS SDK errors (have a `name` property) */\nfunction hasErrorName(error: unknown): error is Error & { name: string } {\n\treturn error instanceof Error && typeof error.name === \"string\";\n}\n\n/**\n * S3-compatible storage implementation\n */\nexport class S3Storage implements Storage {\n\tprivate client: S3Client;\n\tprivate bucket: string;\n\tprivate publicUrl?: string;\n\tprivate endpoint: string;\n\n\tconstructor(config: S3StorageConfig) {\n\t\tthis.bucket = config.bucket;\n\t\tthis.publicUrl = config.publicUrl;\n\t\tthis.endpoint = config.endpoint;\n\n\t\tthis.client = new S3Client({\n\t\t\tendpoint: config.endpoint,\n\t\t\tregion: config.region || \"auto\",\n\t\t\t...(config.accessKeyId && config.secretAccessKey\n\t\t\t\t? {\n\t\t\t\t\t\tcredentials: {\n\t\t\t\t\t\t\taccessKeyId: config.accessKeyId,\n\t\t\t\t\t\t\tsecretAccessKey: config.secretAccessKey,\n\t\t\t\t\t\t},\n\t\t\t\t\t}\n\t\t\t\t: {}),\n\t\t\t// Required for R2 and some S3-compatible services\n\t\t\tforcePathStyle: true,\n\t\t} as ConstructorParameters<typeof S3Client>[0]);\n\t}\n\n\tasync upload(options: {\n\t\tkey: string;\n\t\tbody: Buffer | Uint8Array | ReadableStream<Uint8Array>;\n\t\tcontentType: string;\n\t}): Promise<UploadResult> {\n\t\ttry {\n\t\t\t// Convert ReadableStream to Buffer if needed\n\t\t\tlet body: Buffer | Uint8Array;\n\t\t\tif (options.body instanceof ReadableStream) {\n\t\t\t\tconst chunks: Uint8Array[] = [];\n\t\t\t\tconst reader = options.body.getReader();\n\t\t\t\twhile (true) {\n\t\t\t\t\tconst { done, value } = await reader.read();\n\t\t\t\t\tif (done) break;\n\t\t\t\t\tchunks.push(value);\n\t\t\t\t}\n\t\t\t\tbody = Buffer.concat(chunks);\n\t\t\t} else {\n\t\t\t\tbody = options.body;\n\t\t\t}\n\n\t\t\tawait this.client.send(\n\t\t\t\tnew PutObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: options.key,\n\t\t\t\t\tBody: body,\n\t\t\t\t\tContentType: options.contentType,\n\t\t\t\t}),\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tkey: options.key,\n\t\t\t\turl: this.getPublicUrl(options.key),\n\t\t\t\tsize: body.length,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(`Failed to upload file: ${options.key}`, \"UPLOAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync download(key: string): Promise<DownloadResult> {\n\t\ttry {\n\t\t\tconst response = await this.client.send(\n\t\t\t\tnew GetObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\n\t\t\tif (!response.Body) {\n\t\t\t\tthrow new EmDashStorageError(`File not found: ${key}`, \"NOT_FOUND\");\n\t\t\t}\n\n\t\t\t// Convert SDK stream to web ReadableStream\n\t\t\tconst body = response.Body.transformToWebStream();\n\n\t\t\treturn {\n\t\t\t\tbody,\n\t\t\t\tcontentType: response.ContentType || \"application/octet-stream\",\n\t\t\t\tsize: response.ContentLength || 0,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tif (\n\t\t\t\terror instanceof EmDashStorageError ||\n\t\t\t\t(hasErrorName(error) && error.name === \"NoSuchKey\")\n\t\t\t) {\n\t\t\t\tthrow new EmDashStorageError(`File not found: ${key}`, \"NOT_FOUND\", error);\n\t\t\t}\n\t\t\tthrow new EmDashStorageError(`Failed to download file: ${key}`, \"DOWNLOAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync delete(key: string): Promise<void> {\n\t\ttry {\n\t\t\tawait this.client.send(\n\t\t\t\tnew DeleteObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\t\t} catch (error) {\n\t\t\t// S3 delete is idempotent, so we ignore \"not found\" errors\n\t\t\tif (!hasErrorName(error) || error.name !== \"NoSuchKey\") {\n\t\t\t\tthrow new EmDashStorageError(`Failed to delete file: ${key}`, \"DELETE_FAILED\", error);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync exists(key: string): Promise<boolean> {\n\t\ttry {\n\t\t\tawait this.client.send(\n\t\t\t\tnew HeadObjectCommand({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tKey: key,\n\t\t\t\t}),\n\t\t\t);\n\t\t\treturn true;\n\t\t} catch (error) {\n\t\t\tif (hasErrorName(error) && error.name === \"NotFound\") {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\tthrow new EmDashStorageError(`Failed to check file existence: ${key}`, \"HEAD_FAILED\", error);\n\t\t}\n\t}\n\n\tasync list(options: ListOptions = {}): Promise<ListResult> {\n\t\ttry {\n\t\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- S3 client.send returns generic output; narrowing to ListObjectsV2Response\n\t\t\tconst response = (await this.client.send(\n\t\t\t\tnew ListObjectsV2Command({\n\t\t\t\t\tBucket: this.bucket,\n\t\t\t\t\tPrefix: options.prefix,\n\t\t\t\t\tMaxKeys: options.limit,\n\t\t\t\t\tContinuationToken: options.cursor,\n\t\t\t\t}),\n\t\t\t)) as ListObjectsV2Response;\n\n\t\t\treturn {\n\t\t\t\tfiles: (response.Contents || []).map(\n\t\t\t\t\t(item: { Key?: string; Size?: number; LastModified?: Date; ETag?: string }) => ({\n\t\t\t\t\t\tkey: item.Key!,\n\t\t\t\t\t\tsize: item.Size || 0,\n\t\t\t\t\t\tlastModified: item.LastModified || new Date(),\n\t\t\t\t\t\tetag: item.ETag,\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t\tnextCursor: response.NextContinuationToken,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(\"Failed to list files\", \"LIST_FAILED\", error);\n\t\t}\n\t}\n\n\tasync getSignedUploadUrl(options: SignedUploadOptions): Promise<SignedUploadUrl> {\n\t\ttry {\n\t\t\tconst expiresIn = options.expiresIn || 3600; // 1 hour default\n\n\t\t\tconst command = new PutObjectCommand({\n\t\t\t\tBucket: this.bucket,\n\t\t\t\tKey: options.key,\n\t\t\t\tContentType: options.contentType,\n\t\t\t\tContentLength: options.size,\n\t\t\t});\n\n\t\t\tconst url = await getSignedUrl(this.client, command, { expiresIn });\n\n\t\t\tconst expiresAt = new Date(Date.now() + expiresIn * 1000).toISOString();\n\n\t\t\treturn {\n\t\t\t\turl,\n\t\t\t\tmethod: \"PUT\",\n\t\t\t\theaders: {\n\t\t\t\t\t\"Content-Type\": options.contentType,\n\t\t\t\t\t...(options.size ? { \"Content-Length\": String(options.size) } : {}),\n\t\t\t\t},\n\t\t\t\texpiresAt,\n\t\t\t};\n\t\t} catch (error) {\n\t\t\tthrow new EmDashStorageError(\n\t\t\t\t`Failed to generate signed URL for: ${options.key}`,\n\t\t\t\t\"SIGNED_URL_FAILED\",\n\t\t\t\terror,\n\t\t\t);\n\t\t}\n\t}\n\n\tgetPublicUrl(key: string): string {\n\t\tif (this.publicUrl) {\n\t\t\treturn `${this.publicUrl.replace(TRAILING_SLASH_PATTERN, \"\")}/${key}`;\n\t\t}\n\t\t// Default to endpoint + bucket + key\n\t\treturn `${this.endpoint.replace(TRAILING_SLASH_PATTERN, \"\")}/${this.bucket}/${key}`;\n\t}\n}\n\n/**\n * Create S3 storage adapter\n * This is the factory function called at runtime.\n * Config fields are merged with S3_* env vars; env vars fill in any missing fields.\n */\nexport function createStorage(config: Record<string, unknown>): Storage {\n\treturn new S3Storage(resolveS3Config(config));\n}\n"],"mappings":";;;;;;;;;;;;AA+BA,MAAM,WAAW;CAChB,UAAU;CACV,QAAQ;CACR,aAAa;CACb,iBAAiB;CACjB,QAAQ;CACR,WAAW;CACX;AAED,SAAS,KAAK,KAAoB;AACjC,OAAM,IAAI,mBAAmB,KAAK,oBAAoB;;AAGvD,MAAM,iBAAiB,EAAE,OAAO;CAC/B,UAAU,EAAE,IAAI;EAAE,UAAU;EAAY,OAAO;EAAiC,CAAC,CAAC,UAAU;CAC5F,QAAQ,EAAE,QAAQ,CAAC,UAAU;CAC7B,aAAa,EAAE,QAAQ,CAAC,UAAU;CAClC,iBAAiB,EAAE,QAAQ,CAAC,UAAU;CACtC,QAAQ,EAAE,QAAQ,CAAC,UAAU;CAC7B,WAAW,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,SAAS,YAAY,KAA4C;AAChE,QAAO,OAAO,QAAQ,YAAY,OAAO;;;;;;;;AAS1C,SAAgB,gBAAgB,SAAmD;CAClF,MAAM,MAA+B,EAAE;AACvC,MAAK,MAAM,CAAC,OAAO,WAAW,OAAO,QAAQ,SAAS,EAAE;EACvD,MAAM,WAAW,QAAQ;AACzB,MAAI,aAAa,UAAa,aAAa,IAAI;AAC9C,OAAI,SAAS;AACb;;EAED,MAAM,SAAS,OAAO,YAAY,eAAe,QAAQ,MAAM,QAAQ,IAAI,UAAU;AACrF,MAAI,WAAW,UAAa,WAAW,GACtC,KAAI,SAAS;;CAIf,MAAM,SAAS,eAAe,UAAU,IAAI;AAC5C,KAAI,CAAC,OAAO,SAAS;EACpB,MAAM,QAAQ,OAAO,MAAM,OAAO;EAClC,MAAM,UAAU,OAAO,KAAK;AAC5B,MAAI,CAAC,SAAS,CAAC,YAAY,QAAQ,CAAE,MAAK,8BAA8B;AAGxE,OAAK,GAFgB,QAAQ,aAAa,UAAa,QAAQ,aAAa,KAC/C,QAAQ,QAAQ,OAAO,SAAS,SAC/C,GAAG,MAAM,UAAU;;CAElC,MAAM,SAAS,OAAO;CAEtB,MAAM,WAAW,OAAO;CACxB,MAAM,SAAS,OAAO;AACtB,KAAI,CAAC,YAAY,CAAC,QAAQ;EACzB,MAAM,UAAoB,EAAE;AAC5B,MAAI,CAAC,SAAU,SAAQ,KAAK,iBAAiB,SAAS,SAAS,gCAAgC;AAC/F,MAAI,CAAC,OAAQ,SAAQ,KAAK,eAAe,SAAS,OAAO,8BAA8B;AACvF,OAAK,+BAA+B,QAAQ,KAAK,KAAK,GAAG;;CAE1D,MAAM,cAAc,OAAO;CAC3B,MAAM,kBAAkB,OAAO;AAC/B,KAAI,eAAe,CAAC,gBACnB,MACC,qFAAqF,SAAS,gBAAgB,wCAC9G;AAEF,KAAI,mBAAmB,CAAC,YACvB,MACC,qFAAqF,SAAS,YAAY,oCAC1G;AAGF,QAAO;EAAE,GAAG;EAAQ;EAAU;EAAQ;;AAGvC,MAAM,yBAAyB;;AAG/B,SAAS,aAAa,OAAmD;AACxE,QAAO,iBAAiB,SAAS,OAAO,MAAM,SAAS;;;;;AAMxD,IAAa,YAAb,MAA0C;CACzC,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,QAAyB;AACpC,OAAK,SAAS,OAAO;AACrB,OAAK,YAAY,OAAO;AACxB,OAAK,WAAW,OAAO;AAEvB,OAAK,SAAS,IAAI,SAAS;GAC1B,UAAU,OAAO;GACjB,QAAQ,OAAO,UAAU;GACzB,GAAI,OAAO,eAAe,OAAO,kBAC9B,EACA,aAAa;IACZ,aAAa,OAAO;IACpB,iBAAiB,OAAO;IACxB,EACD,GACA,EAAE;GAEL,gBAAgB;GAChB,CAA8C;;CAGhD,MAAM,OAAO,SAIa;AACzB,MAAI;GAEH,IAAI;AACJ,OAAI,QAAQ,gBAAgB,gBAAgB;IAC3C,MAAM,SAAuB,EAAE;IAC/B,MAAM,SAAS,QAAQ,KAAK,WAAW;AACvC,WAAO,MAAM;KACZ,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,SAAI,KAAM;AACV,YAAO,KAAK,MAAM;;AAEnB,WAAO,OAAO,OAAO,OAAO;SAE5B,QAAO,QAAQ;AAGhB,SAAM,KAAK,OAAO,KACjB,IAAI,iBAAiB;IACpB,QAAQ,KAAK;IACb,KAAK,QAAQ;IACb,MAAM;IACN,aAAa,QAAQ;IACrB,CAAC,CACF;AAED,UAAO;IACN,KAAK,QAAQ;IACb,KAAK,KAAK,aAAa,QAAQ,IAAI;IACnC,MAAM,KAAK;IACX;WACO,OAAO;AACf,SAAM,IAAI,mBAAmB,0BAA0B,QAAQ,OAAO,iBAAiB,MAAM;;;CAI/F,MAAM,SAAS,KAAsC;AACpD,MAAI;GACH,MAAM,WAAW,MAAM,KAAK,OAAO,KAClC,IAAI,iBAAiB;IACpB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;AAED,OAAI,CAAC,SAAS,KACb,OAAM,IAAI,mBAAmB,mBAAmB,OAAO,YAAY;AAMpE,UAAO;IACN,MAHY,SAAS,KAAK,sBAAsB;IAIhD,aAAa,SAAS,eAAe;IACrC,MAAM,SAAS,iBAAiB;IAChC;WACO,OAAO;AACf,OACC,iBAAiB,sBAChB,aAAa,MAAM,IAAI,MAAM,SAAS,YAEvC,OAAM,IAAI,mBAAmB,mBAAmB,OAAO,aAAa,MAAM;AAE3E,SAAM,IAAI,mBAAmB,4BAA4B,OAAO,mBAAmB,MAAM;;;CAI3F,MAAM,OAAO,KAA4B;AACxC,MAAI;AACH,SAAM,KAAK,OAAO,KACjB,IAAI,oBAAoB;IACvB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;WACO,OAAO;AAEf,OAAI,CAAC,aAAa,MAAM,IAAI,MAAM,SAAS,YAC1C,OAAM,IAAI,mBAAmB,0BAA0B,OAAO,iBAAiB,MAAM;;;CAKxF,MAAM,OAAO,KAA+B;AAC3C,MAAI;AACH,SAAM,KAAK,OAAO,KACjB,IAAI,kBAAkB;IACrB,QAAQ,KAAK;IACb,KAAK;IACL,CAAC,CACF;AACD,UAAO;WACC,OAAO;AACf,OAAI,aAAa,MAAM,IAAI,MAAM,SAAS,WACzC,QAAO;AAER,SAAM,IAAI,mBAAmB,mCAAmC,OAAO,eAAe,MAAM;;;CAI9F,MAAM,KAAK,UAAuB,EAAE,EAAuB;AAC1D,MAAI;GAEH,MAAM,WAAY,MAAM,KAAK,OAAO,KACnC,IAAI,qBAAqB;IACxB,QAAQ,KAAK;IACb,QAAQ,QAAQ;IAChB,SAAS,QAAQ;IACjB,mBAAmB,QAAQ;IAC3B,CAAC,CACF;AAED,UAAO;IACN,QAAQ,SAAS,YAAY,EAAE,EAAE,KAC/B,UAA+E;KAC/E,KAAK,KAAK;KACV,MAAM,KAAK,QAAQ;KACnB,cAAc,KAAK,gCAAgB,IAAI,MAAM;KAC7C,MAAM,KAAK;KACX,EACD;IACD,YAAY,SAAS;IACrB;WACO,OAAO;AACf,SAAM,IAAI,mBAAmB,wBAAwB,eAAe,MAAM;;;CAI5E,MAAM,mBAAmB,SAAwD;AAChF,MAAI;GACH,MAAM,YAAY,QAAQ,aAAa;GAEvC,MAAM,UAAU,IAAI,iBAAiB;IACpC,QAAQ,KAAK;IACb,KAAK,QAAQ;IACb,aAAa,QAAQ;IACrB,eAAe,QAAQ;IACvB,CAAC;GAEF,MAAM,MAAM,MAAM,aAAa,KAAK,QAAQ,SAAS,EAAE,WAAW,CAAC;GAEnE,MAAM,YAAY,IAAI,KAAK,KAAK,KAAK,GAAG,YAAY,IAAK,CAAC,aAAa;AAEvE,UAAO;IACN;IACA,QAAQ;IACR,SAAS;KACR,gBAAgB,QAAQ;KACxB,GAAI,QAAQ,OAAO,EAAE,kBAAkB,OAAO,QAAQ,KAAK,EAAE,GAAG,EAAE;KAClE;IACD;IACA;WACO,OAAO;AACf,SAAM,IAAI,mBACT,sCAAsC,QAAQ,OAC9C,qBACA,MACA;;;CAIH,aAAa,KAAqB;AACjC,MAAI,KAAK,UACR,QAAO,GAAG,KAAK,UAAU,QAAQ,wBAAwB,GAAG,CAAC,GAAG;AAGjE,SAAO,GAAG,KAAK,SAAS,QAAQ,wBAAwB,GAAG,CAAC,GAAG,KAAK,OAAO,GAAG;;;;;;;;AAShF,SAAgB,cAAc,QAA0C;AACvE,QAAO,IAAI,UAAU,gBAAgB,OAAO,CAAC"}
@@ -168,4 +168,4 @@ function parseContentId(contentId) {
168
168
 
169
169
  //#endregion
170
170
  export { parseContentId as n, verifyPreviewToken as r, generatePreviewToken as t };
171
- //# sourceMappingURL=tokens-DpgrkrXK.mjs.map
171
+ //# sourceMappingURL=tokens-DrB-W6Q-.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"tokens-DpgrkrXK.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
1
+ {"version":3,"file":"tokens-DrB-W6Q-.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
@@ -9,6 +9,8 @@ interface CreateContentInput {
9
9
  locale?: string;
10
10
  translationOf?: string;
11
11
  publishedAt?: string | null;
12
+ /** Override created_at (ISO 8601). Used by importers to preserve original dates. */
13
+ createdAt?: string | null;
12
14
  }
13
15
  interface UpdateContentInput {
14
16
  data?: Record<string, unknown>;
@@ -99,4 +101,4 @@ declare class EmDashValidationError extends Error {
99
101
  }
100
102
  //#endregion
101
103
  export { ContentSeoInput as a, FindManyOptions as c, ContentSeo as i, FindManyResult as l, ContentBylineCredit as n, CreateContentInput as o, ContentItem as r, EmDashValidationError as s, BylineSummary as t, UpdateContentInput as u };
102
- //# sourceMappingURL=types-BRuPJGdV.d.mts.map
104
+ //# sourceMappingURL=types-BbsYgi_R.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types-BbsYgi_R.d.mts","names":[],"sources":["../src/database/repositories/types.ts"],"mappings":";UAEiB,kBAAA;EAChB,IAAA;EACA,IAAA;EACA,IAAA,EAAM,MAAA;EACN,MAAA;EACA,QAAA;EACA,eAAA;EACA,MAAA;EACA,aAAA;EACA,WAAA;EAJA;EAMA,SAAA;AAAA;AAAA,UAGgB,kBAAA;EAChB,IAAA,GAAO,MAAA;EACP,MAAA;EACA,IAAA;EACA,WAAA;EACA,WAAA;EACA,QAAA;EACA,eAAA;AAAA;;UAIgB,UAAA;EAChB,KAAA;EACA,WAAA;EACA,KAAA;EACA,SAAA;EACA,OAAA;AAAA;;UAIgB,eAAA;EAChB,KAAA;EACA,WAAA;EACA,KAAA;EACA,SAAA;EACA,OAAA;AAAA;AAAA,UAGgB,aAAA;EAChB,EAAA;EACA,IAAA;EACA,WAAA;EACA,GAAA;EACA,aAAA;EACA,UAAA;EACA,MAAA;EACA,OAAA;EACA,SAAA;EACA,SAAA;AAAA;AAAA,UAGgB,mBAAA;EAChB,MAAA,EAAQ,aAAA;EACR,SAAA;EACA,SAAA;EAhBgB;EAkBhB,MAAA;AAAA;AAAA,UAGgB,eAAA;EAChB,KAAA;IACC,MAAA;IACA,QAAA;IACA,MAAA;EAAA;EAED,OAAA;IACC,KAAA;IACA,SAAA;EAAA;EAED,KAAA;EACA,MAAA;AAAA;AAAA,UAGgB,cAAA;EAChB,KAAA,EAAO,CAAA;EACP,UAAA;AAAA;AAAA,UAqBgB,WAAA;EAChB,EAAA;EACA,IAAA;EACA,IAAA;EACA,MAAA;EACA,IAAA,EAAM,MAAA;EACN,QAAA;EACA,eAAA;EACA,MAAA,GAAS,aAAA;EACT,OAAA,GAAU,mBAAA;EACV,SAAA;EACA,SAAA;EACA,WAAA;EACA,WAAA;EACA,cAAA;EACA,eAAA;EACA,OAAA;EACA,MAAA;EACA,gBAAA;EAzC+B;EA2C/B,GAAA,GAAM,UAAA;AAAA;AAAA,cAGM,qBAAA,SAA8B,KAAA;EAGlC,OAAA;cADP,OAAA,UACO,OAAA;AAAA"}
@@ -13,4 +13,4 @@ var EmDashStorageError = class extends Error {
13
13
 
14
14
  //#endregion
15
15
  export { EmDashStorageError as t };
16
- //# sourceMappingURL=types-CUBbjgmP.mjs.map
16
+ //# sourceMappingURL=types-Bec-r_3_.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types-Bec-r_3_.mjs","names":[],"sources":["../src/storage/types.ts"],"sourcesContent":["/**\n * Storage Layer Types\n *\n * Defines the interface for S3-compatible storage backends.\n * Works with R2, AWS S3, Minio, and other S3-compatible services.\n */\n\n/**\n * Storage configuration for S3-compatible backends\n */\nexport interface S3StorageConfig {\n\t/** S3 endpoint URL (e.g., \"https://xxx.r2.cloudflarestorage.com\") */\n\tendpoint: string;\n\t/** Bucket name */\n\tbucket: string;\n\t/**\n\t * AWS access key ID.\n\t * May be resolved from the `S3_ACCESS_KEY_ID` env var at runtime on Node.\n\t * Must be provided together with `secretAccessKey`, or both omitted.\n\t */\n\taccessKeyId?: string;\n\t/**\n\t * AWS secret access key.\n\t * May be resolved from the `S3_SECRET_ACCESS_KEY` env var at runtime on Node.\n\t * Must be provided together with `accessKeyId`, or both omitted.\n\t */\n\tsecretAccessKey?: string;\n\t/** Optional region (defaults to \"auto\") */\n\tregion?: string;\n\t/** Optional public URL prefix for generated URLs (e.g., CDN URL) */\n\tpublicUrl?: string;\n}\n\n/**\n * Local filesystem storage for development\n */\nexport interface LocalStorageConfig {\n\t/** Directory path for storing files */\n\tdirectory: string;\n\t/** Base URL for serving files */\n\tbaseUrl: string;\n}\n\n/**\n * Storage adapter descriptor (serializable config)\n */\nexport interface StorageDescriptor {\n\t/** Module path exporting createStorage function */\n\tentrypoint: string;\n\t/** Serializable config passed to createStorage at runtime */\n\tconfig: Record<string, unknown>;\n}\n\n/**\n * Factory function signature for storage adapters\n *\n * Each adapter accesses its own bindings directly:\n * - R2: imports from cloudflare:workers\n * - S3: uses credentials from config\n * - Local: uses filesystem path from config\n */\nexport type CreateStorageFn = (config: Record<string, unknown>) => Storage;\n\n/**\n * Upload result\n */\nexport interface UploadResult {\n\t/** Storage key (path within bucket) */\n\tkey: string;\n\t/** Public URL to access the file */\n\turl: string;\n\t/** File size in bytes */\n\tsize: number;\n}\n\n/**\n * Download result\n */\nexport interface DownloadResult {\n\t/** File content as readable stream */\n\tbody: ReadableStream<Uint8Array>;\n\t/** MIME type */\n\tcontentType: string;\n\t/** File size in bytes */\n\tsize: number;\n}\n\n/**\n * Signed URL for direct upload\n */\nexport interface SignedUploadUrl {\n\t/** Signed URL for PUT request */\n\turl: string;\n\t/** HTTP method (always PUT) */\n\tmethod: \"PUT\";\n\t/** Headers to include in the upload request */\n\theaders: Record<string, string>;\n\t/** URL expiration time (ISO string) */\n\texpiresAt: string;\n}\n\n/**\n * Options for generating signed upload URL\n */\nexport interface SignedUploadOptions {\n\t/** Storage key (path within bucket) */\n\tkey: string;\n\t/** MIME type of the file */\n\tcontentType: string;\n\t/** File size in bytes (for content-length validation) */\n\tsize?: number;\n\t/** URL expiration in seconds (default: 3600) */\n\texpiresIn?: number;\n}\n\n/**\n * File listing result\n */\nexport interface ListResult {\n\t/** List of files */\n\tfiles: FileInfo[];\n\t/** Cursor for next page (if more results) */\n\tnextCursor?: string;\n}\n\n/**\n * File info from listing\n */\nexport interface FileInfo {\n\t/** Storage key */\n\tkey: string;\n\t/** File size in bytes */\n\tsize: number;\n\t/** Last modified date */\n\tlastModified: Date;\n\t/** ETag (content hash) */\n\tetag?: string;\n}\n\n/**\n * Options for listing files\n */\nexport interface ListOptions {\n\t/** Filter by key prefix */\n\tprefix?: string;\n\t/** Maximum results per page */\n\tlimit?: number;\n\t/** Cursor from previous list call */\n\tcursor?: string;\n}\n\n/**\n * Storage interface\n *\n * All storage backends must implement this interface.\n */\nexport interface Storage {\n\t/**\n\t * Upload a file to storage\n\t */\n\tupload(options: {\n\t\tkey: string;\n\t\tbody: Buffer | Uint8Array | ReadableStream<Uint8Array>;\n\t\tcontentType: string;\n\t}): Promise<UploadResult>;\n\n\t/**\n\t * Download a file from storage\n\t */\n\tdownload(key: string): Promise<DownloadResult>;\n\n\t/**\n\t * Delete a file from storage\n\t * Idempotent - does not throw if file doesn't exist\n\t */\n\tdelete(key: string): Promise<void>;\n\n\t/**\n\t * Check if a file exists\n\t */\n\texists(key: string): Promise<boolean>;\n\n\t/**\n\t * List files in storage\n\t */\n\tlist(options?: ListOptions): Promise<ListResult>;\n\n\t/**\n\t * Generate a signed URL for direct upload\n\t * Client uploads directly to storage, bypassing the server\n\t */\n\tgetSignedUploadUrl(options: SignedUploadOptions): Promise<SignedUploadUrl>;\n\n\t/**\n\t * Get public URL for a file\n\t */\n\tgetPublicUrl(key: string): string;\n}\n\n/**\n * Storage error with additional context\n */\nexport class EmDashStorageError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic code: string,\n\t\tpublic override cause?: unknown,\n\t) {\n\t\tsuper(message);\n\t\tthis.name = \"EmDashStorageError\";\n\t}\n}\n"],"mappings":";;;;AA0MA,IAAa,qBAAb,cAAwC,MAAM;CAC7C,YACC,SACA,AAAO,MACP,AAAgB,OACf;AACD,QAAM,QAAQ;EAHP;EACS;AAGhB,OAAK,OAAO"}
@@ -13,11 +13,19 @@ interface S3StorageConfig {
13
13
  endpoint: string;
14
14
  /** Bucket name */
15
15
  bucket: string;
16
- /** AWS access key ID */
17
- accessKeyId: string;
18
- /** AWS secret access key */
19
- secretAccessKey: string;
20
- /** Optional region (defaults to "auto" for R2) */
16
+ /**
17
+ * AWS access key ID.
18
+ * May be resolved from the `S3_ACCESS_KEY_ID` env var at runtime on Node.
19
+ * Must be provided together with `secretAccessKey`, or both omitted.
20
+ */
21
+ accessKeyId?: string;
22
+ /**
23
+ * AWS secret access key.
24
+ * May be resolved from the `S3_SECRET_ACCESS_KEY` env var at runtime on Node.
25
+ * Must be provided together with `accessKeyId`, or both omitted.
26
+ */
27
+ secretAccessKey?: string;
28
+ /** Optional region (defaults to "auto") */
21
29
  region?: string;
22
30
  /** Optional public URL prefix for generated URLs (e.g., CDN URL) */
23
31
  publicUrl?: string;
@@ -181,4 +189,4 @@ declare class EmDashStorageError extends Error {
181
189
  }
182
190
  //#endregion
183
191
  export { ListOptions as a, S3StorageConfig as c, Storage as d, StorageDescriptor as f, FileInfo as i, SignedUploadOptions as l, DownloadResult as n, ListResult as o, UploadResult as p, EmDashStorageError as r, LocalStorageConfig as s, CreateStorageFn as t, SignedUploadUrl as u };
184
- //# sourceMappingURL=types-DaNLHo_T.d.mts.map
192
+ //# sourceMappingURL=types-C1-PVaS_.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types-C1-PVaS_.d.mts","names":[],"sources":["../src/storage/types.ts"],"mappings":";;AAUA;;;;;;;;UAAiB,eAAA;EAoBhB;EAlBA,QAAA;EAkBS;EAhBT,MAAA;EAsBkC;;;;AAUnC;EA1BC,WAAA;;;;;;EAMA,eAAA;EAwBc;EAtBd,MAAA;EAiC0B;EA/B1B,SAAA;AAAA;;;;UAMgB,kBAAA;EAyByD;EAvBzE,SAAA;EA4B4B;EA1B5B,OAAA;AAAA;;;;UAMgB,iBAAA;EA0BZ;EAxBJ,UAAA;EA8B8B;EA5B9B,MAAA,EAAQ,MAAA;AAAA;;;;;;;;AAwCT;KA7BY,eAAA,IAAmB,MAAA,EAAQ,MAAA,sBAA4B,OAAA;;;;UAKlD,YAAA;EA8BhB;EA5BA,GAAA;EA8BA;EA5BA,GAAA;EA4BS;EA1BT,IAAA;AAAA;;;;UAMgB,cAAA;EAgChB;EA9BA,IAAA,EAAM,cAAA,CAAe,UAAA;EAgCZ;EA9BT,WAAA;EAoCgB;EAlChB,IAAA;AAAA;;;;UAMgB,eAAA;EAgCN;EA9BV,GAAA;EAoCgB;EAlChB,MAAA;;EAEA,OAAA,EAAS,MAAA;EAkCT;EAhCA,SAAA;AAAA;;;;UAMgB,mBAAA;EAsCA;EApChB,GAAA;;EAEA,WAAA;EAoCA;EAlCA,IAAA;EAsCA;EApCA,SAAA;AAAA;AA4CD;;;AAAA,UAtCiB,UAAA;EA4CA;EA1ChB,KAAA,EAAO,QAAA;EA0CsB;EAxC7B,UAAA;AAAA;;;;UAMgB,QAAA;EAyDD;EAvDf,GAAA;EAuD6B;EArD7B,IAAA;EA2D0D;EAzD1D,YAAA,EAAc,IAAA;EAyD2C;EAvDzD,IAAA;AAAA;;;;UAMgB,WAAA;EAoBa;EAlB7B,MAAA;EAmBC;EAjBD,KAAA;EAkBI;EAhBJ,MAAA;AAAA;;;;;;UAQgB,OAAA;EAwBhB;;;EApBA,MAAA,CAAO,OAAA;IACN,GAAA;IACA,IAAA,EAAM,MAAA,GAAS,UAAA,GAAa,cAAA,CAAe,UAAA;IAC3C,WAAA;EAAA,IACG,OAAA,CAAQ,YAAA;EA2BZ;;;EAtBA,QAAA,CAAS,GAAA,WAAc,OAAA,CAAQ,cAAA;EAsB2B;;;;EAhB1D,MAAA,CAAO,GAAA,WAAc,OAAA;EA2BT;;;EAtBZ,MAAA,CAAO,GAAA,WAAc,OAAA;EAsBkB;;;EAjBvC,IAAA,CAAK,OAAA,GAAU,WAAA,GAAc,OAAA,CAAQ,UAAA;EAmBpC;;;;EAbD,kBAAA,CAAmB,OAAA,EAAS,mBAAA,GAAsB,OAAA,CAAQ,eAAA;;;;EAK1D,YAAA,CAAa,GAAA;AAAA;;;;cAMD,kBAAA,SAA2B,KAAA;EAG/B,IAAA;EACS,KAAA;cAFhB,OAAA,UACO,IAAA,UACS,KAAA;AAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"types-CMMN0pNg.mjs","names":[],"sources":["../src/database/repositories/types.ts"],"sourcesContent":["import { encodeBase64, decodeBase64 } from \"../../utils/base64.js\";\n\nexport interface CreateContentInput {\n\ttype: string;\n\tslug?: string | null;\n\tdata: Record<string, unknown>;\n\tstatus?: string;\n\tauthorId?: string;\n\tprimaryBylineId?: string | null;\n\tlocale?: string;\n\ttranslationOf?: string;\n\tpublishedAt?: string | null;\n}\n\nexport interface UpdateContentInput {\n\tdata?: Record<string, unknown>;\n\tstatus?: string;\n\tslug?: string | null;\n\tpublishedAt?: string | null;\n\tscheduledAt?: string | null;\n\tauthorId?: string | null;\n\tprimaryBylineId?: string | null;\n}\n\n/** SEO fields for content items */\nexport interface ContentSeo {\n\ttitle: string | null;\n\tdescription: string | null;\n\timage: string | null;\n\tcanonical: string | null;\n\tnoIndex: boolean;\n}\n\n/** Input for updating SEO fields on content */\nexport interface ContentSeoInput {\n\ttitle?: string | null;\n\tdescription?: string | null;\n\timage?: string | null;\n\tcanonical?: string | null;\n\tnoIndex?: boolean;\n}\n\nexport interface BylineSummary {\n\tid: string;\n\tslug: string;\n\tdisplayName: string;\n\tbio: string | null;\n\tavatarMediaId: string | null;\n\twebsiteUrl: string | null;\n\tuserId: string | null;\n\tisGuest: boolean;\n\tcreatedAt: string;\n\tupdatedAt: string;\n}\n\nexport interface ContentBylineCredit {\n\tbyline: BylineSummary;\n\tsortOrder: number;\n\troleLabel: string | null;\n\t/** Whether this credit was explicitly assigned or inferred from authorId */\n\tsource?: \"explicit\" | \"inferred\";\n}\n\nexport interface FindManyOptions {\n\twhere?: {\n\t\tstatus?: string;\n\t\tauthorId?: string;\n\t\tlocale?: string;\n\t};\n\torderBy?: {\n\t\tfield: string;\n\t\tdirection: \"asc\" | \"desc\";\n\t};\n\tlimit?: number;\n\tcursor?: string; // Base64-encoded JSON: {orderValue: string, id: string}\n}\n\nexport interface FindManyResult<T> {\n\titems: T[];\n\tnextCursor?: string; // Base64-encoded JSON: {orderValue: string, id: string}\n}\n\n/** Encode a cursor from order value + id */\nexport function encodeCursor(orderValue: string, id: string): string {\n\treturn encodeBase64(JSON.stringify({ orderValue, id }));\n}\n\n/** Decode a cursor to order value + id. Returns null if invalid. */\nexport function decodeCursor(cursor: string): { orderValue: string; id: string } | null {\n\ttry {\n\t\tconst parsed = JSON.parse(decodeBase64(cursor));\n\t\tif (typeof parsed.orderValue === \"string\" && typeof parsed.id === \"string\") {\n\t\t\treturn parsed;\n\t\t}\n\t\treturn null;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nexport interface ContentItem {\n\tid: string;\n\ttype: string;\n\tslug: string | null;\n\tstatus: string;\n\tdata: Record<string, unknown>;\n\tauthorId: string | null;\n\tprimaryBylineId: string | null;\n\tbyline?: BylineSummary | null;\n\tbylines?: ContentBylineCredit[];\n\tcreatedAt: string;\n\tupdatedAt: string;\n\tpublishedAt: string | null;\n\tscheduledAt: string | null;\n\tliveRevisionId: string | null;\n\tdraftRevisionId: string | null;\n\tversion: number;\n\tlocale: string | null;\n\ttranslationGroup: string | null;\n\t/** SEO metadata — only populated for collections with `has_seo` enabled */\n\tseo?: ContentSeo;\n}\n\nexport class EmDashValidationError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic details?: unknown,\n\t) {\n\t\tsuper(message);\n\t\tthis.name = \"EmDashValidationError\";\n\t}\n}\n"],"mappings":";;;;AAmFA,SAAgB,aAAa,YAAoB,IAAoB;AACpE,QAAO,aAAa,KAAK,UAAU;EAAE;EAAY;EAAI,CAAC,CAAC;;;AAIxD,SAAgB,aAAa,QAA2D;AACvF,KAAI;EACH,MAAM,SAAS,KAAK,MAAM,aAAa,OAAO,CAAC;AAC/C,MAAI,OAAO,OAAO,eAAe,YAAY,OAAO,OAAO,OAAO,SACjE,QAAO;AAER,SAAO;SACA;AACP,SAAO;;;AA2BT,IAAa,wBAAb,cAA2C,MAAM;CAChD,YACC,SACA,AAAO,SACN;AACD,QAAM,QAAQ;EAFP;AAGP,OAAK,OAAO"}
1
+ {"version":3,"file":"types-CMMN0pNg.mjs","names":[],"sources":["../src/database/repositories/types.ts"],"sourcesContent":["import { encodeBase64, decodeBase64 } from \"../../utils/base64.js\";\n\nexport interface CreateContentInput {\n\ttype: string;\n\tslug?: string | null;\n\tdata: Record<string, unknown>;\n\tstatus?: string;\n\tauthorId?: string;\n\tprimaryBylineId?: string | null;\n\tlocale?: string;\n\ttranslationOf?: string;\n\tpublishedAt?: string | null;\n\t/** Override created_at (ISO 8601). Used by importers to preserve original dates. */\n\tcreatedAt?: string | null;\n}\n\nexport interface UpdateContentInput {\n\tdata?: Record<string, unknown>;\n\tstatus?: string;\n\tslug?: string | null;\n\tpublishedAt?: string | null;\n\tscheduledAt?: string | null;\n\tauthorId?: string | null;\n\tprimaryBylineId?: string | null;\n}\n\n/** SEO fields for content items */\nexport interface ContentSeo {\n\ttitle: string | null;\n\tdescription: string | null;\n\timage: string | null;\n\tcanonical: string | null;\n\tnoIndex: boolean;\n}\n\n/** Input for updating SEO fields on content */\nexport interface ContentSeoInput {\n\ttitle?: string | null;\n\tdescription?: string | null;\n\timage?: string | null;\n\tcanonical?: string | null;\n\tnoIndex?: boolean;\n}\n\nexport interface BylineSummary {\n\tid: string;\n\tslug: string;\n\tdisplayName: string;\n\tbio: string | null;\n\tavatarMediaId: string | null;\n\twebsiteUrl: string | null;\n\tuserId: string | null;\n\tisGuest: boolean;\n\tcreatedAt: string;\n\tupdatedAt: string;\n}\n\nexport interface ContentBylineCredit {\n\tbyline: BylineSummary;\n\tsortOrder: number;\n\troleLabel: string | null;\n\t/** Whether this credit was explicitly assigned or inferred from authorId */\n\tsource?: \"explicit\" | \"inferred\";\n}\n\nexport interface FindManyOptions {\n\twhere?: {\n\t\tstatus?: string;\n\t\tauthorId?: string;\n\t\tlocale?: string;\n\t};\n\torderBy?: {\n\t\tfield: string;\n\t\tdirection: \"asc\" | \"desc\";\n\t};\n\tlimit?: number;\n\tcursor?: string; // Base64-encoded JSON: {orderValue: string, id: string}\n}\n\nexport interface FindManyResult<T> {\n\titems: T[];\n\tnextCursor?: string; // Base64-encoded JSON: {orderValue: string, id: string}\n}\n\n/** Encode a cursor from order value + id */\nexport function encodeCursor(orderValue: string, id: string): string {\n\treturn encodeBase64(JSON.stringify({ orderValue, id }));\n}\n\n/** Decode a cursor to order value + id. Returns null if invalid. */\nexport function decodeCursor(cursor: string): { orderValue: string; id: string } | null {\n\ttry {\n\t\tconst parsed = JSON.parse(decodeBase64(cursor));\n\t\tif (typeof parsed.orderValue === \"string\" && typeof parsed.id === \"string\") {\n\t\t\treturn parsed;\n\t\t}\n\t\treturn null;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nexport interface ContentItem {\n\tid: string;\n\ttype: string;\n\tslug: string | null;\n\tstatus: string;\n\tdata: Record<string, unknown>;\n\tauthorId: string | null;\n\tprimaryBylineId: string | null;\n\tbyline?: BylineSummary | null;\n\tbylines?: ContentBylineCredit[];\n\tcreatedAt: string;\n\tupdatedAt: string;\n\tpublishedAt: string | null;\n\tscheduledAt: string | null;\n\tliveRevisionId: string | null;\n\tdraftRevisionId: string | null;\n\tversion: number;\n\tlocale: string | null;\n\ttranslationGroup: string | null;\n\t/** SEO metadata — only populated for collections with `has_seo` enabled */\n\tseo?: ContentSeo;\n}\n\nexport class EmDashValidationError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic details?: unknown,\n\t) {\n\t\tsuper(message);\n\t\tthis.name = \"EmDashValidationError\";\n\t}\n}\n"],"mappings":";;;;AAqFA,SAAgB,aAAa,YAAoB,IAAoB;AACpE,QAAO,aAAa,KAAK,UAAU;EAAE;EAAY;EAAI,CAAC,CAAC;;;AAIxD,SAAgB,aAAa,QAA2D;AACvF,KAAI;EACH,MAAM,SAAS,KAAK,MAAM,aAAa,OAAO,CAAC;AAC/C,MAAI,OAAO,OAAO,eAAe,YAAY,OAAO,OAAO,OAAO,SACjE,QAAO;AAER,SAAO;SACA;AACP,SAAO;;;AA2BT,IAAa,wBAAb,cAA2C,MAAM;CAChD,YACC,SACA,AAAO,SACN;AACD,QAAM,QAAQ;EAFP;AAGP,OAAK,OAAO"}
@@ -207,6 +207,32 @@ interface KVAccess {
207
207
  value: unknown;
208
208
  }>>;
209
209
  }
210
+ /**
211
+ * SEO metadata for a content item, as stored in the core SEO panel.
212
+ *
213
+ * Only present on items in collections with `has_seo = 1`. For collections
214
+ * without SEO enabled, `ContentItem.seo` is `undefined`.
215
+ */
216
+ interface ContentItemSeo {
217
+ title: string | null;
218
+ description: string | null;
219
+ image: string | null;
220
+ canonical: string | null;
221
+ noIndex: boolean;
222
+ }
223
+ /**
224
+ * SEO input accepted by content write operations.
225
+ *
226
+ * All fields are optional — only fields that are present overwrite existing
227
+ * values. An empty object is treated as a no-op.
228
+ */
229
+ interface ContentItemSeoInput {
230
+ title?: string | null;
231
+ description?: string | null;
232
+ image?: string | null;
233
+ canonical?: string | null;
234
+ noIndex?: boolean;
235
+ }
210
236
  /**
211
237
  * Content item returned from content API
212
238
  */
@@ -214,6 +240,11 @@ interface ContentItem {
214
240
  id: string;
215
241
  type: string;
216
242
  data: Record<string, unknown>;
243
+ /**
244
+ * SEO metadata, populated when the collection has SEO enabled
245
+ * (`has_seo = 1`). `undefined` for non-SEO collections.
246
+ */
247
+ seo?: ContentItemSeo;
217
248
  createdAt: string;
218
249
  updatedAt: string;
219
250
  }
@@ -225,22 +256,33 @@ interface ContentListOptions {
225
256
  cursor?: string;
226
257
  orderBy?: Record<string, "asc" | "desc">;
227
258
  }
259
+ /**
260
+ * Input accepted by `content.create` / `content.update`.
261
+ *
262
+ * Most entries are field slugs mapped to their values. The reserved `seo`
263
+ * key is extracted and routed to the core SEO panel (the `_emdash_seo`
264
+ * table), matching the shape accepted by the REST API. Passing `seo` for a
265
+ * collection that does not have SEO enabled throws a validation error.
266
+ */
267
+ type ContentWriteInput = Record<string, unknown> & {
268
+ seo?: ContentItemSeoInput;
269
+ };
228
270
  /**
229
271
  * Content access interface - capability-gated
230
272
  */
231
273
  interface ContentAccess {
232
274
  get(collection: string, id: string): Promise<ContentItem | null>;
233
275
  list(collection: string, options?: ContentListOptions): Promise<PaginatedResult<ContentItem>>;
234
- create?(collection: string, data: Record<string, unknown>): Promise<ContentItem>;
235
- update?(collection: string, id: string, data: Record<string, unknown>): Promise<ContentItem>;
276
+ create?(collection: string, data: ContentWriteInput): Promise<ContentItem>;
277
+ update?(collection: string, id: string, data: ContentWriteInput): Promise<ContentItem>;
236
278
  delete?(collection: string, id: string): Promise<boolean>;
237
279
  }
238
280
  /**
239
281
  * Full content access with write operations
240
282
  */
241
283
  interface ContentAccessWithWrite extends ContentAccess {
242
- create(collection: string, data: Record<string, unknown>): Promise<ContentItem>;
243
- update(collection: string, id: string, data: Record<string, unknown>): Promise<ContentItem>;
284
+ create(collection: string, data: ContentWriteInput): Promise<ContentItem>;
285
+ update(collection: string, id: string, data: ContentWriteInput): Promise<ContentItem>;
244
286
  delete(collection: string, id: string): Promise<boolean>;
245
287
  }
246
288
  /**
@@ -674,6 +716,16 @@ type LifecycleHandler = (event: LifecycleEvent, ctx: PluginContext) => Promise<v
674
716
  type UninstallHandler = (event: UninstallEvent, ctx: PluginContext) => Promise<void>;
675
717
  /** Placement targets for page fragment contributions */
676
718
  type PagePlacement = "head" | "body:start" | "body:end";
719
+ /**
720
+ * A single breadcrumb trail item. Used by `PublicPageContext.breadcrumbs`
721
+ * so themes can publish breadcrumb trails that SEO plugins consume.
722
+ */
723
+ interface BreadcrumbItem {
724
+ /** Display name for this crumb (e.g. "Home", "Blog", "My Post"). */
725
+ name: string;
726
+ /** Absolute or root-relative URL for this crumb. */
727
+ url: string;
728
+ }
677
729
  /**
678
730
  * Describes the page being rendered. Passed to page hooks so plugins
679
731
  * can decide what to contribute without fetching content themselves.
@@ -684,7 +736,10 @@ interface PublicPageContext {
684
736
  locale: string | null;
685
737
  kind: "content" | "custom";
686
738
  pageType: string;
739
+ /** Full document title for the rendered page */
687
740
  title: string | null;
741
+ /** Page-only title for OG/Twitter/JSON-LD headline output */
742
+ pageTitle?: string | null;
688
743
  description: string | null;
689
744
  canonical: string | null;
690
745
  image: string | null;
@@ -708,6 +763,23 @@ interface PublicPageContext {
708
763
  };
709
764
  /** Site name for structured data and og:site_name */
710
765
  siteName?: string;
766
+ /**
767
+ * Optional breadcrumb trail for this page, root first. When set,
768
+ * SEO plugins should use this verbatim rather than deriving a trail
769
+ * from `path`. Themes typically populate this at the point they
770
+ * build the context (e.g. from a content hierarchy walk, taxonomy
771
+ * lookup, or per-`pageType` routing logic).
772
+ *
773
+ * Semantics for consumers:
774
+ * - `undefined` — theme has no opinion; consumer falls back to
775
+ * its own derivation.
776
+ * - `[]` — this page has no breadcrumbs (e.g. homepage); consumer
777
+ * should skip `BreadcrumbList` emission entirely.
778
+ * - Non-empty array — used verbatim for `BreadcrumbList` output.
779
+ */
780
+ breadcrumbs?: BreadcrumbItem[];
781
+ /** Public-facing site URL (origin) for structured data */
782
+ siteUrl?: string;
711
783
  }
712
784
  interface PageMetadataEvent {
713
785
  page: PublicPageContext;
@@ -1120,5 +1192,5 @@ interface PluginManifest {
1120
1192
  admin: PluginAdminConfig;
1121
1193
  }
1122
1194
  //#endregion
1123
- export { StandardRouteEntry as $, PageMetadataLinkRel as A, PluginRoute as B, ModerationDecision as C, PageMetadataContribution as D, PageFragmentHandler as E, PluginCapability as F, RequestMeta as G, PortableTextBlockConfig as H, PluginContext as I, ResolvedPluginHooks as J, ResolvedHook as K, PluginDefinition as L, PluginAdminConfig as M, PluginAdminExports as N, PageMetadataEvent as O, PluginAdminPage as P, StandardPluginDefinition as Q, PluginHooks as R, MediaUploadEvent as S, PageFragmentEvent as T, PortableTextBlockField as U, PluginStorageConfig as V, PublicPageContext as W, StandardHookEntry as X, RouteContext as Y, StandardHookHandler as Z, HttpAccess as _, CommentAfterModerateHandler as a, MediaAccess as b, CommentModerateEvent as c, ContentHookEvent as d, StandardRouteHandler as et, CronEvent as f, HookName as g, HookConfig as h, CommentAfterModerateEvent as i, Element as it, PagePlacement as j, PageMetadataHandler as k, CommentModerateHandler as l, FieldWidgetConfig as m, CommentAfterCreateEvent as n, StoredComment as nt, CommentBeforeCreateEvent as o, EmailMessage as p, ResolvedPlugin as q, CommentAfterCreateHandler as r, isStandardPluginDefinition as rt, CommentBeforeCreateHandler as s, CollectionCommentSettings as t, StorageCollection as tt, ContentAccess as u, KVAccess as v, PageFragmentContribution as w, MediaItem as x, LogAccess as y, PluginManifest as z };
1124
- //# sourceMappingURL=types-BQo5JS0J.d.mts.map
1195
+ export { StandardPluginDefinition as $, PageMetadataHandler as A, PluginManifest as B, MediaUploadEvent as C, PageFragmentHandler as D, PageFragmentEvent as E, PluginAdminPage as F, PublicPageContext as G, PluginStorageConfig as H, PluginCapability as I, ResolvedPlugin as J, RequestMeta as K, PluginContext as L, PagePlacement as M, PluginAdminConfig as N, PageMetadataContribution as O, PluginAdminExports as P, StandardHookHandler as Q, PluginDefinition as R, MediaItem as S, PageFragmentContribution as T, PortableTextBlockConfig as U, PluginRoute as V, PortableTextBlockField as W, RouteContext as X, ResolvedPluginHooks as Y, StandardHookEntry as Z, HookName as _, CommentAfterModerateEvent as a, Element as at, LogAccess as b, CommentBeforeCreateHandler as c, ContentAccess as d, StandardRouteEntry as et, ContentHookEvent as f, HookConfig as g, FieldWidgetConfig as h, CommentAfterCreateHandler as i, isStandardPluginDefinition as it, PageMetadataLinkRel as j, PageMetadataEvent as k, CommentModerateEvent as l, EmailMessage as m, CollectionCommentSettings as n, StorageCollection as nt, CommentAfterModerateHandler as o, CronEvent as p, ResolvedHook as q, CommentAfterCreateEvent as r, StoredComment as rt, CommentBeforeCreateEvent as s, BreadcrumbItem as t, StandardRouteHandler as tt, CommentModerateHandler as u, HttpAccess as v, ModerationDecision as w, MediaAccess as x, KVAccess as y, PluginHooks as z };
1196
+ //# sourceMappingURL=types-CaKte3hR.d.mts.map