emdash 0.9.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/dist/{adapters-DoNJiveC.d.mts → adapters-BktHA7EO.d.mts} +1 -1
  2. package/dist/{adapters-DoNJiveC.d.mts.map → adapters-BktHA7EO.d.mts.map} +1 -1
  3. package/dist/{apply-BzltprvY.mjs → apply-UsrFuO7l.mjs} +156 -254
  4. package/dist/apply-UsrFuO7l.mjs.map +1 -0
  5. package/dist/astro/index.d.mts +6 -6
  6. package/dist/astro/index.mjs +10 -2
  7. package/dist/astro/index.mjs.map +1 -1
  8. package/dist/astro/middleware/auth.d.mts +5 -5
  9. package/dist/astro/middleware/auth.mjs +5 -5
  10. package/dist/astro/middleware/redirect.mjs +5 -5
  11. package/dist/astro/middleware/request-context.mjs +4 -4
  12. package/dist/astro/middleware/setup.mjs +1 -1
  13. package/dist/astro/middleware.mjs +35 -34
  14. package/dist/astro/middleware.mjs.map +1 -1
  15. package/dist/astro/types.d.mts +8 -9
  16. package/dist/astro/types.d.mts.map +1 -1
  17. package/dist/{base64-BRICGH2l.mjs → base64-MBPo9ozB.mjs} +1 -1
  18. package/dist/{base64-BRICGH2l.mjs.map → base64-MBPo9ozB.mjs.map} +1 -1
  19. package/dist/{byline-BSaNL1w7.mjs → byline-C3vnhIpU.mjs} +4 -4
  20. package/dist/{byline-BSaNL1w7.mjs.map → byline-C3vnhIpU.mjs.map} +1 -1
  21. package/dist/{bylines-CvJ3PYz2.mjs → bylines-esI7ioa9.mjs} +5 -5
  22. package/dist/{bylines-CvJ3PYz2.mjs.map → bylines-esI7ioa9.mjs.map} +1 -1
  23. package/dist/{cache-C6N_hhN7.mjs → cache-fTzxgMFJ.mjs} +3 -3
  24. package/dist/{cache-C6N_hhN7.mjs.map → cache-fTzxgMFJ.mjs.map} +1 -1
  25. package/dist/{chunks-NBQVDOci.mjs → chunks-Da2-b-oA.mjs} +2 -2
  26. package/dist/{chunks-NBQVDOci.mjs.map → chunks-Da2-b-oA.mjs.map} +1 -1
  27. package/dist/cli/index.mjs +251 -79
  28. package/dist/cli/index.mjs.map +1 -1
  29. package/dist/client/cf-access.d.mts +1 -1
  30. package/dist/client/index.d.mts +1 -1
  31. package/dist/client/index.mjs +1 -1
  32. package/dist/{config-BI0V3ICQ.mjs → config-CVssduLe.mjs} +1 -1
  33. package/dist/{config-BI0V3ICQ.mjs.map → config-CVssduLe.mjs.map} +1 -1
  34. package/dist/{content-8lOYF0pr.mjs → content-C7G4QXkK.mjs} +14 -3
  35. package/dist/content-C7G4QXkK.mjs.map +1 -0
  36. package/dist/db/index.d.mts +3 -3
  37. package/dist/db/index.mjs +1 -1
  38. package/dist/db/libsql.d.mts +1 -1
  39. package/dist/db/postgres.d.mts +1 -1
  40. package/dist/db/sqlite.d.mts +1 -1
  41. package/dist/{db-errors-WRezodiz.mjs → db-errors-B7P2pSCn.mjs} +1 -1
  42. package/dist/{db-errors-WRezodiz.mjs.map → db-errors-B7P2pSCn.mjs.map} +1 -1
  43. package/dist/{default-D8ksjWhO.mjs → default-pHuz9WF6.mjs} +1 -1
  44. package/dist/{default-D8ksjWhO.mjs.map → default-pHuz9WF6.mjs.map} +1 -1
  45. package/dist/{error-D_-tqP-I.mjs → error-DqnRMM5z.mjs} +1 -1
  46. package/dist/{error-D_-tqP-I.mjs.map → error-DqnRMM5z.mjs.map} +1 -1
  47. package/dist/{index-BFRaVcD6.d.mts → index-DjPMOfO0.d.mts} +82 -67
  48. package/dist/index-DjPMOfO0.d.mts.map +1 -0
  49. package/dist/index.d.mts +10 -10
  50. package/dist/index.mjs +28 -27
  51. package/dist/{load-DDqMMvZL.mjs → load-sXRuM7Us.mjs} +2 -2
  52. package/dist/{load-DDqMMvZL.mjs.map → load-sXRuM7Us.mjs.map} +1 -1
  53. package/dist/{loader-CKLbBnhK.mjs → loader-Bx2_9-5e.mjs} +31 -6
  54. package/dist/loader-Bx2_9-5e.mjs.map +1 -0
  55. package/dist/{manifest-schema-DqWNC3lM.mjs → manifest-schema-CXAbd1vH.mjs} +1 -1
  56. package/dist/{manifest-schema-DqWNC3lM.mjs.map → manifest-schema-CXAbd1vH.mjs.map} +1 -1
  57. package/dist/media/index.d.mts +1 -1
  58. package/dist/media/index.mjs +1 -1
  59. package/dist/media/local-runtime.d.mts +7 -7
  60. package/dist/media/local-runtime.mjs +3 -3
  61. package/dist/{media-BW32b4gi.mjs → media-D8FbNsl0.mjs} +2 -2
  62. package/dist/{media-BW32b4gi.mjs.map → media-D8FbNsl0.mjs.map} +1 -1
  63. package/dist/{mode-ier8jbBk.mjs → mode-YhqNVef_.mjs} +1 -1
  64. package/dist/{mode-ier8jbBk.mjs.map → mode-YhqNVef_.mjs.map} +1 -1
  65. package/dist/{options-BVp3UsTS.mjs → options-nPxWnrya.mjs} +1 -1
  66. package/dist/{options-BVp3UsTS.mjs.map → options-nPxWnrya.mjs.map} +1 -1
  67. package/dist/page/index.d.mts +2 -2
  68. package/dist/{patterns-CrCYkMBb.mjs → patterns-DsUZ4uxI.mjs} +1 -1
  69. package/dist/{patterns-CrCYkMBb.mjs.map → patterns-DsUZ4uxI.mjs.map} +1 -1
  70. package/dist/{placeholder-BE4o_2dc.d.mts → placeholder-CDPtkelt.d.mts} +1 -1
  71. package/dist/{placeholder-BE4o_2dc.d.mts.map → placeholder-CDPtkelt.d.mts.map} +1 -1
  72. package/dist/{placeholder-CIJejMlK.mjs → placeholder-Ci0RLeCk.mjs} +1 -1
  73. package/dist/{placeholder-CIJejMlK.mjs.map → placeholder-Ci0RLeCk.mjs.map} +1 -1
  74. package/dist/plugins/adapt-sandbox-entry.d.mts +5 -5
  75. package/dist/plugins/adapt-sandbox-entry.mjs +2 -2
  76. package/dist/{public-url-DByxYjUw.mjs → public-url-B1AxbbbQ.mjs} +1 -1
  77. package/dist/{public-url-DByxYjUw.mjs.map → public-url-B1AxbbbQ.mjs.map} +1 -1
  78. package/dist/{query-Cg9ZKRQ0.mjs → query-Bo-msrmu.mjs} +13 -13
  79. package/dist/{query-Cg9ZKRQ0.mjs.map → query-Bo-msrmu.mjs.map} +1 -1
  80. package/dist/{redirect-BhUBKRc1.mjs → redirect-C5H7VGIX.mjs} +3 -3
  81. package/dist/{redirect-BhUBKRc1.mjs.map → redirect-C5H7VGIX.mjs.map} +1 -1
  82. package/dist/{registry-Dw70ChxB.mjs → registry-Beb7wxFc.mjs} +5 -5
  83. package/dist/{registry-Dw70ChxB.mjs.map → registry-Beb7wxFc.mjs.map} +1 -1
  84. package/dist/{request-cache-B-bmkipQ.mjs → request-cache-C-tIpYIw.mjs} +1 -1
  85. package/dist/{request-cache-B-bmkipQ.mjs.map → request-cache-C-tIpYIw.mjs.map} +1 -1
  86. package/dist/{runner-Bnoj7vjK.d.mts → runner-Clwe4Mme.d.mts} +2 -2
  87. package/dist/{runner-Bnoj7vjK.d.mts.map → runner-Clwe4Mme.d.mts.map} +1 -1
  88. package/dist/{runner-C7ADox5q.mjs → runner-DMnlIkh4.mjs} +433 -138
  89. package/dist/runner-DMnlIkh4.mjs.map +1 -0
  90. package/dist/runtime.d.mts +6 -6
  91. package/dist/runtime.mjs +3 -3
  92. package/dist/{search-dOGEccMa.mjs → search-DkN-BqsS.mjs} +164 -92
  93. package/dist/search-DkN-BqsS.mjs.map +1 -0
  94. package/dist/{secrets-CW3reAnU.mjs → secrets-CZ8rxLX3.mjs} +3 -3
  95. package/dist/{secrets-CW3reAnU.mjs.map → secrets-CZ8rxLX3.mjs.map} +1 -1
  96. package/dist/seed/index.d.mts +2 -2
  97. package/dist/seed/index.mjs +15 -14
  98. package/dist/seo/index.d.mts +1 -1
  99. package/dist/storage/local.d.mts +1 -1
  100. package/dist/storage/local.mjs +1 -1
  101. package/dist/storage/s3.d.mts +1 -1
  102. package/dist/storage/s3.mjs +1 -1
  103. package/dist/taxonomies-CTtewrSQ.mjs +407 -0
  104. package/dist/taxonomies-CTtewrSQ.mjs.map +1 -0
  105. package/dist/taxonomy-DSxx2K2L.mjs +218 -0
  106. package/dist/taxonomy-DSxx2K2L.mjs.map +1 -0
  107. package/dist/{tokens-D7zMmWi2.mjs → tokens-CyRDPVW2.mjs} +2 -2
  108. package/dist/{tokens-D7zMmWi2.mjs.map → tokens-CyRDPVW2.mjs.map} +1 -1
  109. package/dist/{transaction-Cn2rjY78.mjs → transaction-D44LBXvU.mjs} +1 -1
  110. package/dist/{transaction-Cn2rjY78.mjs.map → transaction-D44LBXvU.mjs.map} +1 -1
  111. package/dist/{transport-DNEfeMaU.d.mts → transport-DX_5rpsq.d.mts} +1 -1
  112. package/dist/{transport-DNEfeMaU.d.mts.map → transport-DX_5rpsq.d.mts.map} +1 -1
  113. package/dist/{transport-BeMCmin1.mjs → transport-xpzIjCIB.mjs} +1 -1
  114. package/dist/{transport-BeMCmin1.mjs.map → transport-xpzIjCIB.mjs.map} +1 -1
  115. package/dist/{types-CRxNbK-Z.mjs → types-BIgulNsW.mjs} +2 -2
  116. package/dist/{types-CRxNbK-Z.mjs.map → types-BIgulNsW.mjs.map} +1 -1
  117. package/dist/{types-CJsYGpco.d.mts → types-B_CXXnzh.d.mts} +1 -1
  118. package/dist/{types-CJsYGpco.d.mts.map → types-B_CXXnzh.d.mts.map} +1 -1
  119. package/dist/{types-M78DQ1lx.d.mts → types-C-aFbqmA.d.mts} +1 -1
  120. package/dist/{types-M78DQ1lx.d.mts.map → types-C-aFbqmA.d.mts.map} +1 -1
  121. package/dist/{types-4fVtCIm0.mjs → types-CoO6mpV3.mjs} +1 -1
  122. package/dist/{types-4fVtCIm0.mjs.map → types-CoO6mpV3.mjs.map} +1 -1
  123. package/dist/{types-BuBIptGk.d.mts → types-D19uBYWn.d.mts} +149 -4
  124. package/dist/types-D19uBYWn.d.mts.map +1 -0
  125. package/dist/{types-BSyXeCFW.d.mts → types-Dl1fgFjn.d.mts} +1 -1
  126. package/dist/{types-BSyXeCFW.d.mts.map → types-Dl1fgFjn.d.mts.map} +1 -1
  127. package/dist/{types-CrtWgIvl.d.mts → types-Dtx1mSMX.d.mts} +9 -1
  128. package/dist/types-Dtx1mSMX.d.mts.map +1 -0
  129. package/dist/{types-CIOg5AR8.mjs → types-Eg829jj9.mjs} +1 -1
  130. package/dist/{types-CIOg5AR8.mjs.map → types-Eg829jj9.mjs.map} +1 -1
  131. package/dist/{types-CDbKp7ND.mjs → types-K-EkEQCI.mjs} +1 -1
  132. package/dist/{types-CDbKp7ND.mjs.map → types-K-EkEQCI.mjs.map} +1 -1
  133. package/dist/{validate-Baqf0slj.mjs → validate-CBIbxM3L.mjs} +14 -10
  134. package/dist/validate-CBIbxM3L.mjs.map +1 -0
  135. package/dist/{validate-BfQh_C_y.d.mts → validate-DHGwADqO.d.mts} +18 -5
  136. package/dist/validate-DHGwADqO.d.mts.map +1 -0
  137. package/dist/{validation-BfEI7tNe.mjs → validation-B1NYiEos.mjs} +5 -5
  138. package/dist/{validation-BfEI7tNe.mjs.map → validation-B1NYiEos.mjs.map} +1 -1
  139. package/dist/version-CMD42IRC.mjs +7 -0
  140. package/dist/{version-DoxrVdYf.mjs.map → version-CMD42IRC.mjs.map} +1 -1
  141. package/dist/{zod-generator-CC0xNe_K.mjs → zod-generator-BNJDQBSZ.mjs} +8 -3
  142. package/dist/zod-generator-BNJDQBSZ.mjs.map +1 -0
  143. package/package.json +6 -6
  144. package/src/api/handlers/content.ts +11 -0
  145. package/src/api/handlers/dashboard.ts +29 -36
  146. package/src/api/handlers/menus.ts +256 -75
  147. package/src/api/handlers/taxonomies.ts +273 -97
  148. package/src/api/schemas/common.ts +7 -0
  149. package/src/api/schemas/menus.ts +23 -0
  150. package/src/api/schemas/taxonomies.ts +39 -0
  151. package/src/astro/integration/routes.ts +10 -0
  152. package/src/astro/routes/api/content/[collection]/[id]/permanent.ts +1 -1
  153. package/src/astro/routes/api/import/wordpress/rewrite-url-helpers.ts +196 -0
  154. package/src/astro/routes/api/import/wordpress/rewrite-urls.ts +9 -177
  155. package/src/astro/routes/api/menus/[name]/items.ts +16 -6
  156. package/src/astro/routes/api/menus/[name]/reorder.ts +8 -3
  157. package/src/astro/routes/api/menus/[name]/translations.ts +82 -0
  158. package/src/astro/routes/api/menus/[name].ts +19 -10
  159. package/src/astro/routes/api/menus/index.ts +9 -6
  160. package/src/astro/routes/api/taxonomies/[name]/terms/[slug]/translations.ts +89 -0
  161. package/src/astro/routes/api/taxonomies/[name]/terms/[slug].ts +22 -22
  162. package/src/astro/routes/api/taxonomies/[name]/terms/index.ts +11 -14
  163. package/src/astro/routes/api/taxonomies/index.ts +9 -6
  164. package/src/cli/commands/export-seed.ts +82 -21
  165. package/src/cli/commands/plugin-init.ts +216 -90
  166. package/src/database/migrations/036_i18n_menus_and_taxonomies.ts +477 -0
  167. package/src/database/migrations/runner.ts +2 -0
  168. package/src/database/repositories/content.ts +11 -0
  169. package/src/database/repositories/taxonomy.ts +193 -89
  170. package/src/database/types.ts +10 -2
  171. package/src/i18n/resolve.ts +37 -0
  172. package/src/loader.ts +49 -2
  173. package/src/mcp/server.ts +77 -18
  174. package/src/menus/index.ts +143 -124
  175. package/src/menus/types.ts +15 -1
  176. package/src/schema/zod-generator.ts +12 -2
  177. package/src/seed/apply.ts +140 -54
  178. package/src/seed/types.ts +14 -1
  179. package/src/seed/validate.ts +27 -13
  180. package/src/taxonomies/index.ts +230 -213
  181. package/src/taxonomies/types.ts +10 -0
  182. package/dist/apply-BzltprvY.mjs.map +0 -1
  183. package/dist/content-8lOYF0pr.mjs.map +0 -1
  184. package/dist/index-BFRaVcD6.d.mts.map +0 -1
  185. package/dist/loader-CKLbBnhK.mjs.map +0 -1
  186. package/dist/runner-C7ADox5q.mjs.map +0 -1
  187. package/dist/search-dOGEccMa.mjs.map +0 -1
  188. package/dist/taxonomies-ZlRtD6AG.mjs +0 -315
  189. package/dist/taxonomies-ZlRtD6AG.mjs.map +0 -1
  190. package/dist/types-BuBIptGk.d.mts.map +0 -1
  191. package/dist/types-CrtWgIvl.d.mts.map +0 -1
  192. package/dist/validate-Baqf0slj.mjs.map +0 -1
  193. package/dist/validate-BfQh_C_y.d.mts.map +0 -1
  194. package/dist/version-DoxrVdYf.mjs +0 -7
  195. package/dist/zod-generator-CC0xNe_K.mjs.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"taxonomies-CTtewrSQ.mjs","names":[],"sources":["../src/i18n/resolve.ts","../src/taxonomies/index.ts"],"sourcesContent":["/**\n * Shared locale-resolution helpers.\n *\n * Matches the pattern used by `query.ts` for content: an explicit locale wins,\n * otherwise we fall back to the request-context locale, otherwise to\n * `defaultLocale` when i18n is enabled, otherwise to `undefined` (meaning \"do\n * not filter by locale\" — legacy single-locale behaviour).\n */\n\nimport { getRequestContext } from \"../request-context.js\";\nimport { getFallbackChain, getI18nConfig, isI18nEnabled } from \"./config.js\";\n\n/**\n * Resolve the locale to use for a query given an optional explicit value.\n * Returns `undefined` when no locale information is available; callers should\n * treat that as \"do not filter by locale\".\n */\nexport function resolveLocale(explicit?: string): string | undefined {\n\tif (explicit !== undefined) return explicit;\n\tconst ctxLocale = getRequestContext()?.locale;\n\tif (ctxLocale !== undefined) return ctxLocale;\n\tconst cfg = getI18nConfig();\n\tif (cfg && isI18nEnabled()) return cfg.defaultLocale;\n\treturn undefined;\n}\n\n/**\n * Fallback chain to try when looking up a single item. When i18n is disabled\n * or the locale is unspecified, returns a single-element array (or empty when\n * no locale resolves) so callers can iterate uniformly.\n */\nexport function resolveLocaleChain(explicit?: string): string[] {\n\tconst locale = resolveLocale(explicit);\n\tif (locale === undefined) return [];\n\tif (!isI18nEnabled()) return [locale];\n\treturn getFallbackChain(locale);\n}\n","/**\n * Runtime API for taxonomies.\n *\n * All helpers are locale-aware. When a locale is not passed explicitly we fall\n * back to the request context or the configured `defaultLocale` (see\n * `i18n/resolve.ts`).\n *\n * Because `content_taxonomies.taxonomy_id` stores the translation_group (not a\n * specific term id), the joins here are `taxonomies.translation_group =\n * content_taxonomies.taxonomy_id` + filter by `taxonomies.locale`, which picks\n * the right per-locale term.\n */\n\nimport { resolveLocale, resolveLocaleChain } from \"../i18n/resolve.js\";\nimport { getDb } from \"../loader.js\";\nimport { peekRequestCache, requestCached, setRequestCacheEntry } from \"../request-cache.js\";\nimport { chunks, SQL_BATCH_SIZE } from \"../utils/chunks.js\";\nimport { isMissingTableError } from \"../utils/db-errors.js\";\nimport type { TaxonomyDef, TaxonomyTerm, TaxonomyTermRow } from \"./types.js\";\n\nexport interface TaxonomyQueryOptions {\n\tlocale?: string;\n}\n\n/**\n * No-op — kept for API compatibility.\n */\nexport function invalidateTermCache(): void {\n\t// Intentionally empty.\n}\n\n/**\n * Get every taxonomy definition. Definitions are per-locale (one row per\n * locale inside the same translation_group) — by default we resolve to the\n * active locale.\n */\nexport async function getTaxonomyDefs(options: TaxonomyQueryOptions = {}): Promise<TaxonomyDef[]> {\n\tconst locale = resolveLocale(options.locale);\n\treturn requestCached(`taxonomy-defs:${locale ?? \"*\"}`, async () => {\n\t\tconst db = await getDb();\n\t\tlet query = db.selectFrom(\"_emdash_taxonomy_defs\").selectAll();\n\t\tif (locale !== undefined) query = query.where(\"locale\", \"=\", locale);\n\t\tconst rows = await query.execute();\n\t\treturn rows.map(rowToTaxonomyDef);\n\t});\n}\n\n/**\n * Get a single taxonomy definition by name. Uses the fallback chain so even\n * if there is no translation for the active locale we still return something.\n *\n * If `getTaxonomyDefs()` has already loaded the full list in this request\n * (which happens during entry-term hydration on every page that renders a\n * collection), search the matching def in memory rather than running a\n * second query against `_emdash_taxonomy_defs`.\n */\nexport async function getTaxonomyDef(\n\tname: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<TaxonomyDef | null> {\n\tconst chain = resolveLocaleChain(options.locale);\n\tconst peekKey = `taxonomy-defs:${resolveLocale(options.locale) ?? \"*\"}`;\n\tconst allDefs = peekRequestCache<TaxonomyDef[]>(peekKey);\n\tif (allDefs) {\n\t\tconst defs = await allDefs;\n\t\tif (chain.length === 0) return defs.find((d) => d.name === name) ?? null;\n\t\tfor (const locale of chain) {\n\t\t\tconst found = defs.find((d) => d.name === name && d.locale === locale);\n\t\t\tif (found) return found;\n\t\t}\n\t\treturn null;\n\t}\n\n\treturn requestCached(`taxonomy-def:${name}:${chain.join(\",\")}`, async () => {\n\t\tconst db = await getDb();\n\n\t\tif (chain.length === 0) {\n\t\t\tconst row = await db\n\t\t\t\t.selectFrom(\"_emdash_taxonomy_defs\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"name\", \"=\", name)\n\t\t\t\t.orderBy(\"locale\", \"asc\")\n\t\t\t\t.executeTakeFirst();\n\t\t\treturn row ? rowToTaxonomyDef(row) : null;\n\t\t}\n\n\t\tfor (const locale of chain) {\n\t\t\tconst row = await db\n\t\t\t\t.selectFrom(\"_emdash_taxonomy_defs\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"name\", \"=\", name)\n\t\t\t\t.where(\"locale\", \"=\", locale)\n\t\t\t\t.executeTakeFirst();\n\t\t\tif (row) return rowToTaxonomyDef(row);\n\t\t}\n\t\treturn null;\n\t});\n}\n\n/**\n * All terms of a taxonomy in a specific locale (flat for non-hierarchical,\n * tree for hierarchical).\n */\nexport async function getTaxonomyTerms(\n\ttaxonomyName: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<TaxonomyTerm[]> {\n\tconst locale = resolveLocale(options.locale);\n\treturn requestCached(`taxonomy-terms:${taxonomyName}:${locale ?? \"*\"}`, async () => {\n\t\tconst db = await getDb();\n\n\t\tconst def = await getTaxonomyDef(taxonomyName, options);\n\t\tif (!def) return [];\n\n\t\tlet termsQuery = db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", taxonomyName)\n\t\t\t.orderBy(\"label\", \"asc\");\n\t\tif (locale !== undefined) termsQuery = termsQuery.where(\"locale\", \"=\", locale);\n\t\tconst rows = await termsQuery.execute();\n\n\t\t// Counts are keyed by translation_group (what the pivot stores).\n\t\tconst countsResult = await db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.select([\"taxonomy_id\"])\n\t\t\t.select((eb) => eb.fn.count<number>(\"entry_id\").as(\"count\"))\n\t\t\t.groupBy(\"taxonomy_id\")\n\t\t\t.execute();\n\t\tconst counts = new Map<string, number>();\n\t\tfor (const row of countsResult) counts.set(row.taxonomy_id, row.count);\n\n\t\tconst flatTerms: TaxonomyTermRow[] = rows.map((row) => ({\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tslug: row.slug,\n\t\t\tlabel: row.label,\n\t\t\tparent_id: row.parent_id,\n\t\t\tdata: row.data,\n\t\t\tlocale: row.locale,\n\t\t\ttranslation_group: row.translation_group,\n\t\t}));\n\n\t\tif (def.hierarchical) return buildTree(flatTerms, counts);\n\n\t\treturn flatTerms.map((term) => ({\n\t\t\tid: term.id,\n\t\t\tname: term.name,\n\t\t\tslug: term.slug,\n\t\t\tlabel: term.label,\n\t\t\tchildren: [],\n\t\t\tcount: counts.get(term.translation_group ?? term.id) ?? 0,\n\t\t\tlocale: term.locale,\n\t\t\ttranslationGroup: term.translation_group,\n\t\t}));\n\t});\n}\n\n/**\n * Get a single term by (taxonomy, slug). Honours the fallback chain — if the\n * slug exists in a fallback locale, we return that row (useful for deep-linking\n * to a term page when the translation is missing).\n */\nexport async function getTerm(\n\ttaxonomyName: string,\n\tslug: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<TaxonomyTerm | null> {\n\tconst db = await getDb();\n\tconst chain = resolveLocaleChain(options.locale);\n\n\tlet row: Awaited<ReturnType<ReturnType<typeof selectTerm>[\"executeTakeFirst\"]>>;\n\tconst selectTerm = () =>\n\t\tdb\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", taxonomyName)\n\t\t\t.where(\"slug\", \"=\", slug);\n\n\tif (chain.length === 0) {\n\t\trow = await selectTerm().orderBy(\"locale\", \"asc\").executeTakeFirst();\n\t} else {\n\t\trow = undefined;\n\t\tfor (const locale of chain) {\n\t\t\trow = await selectTerm().where(\"locale\", \"=\", locale).executeTakeFirst();\n\t\t\tif (row) break;\n\t\t}\n\t}\n\n\tif (!row) return null;\n\n\tconst countResult = await db\n\t\t.selectFrom(\"content_taxonomies\")\n\t\t.select((eb) => eb.fn.count<number>(\"entry_id\").as(\"count\"))\n\t\t.where(\"taxonomy_id\", \"=\", row.translation_group ?? row.id)\n\t\t.executeTakeFirst();\n\tconst count = countResult?.count ?? 0;\n\n\tlet childrenQuery = db\n\t\t.selectFrom(\"taxonomies\")\n\t\t.selectAll()\n\t\t.where(\"parent_id\", \"=\", row.id)\n\t\t.orderBy(\"label\", \"asc\");\n\tconst termLocale = row.locale;\n\tif (termLocale) childrenQuery = childrenQuery.where(\"locale\", \"=\", termLocale);\n\tconst childRows = await childrenQuery.execute();\n\n\tconst children = childRows.map<TaxonomyTerm>((child) => ({\n\t\tid: child.id,\n\t\tname: child.name,\n\t\tslug: child.slug,\n\t\tlabel: child.label,\n\t\tparentId: child.parent_id ?? undefined,\n\t\tchildren: [],\n\t\tlocale: child.locale,\n\t\ttranslationGroup: child.translation_group,\n\t}));\n\n\treturn {\n\t\tid: row.id,\n\t\tname: row.name,\n\t\tslug: row.slug,\n\t\tlabel: row.label,\n\t\tparentId: row.parent_id ?? undefined,\n\t\tdescription: row.data ? JSON.parse(row.data).description : undefined,\n\t\tchildren,\n\t\tcount,\n\t\tlocale: row.locale,\n\t\ttranslationGroup: row.translation_group,\n\t};\n}\n\n/**\n * Terms assigned to a content entry, resolved into the active locale. Terms\n * whose translation_group lacks a row in the requested locale are omitted.\n */\nexport function getEntryTerms(\n\tcollection: string,\n\tentryId: string,\n\ttaxonomyName?: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<TaxonomyTerm[]> {\n\tconst locale = resolveLocale(options.locale);\n\treturn requestCached(\n\t\t`terms:${collection}:${entryId}:${taxonomyName ?? \"*\"}:${locale ?? \"*\"}`,\n\t\tasync () => {\n\t\t\tconst db = await getDb();\n\n\t\t\tlet query = db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.translation_group\", \"content_taxonomies.taxonomy_id\")\n\t\t\t\t.selectAll(\"taxonomies\")\n\t\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t\t.where(\"content_taxonomies.entry_id\", \"=\", entryId);\n\n\t\t\tif (taxonomyName) query = query.where(\"taxonomies.name\", \"=\", taxonomyName);\n\t\t\tif (locale !== undefined) query = query.where(\"taxonomies.locale\", \"=\", locale);\n\n\t\t\tconst rows = await query.execute();\n\t\t\treturn rows.map<TaxonomyTerm>((row) => ({\n\t\t\t\tid: row.id,\n\t\t\t\tname: row.name,\n\t\t\t\tslug: row.slug,\n\t\t\t\tlabel: row.label,\n\t\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\t\tchildren: [],\n\t\t\t\tlocale: row.locale,\n\t\t\t\ttranslationGroup: row.translation_group,\n\t\t\t}));\n\t\t},\n\t);\n}\n\n/**\n * Terms for multiple entries of one taxonomy, single query.\n */\nexport async function getTermsForEntries(\n\tcollection: string,\n\tentryIds: string[],\n\ttaxonomyName: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<Map<string, TaxonomyTerm[]>> {\n\tconst result = new Map<string, TaxonomyTerm[]>();\n\tconst uniqueIds = [...new Set(entryIds)];\n\tfor (const id of uniqueIds) result.set(id, []);\n\tif (uniqueIds.length === 0) return result;\n\n\tconst db = await getDb();\n\tconst locale = resolveLocale(options.locale);\n\n\tfor (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {\n\t\tlet rows;\n\t\ttry {\n\t\t\tlet query = db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.translation_group\", \"content_taxonomies.taxonomy_id\")\n\t\t\t\t.select([\n\t\t\t\t\t\"content_taxonomies.entry_id\",\n\t\t\t\t\t\"taxonomies.id\",\n\t\t\t\t\t\"taxonomies.name\",\n\t\t\t\t\t\"taxonomies.slug\",\n\t\t\t\t\t\"taxonomies.label\",\n\t\t\t\t\t\"taxonomies.parent_id\",\n\t\t\t\t\t\"taxonomies.locale\",\n\t\t\t\t\t\"taxonomies.translation_group\",\n\t\t\t\t])\n\t\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t\t.where(\"content_taxonomies.entry_id\", \"in\", chunk)\n\t\t\t\t.where(\"taxonomies.name\", \"=\", taxonomyName);\n\t\t\tif (locale !== undefined) query = query.where(\"taxonomies.locale\", \"=\", locale);\n\t\t\trows = await query.execute();\n\t\t} catch (error) {\n\t\t\tif (isMissingTableError(error)) return result;\n\t\t\tthrow error;\n\t\t}\n\n\t\tfor (const row of rows) {\n\t\t\tconst term: TaxonomyTerm = {\n\t\t\t\tid: row.id,\n\t\t\t\tname: row.name,\n\t\t\t\tslug: row.slug,\n\t\t\t\tlabel: row.label,\n\t\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\t\tchildren: [],\n\t\t\t\tlocale: row.locale,\n\t\t\t\ttranslationGroup: row.translation_group,\n\t\t\t};\n\t\t\tconst terms = result.get(row.entry_id);\n\t\t\tif (terms) terms.push(term);\n\t\t}\n\t}\n\n\treturn result;\n}\n\n/**\n * Batch-fetch terms for multiple entries across ALL taxonomies in one query.\n * Primes the request-cache for subsequent per-entry calls to `getEntryTerms`.\n */\nexport async function getAllTermsForEntries(\n\tcollection: string,\n\tentryIds: string[],\n\toptions: TaxonomyQueryOptions = {},\n): Promise<Map<string, Record<string, TaxonomyTerm[]>>> {\n\tconst result = new Map<string, Record<string, TaxonomyTerm[]>>();\n\tconst uniqueIds = [...new Set(entryIds)];\n\tfor (const id of uniqueIds) result.set(id, {});\n\tif (uniqueIds.length === 0) return result;\n\n\tconst db = await getDb();\n\tconst locale = resolveLocale(options.locale);\n\tconst applicableTaxonomyNames = await getCollectionTaxonomyNames(collection, { locale });\n\n\tfor (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {\n\t\tlet rows;\n\t\ttry {\n\t\t\tlet query = db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.translation_group\", \"content_taxonomies.taxonomy_id\")\n\t\t\t\t.select([\n\t\t\t\t\t\"content_taxonomies.entry_id\",\n\t\t\t\t\t\"taxonomies.id\",\n\t\t\t\t\t\"taxonomies.name\",\n\t\t\t\t\t\"taxonomies.slug\",\n\t\t\t\t\t\"taxonomies.label\",\n\t\t\t\t\t\"taxonomies.parent_id\",\n\t\t\t\t\t\"taxonomies.locale\",\n\t\t\t\t\t\"taxonomies.translation_group\",\n\t\t\t\t])\n\t\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t\t.where(\"content_taxonomies.entry_id\", \"in\", chunk)\n\t\t\t\t.orderBy(\"taxonomies.label\", \"asc\");\n\t\t\tif (locale !== undefined) query = query.where(\"taxonomies.locale\", \"=\", locale);\n\t\t\trows = await query.execute();\n\t\t} catch (error) {\n\t\t\tif (isMissingTableError(error)) {\n\t\t\t\tfor (const id of uniqueIds) {\n\t\t\t\t\tprimeEntryTermsCache(collection, id, {}, applicableTaxonomyNames, locale);\n\t\t\t\t}\n\t\t\t\treturn result;\n\t\t\t}\n\t\t\tthrow error;\n\t\t}\n\n\t\tfor (const row of rows) {\n\t\t\tconst term: TaxonomyTerm = {\n\t\t\t\tid: row.id,\n\t\t\t\tname: row.name,\n\t\t\t\tslug: row.slug,\n\t\t\t\tlabel: row.label,\n\t\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\t\tchildren: [],\n\t\t\t\tlocale: row.locale,\n\t\t\t\ttranslationGroup: row.translation_group,\n\t\t\t};\n\t\t\tconst byTaxonomy = result.get(row.entry_id);\n\t\t\tif (!byTaxonomy) continue;\n\t\t\tconst existing = byTaxonomy[row.name];\n\t\t\tif (existing) existing.push(term);\n\t\t\telse byTaxonomy[row.name] = [term];\n\t\t}\n\t}\n\n\tfor (const [entryId, byTaxonomy] of result) {\n\t\tprimeEntryTermsCache(collection, entryId, byTaxonomy, applicableTaxonomyNames, locale);\n\t}\n\n\treturn result;\n}\n\n/**\n * Return the list of taxonomy names applicable to a collection, request-\n * cached so a page render only pays for it once.\n *\n * Returns an empty list when taxonomies haven't been defined yet.\n */\nasync function getCollectionTaxonomyNames(\n\tcollection: string,\n\toptions: TaxonomyQueryOptions,\n): Promise<string[]> {\n\ttry {\n\t\tconst defs = await getTaxonomyDefs(options);\n\t\treturn defs.filter((d) => d.collections.includes(collection)).map((d) => d.name);\n\t} catch (error) {\n\t\tif (isMissingTableError(error)) return [];\n\t\tthrow error;\n\t}\n}\n\n/**\n * Pre-populate the request-cache for every getEntryTerms call-shape that\n * could hit this entry:\n *\n * getEntryTerms(collection, entryId) -> key `terms:C:E:*`\n * getEntryTerms(collection, entryId, \"tag\") -> key `terms:C:E:tag`\n * getEntryTerms(collection, entryId, \"category\") -> key `terms:C:E:category`\n * ...one per taxonomy that applies to this collection\n *\n * Taxonomies with no rows on this entry are seeded with `[]` so legacy\n * callers short-circuit to the cached empty array instead of re-querying.\n */\nfunction primeEntryTermsCache(\n\tcollection: string,\n\tentryId: string,\n\tbyTaxonomy: Record<string, TaxonomyTerm[]>,\n\tapplicableTaxonomyNames: string[],\n\tlocale: string | undefined,\n): void {\n\tconst localeKey = locale ?? \"*\";\n\tfor (const name of applicableTaxonomyNames) {\n\t\tsetRequestCacheEntry(\n\t\t\t`terms:${collection}:${entryId}:${name}:${localeKey}`,\n\t\t\tbyTaxonomy[name] ?? [],\n\t\t);\n\t}\n\tfor (const [name, terms] of Object.entries(byTaxonomy)) {\n\t\tsetRequestCacheEntry(`terms:${collection}:${entryId}:${name}:${localeKey}`, terms);\n\t}\n\tconst allTerms = Object.values(byTaxonomy).flat();\n\tsetRequestCacheEntry(`terms:${collection}:${entryId}:*:${localeKey}`, allTerms);\n}\n\n/**\n * Get entries by term. Both the lookup (term slug in the active locale) and\n * the content query respect the active locale.\n */\nexport async function getEntriesByTerm(\n\tcollection: string,\n\ttaxonomyName: string,\n\ttermSlug: string,\n\toptions: TaxonomyQueryOptions = {},\n): Promise<Array<{ id: string; data: Record<string, unknown> }>> {\n\tconst { getEmDashCollection } = await import(\"../query.js\");\n\n\tconst queryOptions: Record<string, unknown> = {\n\t\twhere: { [taxonomyName]: termSlug },\n\t};\n\tif (options.locale !== undefined) queryOptions.locale = options.locale;\n\tconst { entries } = await getEmDashCollection(collection, queryOptions);\n\treturn entries;\n}\n\nfunction rowToTaxonomyDef(row: {\n\tid: string;\n\tname: string;\n\tlabel: string;\n\tlabel_singular: string | null;\n\thierarchical: number;\n\tcollections: string | null;\n\tlocale: string;\n\ttranslation_group: string | null;\n}): TaxonomyDef {\n\treturn {\n\t\tid: row.id,\n\t\tname: row.name,\n\t\tlabel: row.label,\n\t\tlabelSingular: row.label_singular ?? undefined,\n\t\thierarchical: row.hierarchical === 1,\n\t\tcollections: row.collections ? JSON.parse(row.collections) : [],\n\t\tlocale: row.locale,\n\t\ttranslationGroup: row.translation_group,\n\t};\n}\n\n/**\n * Build tree structure from flat terms\n */\nfunction buildTree(flatTerms: TaxonomyTermRow[], counts: Map<string, number>): TaxonomyTerm[] {\n\tconst map = new Map<string, TaxonomyTerm>();\n\tconst roots: TaxonomyTerm[] = [];\n\n\tfor (const term of flatTerms) {\n\t\tmap.set(term.id, {\n\t\t\tid: term.id,\n\t\t\tname: term.name,\n\t\t\tslug: term.slug,\n\t\t\tlabel: term.label,\n\t\t\tparentId: term.parent_id ?? undefined,\n\t\t\tdescription: term.data ? JSON.parse(term.data).description : undefined,\n\t\t\tchildren: [],\n\t\t\tcount: counts.get(term.translation_group ?? term.id) ?? 0,\n\t\t\tlocale: term.locale,\n\t\t\ttranslationGroup: term.translation_group,\n\t\t});\n\t}\n\n\tfor (const term of map.values()) {\n\t\tif (term.parentId && map.has(term.parentId)) {\n\t\t\tmap.get(term.parentId)!.children.push(term);\n\t\t} else {\n\t\t\troots.push(term);\n\t\t}\n\t}\n\n\treturn roots;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAiBA,SAAgB,cAAc,UAAuC;AACpE,KAAI,aAAa,OAAW,QAAO;CACnC,MAAM,YAAY,mBAAmB,EAAE;AACvC,KAAI,cAAc,OAAW,QAAO;CACpC,MAAM,MAAM,eAAe;AAC3B,KAAI,OAAO,eAAe,CAAE,QAAO,IAAI;;;;;;;AASxC,SAAgB,mBAAmB,UAA6B;CAC/D,MAAM,SAAS,cAAc,SAAS;AACtC,KAAI,WAAW,OAAW,QAAO,EAAE;AACnC,KAAI,CAAC,eAAe,CAAE,QAAO,CAAC,OAAO;AACrC,QAAO,iBAAiB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACRhC,SAAgB,sBAA4B;;;;;;AAS5C,eAAsB,gBAAgB,UAAgC,EAAE,EAA0B;CACjG,MAAM,SAAS,cAAc,QAAQ,OAAO;AAC5C,QAAO,cAAc,iBAAiB,UAAU,OAAO,YAAY;EAElE,IAAI,SADO,MAAM,OAAO,EACT,WAAW,wBAAwB,CAAC,WAAW;AAC9D,MAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,UAAU,KAAK,OAAO;AAEpE,UADa,MAAM,MAAM,SAAS,EACtB,IAAI,iBAAiB;GAChC;;;;;;;;;;;AAYH,eAAsB,eACrB,MACA,UAAgC,EAAE,EACJ;CAC9B,MAAM,QAAQ,mBAAmB,QAAQ,OAAO;CAEhD,MAAM,UAAU,iBADA,iBAAiB,cAAc,QAAQ,OAAO,IAAI,MACV;AACxD,KAAI,SAAS;EACZ,MAAM,OAAO,MAAM;AACnB,MAAI,MAAM,WAAW,EAAG,QAAO,KAAK,MAAM,MAAM,EAAE,SAAS,KAAK,IAAI;AACpE,OAAK,MAAM,UAAU,OAAO;GAC3B,MAAM,QAAQ,KAAK,MAAM,MAAM,EAAE,SAAS,QAAQ,EAAE,WAAW,OAAO;AACtE,OAAI,MAAO,QAAO;;AAEnB,SAAO;;AAGR,QAAO,cAAc,gBAAgB,KAAK,GAAG,MAAM,KAAK,IAAI,IAAI,YAAY;EAC3E,MAAM,KAAK,MAAM,OAAO;AAExB,MAAI,MAAM,WAAW,GAAG;GACvB,MAAM,MAAM,MAAM,GAChB,WAAW,wBAAwB,CACnC,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,QAAQ,UAAU,MAAM,CACxB,kBAAkB;AACpB,UAAO,MAAM,iBAAiB,IAAI,GAAG;;AAGtC,OAAK,MAAM,UAAU,OAAO;GAC3B,MAAM,MAAM,MAAM,GAChB,WAAW,wBAAwB,CACnC,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,MAAM,UAAU,KAAK,OAAO,CAC5B,kBAAkB;AACpB,OAAI,IAAK,QAAO,iBAAiB,IAAI;;AAEtC,SAAO;GACN;;;;;;AAOH,eAAsB,iBACrB,cACA,UAAgC,EAAE,EACR;CAC1B,MAAM,SAAS,cAAc,QAAQ,OAAO;AAC5C,QAAO,cAAc,kBAAkB,aAAa,GAAG,UAAU,OAAO,YAAY;EACnF,MAAM,KAAK,MAAM,OAAO;EAExB,MAAM,MAAM,MAAM,eAAe,cAAc,QAAQ;AACvD,MAAI,CAAC,IAAK,QAAO,EAAE;EAEnB,IAAI,aAAa,GACf,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,aAAa,CAChC,QAAQ,SAAS,MAAM;AACzB,MAAI,WAAW,OAAW,cAAa,WAAW,MAAM,UAAU,KAAK,OAAO;EAC9E,MAAM,OAAO,MAAM,WAAW,SAAS;EAGvC,MAAM,eAAe,MAAM,GACzB,WAAW,qBAAqB,CAChC,OAAO,CAAC,cAAc,CAAC,CACvB,QAAQ,OAAO,GAAG,GAAG,MAAc,WAAW,CAAC,GAAG,QAAQ,CAAC,CAC3D,QAAQ,cAAc,CACtB,SAAS;EACX,MAAM,yBAAS,IAAI,KAAqB;AACxC,OAAK,MAAM,OAAO,aAAc,QAAO,IAAI,IAAI,aAAa,IAAI,MAAM;EAEtE,MAAM,YAA+B,KAAK,KAAK,SAAS;GACvD,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,WAAW,IAAI;GACf,MAAM,IAAI;GACV,QAAQ,IAAI;GACZ,mBAAmB,IAAI;GACvB,EAAE;AAEH,MAAI,IAAI,aAAc,QAAO,UAAU,WAAW,OAAO;AAEzD,SAAO,UAAU,KAAK,UAAU;GAC/B,IAAI,KAAK;GACT,MAAM,KAAK;GACX,MAAM,KAAK;GACX,OAAO,KAAK;GACZ,UAAU,EAAE;GACZ,OAAO,OAAO,IAAI,KAAK,qBAAqB,KAAK,GAAG,IAAI;GACxD,QAAQ,KAAK;GACb,kBAAkB,KAAK;GACvB,EAAE;GACF;;;;;;;AAQH,eAAsB,QACrB,cACA,MACA,UAAgC,EAAE,EACH;CAC/B,MAAM,KAAK,MAAM,OAAO;CACxB,MAAM,QAAQ,mBAAmB,QAAQ,OAAO;CAEhD,IAAI;CACJ,MAAM,mBACL,GACE,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,aAAa,CAChC,MAAM,QAAQ,KAAK,KAAK;AAE3B,KAAI,MAAM,WAAW,EACpB,OAAM,MAAM,YAAY,CAAC,QAAQ,UAAU,MAAM,CAAC,kBAAkB;MAC9D;AACN,QAAM;AACN,OAAK,MAAM,UAAU,OAAO;AAC3B,SAAM,MAAM,YAAY,CAAC,MAAM,UAAU,KAAK,OAAO,CAAC,kBAAkB;AACxE,OAAI,IAAK;;;AAIX,KAAI,CAAC,IAAK,QAAO;CAOjB,MAAM,SALc,MAAM,GACxB,WAAW,qBAAqB,CAChC,QAAQ,OAAO,GAAG,GAAG,MAAc,WAAW,CAAC,GAAG,QAAQ,CAAC,CAC3D,MAAM,eAAe,KAAK,IAAI,qBAAqB,IAAI,GAAG,CAC1D,kBAAkB,GACO,SAAS;CAEpC,IAAI,gBAAgB,GAClB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,aAAa,KAAK,IAAI,GAAG,CAC/B,QAAQ,SAAS,MAAM;CACzB,MAAM,aAAa,IAAI;AACvB,KAAI,WAAY,iBAAgB,cAAc,MAAM,UAAU,KAAK,WAAW;CAG9E,MAAM,YAFY,MAAM,cAAc,SAAS,EAEpB,KAAmB,WAAW;EACxD,IAAI,MAAM;EACV,MAAM,MAAM;EACZ,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,UAAU,MAAM,aAAa;EAC7B,UAAU,EAAE;EACZ,QAAQ,MAAM;EACd,kBAAkB,MAAM;EACxB,EAAE;AAEH,QAAO;EACN,IAAI,IAAI;EACR,MAAM,IAAI;EACV,MAAM,IAAI;EACV,OAAO,IAAI;EACX,UAAU,IAAI,aAAa;EAC3B,aAAa,IAAI,OAAO,KAAK,MAAM,IAAI,KAAK,CAAC,cAAc;EAC3D;EACA;EACA,QAAQ,IAAI;EACZ,kBAAkB,IAAI;EACtB;;;;;;AAOF,SAAgB,cACf,YACA,SACA,cACA,UAAgC,EAAE,EACR;CAC1B,MAAM,SAAS,cAAc,QAAQ,OAAO;AAC5C,QAAO,cACN,SAAS,WAAW,GAAG,QAAQ,GAAG,gBAAgB,IAAI,GAAG,UAAU,OACnE,YAAY;EAGX,IAAI,SAFO,MAAM,OAAO,EAGtB,WAAW,qBAAqB,CAChC,UAAU,cAAc,gCAAgC,iCAAiC,CACzF,UAAU,aAAa,CACvB,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,KAAK,QAAQ;AAEpD,MAAI,aAAc,SAAQ,MAAM,MAAM,mBAAmB,KAAK,aAAa;AAC3E,MAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,qBAAqB,KAAK,OAAO;AAG/E,UADa,MAAM,MAAM,SAAS,EACtB,KAAmB,SAAS;GACvC,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,UAAU,IAAI,aAAa;GAC3B,UAAU,EAAE;GACZ,QAAQ,IAAI;GACZ,kBAAkB,IAAI;GACtB,EAAE;GAEJ;;;;;AAMF,eAAsB,mBACrB,YACA,UACA,cACA,UAAgC,EAAE,EACK;CACvC,MAAM,yBAAS,IAAI,KAA6B;CAChD,MAAM,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AACxC,MAAK,MAAM,MAAM,UAAW,QAAO,IAAI,IAAI,EAAE,CAAC;AAC9C,KAAI,UAAU,WAAW,EAAG,QAAO;CAEnC,MAAM,KAAK,MAAM,OAAO;CACxB,MAAM,SAAS,cAAc,QAAQ,OAAO;AAE5C,MAAK,MAAM,SAAS,OAAO,WAAW,eAAe,EAAE;EACtD,IAAI;AACJ,MAAI;GACH,IAAI,QAAQ,GACV,WAAW,qBAAqB,CAChC,UAAU,cAAc,gCAAgC,iCAAiC,CACzF,OAAO;IACP;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA,CAAC,CACD,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,MAAM,MAAM,CACjD,MAAM,mBAAmB,KAAK,aAAa;AAC7C,OAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,qBAAqB,KAAK,OAAO;AAC/E,UAAO,MAAM,MAAM,SAAS;WACpB,OAAO;AACf,OAAI,oBAAoB,MAAM,CAAE,QAAO;AACvC,SAAM;;AAGP,OAAK,MAAM,OAAO,MAAM;GACvB,MAAM,OAAqB;IAC1B,IAAI,IAAI;IACR,MAAM,IAAI;IACV,MAAM,IAAI;IACV,OAAO,IAAI;IACX,UAAU,IAAI,aAAa;IAC3B,UAAU,EAAE;IACZ,QAAQ,IAAI;IACZ,kBAAkB,IAAI;IACtB;GACD,MAAM,QAAQ,OAAO,IAAI,IAAI,SAAS;AACtC,OAAI,MAAO,OAAM,KAAK,KAAK;;;AAI7B,QAAO;;;;;;AAOR,eAAsB,sBACrB,YACA,UACA,UAAgC,EAAE,EACqB;CACvD,MAAM,yBAAS,IAAI,KAA6C;CAChE,MAAM,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AACxC,MAAK,MAAM,MAAM,UAAW,QAAO,IAAI,IAAI,EAAE,CAAC;AAC9C,KAAI,UAAU,WAAW,EAAG,QAAO;CAEnC,MAAM,KAAK,MAAM,OAAO;CACxB,MAAM,SAAS,cAAc,QAAQ,OAAO;CAC5C,MAAM,0BAA0B,MAAM,2BAA2B,YAAY,EAAE,QAAQ,CAAC;AAExF,MAAK,MAAM,SAAS,OAAO,WAAW,eAAe,EAAE;EACtD,IAAI;AACJ,MAAI;GACH,IAAI,QAAQ,GACV,WAAW,qBAAqB,CAChC,UAAU,cAAc,gCAAgC,iCAAiC,CACzF,OAAO;IACP;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA,CAAC,CACD,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,MAAM,MAAM,CACjD,QAAQ,oBAAoB,MAAM;AACpC,OAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,qBAAqB,KAAK,OAAO;AAC/E,UAAO,MAAM,MAAM,SAAS;WACpB,OAAO;AACf,OAAI,oBAAoB,MAAM,EAAE;AAC/B,SAAK,MAAM,MAAM,UAChB,sBAAqB,YAAY,IAAI,EAAE,EAAE,yBAAyB,OAAO;AAE1E,WAAO;;AAER,SAAM;;AAGP,OAAK,MAAM,OAAO,MAAM;GACvB,MAAM,OAAqB;IAC1B,IAAI,IAAI;IACR,MAAM,IAAI;IACV,MAAM,IAAI;IACV,OAAO,IAAI;IACX,UAAU,IAAI,aAAa;IAC3B,UAAU,EAAE;IACZ,QAAQ,IAAI;IACZ,kBAAkB,IAAI;IACtB;GACD,MAAM,aAAa,OAAO,IAAI,IAAI,SAAS;AAC3C,OAAI,CAAC,WAAY;GACjB,MAAM,WAAW,WAAW,IAAI;AAChC,OAAI,SAAU,UAAS,KAAK,KAAK;OAC5B,YAAW,IAAI,QAAQ,CAAC,KAAK;;;AAIpC,MAAK,MAAM,CAAC,SAAS,eAAe,OACnC,sBAAqB,YAAY,SAAS,YAAY,yBAAyB,OAAO;AAGvF,QAAO;;;;;;;;AASR,eAAe,2BACd,YACA,SACoB;AACpB,KAAI;AAEH,UADa,MAAM,gBAAgB,QAAQ,EAC/B,QAAQ,MAAM,EAAE,YAAY,SAAS,WAAW,CAAC,CAAC,KAAK,MAAM,EAAE,KAAK;UACxE,OAAO;AACf,MAAI,oBAAoB,MAAM,CAAE,QAAO,EAAE;AACzC,QAAM;;;;;;;;;;;;;;;AAgBR,SAAS,qBACR,YACA,SACA,YACA,yBACA,QACO;CACP,MAAM,YAAY,UAAU;AAC5B,MAAK,MAAM,QAAQ,wBAClB,sBACC,SAAS,WAAW,GAAG,QAAQ,GAAG,KAAK,GAAG,aAC1C,WAAW,SAAS,EAAE,CACtB;AAEF,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,WAAW,CACrD,sBAAqB,SAAS,WAAW,GAAG,QAAQ,GAAG,KAAK,GAAG,aAAa,MAAM;CAEnF,MAAM,WAAW,OAAO,OAAO,WAAW,CAAC,MAAM;AACjD,sBAAqB,SAAS,WAAW,GAAG,QAAQ,KAAK,aAAa,SAAS;;;;;;AAOhF,eAAsB,iBACrB,YACA,cACA,UACA,UAAgC,EAAE,EAC8B;CAChE,MAAM,EAAE,wBAAwB,MAAM,OAAO;CAE7C,MAAM,eAAwC,EAC7C,OAAO,GAAG,eAAe,UAAU,EACnC;AACD,KAAI,QAAQ,WAAW,OAAW,cAAa,SAAS,QAAQ;CAChE,MAAM,EAAE,YAAY,MAAM,oBAAoB,YAAY,aAAa;AACvE,QAAO;;AAGR,SAAS,iBAAiB,KASV;AACf,QAAO;EACN,IAAI,IAAI;EACR,MAAM,IAAI;EACV,OAAO,IAAI;EACX,eAAe,IAAI,kBAAkB;EACrC,cAAc,IAAI,iBAAiB;EACnC,aAAa,IAAI,cAAc,KAAK,MAAM,IAAI,YAAY,GAAG,EAAE;EAC/D,QAAQ,IAAI;EACZ,kBAAkB,IAAI;EACtB;;;;;AAMF,SAAS,UAAU,WAA8B,QAA6C;CAC7F,MAAM,sBAAM,IAAI,KAA2B;CAC3C,MAAM,QAAwB,EAAE;AAEhC,MAAK,MAAM,QAAQ,UAClB,KAAI,IAAI,KAAK,IAAI;EAChB,IAAI,KAAK;EACT,MAAM,KAAK;EACX,MAAM,KAAK;EACX,OAAO,KAAK;EACZ,UAAU,KAAK,aAAa;EAC5B,aAAa,KAAK,OAAO,KAAK,MAAM,KAAK,KAAK,CAAC,cAAc;EAC7D,UAAU,EAAE;EACZ,OAAO,OAAO,IAAI,KAAK,qBAAqB,KAAK,GAAG,IAAI;EACxD,QAAQ,KAAK;EACb,kBAAkB,KAAK;EACvB,CAAC;AAGH,MAAK,MAAM,QAAQ,IAAI,QAAQ,CAC9B,KAAI,KAAK,YAAY,IAAI,IAAI,KAAK,SAAS,CAC1C,KAAI,IAAI,KAAK,SAAS,CAAE,SAAS,KAAK,KAAK;KAE3C,OAAM,KAAK,KAAK;AAIlB,QAAO"}
@@ -0,0 +1,218 @@
1
+ import { i as __exportAll } from "./runner-DMnlIkh4.mjs";
2
+ import { ulid } from "ulidx";
3
+
4
+ //#region src/database/repositories/taxonomy.ts
5
+ var taxonomy_exports = /* @__PURE__ */ __exportAll({ TaxonomyRepository: () => TaxonomyRepository });
6
+ /**
7
+ * Taxonomy repository for categories, tags, and other classification.
8
+ *
9
+ * Terms are per-locale. Translations of the same term share a `translation_group`
10
+ * ULID. `content_taxonomies.taxonomy_id` stores the translation_group so a single
11
+ * association spans every locale of a post.
12
+ *
13
+ * The repository does not resolve locale fallbacks on its own — callers supply
14
+ * the locale they want. Runtime helpers and handlers use `getFallbackChain()`
15
+ * from `i18n/config` when they need fallback behaviour.
16
+ */
17
+ var TaxonomyRepository = class {
18
+ constructor(db) {
19
+ this.db = db;
20
+ }
21
+ /**
22
+ * Create a new taxonomy term. When `translationOf` is set the new row joins
23
+ * the source term's translation_group; otherwise a fresh group is minted
24
+ * (matching the migration backfill pattern `translation_group = id`).
25
+ */
26
+ async create(input) {
27
+ const id = ulid();
28
+ const parentId = input.parentId === void 0 || input.parentId === "" ? null : input.parentId;
29
+ let translationGroup = id;
30
+ if (input.translationOf) {
31
+ const source = await this.findById(input.translationOf);
32
+ if (source?.translationGroup) translationGroup = source.translationGroup;
33
+ }
34
+ await this.db.insertInto("taxonomies").values({
35
+ id,
36
+ name: input.name,
37
+ slug: input.slug,
38
+ label: input.label,
39
+ parent_id: parentId,
40
+ data: input.data ? JSON.stringify(input.data) : null,
41
+ ...input.locale !== void 0 ? { locale: input.locale } : {},
42
+ translation_group: translationGroup
43
+ }).execute();
44
+ const taxonomy = await this.findById(id);
45
+ if (!taxonomy) throw new Error("Failed to create taxonomy");
46
+ return taxonomy;
47
+ }
48
+ async findById(id) {
49
+ const row = await this.db.selectFrom("taxonomies").selectAll().where("id", "=", id).executeTakeFirst();
50
+ return row ? this.rowToTaxonomy(row) : null;
51
+ }
52
+ /**
53
+ * Find a term by (name, slug). When `locale` is provided, filter by it.
54
+ * When omitted, returns the lowest-locale-code match (deterministic across
55
+ * calls). Mirrors `ContentRepository.findBySlug`.
56
+ */
57
+ async findBySlug(name, slug, locale) {
58
+ let query = this.db.selectFrom("taxonomies").selectAll().where("name", "=", name).where("slug", "=", slug);
59
+ if (locale !== void 0) query = query.where("locale", "=", locale);
60
+ const row = await query.orderBy("locale", "asc").executeTakeFirst();
61
+ return row ? this.rowToTaxonomy(row) : null;
62
+ }
63
+ /**
64
+ * Get all terms for a taxonomy (e.g., all categories).
65
+ *
66
+ * `id asc` is a stable tiebreaker for terms that share a label. Without it
67
+ * the SQL ordering is implementation-defined when labels match, which
68
+ * breaks keyset pagination over `(label, id)`.
69
+ */
70
+ async findByName(name, options = {}) {
71
+ let query = this.db.selectFrom("taxonomies").selectAll().where("name", "=", name).orderBy("label", "asc").orderBy("id", "asc");
72
+ if (options.locale !== void 0) query = query.where("locale", "=", options.locale);
73
+ if (options.parentId !== void 0) if (options.parentId === null) query = query.where("parent_id", "is", null);
74
+ else query = query.where("parent_id", "=", options.parentId);
75
+ return (await query.execute()).map((row) => this.rowToTaxonomy(row));
76
+ }
77
+ async findChildren(parentId) {
78
+ return (await this.db.selectFrom("taxonomies").selectAll().where("parent_id", "=", parentId).orderBy("label", "asc").orderBy("id", "asc").execute()).map((row) => this.rowToTaxonomy(row));
79
+ }
80
+ /**
81
+ * Every translation sibling of a term (including itself), identified by
82
+ * their shared `translation_group`.
83
+ */
84
+ async findTranslations(translationGroup) {
85
+ return (await this.db.selectFrom("taxonomies").selectAll().where("translation_group", "=", translationGroup).orderBy("locale", "asc").execute()).map((row) => this.rowToTaxonomy(row));
86
+ }
87
+ async update(id, input) {
88
+ if (!await this.findById(id)) return null;
89
+ const updates = {};
90
+ if (input.slug !== void 0) updates.slug = input.slug;
91
+ if (input.label !== void 0) updates.label = input.label;
92
+ if (input.parentId !== void 0) updates.parent_id = input.parentId === "" ? null : input.parentId;
93
+ if (input.data !== void 0) updates.data = JSON.stringify(input.data);
94
+ if (Object.keys(updates).length > 0) await this.db.updateTable("taxonomies").set(updates).where("id", "=", id).execute();
95
+ return this.findById(id);
96
+ }
97
+ async delete(id) {
98
+ const term = await this.findById(id);
99
+ if (!term) return false;
100
+ if (term.translationGroup) {
101
+ if ((await this.db.selectFrom("taxonomies").select("id").where("translation_group", "=", term.translationGroup).where("id", "!=", id).execute()).length === 0) await this.db.deleteFrom("content_taxonomies").where("taxonomy_id", "=", term.translationGroup).execute();
102
+ }
103
+ return ((await this.db.deleteFrom("taxonomies").where("id", "=", id).executeTakeFirst()).numDeletedRows ?? 0n) > 0n;
104
+ }
105
+ async attachToEntry(collection, entryId, taxonomyId) {
106
+ const group = await this.resolveTranslationGroup(taxonomyId);
107
+ if (!group) return;
108
+ const row = {
109
+ collection,
110
+ entry_id: entryId,
111
+ taxonomy_id: group
112
+ };
113
+ await this.db.insertInto("content_taxonomies").values(row).onConflict((oc) => oc.doNothing()).execute();
114
+ }
115
+ async detachFromEntry(collection, entryId, taxonomyId) {
116
+ const group = await this.resolveTranslationGroup(taxonomyId);
117
+ if (!group) return;
118
+ await this.db.deleteFrom("content_taxonomies").where("collection", "=", collection).where("entry_id", "=", entryId).where("taxonomy_id", "=", group).execute();
119
+ }
120
+ /**
121
+ * Taxonomy terms assigned to a content entry, resolved into a specific locale.
122
+ * Terms whose translation_group lacks a row in the requested locale are
123
+ * omitted — callers wanting fallback behaviour apply it themselves.
124
+ */
125
+ async getTermsForEntry(collection, entryId, taxonomyName, locale) {
126
+ let query = this.db.selectFrom("content_taxonomies").innerJoin("taxonomies", "taxonomies.translation_group", "content_taxonomies.taxonomy_id").selectAll("taxonomies").where("content_taxonomies.collection", "=", collection).where("content_taxonomies.entry_id", "=", entryId);
127
+ if (taxonomyName) query = query.where("taxonomies.name", "=", taxonomyName);
128
+ if (locale !== void 0) query = query.where("taxonomies.locale", "=", locale);
129
+ return (await query.orderBy("taxonomies.locale", "asc").execute()).map((row) => this.rowToTaxonomy(row));
130
+ }
131
+ /**
132
+ * Replace all assignments of a given taxonomy for one content entry.
133
+ * Term ids OR translation_groups are accepted and normalised to groups.
134
+ */
135
+ async setTermsForEntry(collection, entryId, taxonomyName, termIds) {
136
+ const groups = [];
137
+ for (const id of termIds) {
138
+ const group = await this.resolveTranslationGroup(id);
139
+ if (group) groups.push(group);
140
+ }
141
+ const newGroups = new Set(groups);
142
+ const current = await this.db.selectFrom("content_taxonomies").innerJoin("taxonomies", "taxonomies.translation_group", "content_taxonomies.taxonomy_id").select(["content_taxonomies.taxonomy_id as group"]).distinct().where("content_taxonomies.collection", "=", collection).where("content_taxonomies.entry_id", "=", entryId).where("taxonomies.name", "=", taxonomyName).execute();
143
+ const currentGroups = new Set(current.map((r) => r.group));
144
+ const toRemove = [...currentGroups].filter((g) => !newGroups.has(g));
145
+ if (toRemove.length > 0) await this.db.deleteFrom("content_taxonomies").where("collection", "=", collection).where("entry_id", "=", entryId).where("taxonomy_id", "in", toRemove).execute();
146
+ const toAdd = [...newGroups].filter((g) => !currentGroups.has(g));
147
+ if (toAdd.length > 0) await this.db.insertInto("content_taxonomies").values(toAdd.map((taxonomy_id) => ({
148
+ collection,
149
+ entry_id: entryId,
150
+ taxonomy_id
151
+ }))).onConflict((oc) => oc.doNothing()).execute();
152
+ }
153
+ async clearEntryTerms(collection, entryId) {
154
+ const result = await this.db.deleteFrom("content_taxonomies").where("collection", "=", collection).where("entry_id", "=", entryId).executeTakeFirst();
155
+ return Number(result.numDeletedRows ?? 0);
156
+ }
157
+ /**
158
+ * Copy every term assignment from one content entry to another. Used when
159
+ * creating a translation of a post so the new translation inherits the
160
+ * source's term assignments. Safe to call when the source has no terms.
161
+ */
162
+ async copyEntryTerms(collection, sourceEntryId, targetEntryId) {
163
+ const rows = await this.db.selectFrom("content_taxonomies").select(["taxonomy_id"]).where("collection", "=", collection).where("entry_id", "=", sourceEntryId).execute();
164
+ if (rows.length === 0) return;
165
+ await this.db.insertInto("content_taxonomies").values(rows.map((r) => ({
166
+ collection,
167
+ entry_id: targetEntryId,
168
+ taxonomy_id: r.taxonomy_id
169
+ }))).onConflict((oc) => oc.doNothing()).execute();
170
+ }
171
+ /**
172
+ * Count content entries that use any translation of this term. Accepts
173
+ * either a term id or a translation_group — we normalise to the group.
174
+ */
175
+ async countEntriesWithTerm(termIdOrGroup) {
176
+ const group = await this.resolveTranslationGroup(termIdOrGroup);
177
+ if (!group) return 0;
178
+ const result = await this.db.selectFrom("content_taxonomies").select((eb) => eb.fn.count("entry_id").as("count")).where("taxonomy_id", "=", group).executeTakeFirst();
179
+ return Number(result?.count ?? 0);
180
+ }
181
+ async resolveTranslationGroup(idOrGroup) {
182
+ return (await this.db.selectFrom("taxonomies").select(["translation_group"]).where((eb) => eb.or([eb("id", "=", idOrGroup), eb("translation_group", "=", idOrGroup)])).executeTakeFirst())?.translation_group ?? null;
183
+ }
184
+ /**
185
+ * Batch count entries for multiple taxonomy translation_groups.
186
+ * Chunks the query at SQL_BATCH_SIZE to stay below D1's bind-parameter limit.
187
+ * Returns a Map from translation_group to count.
188
+ *
189
+ * Pass translation_groups (not term ids) — `content_taxonomies.taxonomy_id`
190
+ * stores the translation_group so a single assignment spans every locale.
191
+ */
192
+ async countEntriesForTerms(translationGroups) {
193
+ if (translationGroups.length === 0) return /* @__PURE__ */ new Map();
194
+ const { chunks, SQL_BATCH_SIZE } = await import("./chunks-Da2-b-oA.mjs").then((n) => n.r);
195
+ const counts = /* @__PURE__ */ new Map();
196
+ for (const chunk of chunks(translationGroups, SQL_BATCH_SIZE)) {
197
+ const rows = await this.db.selectFrom("content_taxonomies").select(["taxonomy_id", (eb) => eb.fn.count("entry_id").as("count")]).where("taxonomy_id", "in", chunk).groupBy("taxonomy_id").execute();
198
+ for (const row of rows) counts.set(row.taxonomy_id, Number(row.count || 0));
199
+ }
200
+ return counts;
201
+ }
202
+ rowToTaxonomy(row) {
203
+ return {
204
+ id: row.id,
205
+ name: row.name,
206
+ slug: row.slug,
207
+ label: row.label,
208
+ parentId: row.parent_id,
209
+ data: row.data ? JSON.parse(row.data) : null,
210
+ locale: row.locale,
211
+ translationGroup: row.translation_group
212
+ };
213
+ }
214
+ };
215
+
216
+ //#endregion
217
+ export { taxonomy_exports as n, TaxonomyRepository as t };
218
+ //# sourceMappingURL=taxonomy-DSxx2K2L.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"taxonomy-DSxx2K2L.mjs","names":[],"sources":["../src/database/repositories/taxonomy.ts"],"sourcesContent":["import type { Kysely, Selectable } from \"kysely\";\nimport { ulid } from \"ulidx\";\n\nimport type { Database, TaxonomyTable, ContentTaxonomyTable } from \"../types.js\";\n\nexport interface Taxonomy {\n\tid: string;\n\tname: string;\n\tslug: string;\n\tlabel: string;\n\tparentId: string | null;\n\tdata: Record<string, unknown> | null;\n\tlocale: string;\n\ttranslationGroup: string | null;\n}\n\nexport interface CreateTaxonomyInput {\n\tname: string;\n\tslug: string;\n\tlabel: string;\n\tparentId?: string;\n\tdata?: Record<string, unknown>;\n\t/** Omit to let the DB default (current value: 'en') apply. Higher layers\n\t * resolve the locale from the request context / i18n config. */\n\tlocale?: string;\n\t/** When set, links the new term into the source term's translation_group. */\n\ttranslationOf?: string;\n}\n\nexport interface UpdateTaxonomyInput {\n\tslug?: string;\n\tlabel?: string;\n\tparentId?: string | null;\n\tdata?: Record<string, unknown>;\n}\n\nexport interface FindOptions {\n\tparentId?: string | null;\n\tlocale?: string;\n}\n\n/**\n * Taxonomy repository for categories, tags, and other classification.\n *\n * Terms are per-locale. Translations of the same term share a `translation_group`\n * ULID. `content_taxonomies.taxonomy_id` stores the translation_group so a single\n * association spans every locale of a post.\n *\n * The repository does not resolve locale fallbacks on its own — callers supply\n * the locale they want. Runtime helpers and handlers use `getFallbackChain()`\n * from `i18n/config` when they need fallback behaviour.\n */\nexport class TaxonomyRepository {\n\tconstructor(private db: Kysely<Database>) {}\n\n\t/**\n\t * Create a new taxonomy term. When `translationOf` is set the new row joins\n\t * the source term's translation_group; otherwise a fresh group is minted\n\t * (matching the migration backfill pattern `translation_group = id`).\n\t */\n\tasync create(input: CreateTaxonomyInput): Promise<Taxonomy> {\n\t\tconst id = ulid();\n\n\t\t// Empty-string parentId is coerced to null defensively. Higher layers\n\t\t// also normalize this — see handleTermCreate / handleTermUpdate.\n\t\tconst parentId = input.parentId === undefined || input.parentId === \"\" ? null : input.parentId;\n\n\t\tlet translationGroup = id;\n\t\tif (input.translationOf) {\n\t\t\tconst source = await this.findById(input.translationOf);\n\t\t\tif (source?.translationGroup) translationGroup = source.translationGroup;\n\t\t}\n\n\t\tawait this.db\n\t\t\t.insertInto(\"taxonomies\")\n\t\t\t.values({\n\t\t\t\tid,\n\t\t\t\tname: input.name,\n\t\t\t\tslug: input.slug,\n\t\t\t\tlabel: input.label,\n\t\t\t\tparent_id: parentId,\n\t\t\t\tdata: input.data ? JSON.stringify(input.data) : null,\n\t\t\t\t// When omitted, the DB DEFAULT 'en' is used — keeps behaviour\n\t\t\t\t// consistent with ContentRepository and lets higher layers\n\t\t\t\t// supply an explicit locale from request context.\n\t\t\t\t...(input.locale !== undefined ? { locale: input.locale } : {}),\n\t\t\t\ttranslation_group: translationGroup,\n\t\t\t})\n\t\t\t.execute();\n\n\t\tconst taxonomy = await this.findById(id);\n\t\tif (!taxonomy) throw new Error(\"Failed to create taxonomy\");\n\t\treturn taxonomy;\n\t}\n\n\tasync findById(id: string): Promise<Taxonomy | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"id\", \"=\", id)\n\t\t\t.executeTakeFirst();\n\t\treturn row ? this.rowToTaxonomy(row) : null;\n\t}\n\n\t/**\n\t * Find a term by (name, slug). When `locale` is provided, filter by it.\n\t * When omitted, returns the lowest-locale-code match (deterministic across\n\t * calls). Mirrors `ContentRepository.findBySlug`.\n\t */\n\tasync findBySlug(name: string, slug: string, locale?: string): Promise<Taxonomy | null> {\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.where(\"slug\", \"=\", slug);\n\t\tif (locale !== undefined) query = query.where(\"locale\", \"=\", locale);\n\t\tconst row = await query.orderBy(\"locale\", \"asc\").executeTakeFirst();\n\t\treturn row ? this.rowToTaxonomy(row) : null;\n\t}\n\n\t/**\n\t * Get all terms for a taxonomy (e.g., all categories).\n\t *\n\t * `id asc` is a stable tiebreaker for terms that share a label. Without it\n\t * the SQL ordering is implementation-defined when labels match, which\n\t * breaks keyset pagination over `(label, id)`.\n\t */\n\tasync findByName(name: string, options: FindOptions = {}): Promise<Taxonomy[]> {\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.orderBy(\"label\", \"asc\")\n\t\t\t.orderBy(\"id\", \"asc\");\n\n\t\tif (options.locale !== undefined) query = query.where(\"locale\", \"=\", options.locale);\n\n\t\tif (options.parentId !== undefined) {\n\t\t\tif (options.parentId === null) {\n\t\t\t\tquery = query.where(\"parent_id\", \"is\", null);\n\t\t\t} else {\n\t\t\t\tquery = query.where(\"parent_id\", \"=\", options.parentId);\n\t\t\t}\n\t\t}\n\n\t\tconst rows = await query.execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\tasync findChildren(parentId: string): Promise<Taxonomy[]> {\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"parent_id\", \"=\", parentId)\n\t\t\t.orderBy(\"label\", \"asc\")\n\t\t\t.orderBy(\"id\", \"asc\")\n\t\t\t.execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\t/**\n\t * Every translation sibling of a term (including itself), identified by\n\t * their shared `translation_group`.\n\t */\n\tasync findTranslations(translationGroup: string): Promise<Taxonomy[]> {\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"translation_group\", \"=\", translationGroup)\n\t\t\t.orderBy(\"locale\", \"asc\")\n\t\t\t.execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\tasync update(id: string, input: UpdateTaxonomyInput): Promise<Taxonomy | null> {\n\t\tconst existing = await this.findById(id);\n\t\tif (!existing) return null;\n\n\t\tconst updates: Record<string, unknown> = {};\n\t\tif (input.slug !== undefined) updates.slug = input.slug;\n\t\tif (input.label !== undefined) updates.label = input.label;\n\t\tif (input.parentId !== undefined) {\n\t\t\t// Defense in depth: empty-string parentId means null (no parent).\n\t\t\tupdates.parent_id = input.parentId === \"\" ? null : input.parentId;\n\t\t}\n\t\tif (input.data !== undefined) updates.data = JSON.stringify(input.data);\n\n\t\tif (Object.keys(updates).length > 0) {\n\t\t\tawait this.db.updateTable(\"taxonomies\").set(updates).where(\"id\", \"=\", id).execute();\n\t\t}\n\n\t\treturn this.findById(id);\n\t}\n\n\tasync delete(id: string): Promise<boolean> {\n\t\tconst term = await this.findById(id);\n\t\tif (!term) return false;\n\n\t\t// When deleting the last translation of a group the pivot rows that\n\t\t// reference that translation_group become orphaned — purge them.\n\t\tif (term.translationGroup) {\n\t\t\tconst siblings = await this.db\n\t\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t\t.select(\"id\")\n\t\t\t\t.where(\"translation_group\", \"=\", term.translationGroup)\n\t\t\t\t.where(\"id\", \"!=\", id)\n\t\t\t\t.execute();\n\t\t\tif (siblings.length === 0) {\n\t\t\t\tawait this.db\n\t\t\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t\t\t.where(\"taxonomy_id\", \"=\", term.translationGroup)\n\t\t\t\t\t.execute();\n\t\t\t}\n\t\t}\n\n\t\tconst result = await this.db.deleteFrom(\"taxonomies\").where(\"id\", \"=\", id).executeTakeFirst();\n\t\treturn (result.numDeletedRows ?? 0n) > 0n;\n\t}\n\n\t// --- Content-Taxonomy Junction (taxonomy_id stores the translation_group) ---\n\n\tasync attachToEntry(collection: string, entryId: string, taxonomyId: string): Promise<void> {\n\t\tconst group = await this.resolveTranslationGroup(taxonomyId);\n\t\tif (!group) return;\n\n\t\tconst row: ContentTaxonomyTable = {\n\t\t\tcollection,\n\t\t\tentry_id: entryId,\n\t\t\ttaxonomy_id: group,\n\t\t};\n\t\tawait this.db\n\t\t\t.insertInto(\"content_taxonomies\")\n\t\t\t.values(row)\n\t\t\t.onConflict((oc) => oc.doNothing())\n\t\t\t.execute();\n\t}\n\n\tasync detachFromEntry(collection: string, entryId: string, taxonomyId: string): Promise<void> {\n\t\tconst group = await this.resolveTranslationGroup(taxonomyId);\n\t\tif (!group) return;\n\n\t\tawait this.db\n\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t.where(\"taxonomy_id\", \"=\", group)\n\t\t\t.execute();\n\t}\n\n\t/**\n\t * Taxonomy terms assigned to a content entry, resolved into a specific locale.\n\t * Terms whose translation_group lacks a row in the requested locale are\n\t * omitted — callers wanting fallback behaviour apply it themselves.\n\t */\n\tasync getTermsForEntry(\n\t\tcollection: string,\n\t\tentryId: string,\n\t\ttaxonomyName?: string,\n\t\tlocale?: string,\n\t): Promise<Taxonomy[]> {\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.translation_group\", \"content_taxonomies.taxonomy_id\")\n\t\t\t.selectAll(\"taxonomies\")\n\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t.where(\"content_taxonomies.entry_id\", \"=\", entryId);\n\n\t\tif (taxonomyName) query = query.where(\"taxonomies.name\", \"=\", taxonomyName);\n\t\tif (locale !== undefined) query = query.where(\"taxonomies.locale\", \"=\", locale);\n\n\t\tconst rows = await query.orderBy(\"taxonomies.locale\", \"asc\").execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\t/**\n\t * Replace all assignments of a given taxonomy for one content entry.\n\t * Term ids OR translation_groups are accepted and normalised to groups.\n\t */\n\tasync setTermsForEntry(\n\t\tcollection: string,\n\t\tentryId: string,\n\t\ttaxonomyName: string,\n\t\ttermIds: string[],\n\t): Promise<void> {\n\t\tconst groups: string[] = [];\n\t\tfor (const id of termIds) {\n\t\t\tconst group = await this.resolveTranslationGroup(id);\n\t\t\tif (group) groups.push(group);\n\t\t}\n\t\tconst newGroups = new Set(groups);\n\n\t\tconst current = await this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.translation_group\", \"content_taxonomies.taxonomy_id\")\n\t\t\t.select([\"content_taxonomies.taxonomy_id as group\"])\n\t\t\t.distinct()\n\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t.where(\"content_taxonomies.entry_id\", \"=\", entryId)\n\t\t\t.where(\"taxonomies.name\", \"=\", taxonomyName)\n\t\t\t.execute();\n\t\tconst currentGroups = new Set(current.map((r) => r.group));\n\n\t\tconst toRemove = [...currentGroups].filter((g) => !newGroups.has(g));\n\t\tif (toRemove.length > 0) {\n\t\t\tawait this.db\n\t\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t\t.where(\"taxonomy_id\", \"in\", toRemove)\n\t\t\t\t.execute();\n\t\t}\n\n\t\tconst toAdd = [...newGroups].filter((g) => !currentGroups.has(g));\n\t\tif (toAdd.length > 0) {\n\t\t\tawait this.db\n\t\t\t\t.insertInto(\"content_taxonomies\")\n\t\t\t\t.values(\n\t\t\t\t\ttoAdd.map((taxonomy_id) => ({\n\t\t\t\t\t\tcollection,\n\t\t\t\t\t\tentry_id: entryId,\n\t\t\t\t\t\ttaxonomy_id,\n\t\t\t\t\t})),\n\t\t\t\t)\n\t\t\t\t.onConflict((oc) => oc.doNothing())\n\t\t\t\t.execute();\n\t\t}\n\t}\n\n\tasync clearEntryTerms(collection: string, entryId: string): Promise<number> {\n\t\tconst result = await this.db\n\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t.executeTakeFirst();\n\t\treturn Number(result.numDeletedRows ?? 0);\n\t}\n\n\t/**\n\t * Copy every term assignment from one content entry to another. Used when\n\t * creating a translation of a post so the new translation inherits the\n\t * source's term assignments. Safe to call when the source has no terms.\n\t */\n\tasync copyEntryTerms(\n\t\tcollection: string,\n\t\tsourceEntryId: string,\n\t\ttargetEntryId: string,\n\t): Promise<void> {\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.select([\"taxonomy_id\"])\n\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t.where(\"entry_id\", \"=\", sourceEntryId)\n\t\t\t.execute();\n\t\tif (rows.length === 0) return;\n\n\t\tawait this.db\n\t\t\t.insertInto(\"content_taxonomies\")\n\t\t\t.values(\n\t\t\t\trows.map((r) => ({\n\t\t\t\t\tcollection,\n\t\t\t\t\tentry_id: targetEntryId,\n\t\t\t\t\ttaxonomy_id: r.taxonomy_id,\n\t\t\t\t})),\n\t\t\t)\n\t\t\t.onConflict((oc) => oc.doNothing())\n\t\t\t.execute();\n\t}\n\n\t/**\n\t * Count content entries that use any translation of this term. Accepts\n\t * either a term id or a translation_group — we normalise to the group.\n\t */\n\tasync countEntriesWithTerm(termIdOrGroup: string): Promise<number> {\n\t\tconst group = await this.resolveTranslationGroup(termIdOrGroup);\n\t\tif (!group) return 0;\n\n\t\tconst result = await this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.select((eb) => eb.fn.count(\"entry_id\").as(\"count\"))\n\t\t\t.where(\"taxonomy_id\", \"=\", group)\n\t\t\t.executeTakeFirst();\n\t\treturn Number(result?.count ?? 0);\n\t}\n\n\tprivate async resolveTranslationGroup(idOrGroup: string): Promise<string | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.select([\"translation_group\"])\n\t\t\t.where((eb) => eb.or([eb(\"id\", \"=\", idOrGroup), eb(\"translation_group\", \"=\", idOrGroup)]))\n\t\t\t.executeTakeFirst();\n\t\treturn row?.translation_group ?? null;\n\t}\n\n\t/**\n\t * Batch count entries for multiple taxonomy translation_groups.\n\t * Chunks the query at SQL_BATCH_SIZE to stay below D1's bind-parameter limit.\n\t * Returns a Map from translation_group to count.\n\t *\n\t * Pass translation_groups (not term ids) — `content_taxonomies.taxonomy_id`\n\t * stores the translation_group so a single assignment spans every locale.\n\t */\n\tasync countEntriesForTerms(translationGroups: string[]): Promise<Map<string, number>> {\n\t\tif (translationGroups.length === 0) return new Map();\n\n\t\tconst { chunks, SQL_BATCH_SIZE } = await import(\"../../utils/chunks.js\");\n\n\t\tconst counts = new Map<string, number>();\n\t\tfor (const chunk of chunks(translationGroups, SQL_BATCH_SIZE)) {\n\t\t\tconst rows = await this.db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.select([\"taxonomy_id\", (eb) => eb.fn.count(\"entry_id\").as(\"count\")])\n\t\t\t\t.where(\"taxonomy_id\", \"in\", chunk)\n\t\t\t\t.groupBy(\"taxonomy_id\")\n\t\t\t\t.execute();\n\n\t\t\tfor (const row of rows) {\n\t\t\t\tcounts.set(row.taxonomy_id, Number(row.count || 0));\n\t\t\t}\n\t\t}\n\t\treturn counts;\n\t}\n\n\tprivate rowToTaxonomy(row: Selectable<TaxonomyTable>): Taxonomy {\n\t\treturn {\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tslug: row.slug,\n\t\t\tlabel: row.label,\n\t\t\tparentId: row.parent_id,\n\t\t\tdata: row.data ? JSON.parse(row.data) : null,\n\t\t\tlocale: row.locale,\n\t\t\ttranslationGroup: row.translation_group,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAoDA,IAAa,qBAAb,MAAgC;CAC/B,YAAY,AAAQ,IAAsB;EAAtB;;;;;;;CAOpB,MAAM,OAAO,OAA+C;EAC3D,MAAM,KAAK,MAAM;EAIjB,MAAM,WAAW,MAAM,aAAa,UAAa,MAAM,aAAa,KAAK,OAAO,MAAM;EAEtF,IAAI,mBAAmB;AACvB,MAAI,MAAM,eAAe;GACxB,MAAM,SAAS,MAAM,KAAK,SAAS,MAAM,cAAc;AACvD,OAAI,QAAQ,iBAAkB,oBAAmB,OAAO;;AAGzD,QAAM,KAAK,GACT,WAAW,aAAa,CACxB,OAAO;GACP;GACA,MAAM,MAAM;GACZ,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,WAAW;GACX,MAAM,MAAM,OAAO,KAAK,UAAU,MAAM,KAAK,GAAG;GAIhD,GAAI,MAAM,WAAW,SAAY,EAAE,QAAQ,MAAM,QAAQ,GAAG,EAAE;GAC9D,mBAAmB;GACnB,CAAC,CACD,SAAS;EAEX,MAAM,WAAW,MAAM,KAAK,SAAS,GAAG;AACxC,MAAI,CAAC,SAAU,OAAM,IAAI,MAAM,4BAA4B;AAC3D,SAAO;;CAGR,MAAM,SAAS,IAAsC;EACpD,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,MAAM,KAAK,GAAG,CACpB,kBAAkB;AACpB,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;;;;;;CAQxC,MAAM,WAAW,MAAc,MAAc,QAA2C;EACvF,IAAI,QAAQ,KAAK,GACf,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,MAAM,QAAQ,KAAK,KAAK;AAC1B,MAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,UAAU,KAAK,OAAO;EACpE,MAAM,MAAM,MAAM,MAAM,QAAQ,UAAU,MAAM,CAAC,kBAAkB;AACnE,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;;;;;;;;CAUxC,MAAM,WAAW,MAAc,UAAuB,EAAE,EAAuB;EAC9E,IAAI,QAAQ,KAAK,GACf,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,QAAQ,SAAS,MAAM,CACvB,QAAQ,MAAM,MAAM;AAEtB,MAAI,QAAQ,WAAW,OAAW,SAAQ,MAAM,MAAM,UAAU,KAAK,QAAQ,OAAO;AAEpF,MAAI,QAAQ,aAAa,OACxB,KAAI,QAAQ,aAAa,KACxB,SAAQ,MAAM,MAAM,aAAa,MAAM,KAAK;MAE5C,SAAQ,MAAM,MAAM,aAAa,KAAK,QAAQ,SAAS;AAKzD,UADa,MAAM,MAAM,SAAS,EACtB,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;CAGlD,MAAM,aAAa,UAAuC;AAQzD,UAPa,MAAM,KAAK,GACtB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,aAAa,KAAK,SAAS,CACjC,QAAQ,SAAS,MAAM,CACvB,QAAQ,MAAM,MAAM,CACpB,SAAS,EACC,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;;;;;CAOlD,MAAM,iBAAiB,kBAA+C;AAOrE,UANa,MAAM,KAAK,GACtB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,qBAAqB,KAAK,iBAAiB,CACjD,QAAQ,UAAU,MAAM,CACxB,SAAS,EACC,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;CAGlD,MAAM,OAAO,IAAY,OAAsD;AAE9E,MAAI,CADa,MAAM,KAAK,SAAS,GAAG,CACzB,QAAO;EAEtB,MAAM,UAAmC,EAAE;AAC3C,MAAI,MAAM,SAAS,OAAW,SAAQ,OAAO,MAAM;AACnD,MAAI,MAAM,UAAU,OAAW,SAAQ,QAAQ,MAAM;AACrD,MAAI,MAAM,aAAa,OAEtB,SAAQ,YAAY,MAAM,aAAa,KAAK,OAAO,MAAM;AAE1D,MAAI,MAAM,SAAS,OAAW,SAAQ,OAAO,KAAK,UAAU,MAAM,KAAK;AAEvE,MAAI,OAAO,KAAK,QAAQ,CAAC,SAAS,EACjC,OAAM,KAAK,GAAG,YAAY,aAAa,CAAC,IAAI,QAAQ,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,SAAS;AAGpF,SAAO,KAAK,SAAS,GAAG;;CAGzB,MAAM,OAAO,IAA8B;EAC1C,MAAM,OAAO,MAAM,KAAK,SAAS,GAAG;AACpC,MAAI,CAAC,KAAM,QAAO;AAIlB,MAAI,KAAK,kBAOR;QANiB,MAAM,KAAK,GAC1B,WAAW,aAAa,CACxB,OAAO,KAAK,CACZ,MAAM,qBAAqB,KAAK,KAAK,iBAAiB,CACtD,MAAM,MAAM,MAAM,GAAG,CACrB,SAAS,EACE,WAAW,EACvB,OAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,MAAM,eAAe,KAAK,KAAK,iBAAiB,CAChD,SAAS;;AAKb,WADe,MAAM,KAAK,GAAG,WAAW,aAAa,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,kBAAkB,EAC9E,kBAAkB,MAAM;;CAKxC,MAAM,cAAc,YAAoB,SAAiB,YAAmC;EAC3F,MAAM,QAAQ,MAAM,KAAK,wBAAwB,WAAW;AAC5D,MAAI,CAAC,MAAO;EAEZ,MAAM,MAA4B;GACjC;GACA,UAAU;GACV,aAAa;GACb;AACD,QAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,OAAO,IAAI,CACX,YAAY,OAAO,GAAG,WAAW,CAAC,CAClC,SAAS;;CAGZ,MAAM,gBAAgB,YAAoB,SAAiB,YAAmC;EAC7F,MAAM,QAAQ,MAAM,KAAK,wBAAwB,WAAW;AAC5D,MAAI,CAAC,MAAO;AAEZ,QAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,MAAM,eAAe,KAAK,MAAM,CAChC,SAAS;;;;;;;CAQZ,MAAM,iBACL,YACA,SACA,cACA,QACsB;EACtB,IAAI,QAAQ,KAAK,GACf,WAAW,qBAAqB,CAChC,UAAU,cAAc,gCAAgC,iCAAiC,CACzF,UAAU,aAAa,CACvB,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,KAAK,QAAQ;AAEpD,MAAI,aAAc,SAAQ,MAAM,MAAM,mBAAmB,KAAK,aAAa;AAC3E,MAAI,WAAW,OAAW,SAAQ,MAAM,MAAM,qBAAqB,KAAK,OAAO;AAG/E,UADa,MAAM,MAAM,QAAQ,qBAAqB,MAAM,CAAC,SAAS,EAC1D,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;;;;;CAOlD,MAAM,iBACL,YACA,SACA,cACA,SACgB;EAChB,MAAM,SAAmB,EAAE;AAC3B,OAAK,MAAM,MAAM,SAAS;GACzB,MAAM,QAAQ,MAAM,KAAK,wBAAwB,GAAG;AACpD,OAAI,MAAO,QAAO,KAAK,MAAM;;EAE9B,MAAM,YAAY,IAAI,IAAI,OAAO;EAEjC,MAAM,UAAU,MAAM,KAAK,GACzB,WAAW,qBAAqB,CAChC,UAAU,cAAc,gCAAgC,iCAAiC,CACzF,OAAO,CAAC,0CAA0C,CAAC,CACnD,UAAU,CACV,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,KAAK,QAAQ,CAClD,MAAM,mBAAmB,KAAK,aAAa,CAC3C,SAAS;EACX,MAAM,gBAAgB,IAAI,IAAI,QAAQ,KAAK,MAAM,EAAE,MAAM,CAAC;EAE1D,MAAM,WAAW,CAAC,GAAG,cAAc,CAAC,QAAQ,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC;AACpE,MAAI,SAAS,SAAS,EACrB,OAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,MAAM,eAAe,MAAM,SAAS,CACpC,SAAS;EAGZ,MAAM,QAAQ,CAAC,GAAG,UAAU,CAAC,QAAQ,MAAM,CAAC,cAAc,IAAI,EAAE,CAAC;AACjE,MAAI,MAAM,SAAS,EAClB,OAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,OACA,MAAM,KAAK,iBAAiB;GAC3B;GACA,UAAU;GACV;GACA,EAAE,CACH,CACA,YAAY,OAAO,GAAG,WAAW,CAAC,CAClC,SAAS;;CAIb,MAAM,gBAAgB,YAAoB,SAAkC;EAC3E,MAAM,SAAS,MAAM,KAAK,GACxB,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,kBAAkB;AACpB,SAAO,OAAO,OAAO,kBAAkB,EAAE;;;;;;;CAQ1C,MAAM,eACL,YACA,eACA,eACgB;EAChB,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,qBAAqB,CAChC,OAAO,CAAC,cAAc,CAAC,CACvB,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,cAAc,CACrC,SAAS;AACX,MAAI,KAAK,WAAW,EAAG;AAEvB,QAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,OACA,KAAK,KAAK,OAAO;GAChB;GACA,UAAU;GACV,aAAa,EAAE;GACf,EAAE,CACH,CACA,YAAY,OAAO,GAAG,WAAW,CAAC,CAClC,SAAS;;;;;;CAOZ,MAAM,qBAAqB,eAAwC;EAClE,MAAM,QAAQ,MAAM,KAAK,wBAAwB,cAAc;AAC/D,MAAI,CAAC,MAAO,QAAO;EAEnB,MAAM,SAAS,MAAM,KAAK,GACxB,WAAW,qBAAqB,CAChC,QAAQ,OAAO,GAAG,GAAG,MAAM,WAAW,CAAC,GAAG,QAAQ,CAAC,CACnD,MAAM,eAAe,KAAK,MAAM,CAChC,kBAAkB;AACpB,SAAO,OAAO,QAAQ,SAAS,EAAE;;CAGlC,MAAc,wBAAwB,WAA2C;AAMhF,UALY,MAAM,KAAK,GACrB,WAAW,aAAa,CACxB,OAAO,CAAC,oBAAoB,CAAC,CAC7B,OAAO,OAAO,GAAG,GAAG,CAAC,GAAG,MAAM,KAAK,UAAU,EAAE,GAAG,qBAAqB,KAAK,UAAU,CAAC,CAAC,CAAC,CACzF,kBAAkB,GACR,qBAAqB;;;;;;;;;;CAWlC,MAAM,qBAAqB,mBAA2D;AACrF,MAAI,kBAAkB,WAAW,EAAG,wBAAO,IAAI,KAAK;EAEpD,MAAM,EAAE,QAAQ,mBAAmB,MAAM,OAAO;EAEhD,MAAM,yBAAS,IAAI,KAAqB;AACxC,OAAK,MAAM,SAAS,OAAO,mBAAmB,eAAe,EAAE;GAC9D,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,qBAAqB,CAChC,OAAO,CAAC,gBAAgB,OAAO,GAAG,GAAG,MAAM,WAAW,CAAC,GAAG,QAAQ,CAAC,CAAC,CACpE,MAAM,eAAe,MAAM,MAAM,CACjC,QAAQ,cAAc,CACtB,SAAS;AAEX,QAAK,MAAM,OAAO,KACjB,QAAO,IAAI,IAAI,aAAa,OAAO,IAAI,SAAS,EAAE,CAAC;;AAGrD,SAAO;;CAGR,AAAQ,cAAc,KAA0C;AAC/D,SAAO;GACN,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,UAAU,IAAI;GACd,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,KAAK,GAAG;GACxC,QAAQ,IAAI;GACZ,kBAAkB,IAAI;GACtB"}
@@ -1,4 +1,4 @@
1
- import { i as encodeBase64url, n as decodeBase64url } from "./base64-BRICGH2l.mjs";
1
+ import { i as encodeBase64url, n as decodeBase64url } from "./base64-MBPo9ozB.mjs";
2
2
 
3
3
  //#region src/preview/tokens.ts
4
4
  /**
@@ -168,4 +168,4 @@ function parseContentId(contentId) {
168
168
 
169
169
  //#endregion
170
170
  export { parseContentId as n, verifyPreviewToken as r, generatePreviewToken as t };
171
- //# sourceMappingURL=tokens-D7zMmWi2.mjs.map
171
+ //# sourceMappingURL=tokens-CyRDPVW2.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"tokens-D7zMmWi2.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
1
+ {"version":3,"file":"tokens-CyRDPVW2.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
@@ -25,4 +25,4 @@ async function withTransaction(db, fn) {
25
25
 
26
26
  //#endregion
27
27
  export { withTransaction as t };
28
- //# sourceMappingURL=transaction-Cn2rjY78.mjs.map
28
+ //# sourceMappingURL=transaction-D44LBXvU.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"transaction-Cn2rjY78.mjs","names":[],"sources":["../src/database/transaction.ts"],"sourcesContent":["/**\n * Transaction utility for D1 compatibility\n *\n * D1 (via kysely-d1) does not support transactions. On workerd, the error\n * from beginTransaction() crosses request contexts and can hang the worker.\n *\n * This utility provides a drop-in replacement that runs the callback directly\n * against the db instance when transactions are unavailable. D1 is single-writer\n * so atomicity is not a concern for individual statements — multi-statement\n * atomicity is lost, but that's a known D1 limitation.\n *\n * Usage:\n * import { withTransaction } from \"../database/transaction.js\";\n * const result = await withTransaction(db, async (trx) => { ... });\n */\n\nimport type { Kysely, Transaction } from \"kysely\";\n\n/**\n * Run a callback inside a transaction if supported, or directly if not.\n *\n * Probes the database once on first call to determine if transactions work.\n * The result is cached for the lifetime of the process/worker.\n */\nlet transactionsSupported: boolean | null = null;\nconst TRANSACTIONS_NOT_SUPPORTED_RE = /transactions are not supported/i;\n\nexport async function withTransaction<DB, T>(\n\tdb: Kysely<DB>,\n\tfn: (trx: Kysely<DB> | Transaction<DB>) => Promise<T>,\n): Promise<T> {\n\t// Fast path: we already know transactions work\n\tif (transactionsSupported === true) {\n\t\treturn db.transaction().execute(fn);\n\t}\n\n\t// Fast path: we already know they don't\n\tif (transactionsSupported === false) {\n\t\treturn fn(db);\n\t}\n\n\t// First call: probe\n\ttry {\n\t\tconst result = await db.transaction().execute(fn);\n\t\ttransactionsSupported = true;\n\t\treturn result;\n\t} catch (error) {\n\t\tif (error instanceof Error && TRANSACTIONS_NOT_SUPPORTED_RE.test(error.message)) {\n\t\t\ttransactionsSupported = false;\n\t\t\treturn fn(db);\n\t\t}\n\t\tthrow error;\n\t}\n}\n"],"mappings":";;;;;;;AAwBA,IAAI,wBAAwC;AAC5C,MAAM,gCAAgC;AAEtC,eAAsB,gBACrB,IACA,IACa;AAEb,KAAI,0BAA0B,KAC7B,QAAO,GAAG,aAAa,CAAC,QAAQ,GAAG;AAIpC,KAAI,0BAA0B,MAC7B,QAAO,GAAG,GAAG;AAId,KAAI;EACH,MAAM,SAAS,MAAM,GAAG,aAAa,CAAC,QAAQ,GAAG;AACjD,0BAAwB;AACxB,SAAO;UACC,OAAO;AACf,MAAI,iBAAiB,SAAS,8BAA8B,KAAK,MAAM,QAAQ,EAAE;AAChF,2BAAwB;AACxB,UAAO,GAAG,GAAG;;AAEd,QAAM"}
1
+ {"version":3,"file":"transaction-D44LBXvU.mjs","names":[],"sources":["../src/database/transaction.ts"],"sourcesContent":["/**\n * Transaction utility for D1 compatibility\n *\n * D1 (via kysely-d1) does not support transactions. On workerd, the error\n * from beginTransaction() crosses request contexts and can hang the worker.\n *\n * This utility provides a drop-in replacement that runs the callback directly\n * against the db instance when transactions are unavailable. D1 is single-writer\n * so atomicity is not a concern for individual statements — multi-statement\n * atomicity is lost, but that's a known D1 limitation.\n *\n * Usage:\n * import { withTransaction } from \"../database/transaction.js\";\n * const result = await withTransaction(db, async (trx) => { ... });\n */\n\nimport type { Kysely, Transaction } from \"kysely\";\n\n/**\n * Run a callback inside a transaction if supported, or directly if not.\n *\n * Probes the database once on first call to determine if transactions work.\n * The result is cached for the lifetime of the process/worker.\n */\nlet transactionsSupported: boolean | null = null;\nconst TRANSACTIONS_NOT_SUPPORTED_RE = /transactions are not supported/i;\n\nexport async function withTransaction<DB, T>(\n\tdb: Kysely<DB>,\n\tfn: (trx: Kysely<DB> | Transaction<DB>) => Promise<T>,\n): Promise<T> {\n\t// Fast path: we already know transactions work\n\tif (transactionsSupported === true) {\n\t\treturn db.transaction().execute(fn);\n\t}\n\n\t// Fast path: we already know they don't\n\tif (transactionsSupported === false) {\n\t\treturn fn(db);\n\t}\n\n\t// First call: probe\n\ttry {\n\t\tconst result = await db.transaction().execute(fn);\n\t\ttransactionsSupported = true;\n\t\treturn result;\n\t} catch (error) {\n\t\tif (error instanceof Error && TRANSACTIONS_NOT_SUPPORTED_RE.test(error.message)) {\n\t\t\ttransactionsSupported = false;\n\t\t\treturn fn(db);\n\t\t}\n\t\tthrow error;\n\t}\n}\n"],"mappings":";;;;;;;AAwBA,IAAI,wBAAwC;AAC5C,MAAM,gCAAgC;AAEtC,eAAsB,gBACrB,IACA,IACa;AAEb,KAAI,0BAA0B,KAC7B,QAAO,GAAG,aAAa,CAAC,QAAQ,GAAG;AAIpC,KAAI,0BAA0B,MAC7B,QAAO,GAAG,GAAG;AAId,KAAI;EACH,MAAM,SAAS,MAAM,GAAG,aAAa,CAAC,QAAQ,GAAG;AACjD,0BAAwB;AACxB,SAAO;UACC,OAAO;AACf,MAAI,iBAAiB,SAAS,8BAA8B,KAAK,MAAM,QAAQ,EAAE;AAChF,2BAAwB;AACxB,UAAO,GAAG,GAAG;;AAEd,QAAM"}
@@ -39,4 +39,4 @@ declare function tokenInterceptor(token: string): Interceptor;
39
39
  declare function devBypassInterceptor(baseUrl: string): Interceptor;
40
40
  //#endregion
41
41
  export { tokenInterceptor as a, devBypassInterceptor as i, createTransport as n, csrfInterceptor as r, Interceptor as t };
42
- //# sourceMappingURL=transport-DNEfeMaU.d.mts.map
42
+ //# sourceMappingURL=transport-DX_5rpsq.d.mts.map
@@ -1 +1 @@
1
- {"version":3,"file":"transport-DNEfeMaU.d.mts","names":[],"sources":["../src/client/transport.ts"],"mappings":";;AAeA;;;;;;;;;;KAAY,WAAA,IACX,OAAA,EAAS,OAAA,EACT,IAAA,GAAO,OAAA,EAAS,OAAA,KAAY,OAAA,CAAQ,QAAA,MAChC,OAAA,CAAQ,QAAA;AAAA,UAEI,gBAAA;EAChB,YAAA,GAAe,WAAA;AAAA;;;;iBAUA,eAAA,CAAgB,OAAA,GAAS,gBAAA;EACxC,KAAA,GAAQ,OAAA,EAAS,OAAA,KAAY,OAAA,CAAQ,QAAA;AAAA;;;AAZtC;;;;iBAqCgB,eAAA,CAAA,GAAmB,WAAA;AA1BnC;;;AAAA,iBA8CgB,gBAAA,CAAiB,KAAA,WAAgB,WAAA;;;;;;iBAajC,oBAAA,CAAqB,OAAA,WAAkB,WAAA"}
1
+ {"version":3,"file":"transport-DX_5rpsq.d.mts","names":[],"sources":["../src/client/transport.ts"],"mappings":";;AAeA;;;;;;;;;;KAAY,WAAA,IACX,OAAA,EAAS,OAAA,EACT,IAAA,GAAO,OAAA,EAAS,OAAA,KAAY,OAAA,CAAQ,QAAA,MAChC,OAAA,CAAQ,QAAA;AAAA,UAEI,gBAAA;EAChB,YAAA,GAAe,WAAA;AAAA;;;;iBAUA,eAAA,CAAgB,OAAA,GAAS,gBAAA;EACxC,KAAA,GAAQ,OAAA,EAAS,OAAA,KAAY,OAAA,CAAQ,QAAA;AAAA;;;AAZtC;;;;iBAqCgB,eAAA,CAAA,GAAmB,WAAA;AA1BnC;;;AAAA,iBA8CgB,gBAAA,CAAiB,KAAA,WAAgB,WAAA;;;;;;iBAajC,oBAAA,CAAqB,OAAA,WAAkB,WAAA"}
@@ -416,4 +416,4 @@ function refreshInterceptor(options) {
416
416
 
417
417
  //#endregion
418
418
  export { tokenInterceptor as a, markdownToPortableText as c, refreshInterceptor as i, portableTextToMarkdown as l, csrfInterceptor as n, convertDataForRead as o, devBypassInterceptor as r, convertDataForWrite as s, createTransport as t };
419
- //# sourceMappingURL=transport-BeMCmin1.mjs.map
419
+ //# sourceMappingURL=transport-xpzIjCIB.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"transport-BeMCmin1.mjs","names":[],"sources":["../src/client/portable-text.ts","../src/client/transport.ts"],"sourcesContent":["/**\n * Portable Text <-> Markdown conversion layer.\n *\n * Three tiers of block handling:\n * Tier 1: Standard PT blocks <-> standard Markdown (headings, paragraphs, lists, etc.)\n * Tier 2: EmDash custom blocks <-> Markdown directives (future)\n * Tier 3: Unknown blocks <-> opaque HTML comment fences (preserved, not editable)\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Minimal Portable Text block shape */\nexport interface PortableTextBlock {\n\t_type: string;\n\t_key?: string;\n\tstyle?: string;\n\tlevel?: number;\n\tlistItem?: string;\n\tmarkDefs?: MarkDef[];\n\tchildren?: PortableTextSpan[];\n\t[key: string]: unknown;\n}\n\ninterface PortableTextSpan {\n\t_type: string;\n\t_key?: string;\n\ttext?: string;\n\tmarks?: string[];\n\t[key: string]: unknown;\n}\n\ninterface MarkDef {\n\t_key: string;\n\t_type: string;\n\thref?: string;\n\t[key: string]: unknown;\n}\n\ninterface ParsedInline {\n\tspans: PortableTextSpan[];\n\tmarkDefs: MarkDef[];\n}\n\n// ---------------------------------------------------------------------------\n// PT -> Markdown\n// ---------------------------------------------------------------------------\n\n/**\n * Convert Portable Text blocks to Markdown.\n * Unknown block types are serialized as opaque fences.\n */\nexport function portableTextToMarkdown(blocks: PortableTextBlock[]): string {\n\tconst lines: string[] = [];\n\tlet prevWasList = false;\n\n\tfor (let i = 0; i < blocks.length; i++) {\n\t\tconst block = blocks[i];\n\n\t\tif (block._type === \"block\") {\n\t\t\tconst isList = !!block.listItem;\n\n\t\t\t// Blank line between non-contiguous block types\n\t\t\tif (i > 0 && (!isList || !prevWasList)) {\n\t\t\t\tlines.push(\"\");\n\t\t\t}\n\n\t\t\tlines.push(renderStandardBlock(block));\n\t\t\tprevWasList = isList;\n\t\t} else if (block._type === \"code\") {\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tconst lang = (block.language as string) || \"\";\n\t\t\tconst code = (block.code as string) || \"\";\n\t\t\tlines.push(\"```\" + lang);\n\t\t\tlines.push(code);\n\t\t\tlines.push(\"```\");\n\t\t\tprevWasList = false;\n\t\t} else if (block._type === \"image\") {\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tconst alt = (block.alt as string) || \"\";\n\t\t\tconst url = (block.asset as { url?: string })?.url || \"\";\n\t\t\tlines.push(`![${alt}](${url})`);\n\t\t\tprevWasList = false;\n\t\t} else {\n\t\t\t// Tier 3: Unknown block -> opaque fence\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tlines.push(`<!--ec:block ${JSON.stringify(block)} -->`);\n\t\t\tprevWasList = false;\n\t\t}\n\t}\n\n\treturn lines.join(\"\\n\") + \"\\n\";\n}\n\nfunction renderStandardBlock(block: PortableTextBlock): string {\n\tconst text = renderSpans(block.children ?? [], block.markDefs ?? []);\n\n\t// List items\n\tif (block.listItem) {\n\t\tconst indent = \" \".repeat(Math.max(0, (block.level ?? 1) - 1));\n\t\tconst marker = block.listItem === \"number\" ? \"1.\" : \"-\";\n\t\treturn `${indent}${marker} ${text}`;\n\t}\n\n\t// Headings\n\tif (block.style && block.style.startsWith(\"h\")) {\n\t\tconst level = parseInt(block.style.substring(1), 10);\n\t\tif (level >= 1 && level <= 6) {\n\t\t\treturn `${\"#\".repeat(level)} ${text}`;\n\t\t}\n\t}\n\n\t// Blockquote\n\tif (block.style === \"blockquote\") {\n\t\treturn `> ${text}`;\n\t}\n\n\treturn text;\n}\n\nfunction renderSpans(spans: PortableTextSpan[], markDefs: MarkDef[]): string {\n\tlet result = \"\";\n\n\tfor (const span of spans) {\n\t\tif (span._type !== \"span\") continue;\n\n\t\tlet text = span.text ?? \"\";\n\t\tconst marks = span.marks ?? [];\n\n\t\tfor (const mark of marks) {\n\t\t\tconst def = markDefs.find((d) => d._key === mark);\n\t\t\tif (def) {\n\t\t\t\tif (def._type === \"link\") {\n\t\t\t\t\ttext = `[${text}](${def.href ?? \"\"})`;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tswitch (mark) {\n\t\t\t\t\tcase \"strong\":\n\t\t\t\t\t\ttext = `**${text}**`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"em\":\n\t\t\t\t\t\ttext = `_${text}_`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"code\":\n\t\t\t\t\t\ttext = `\\`${text}\\``;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"strike-through\":\n\t\t\t\t\tcase \"strikethrough\":\n\t\t\t\t\t\ttext = `~~${text}~~`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tresult += text;\n\t}\n\n\treturn result;\n}\n\n// ---------------------------------------------------------------------------\n// Markdown -> PT\n// ---------------------------------------------------------------------------\n\n// Regex patterns for markdown parsing\nconst OPAQUE_FENCE_PATTERN = /^<!--ec:block (.+) -->$/;\nconst HEADING_PATTERN = /^(#{1,6})\\s+(.+)$/;\nconst UNORDERED_LIST_PATTERN = /^(\\s*)[-*+]\\s+(.+)$/;\nconst ORDERED_LIST_PATTERN = /^(\\s*)\\d+\\.\\s+(.+)$/;\nconst IMAGE_PATTERN = /^!\\[([^\\]]*)\\]\\(([^)]+)\\)$/;\nconst INLINE_MARKDOWN_PATTERN =\n\t/(\\*\\*(.+?)\\*\\*)|(_(.+?)_)|(`(.+?)`)|(\\[(.+?)\\]\\((.+?)\\))|(~~(.+?)~~)/g;\n\n/**\n * Convert Markdown to Portable Text blocks.\n * Opaque fences (<!--ec:block ... -->) are deserialized and spliced back in.\n */\nexport function markdownToPortableText(markdown: string): PortableTextBlock[] {\n\tconst blocks: PortableTextBlock[] = [];\n\tconst lines = markdown.split(\"\\n\");\n\tlet i = 0;\n\n\twhile (i < lines.length) {\n\t\tconst line = lines[i];\n\n\t\t// Opaque fence\n\t\tconst opaqueMatch = line.match(OPAQUE_FENCE_PATTERN);\n\t\tif (opaqueMatch) {\n\t\t\ttry {\n\t\t\t\tblocks.push(JSON.parse(opaqueMatch[1]) as PortableTextBlock);\n\t\t\t} catch {\n\t\t\t\tblocks.push(makeBlock(line));\n\t\t\t}\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Code fence\n\t\tif (line.startsWith(\"```\")) {\n\t\t\tconst lang = line.slice(3).trim();\n\t\t\tconst codeLines: string[] = [];\n\t\t\ti++;\n\t\t\twhile (i < lines.length && !lines[i].startsWith(\"```\")) {\n\t\t\t\tcodeLines.push(lines[i]);\n\t\t\t\ti++;\n\t\t\t}\n\t\t\tblocks.push({\n\t\t\t\t_type: \"code\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\tlanguage: lang || undefined,\n\t\t\t\tcode: codeLines.join(\"\\n\"),\n\t\t\t});\n\t\t\ti++; // skip closing ```\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Blank line\n\t\tif (line.trim() === \"\") {\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Heading\n\t\tconst headingMatch = line.match(HEADING_PATTERN);\n\t\tif (headingMatch) {\n\t\t\tblocks.push(makeBlock(headingMatch[2], `h${headingMatch[1].length}`));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Blockquote\n\t\tif (line.startsWith(\"> \")) {\n\t\t\tblocks.push(makeBlock(line.slice(2), \"blockquote\"));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Unordered list\n\t\tconst ulMatch = line.match(UNORDERED_LIST_PATTERN);\n\t\tif (ulMatch) {\n\t\t\tconst level = Math.floor(ulMatch[1].length / 2) + 1;\n\t\t\tblocks.push(makeListBlock(ulMatch[2], \"bullet\", level));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Ordered list\n\t\tconst olMatch = line.match(ORDERED_LIST_PATTERN);\n\t\tif (olMatch) {\n\t\t\tconst level = Math.floor(olMatch[1].length / 2) + 1;\n\t\t\tblocks.push(makeListBlock(olMatch[2], \"number\", level));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Image\n\t\tconst imgMatch = line.match(IMAGE_PATTERN);\n\t\tif (imgMatch) {\n\t\t\tblocks.push({\n\t\t\t\t_type: \"image\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\talt: imgMatch[1],\n\t\t\t\tasset: { url: imgMatch[2] },\n\t\t\t});\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Paragraph\n\t\tblocks.push(makeBlock(line));\n\t\ti++;\n\t}\n\n\treturn blocks;\n}\n\n// ---------------------------------------------------------------------------\n// Block builders\n// ---------------------------------------------------------------------------\n\nfunction makeBlock(text: string, style: string = \"normal\"): PortableTextBlock {\n\tconst { spans, markDefs } = parseInline(text);\n\treturn { _type: \"block\", _key: generateKey(), style, markDefs, children: spans };\n}\n\nfunction makeListBlock(text: string, listItem: string, level: number): PortableTextBlock {\n\tconst { spans, markDefs } = parseInline(text);\n\treturn {\n\t\t_type: \"block\",\n\t\t_key: generateKey(),\n\t\tstyle: \"normal\",\n\t\tlistItem,\n\t\tlevel,\n\t\tmarkDefs,\n\t\tchildren: spans,\n\t};\n}\n\n/**\n * Parse inline markdown (bold, italic, code, links, strikethrough) into PT spans + markDefs.\n */\nfunction parseInline(text: string): ParsedInline {\n\tconst spans: PortableTextSpan[] = [];\n\tconst markDefs: MarkDef[] = [];\n\tconst regex = INLINE_MARKDOWN_PATTERN;\n\n\tlet lastIndex = 0;\n\tlet match: RegExpExecArray | null;\n\n\twhile ((match = regex.exec(text)) !== null) {\n\t\tif (match.index > lastIndex) {\n\t\t\tspans.push({\n\t\t\t\t_type: \"span\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\ttext: text.slice(lastIndex, match.index),\n\t\t\t\tmarks: [],\n\t\t\t});\n\t\t}\n\n\t\tif (match[2] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[2], marks: [\"strong\"] });\n\t\t} else if (match[4] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[4], marks: [\"em\"] });\n\t\t} else if (match[6] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[6], marks: [\"code\"] });\n\t\t} else if (match[8] != null && match[9] != null) {\n\t\t\tconst key = generateKey();\n\t\t\tmarkDefs.push({ _key: key, _type: \"link\", href: match[9] });\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[8], marks: [key] });\n\t\t} else if (match[11] != null) {\n\t\t\tspans.push({\n\t\t\t\t_type: \"span\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\ttext: match[11],\n\t\t\t\tmarks: [\"strike-through\"],\n\t\t\t});\n\t\t}\n\n\t\tlastIndex = match.index + match[0].length;\n\t}\n\n\tif (lastIndex < text.length) {\n\t\tspans.push({ _type: \"span\", _key: generateKey(), text: text.slice(lastIndex), marks: [] });\n\t}\n\n\tif (spans.length === 0) {\n\t\tspans.push({ _type: \"span\", _key: generateKey(), text, marks: [] });\n\t}\n\n\treturn { spans, markDefs };\n}\n\n// ---------------------------------------------------------------------------\n// Key generation\n// ---------------------------------------------------------------------------\n\nlet keyCounter = 0;\n\nfunction generateKey(): string {\n\treturn `k${(keyCounter++).toString(36)}`;\n}\n\n/** Reset key counter (useful for testing) */\nexport function resetKeyCounter(): void {\n\tkeyCounter = 0;\n}\n\n// ---------------------------------------------------------------------------\n// Schema-aware conversion helpers\n// ---------------------------------------------------------------------------\n\nexport interface FieldSchema {\n\tslug: string;\n\ttype: string;\n}\n\n/**\n * Convert content data for reading: PT fields -> markdown strings.\n * Only converts fields with type \"portableText\" that contain arrays.\n */\nexport function convertDataForRead(\n\tdata: Record<string, unknown>,\n\tfields: FieldSchema[],\n\traw: boolean = false,\n): Record<string, unknown> {\n\tif (raw) return data;\n\n\tconst result = { ...data };\n\tfor (const field of fields) {\n\t\tif (field.type === \"portableText\" && Array.isArray(result[field.slug])) {\n\t\t\tresult[field.slug] = portableTextToMarkdown(result[field.slug] as PortableTextBlock[]);\n\t\t}\n\t}\n\treturn result;\n}\n\n/**\n * Convert content data for writing: markdown strings -> PT arrays.\n * Only converts fields with type \"portableText\" that contain strings.\n */\nexport function convertDataForWrite(\n\tdata: Record<string, unknown>,\n\tfields: FieldSchema[],\n): Record<string, unknown> {\n\tconst result = { ...data };\n\tfor (const field of fields) {\n\t\tif (field.type === \"portableText\" && typeof result[field.slug] === \"string\") {\n\t\t\tresult[field.slug] = markdownToPortableText(result[field.slug] as string);\n\t\t}\n\t}\n\treturn result;\n}\n","/**\n * Transport layer for the EmDash client.\n *\n * Implements a composable interceptor pipeline that modifies requests\n * and responses. The client calls `transport.fetch(request)` — everything\n * else (auth, CSRF, retry) is handled by interceptors.\n */\n\n// Regex patterns for transport utilities\nconst COOKIE_NAME_VALUE_PATTERN = /^([^;]+)/;\n\n/**\n * An interceptor can modify the request, call next(), inspect\n * the response, and optionally retry.\n */\nexport type Interceptor = (\n\trequest: Request,\n\tnext: (request: Request) => Promise<Response>,\n) => Promise<Response>;\n\nexport interface TransportOptions {\n\tinterceptors?: Interceptor[];\n}\n\nfunction baseFetch(request: Request): Promise<Response> {\n\treturn globalThis.fetch(request);\n}\n\n/**\n * Creates a fetch function that runs requests through an interceptor pipeline.\n */\nexport function createTransport(options: TransportOptions = {}): {\n\tfetch: (request: Request) => Promise<Response>;\n} {\n\tconst interceptors = options.interceptors ?? [];\n\n\t// Build the chain once — interceptors don't change after construction\n\tlet chain: (request: Request) => Promise<Response> = baseFetch;\n\tfor (let i = interceptors.length - 1; i >= 0; i--) {\n\t\tconst interceptor = interceptors[i];\n\t\tconst next = chain;\n\t\tchain = (req) => interceptor(req, next);\n\t}\n\n\treturn { fetch: chain };\n}\n\n// ---------------------------------------------------------------------------\n// Built-in interceptors\n// ---------------------------------------------------------------------------\n\n/**\n * Adds X-EmDash-Request: 1 and Origin headers to mutation requests\n * (POST, PUT, DELETE). The custom header satisfies EmDash's CSRF check;\n * the Origin header satisfies Astro's built-in origin verification which\n * rejects server-side POST requests that lack a matching Origin.\n */\nexport function csrfInterceptor(): Interceptor {\n\tconst MUTATION_METHODS = new Set([\"POST\", \"PUT\", \"DELETE\", \"PATCH\"]);\n\n\treturn (request, next) => {\n\t\tif (MUTATION_METHODS.has(request.method)) {\n\t\t\tconst headers = new Headers(request.headers);\n\t\t\theaders.set(\"X-EmDash-Request\", \"1\");\n\t\t\tif (!headers.has(\"Origin\")) {\n\t\t\t\tconst url = new URL(request.url);\n\t\t\t\theaders.set(\"Origin\", url.origin);\n\t\t\t}\n\t\t\treturn next(new Request(request, { headers }));\n\t\t}\n\t\treturn next(request);\n\t};\n}\n\n/**\n * Adds Authorization: Bearer header from a static token.\n */\nexport function tokenInterceptor(token: string): Interceptor {\n\treturn (request, next) => {\n\t\tconst headers = new Headers(request.headers);\n\t\theaders.set(\"Authorization\", `Bearer ${token}`);\n\t\treturn next(new Request(request, { headers }));\n\t};\n}\n\n/**\n * Dev bypass interceptor. Calls the dev-bypass endpoint on first request\n * to establish a session, then forwards the session cookie on subsequent\n * requests.\n */\nexport function devBypassInterceptor(baseUrl: string): Interceptor {\n\tlet sessionCookie: string | null = null;\n\tlet initializing: Promise<void> | null = null;\n\n\tasync function init(): Promise<void> {\n\t\tconst bypassUrl = new URL(\"/_emdash/api/auth/dev-bypass\", baseUrl);\n\t\tconst res = await globalThis.fetch(bypassUrl, { redirect: \"manual\" });\n\n\t\t// Extract session cookie from Set-Cookie header\n\t\tconst setCookie = res.headers.get(\"set-cookie\");\n\t\tif (setCookie) {\n\t\t\t// Extract just the cookie name=value part\n\t\t\tconst match = setCookie.match(COOKIE_NAME_VALUE_PATTERN);\n\t\t\tif (match) {\n\t\t\t\tsessionCookie = match[1]!;\n\t\t\t}\n\t\t}\n\n\t\t// Consume the response body\n\t\tif (res.body) {\n\t\t\tawait res.text().catch(() => {});\n\t\t}\n\t}\n\n\treturn async (request, next) => {\n\t\t// Ensure we've initialized (only once, even with concurrent requests)\n\t\tif (!sessionCookie) {\n\t\t\tif (!initializing) {\n\t\t\t\tinitializing = init();\n\t\t\t}\n\t\t\tawait initializing;\n\t\t}\n\n\t\tif (sessionCookie) {\n\t\t\tconst headers = new Headers(request.headers);\n\t\t\tconst existing = headers.get(\"cookie\");\n\t\t\theaders.set(\"cookie\", existing ? `${existing}; ${sessionCookie}` : sessionCookie);\n\t\t\treturn next(new Request(request, { headers }));\n\t\t}\n\n\t\treturn next(request);\n\t};\n}\n\n/**\n * Auto-refreshes expired OAuth tokens on 401 responses.\n * Requires a refresh token and the token endpoint URL.\n */\nexport function refreshInterceptor(options: {\n\trefreshToken: string;\n\ttokenEndpoint: string;\n\tonTokenRefreshed?: (accessToken: string, refreshToken: string, expiresAt: string) => void;\n}): Interceptor {\n\tlet refreshing: Promise<string | null> | null = null;\n\n\tasync function refresh(): Promise<string | null> {\n\t\tconst res = await globalThis.fetch(options.tokenEndpoint, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders: { \"Content-Type\": \"application/json\" },\n\t\t\tbody: JSON.stringify({\n\t\t\t\tgrant_type: \"refresh_token\",\n\t\t\t\trefresh_token: options.refreshToken,\n\t\t\t}),\n\t\t});\n\n\t\tif (!res.ok) return null;\n\n\t\tinterface TokenFields {\n\t\t\taccess_token: string;\n\t\t\trefresh_token?: string;\n\t\t\texpires_in?: number;\n\t\t}\n\n\t\tconst json = (await res.json()) as Record<string, unknown>;\n\n\t\t// The token endpoint wraps the response in { data: ... } via apiSuccess.\n\t\t// Handle both wrapped and bare shapes for robustness.\n\t\tconst tokenData: TokenFields =\n\t\t\tjson.data && typeof json.data === \"object\" && \"access_token\" in json.data\n\t\t\t\t? (json.data as TokenFields)\n\t\t\t\t: (json as unknown as TokenFields);\n\n\t\tconst expiresAt = tokenData.expires_in\n\t\t\t? new Date(Date.now() + tokenData.expires_in * 1000).toISOString()\n\t\t\t: new Date(Date.now() + 3600_000).toISOString();\n\n\t\tif (options.onTokenRefreshed) {\n\t\t\toptions.onTokenRefreshed(\n\t\t\t\ttokenData.access_token,\n\t\t\t\ttokenData.refresh_token ?? options.refreshToken,\n\t\t\t\texpiresAt,\n\t\t\t);\n\t\t}\n\n\t\treturn tokenData.access_token;\n\t}\n\n\treturn async (request, next) => {\n\t\tconst response = await next(request);\n\n\t\tif (response.status === 401) {\n\t\t\t// Try to refresh\n\t\t\tif (!refreshing) {\n\t\t\t\trefreshing = refresh().finally(() => {\n\t\t\t\t\trefreshing = null;\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tconst newToken = await refreshing;\n\t\t\tif (newToken) {\n\t\t\t\t// Retry with new token\n\t\t\t\tconst headers = new Headers(request.headers);\n\t\t\t\theaders.set(\"Authorization\", `Bearer ${newToken}`);\n\t\t\t\treturn next(new Request(request, { headers }));\n\t\t\t}\n\t\t}\n\n\t\treturn response;\n\t};\n}\n"],"mappings":";;;;;AAqDA,SAAgB,uBAAuB,QAAqC;CAC3E,MAAM,QAAkB,EAAE;CAC1B,IAAI,cAAc;AAElB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACvC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,SAAS;GAC5B,MAAM,SAAS,CAAC,CAAC,MAAM;AAGvB,OAAI,IAAI,MAAM,CAAC,UAAU,CAAC,aACzB,OAAM,KAAK,GAAG;AAGf,SAAM,KAAK,oBAAoB,MAAM,CAAC;AACtC,iBAAc;aACJ,MAAM,UAAU,QAAQ;AAClC,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;GACzB,MAAM,OAAQ,MAAM,YAAuB;GAC3C,MAAM,OAAQ,MAAM,QAAmB;AACvC,SAAM,KAAK,QAAQ,KAAK;AACxB,SAAM,KAAK,KAAK;AAChB,SAAM,KAAK,MAAM;AACjB,iBAAc;aACJ,MAAM,UAAU,SAAS;AACnC,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;GACzB,MAAM,MAAO,MAAM,OAAkB;GACrC,MAAM,MAAO,MAAM,OAA4B,OAAO;AACtD,SAAM,KAAK,KAAK,IAAI,IAAI,IAAI,GAAG;AAC/B,iBAAc;SACR;AAEN,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;AACzB,SAAM,KAAK,gBAAgB,KAAK,UAAU,MAAM,CAAC,MAAM;AACvD,iBAAc;;;AAIhB,QAAO,MAAM,KAAK,KAAK,GAAG;;AAG3B,SAAS,oBAAoB,OAAkC;CAC9D,MAAM,OAAO,YAAY,MAAM,YAAY,EAAE,EAAE,MAAM,YAAY,EAAE,CAAC;AAGpE,KAAI,MAAM,SAGT,QAAO,GAFQ,KAAK,OAAO,KAAK,IAAI,IAAI,MAAM,SAAS,KAAK,EAAE,CAAC,GAChD,MAAM,aAAa,WAAW,OAAO,IAC1B,GAAG;AAI9B,KAAI,MAAM,SAAS,MAAM,MAAM,WAAW,IAAI,EAAE;EAC/C,MAAM,QAAQ,SAAS,MAAM,MAAM,UAAU,EAAE,EAAE,GAAG;AACpD,MAAI,SAAS,KAAK,SAAS,EAC1B,QAAO,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG;;AAKjC,KAAI,MAAM,UAAU,aACnB,QAAO,KAAK;AAGb,QAAO;;AAGR,SAAS,YAAY,OAA2B,UAA6B;CAC5E,IAAI,SAAS;AAEb,MAAK,MAAM,QAAQ,OAAO;AACzB,MAAI,KAAK,UAAU,OAAQ;EAE3B,IAAI,OAAO,KAAK,QAAQ;EACxB,MAAM,QAAQ,KAAK,SAAS,EAAE;AAE9B,OAAK,MAAM,QAAQ,OAAO;GACzB,MAAM,MAAM,SAAS,MAAM,MAAM,EAAE,SAAS,KAAK;AACjD,OAAI,KACH;QAAI,IAAI,UAAU,OACjB,QAAO,IAAI,KAAK,IAAI,IAAI,QAAQ,GAAG;SAGpC,SAAQ,MAAR;IACC,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;IACD,KAAK;AACJ,YAAO,IAAI,KAAK;AAChB;IACD,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;IACD,KAAK;IACL,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;;;AAKJ,YAAU;;AAGX,QAAO;;AAQR,MAAM,uBAAuB;AAC7B,MAAM,kBAAkB;AACxB,MAAM,yBAAyB;AAC/B,MAAM,uBAAuB;AAC7B,MAAM,gBAAgB;AACtB,MAAM,0BACL;;;;;AAMD,SAAgB,uBAAuB,UAAuC;CAC7E,MAAM,SAA8B,EAAE;CACtC,MAAM,QAAQ,SAAS,MAAM,KAAK;CAClC,IAAI,IAAI;AAER,QAAO,IAAI,MAAM,QAAQ;EACxB,MAAM,OAAO,MAAM;EAGnB,MAAM,cAAc,KAAK,MAAM,qBAAqB;AACpD,MAAI,aAAa;AAChB,OAAI;AACH,WAAO,KAAK,KAAK,MAAM,YAAY,GAAG,CAAsB;WACrD;AACP,WAAO,KAAK,UAAU,KAAK,CAAC;;AAE7B;AACA;;AAID,MAAI,KAAK,WAAW,MAAM,EAAE;GAC3B,MAAM,OAAO,KAAK,MAAM,EAAE,CAAC,MAAM;GACjC,MAAM,YAAsB,EAAE;AAC9B;AACA,UAAO,IAAI,MAAM,UAAU,CAAC,MAAM,GAAG,WAAW,MAAM,EAAE;AACvD,cAAU,KAAK,MAAM,GAAG;AACxB;;AAED,UAAO,KAAK;IACX,OAAO;IACP,MAAM,aAAa;IACnB,UAAU,QAAQ;IAClB,MAAM,UAAU,KAAK,KAAK;IAC1B,CAAC;AACF;AACA;;AAID,MAAI,KAAK,MAAM,KAAK,IAAI;AACvB;AACA;;EAID,MAAM,eAAe,KAAK,MAAM,gBAAgB;AAChD,MAAI,cAAc;AACjB,UAAO,KAAK,UAAU,aAAa,IAAI,IAAI,aAAa,GAAG,SAAS,CAAC;AACrE;AACA;;AAID,MAAI,KAAK,WAAW,KAAK,EAAE;AAC1B,UAAO,KAAK,UAAU,KAAK,MAAM,EAAE,EAAE,aAAa,CAAC;AACnD;AACA;;EAID,MAAM,UAAU,KAAK,MAAM,uBAAuB;AAClD,MAAI,SAAS;GACZ,MAAM,QAAQ,KAAK,MAAM,QAAQ,GAAG,SAAS,EAAE,GAAG;AAClD,UAAO,KAAK,cAAc,QAAQ,IAAI,UAAU,MAAM,CAAC;AACvD;AACA;;EAID,MAAM,UAAU,KAAK,MAAM,qBAAqB;AAChD,MAAI,SAAS;GACZ,MAAM,QAAQ,KAAK,MAAM,QAAQ,GAAG,SAAS,EAAE,GAAG;AAClD,UAAO,KAAK,cAAc,QAAQ,IAAI,UAAU,MAAM,CAAC;AACvD;AACA;;EAID,MAAM,WAAW,KAAK,MAAM,cAAc;AAC1C,MAAI,UAAU;AACb,UAAO,KAAK;IACX,OAAO;IACP,MAAM,aAAa;IACnB,KAAK,SAAS;IACd,OAAO,EAAE,KAAK,SAAS,IAAI;IAC3B,CAAC;AACF;AACA;;AAID,SAAO,KAAK,UAAU,KAAK,CAAC;AAC5B;;AAGD,QAAO;;AAOR,SAAS,UAAU,MAAc,QAAgB,UAA6B;CAC7E,MAAM,EAAE,OAAO,aAAa,YAAY,KAAK;AAC7C,QAAO;EAAE,OAAO;EAAS,MAAM,aAAa;EAAE;EAAO;EAAU,UAAU;EAAO;;AAGjF,SAAS,cAAc,MAAc,UAAkB,OAAkC;CACxF,MAAM,EAAE,OAAO,aAAa,YAAY,KAAK;AAC7C,QAAO;EACN,OAAO;EACP,MAAM,aAAa;EACnB,OAAO;EACP;EACA;EACA;EACA,UAAU;EACV;;;;;AAMF,SAAS,YAAY,MAA4B;CAChD,MAAM,QAA4B,EAAE;CACpC,MAAM,WAAsB,EAAE;CAC9B,MAAM,QAAQ;CAEd,IAAI,YAAY;CAChB,IAAI;AAEJ,SAAQ,QAAQ,MAAM,KAAK,KAAK,MAAM,MAAM;AAC3C,MAAI,MAAM,QAAQ,UACjB,OAAM,KAAK;GACV,OAAO;GACP,MAAM,aAAa;GACnB,MAAM,KAAK,MAAM,WAAW,MAAM,MAAM;GACxC,OAAO,EAAE;GACT,CAAC;AAGH,MAAI,MAAM,MAAM,KACf,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,SAAS;GAAE,CAAC;WAC3E,MAAM,MAAM,KACtB,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,KAAK;GAAE,CAAC;WACvE,MAAM,MAAM,KACtB,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,OAAO;GAAE,CAAC;WACzE,MAAM,MAAM,QAAQ,MAAM,MAAM,MAAM;GAChD,MAAM,MAAM,aAAa;AACzB,YAAS,KAAK;IAAE,MAAM;IAAK,OAAO;IAAQ,MAAM,MAAM;IAAI,CAAC;AAC3D,SAAM,KAAK;IAAE,OAAO;IAAQ,MAAM,aAAa;IAAE,MAAM,MAAM;IAAI,OAAO,CAAC,IAAI;IAAE,CAAC;aACtE,MAAM,OAAO,KACvB,OAAM,KAAK;GACV,OAAO;GACP,MAAM,aAAa;GACnB,MAAM,MAAM;GACZ,OAAO,CAAC,iBAAiB;GACzB,CAAC;AAGH,cAAY,MAAM,QAAQ,MAAM,GAAG;;AAGpC,KAAI,YAAY,KAAK,OACpB,OAAM,KAAK;EAAE,OAAO;EAAQ,MAAM,aAAa;EAAE,MAAM,KAAK,MAAM,UAAU;EAAE,OAAO,EAAE;EAAE,CAAC;AAG3F,KAAI,MAAM,WAAW,EACpB,OAAM,KAAK;EAAE,OAAO;EAAQ,MAAM,aAAa;EAAE;EAAM,OAAO,EAAE;EAAE,CAAC;AAGpE,QAAO;EAAE;EAAO;EAAU;;AAO3B,IAAI,aAAa;AAEjB,SAAS,cAAsB;AAC9B,QAAO,KAAK,cAAc,SAAS,GAAG;;;;;;AAqBvC,SAAgB,mBACf,MACA,QACA,MAAe,OACW;AAC1B,KAAI,IAAK,QAAO;CAEhB,MAAM,SAAS,EAAE,GAAG,MAAM;AAC1B,MAAK,MAAM,SAAS,OACnB,KAAI,MAAM,SAAS,kBAAkB,MAAM,QAAQ,OAAO,MAAM,MAAM,CACrE,QAAO,MAAM,QAAQ,uBAAuB,OAAO,MAAM,MAA6B;AAGxF,QAAO;;;;;;AAOR,SAAgB,oBACf,MACA,QAC0B;CAC1B,MAAM,SAAS,EAAE,GAAG,MAAM;AAC1B,MAAK,MAAM,SAAS,OACnB,KAAI,MAAM,SAAS,kBAAkB,OAAO,OAAO,MAAM,UAAU,SAClE,QAAO,MAAM,QAAQ,uBAAuB,OAAO,MAAM,MAAgB;AAG3E,QAAO;;;;;;;;;;;;AClZR,MAAM,4BAA4B;AAelC,SAAS,UAAU,SAAqC;AACvD,QAAO,WAAW,MAAM,QAAQ;;;;;AAMjC,SAAgB,gBAAgB,UAA4B,EAAE,EAE5D;CACD,MAAM,eAAe,QAAQ,gBAAgB,EAAE;CAG/C,IAAI,QAAiD;AACrD,MAAK,IAAI,IAAI,aAAa,SAAS,GAAG,KAAK,GAAG,KAAK;EAClD,MAAM,cAAc,aAAa;EACjC,MAAM,OAAO;AACb,WAAS,QAAQ,YAAY,KAAK,KAAK;;AAGxC,QAAO,EAAE,OAAO,OAAO;;;;;;;;AAaxB,SAAgB,kBAA+B;CAC9C,MAAM,mBAAmB,IAAI,IAAI;EAAC;EAAQ;EAAO;EAAU;EAAQ,CAAC;AAEpE,SAAQ,SAAS,SAAS;AACzB,MAAI,iBAAiB,IAAI,QAAQ,OAAO,EAAE;GACzC,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,WAAQ,IAAI,oBAAoB,IAAI;AACpC,OAAI,CAAC,QAAQ,IAAI,SAAS,EAAE;IAC3B,MAAM,MAAM,IAAI,IAAI,QAAQ,IAAI;AAChC,YAAQ,IAAI,UAAU,IAAI,OAAO;;AAElC,UAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;AAE/C,SAAO,KAAK,QAAQ;;;;;;AAOtB,SAAgB,iBAAiB,OAA4B;AAC5D,SAAQ,SAAS,SAAS;EACzB,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,UAAQ,IAAI,iBAAiB,UAAU,QAAQ;AAC/C,SAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;;;;;;;AAShD,SAAgB,qBAAqB,SAA8B;CAClE,IAAI,gBAA+B;CACnC,IAAI,eAAqC;CAEzC,eAAe,OAAsB;EACpC,MAAM,YAAY,IAAI,IAAI,gCAAgC,QAAQ;EAClE,MAAM,MAAM,MAAM,WAAW,MAAM,WAAW,EAAE,UAAU,UAAU,CAAC;EAGrE,MAAM,YAAY,IAAI,QAAQ,IAAI,aAAa;AAC/C,MAAI,WAAW;GAEd,MAAM,QAAQ,UAAU,MAAM,0BAA0B;AACxD,OAAI,MACH,iBAAgB,MAAM;;AAKxB,MAAI,IAAI,KACP,OAAM,IAAI,MAAM,CAAC,YAAY,GAAG;;AAIlC,QAAO,OAAO,SAAS,SAAS;AAE/B,MAAI,CAAC,eAAe;AACnB,OAAI,CAAC,aACJ,gBAAe,MAAM;AAEtB,SAAM;;AAGP,MAAI,eAAe;GAClB,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;GAC5C,MAAM,WAAW,QAAQ,IAAI,SAAS;AACtC,WAAQ,IAAI,UAAU,WAAW,GAAG,SAAS,IAAI,kBAAkB,cAAc;AACjF,UAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;AAG/C,SAAO,KAAK,QAAQ;;;;;;;AAQtB,SAAgB,mBAAmB,SAInB;CACf,IAAI,aAA4C;CAEhD,eAAe,UAAkC;EAChD,MAAM,MAAM,MAAM,WAAW,MAAM,QAAQ,eAAe;GACzD,QAAQ;GACR,SAAS,EAAE,gBAAgB,oBAAoB;GAC/C,MAAM,KAAK,UAAU;IACpB,YAAY;IACZ,eAAe,QAAQ;IACvB,CAAC;GACF,CAAC;AAEF,MAAI,CAAC,IAAI,GAAI,QAAO;EAQpB,MAAM,OAAQ,MAAM,IAAI,MAAM;EAI9B,MAAM,YACL,KAAK,QAAQ,OAAO,KAAK,SAAS,YAAY,kBAAkB,KAAK,OACjE,KAAK,OACL;EAEL,MAAM,YAAY,UAAU,aACzB,IAAI,KAAK,KAAK,KAAK,GAAG,UAAU,aAAa,IAAK,CAAC,aAAa,GAChE,IAAI,KAAK,KAAK,KAAK,GAAG,KAAS,CAAC,aAAa;AAEhD,MAAI,QAAQ,iBACX,SAAQ,iBACP,UAAU,cACV,UAAU,iBAAiB,QAAQ,cACnC,UACA;AAGF,SAAO,UAAU;;AAGlB,QAAO,OAAO,SAAS,SAAS;EAC/B,MAAM,WAAW,MAAM,KAAK,QAAQ;AAEpC,MAAI,SAAS,WAAW,KAAK;AAE5B,OAAI,CAAC,WACJ,cAAa,SAAS,CAAC,cAAc;AACpC,iBAAa;KACZ;GAGH,MAAM,WAAW,MAAM;AACvB,OAAI,UAAU;IAEb,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,YAAQ,IAAI,iBAAiB,UAAU,WAAW;AAClD,WAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;;AAIhD,SAAO"}
1
+ {"version":3,"file":"transport-xpzIjCIB.mjs","names":[],"sources":["../src/client/portable-text.ts","../src/client/transport.ts"],"sourcesContent":["/**\n * Portable Text <-> Markdown conversion layer.\n *\n * Three tiers of block handling:\n * Tier 1: Standard PT blocks <-> standard Markdown (headings, paragraphs, lists, etc.)\n * Tier 2: EmDash custom blocks <-> Markdown directives (future)\n * Tier 3: Unknown blocks <-> opaque HTML comment fences (preserved, not editable)\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Minimal Portable Text block shape */\nexport interface PortableTextBlock {\n\t_type: string;\n\t_key?: string;\n\tstyle?: string;\n\tlevel?: number;\n\tlistItem?: string;\n\tmarkDefs?: MarkDef[];\n\tchildren?: PortableTextSpan[];\n\t[key: string]: unknown;\n}\n\ninterface PortableTextSpan {\n\t_type: string;\n\t_key?: string;\n\ttext?: string;\n\tmarks?: string[];\n\t[key: string]: unknown;\n}\n\ninterface MarkDef {\n\t_key: string;\n\t_type: string;\n\thref?: string;\n\t[key: string]: unknown;\n}\n\ninterface ParsedInline {\n\tspans: PortableTextSpan[];\n\tmarkDefs: MarkDef[];\n}\n\n// ---------------------------------------------------------------------------\n// PT -> Markdown\n// ---------------------------------------------------------------------------\n\n/**\n * Convert Portable Text blocks to Markdown.\n * Unknown block types are serialized as opaque fences.\n */\nexport function portableTextToMarkdown(blocks: PortableTextBlock[]): string {\n\tconst lines: string[] = [];\n\tlet prevWasList = false;\n\n\tfor (let i = 0; i < blocks.length; i++) {\n\t\tconst block = blocks[i];\n\n\t\tif (block._type === \"block\") {\n\t\t\tconst isList = !!block.listItem;\n\n\t\t\t// Blank line between non-contiguous block types\n\t\t\tif (i > 0 && (!isList || !prevWasList)) {\n\t\t\t\tlines.push(\"\");\n\t\t\t}\n\n\t\t\tlines.push(renderStandardBlock(block));\n\t\t\tprevWasList = isList;\n\t\t} else if (block._type === \"code\") {\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tconst lang = (block.language as string) || \"\";\n\t\t\tconst code = (block.code as string) || \"\";\n\t\t\tlines.push(\"```\" + lang);\n\t\t\tlines.push(code);\n\t\t\tlines.push(\"```\");\n\t\t\tprevWasList = false;\n\t\t} else if (block._type === \"image\") {\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tconst alt = (block.alt as string) || \"\";\n\t\t\tconst url = (block.asset as { url?: string })?.url || \"\";\n\t\t\tlines.push(`![${alt}](${url})`);\n\t\t\tprevWasList = false;\n\t\t} else {\n\t\t\t// Tier 3: Unknown block -> opaque fence\n\t\t\tif (i > 0) lines.push(\"\");\n\t\t\tlines.push(`<!--ec:block ${JSON.stringify(block)} -->`);\n\t\t\tprevWasList = false;\n\t\t}\n\t}\n\n\treturn lines.join(\"\\n\") + \"\\n\";\n}\n\nfunction renderStandardBlock(block: PortableTextBlock): string {\n\tconst text = renderSpans(block.children ?? [], block.markDefs ?? []);\n\n\t// List items\n\tif (block.listItem) {\n\t\tconst indent = \" \".repeat(Math.max(0, (block.level ?? 1) - 1));\n\t\tconst marker = block.listItem === \"number\" ? \"1.\" : \"-\";\n\t\treturn `${indent}${marker} ${text}`;\n\t}\n\n\t// Headings\n\tif (block.style && block.style.startsWith(\"h\")) {\n\t\tconst level = parseInt(block.style.substring(1), 10);\n\t\tif (level >= 1 && level <= 6) {\n\t\t\treturn `${\"#\".repeat(level)} ${text}`;\n\t\t}\n\t}\n\n\t// Blockquote\n\tif (block.style === \"blockquote\") {\n\t\treturn `> ${text}`;\n\t}\n\n\treturn text;\n}\n\nfunction renderSpans(spans: PortableTextSpan[], markDefs: MarkDef[]): string {\n\tlet result = \"\";\n\n\tfor (const span of spans) {\n\t\tif (span._type !== \"span\") continue;\n\n\t\tlet text = span.text ?? \"\";\n\t\tconst marks = span.marks ?? [];\n\n\t\tfor (const mark of marks) {\n\t\t\tconst def = markDefs.find((d) => d._key === mark);\n\t\t\tif (def) {\n\t\t\t\tif (def._type === \"link\") {\n\t\t\t\t\ttext = `[${text}](${def.href ?? \"\"})`;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tswitch (mark) {\n\t\t\t\t\tcase \"strong\":\n\t\t\t\t\t\ttext = `**${text}**`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"em\":\n\t\t\t\t\t\ttext = `_${text}_`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"code\":\n\t\t\t\t\t\ttext = `\\`${text}\\``;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"strike-through\":\n\t\t\t\t\tcase \"strikethrough\":\n\t\t\t\t\t\ttext = `~~${text}~~`;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tresult += text;\n\t}\n\n\treturn result;\n}\n\n// ---------------------------------------------------------------------------\n// Markdown -> PT\n// ---------------------------------------------------------------------------\n\n// Regex patterns for markdown parsing\nconst OPAQUE_FENCE_PATTERN = /^<!--ec:block (.+) -->$/;\nconst HEADING_PATTERN = /^(#{1,6})\\s+(.+)$/;\nconst UNORDERED_LIST_PATTERN = /^(\\s*)[-*+]\\s+(.+)$/;\nconst ORDERED_LIST_PATTERN = /^(\\s*)\\d+\\.\\s+(.+)$/;\nconst IMAGE_PATTERN = /^!\\[([^\\]]*)\\]\\(([^)]+)\\)$/;\nconst INLINE_MARKDOWN_PATTERN =\n\t/(\\*\\*(.+?)\\*\\*)|(_(.+?)_)|(`(.+?)`)|(\\[(.+?)\\]\\((.+?)\\))|(~~(.+?)~~)/g;\n\n/**\n * Convert Markdown to Portable Text blocks.\n * Opaque fences (<!--ec:block ... -->) are deserialized and spliced back in.\n */\nexport function markdownToPortableText(markdown: string): PortableTextBlock[] {\n\tconst blocks: PortableTextBlock[] = [];\n\tconst lines = markdown.split(\"\\n\");\n\tlet i = 0;\n\n\twhile (i < lines.length) {\n\t\tconst line = lines[i];\n\n\t\t// Opaque fence\n\t\tconst opaqueMatch = line.match(OPAQUE_FENCE_PATTERN);\n\t\tif (opaqueMatch) {\n\t\t\ttry {\n\t\t\t\tblocks.push(JSON.parse(opaqueMatch[1]) as PortableTextBlock);\n\t\t\t} catch {\n\t\t\t\tblocks.push(makeBlock(line));\n\t\t\t}\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Code fence\n\t\tif (line.startsWith(\"```\")) {\n\t\t\tconst lang = line.slice(3).trim();\n\t\t\tconst codeLines: string[] = [];\n\t\t\ti++;\n\t\t\twhile (i < lines.length && !lines[i].startsWith(\"```\")) {\n\t\t\t\tcodeLines.push(lines[i]);\n\t\t\t\ti++;\n\t\t\t}\n\t\t\tblocks.push({\n\t\t\t\t_type: \"code\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\tlanguage: lang || undefined,\n\t\t\t\tcode: codeLines.join(\"\\n\"),\n\t\t\t});\n\t\t\ti++; // skip closing ```\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Blank line\n\t\tif (line.trim() === \"\") {\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Heading\n\t\tconst headingMatch = line.match(HEADING_PATTERN);\n\t\tif (headingMatch) {\n\t\t\tblocks.push(makeBlock(headingMatch[2], `h${headingMatch[1].length}`));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Blockquote\n\t\tif (line.startsWith(\"> \")) {\n\t\t\tblocks.push(makeBlock(line.slice(2), \"blockquote\"));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Unordered list\n\t\tconst ulMatch = line.match(UNORDERED_LIST_PATTERN);\n\t\tif (ulMatch) {\n\t\t\tconst level = Math.floor(ulMatch[1].length / 2) + 1;\n\t\t\tblocks.push(makeListBlock(ulMatch[2], \"bullet\", level));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Ordered list\n\t\tconst olMatch = line.match(ORDERED_LIST_PATTERN);\n\t\tif (olMatch) {\n\t\t\tconst level = Math.floor(olMatch[1].length / 2) + 1;\n\t\t\tblocks.push(makeListBlock(olMatch[2], \"number\", level));\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Image\n\t\tconst imgMatch = line.match(IMAGE_PATTERN);\n\t\tif (imgMatch) {\n\t\t\tblocks.push({\n\t\t\t\t_type: \"image\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\talt: imgMatch[1],\n\t\t\t\tasset: { url: imgMatch[2] },\n\t\t\t});\n\t\t\ti++;\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Paragraph\n\t\tblocks.push(makeBlock(line));\n\t\ti++;\n\t}\n\n\treturn blocks;\n}\n\n// ---------------------------------------------------------------------------\n// Block builders\n// ---------------------------------------------------------------------------\n\nfunction makeBlock(text: string, style: string = \"normal\"): PortableTextBlock {\n\tconst { spans, markDefs } = parseInline(text);\n\treturn { _type: \"block\", _key: generateKey(), style, markDefs, children: spans };\n}\n\nfunction makeListBlock(text: string, listItem: string, level: number): PortableTextBlock {\n\tconst { spans, markDefs } = parseInline(text);\n\treturn {\n\t\t_type: \"block\",\n\t\t_key: generateKey(),\n\t\tstyle: \"normal\",\n\t\tlistItem,\n\t\tlevel,\n\t\tmarkDefs,\n\t\tchildren: spans,\n\t};\n}\n\n/**\n * Parse inline markdown (bold, italic, code, links, strikethrough) into PT spans + markDefs.\n */\nfunction parseInline(text: string): ParsedInline {\n\tconst spans: PortableTextSpan[] = [];\n\tconst markDefs: MarkDef[] = [];\n\tconst regex = INLINE_MARKDOWN_PATTERN;\n\n\tlet lastIndex = 0;\n\tlet match: RegExpExecArray | null;\n\n\twhile ((match = regex.exec(text)) !== null) {\n\t\tif (match.index > lastIndex) {\n\t\t\tspans.push({\n\t\t\t\t_type: \"span\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\ttext: text.slice(lastIndex, match.index),\n\t\t\t\tmarks: [],\n\t\t\t});\n\t\t}\n\n\t\tif (match[2] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[2], marks: [\"strong\"] });\n\t\t} else if (match[4] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[4], marks: [\"em\"] });\n\t\t} else if (match[6] != null) {\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[6], marks: [\"code\"] });\n\t\t} else if (match[8] != null && match[9] != null) {\n\t\t\tconst key = generateKey();\n\t\t\tmarkDefs.push({ _key: key, _type: \"link\", href: match[9] });\n\t\t\tspans.push({ _type: \"span\", _key: generateKey(), text: match[8], marks: [key] });\n\t\t} else if (match[11] != null) {\n\t\t\tspans.push({\n\t\t\t\t_type: \"span\",\n\t\t\t\t_key: generateKey(),\n\t\t\t\ttext: match[11],\n\t\t\t\tmarks: [\"strike-through\"],\n\t\t\t});\n\t\t}\n\n\t\tlastIndex = match.index + match[0].length;\n\t}\n\n\tif (lastIndex < text.length) {\n\t\tspans.push({ _type: \"span\", _key: generateKey(), text: text.slice(lastIndex), marks: [] });\n\t}\n\n\tif (spans.length === 0) {\n\t\tspans.push({ _type: \"span\", _key: generateKey(), text, marks: [] });\n\t}\n\n\treturn { spans, markDefs };\n}\n\n// ---------------------------------------------------------------------------\n// Key generation\n// ---------------------------------------------------------------------------\n\nlet keyCounter = 0;\n\nfunction generateKey(): string {\n\treturn `k${(keyCounter++).toString(36)}`;\n}\n\n/** Reset key counter (useful for testing) */\nexport function resetKeyCounter(): void {\n\tkeyCounter = 0;\n}\n\n// ---------------------------------------------------------------------------\n// Schema-aware conversion helpers\n// ---------------------------------------------------------------------------\n\nexport interface FieldSchema {\n\tslug: string;\n\ttype: string;\n}\n\n/**\n * Convert content data for reading: PT fields -> markdown strings.\n * Only converts fields with type \"portableText\" that contain arrays.\n */\nexport function convertDataForRead(\n\tdata: Record<string, unknown>,\n\tfields: FieldSchema[],\n\traw: boolean = false,\n): Record<string, unknown> {\n\tif (raw) return data;\n\n\tconst result = { ...data };\n\tfor (const field of fields) {\n\t\tif (field.type === \"portableText\" && Array.isArray(result[field.slug])) {\n\t\t\tresult[field.slug] = portableTextToMarkdown(result[field.slug] as PortableTextBlock[]);\n\t\t}\n\t}\n\treturn result;\n}\n\n/**\n * Convert content data for writing: markdown strings -> PT arrays.\n * Only converts fields with type \"portableText\" that contain strings.\n */\nexport function convertDataForWrite(\n\tdata: Record<string, unknown>,\n\tfields: FieldSchema[],\n): Record<string, unknown> {\n\tconst result = { ...data };\n\tfor (const field of fields) {\n\t\tif (field.type === \"portableText\" && typeof result[field.slug] === \"string\") {\n\t\t\tresult[field.slug] = markdownToPortableText(result[field.slug] as string);\n\t\t}\n\t}\n\treturn result;\n}\n","/**\n * Transport layer for the EmDash client.\n *\n * Implements a composable interceptor pipeline that modifies requests\n * and responses. The client calls `transport.fetch(request)` — everything\n * else (auth, CSRF, retry) is handled by interceptors.\n */\n\n// Regex patterns for transport utilities\nconst COOKIE_NAME_VALUE_PATTERN = /^([^;]+)/;\n\n/**\n * An interceptor can modify the request, call next(), inspect\n * the response, and optionally retry.\n */\nexport type Interceptor = (\n\trequest: Request,\n\tnext: (request: Request) => Promise<Response>,\n) => Promise<Response>;\n\nexport interface TransportOptions {\n\tinterceptors?: Interceptor[];\n}\n\nfunction baseFetch(request: Request): Promise<Response> {\n\treturn globalThis.fetch(request);\n}\n\n/**\n * Creates a fetch function that runs requests through an interceptor pipeline.\n */\nexport function createTransport(options: TransportOptions = {}): {\n\tfetch: (request: Request) => Promise<Response>;\n} {\n\tconst interceptors = options.interceptors ?? [];\n\n\t// Build the chain once — interceptors don't change after construction\n\tlet chain: (request: Request) => Promise<Response> = baseFetch;\n\tfor (let i = interceptors.length - 1; i >= 0; i--) {\n\t\tconst interceptor = interceptors[i];\n\t\tconst next = chain;\n\t\tchain = (req) => interceptor(req, next);\n\t}\n\n\treturn { fetch: chain };\n}\n\n// ---------------------------------------------------------------------------\n// Built-in interceptors\n// ---------------------------------------------------------------------------\n\n/**\n * Adds X-EmDash-Request: 1 and Origin headers to mutation requests\n * (POST, PUT, DELETE). The custom header satisfies EmDash's CSRF check;\n * the Origin header satisfies Astro's built-in origin verification which\n * rejects server-side POST requests that lack a matching Origin.\n */\nexport function csrfInterceptor(): Interceptor {\n\tconst MUTATION_METHODS = new Set([\"POST\", \"PUT\", \"DELETE\", \"PATCH\"]);\n\n\treturn (request, next) => {\n\t\tif (MUTATION_METHODS.has(request.method)) {\n\t\t\tconst headers = new Headers(request.headers);\n\t\t\theaders.set(\"X-EmDash-Request\", \"1\");\n\t\t\tif (!headers.has(\"Origin\")) {\n\t\t\t\tconst url = new URL(request.url);\n\t\t\t\theaders.set(\"Origin\", url.origin);\n\t\t\t}\n\t\t\treturn next(new Request(request, { headers }));\n\t\t}\n\t\treturn next(request);\n\t};\n}\n\n/**\n * Adds Authorization: Bearer header from a static token.\n */\nexport function tokenInterceptor(token: string): Interceptor {\n\treturn (request, next) => {\n\t\tconst headers = new Headers(request.headers);\n\t\theaders.set(\"Authorization\", `Bearer ${token}`);\n\t\treturn next(new Request(request, { headers }));\n\t};\n}\n\n/**\n * Dev bypass interceptor. Calls the dev-bypass endpoint on first request\n * to establish a session, then forwards the session cookie on subsequent\n * requests.\n */\nexport function devBypassInterceptor(baseUrl: string): Interceptor {\n\tlet sessionCookie: string | null = null;\n\tlet initializing: Promise<void> | null = null;\n\n\tasync function init(): Promise<void> {\n\t\tconst bypassUrl = new URL(\"/_emdash/api/auth/dev-bypass\", baseUrl);\n\t\tconst res = await globalThis.fetch(bypassUrl, { redirect: \"manual\" });\n\n\t\t// Extract session cookie from Set-Cookie header\n\t\tconst setCookie = res.headers.get(\"set-cookie\");\n\t\tif (setCookie) {\n\t\t\t// Extract just the cookie name=value part\n\t\t\tconst match = setCookie.match(COOKIE_NAME_VALUE_PATTERN);\n\t\t\tif (match) {\n\t\t\t\tsessionCookie = match[1]!;\n\t\t\t}\n\t\t}\n\n\t\t// Consume the response body\n\t\tif (res.body) {\n\t\t\tawait res.text().catch(() => {});\n\t\t}\n\t}\n\n\treturn async (request, next) => {\n\t\t// Ensure we've initialized (only once, even with concurrent requests)\n\t\tif (!sessionCookie) {\n\t\t\tif (!initializing) {\n\t\t\t\tinitializing = init();\n\t\t\t}\n\t\t\tawait initializing;\n\t\t}\n\n\t\tif (sessionCookie) {\n\t\t\tconst headers = new Headers(request.headers);\n\t\t\tconst existing = headers.get(\"cookie\");\n\t\t\theaders.set(\"cookie\", existing ? `${existing}; ${sessionCookie}` : sessionCookie);\n\t\t\treturn next(new Request(request, { headers }));\n\t\t}\n\n\t\treturn next(request);\n\t};\n}\n\n/**\n * Auto-refreshes expired OAuth tokens on 401 responses.\n * Requires a refresh token and the token endpoint URL.\n */\nexport function refreshInterceptor(options: {\n\trefreshToken: string;\n\ttokenEndpoint: string;\n\tonTokenRefreshed?: (accessToken: string, refreshToken: string, expiresAt: string) => void;\n}): Interceptor {\n\tlet refreshing: Promise<string | null> | null = null;\n\n\tasync function refresh(): Promise<string | null> {\n\t\tconst res = await globalThis.fetch(options.tokenEndpoint, {\n\t\t\tmethod: \"POST\",\n\t\t\theaders: { \"Content-Type\": \"application/json\" },\n\t\t\tbody: JSON.stringify({\n\t\t\t\tgrant_type: \"refresh_token\",\n\t\t\t\trefresh_token: options.refreshToken,\n\t\t\t}),\n\t\t});\n\n\t\tif (!res.ok) return null;\n\n\t\tinterface TokenFields {\n\t\t\taccess_token: string;\n\t\t\trefresh_token?: string;\n\t\t\texpires_in?: number;\n\t\t}\n\n\t\tconst json = (await res.json()) as Record<string, unknown>;\n\n\t\t// The token endpoint wraps the response in { data: ... } via apiSuccess.\n\t\t// Handle both wrapped and bare shapes for robustness.\n\t\tconst tokenData: TokenFields =\n\t\t\tjson.data && typeof json.data === \"object\" && \"access_token\" in json.data\n\t\t\t\t? (json.data as TokenFields)\n\t\t\t\t: (json as unknown as TokenFields);\n\n\t\tconst expiresAt = tokenData.expires_in\n\t\t\t? new Date(Date.now() + tokenData.expires_in * 1000).toISOString()\n\t\t\t: new Date(Date.now() + 3600_000).toISOString();\n\n\t\tif (options.onTokenRefreshed) {\n\t\t\toptions.onTokenRefreshed(\n\t\t\t\ttokenData.access_token,\n\t\t\t\ttokenData.refresh_token ?? options.refreshToken,\n\t\t\t\texpiresAt,\n\t\t\t);\n\t\t}\n\n\t\treturn tokenData.access_token;\n\t}\n\n\treturn async (request, next) => {\n\t\tconst response = await next(request);\n\n\t\tif (response.status === 401) {\n\t\t\t// Try to refresh\n\t\t\tif (!refreshing) {\n\t\t\t\trefreshing = refresh().finally(() => {\n\t\t\t\t\trefreshing = null;\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tconst newToken = await refreshing;\n\t\t\tif (newToken) {\n\t\t\t\t// Retry with new token\n\t\t\t\tconst headers = new Headers(request.headers);\n\t\t\t\theaders.set(\"Authorization\", `Bearer ${newToken}`);\n\t\t\t\treturn next(new Request(request, { headers }));\n\t\t\t}\n\t\t}\n\n\t\treturn response;\n\t};\n}\n"],"mappings":";;;;;AAqDA,SAAgB,uBAAuB,QAAqC;CAC3E,MAAM,QAAkB,EAAE;CAC1B,IAAI,cAAc;AAElB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACvC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,SAAS;GAC5B,MAAM,SAAS,CAAC,CAAC,MAAM;AAGvB,OAAI,IAAI,MAAM,CAAC,UAAU,CAAC,aACzB,OAAM,KAAK,GAAG;AAGf,SAAM,KAAK,oBAAoB,MAAM,CAAC;AACtC,iBAAc;aACJ,MAAM,UAAU,QAAQ;AAClC,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;GACzB,MAAM,OAAQ,MAAM,YAAuB;GAC3C,MAAM,OAAQ,MAAM,QAAmB;AACvC,SAAM,KAAK,QAAQ,KAAK;AACxB,SAAM,KAAK,KAAK;AAChB,SAAM,KAAK,MAAM;AACjB,iBAAc;aACJ,MAAM,UAAU,SAAS;AACnC,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;GACzB,MAAM,MAAO,MAAM,OAAkB;GACrC,MAAM,MAAO,MAAM,OAA4B,OAAO;AACtD,SAAM,KAAK,KAAK,IAAI,IAAI,IAAI,GAAG;AAC/B,iBAAc;SACR;AAEN,OAAI,IAAI,EAAG,OAAM,KAAK,GAAG;AACzB,SAAM,KAAK,gBAAgB,KAAK,UAAU,MAAM,CAAC,MAAM;AACvD,iBAAc;;;AAIhB,QAAO,MAAM,KAAK,KAAK,GAAG;;AAG3B,SAAS,oBAAoB,OAAkC;CAC9D,MAAM,OAAO,YAAY,MAAM,YAAY,EAAE,EAAE,MAAM,YAAY,EAAE,CAAC;AAGpE,KAAI,MAAM,SAGT,QAAO,GAFQ,KAAK,OAAO,KAAK,IAAI,IAAI,MAAM,SAAS,KAAK,EAAE,CAAC,GAChD,MAAM,aAAa,WAAW,OAAO,IAC1B,GAAG;AAI9B,KAAI,MAAM,SAAS,MAAM,MAAM,WAAW,IAAI,EAAE;EAC/C,MAAM,QAAQ,SAAS,MAAM,MAAM,UAAU,EAAE,EAAE,GAAG;AACpD,MAAI,SAAS,KAAK,SAAS,EAC1B,QAAO,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG;;AAKjC,KAAI,MAAM,UAAU,aACnB,QAAO,KAAK;AAGb,QAAO;;AAGR,SAAS,YAAY,OAA2B,UAA6B;CAC5E,IAAI,SAAS;AAEb,MAAK,MAAM,QAAQ,OAAO;AACzB,MAAI,KAAK,UAAU,OAAQ;EAE3B,IAAI,OAAO,KAAK,QAAQ;EACxB,MAAM,QAAQ,KAAK,SAAS,EAAE;AAE9B,OAAK,MAAM,QAAQ,OAAO;GACzB,MAAM,MAAM,SAAS,MAAM,MAAM,EAAE,SAAS,KAAK;AACjD,OAAI,KACH;QAAI,IAAI,UAAU,OACjB,QAAO,IAAI,KAAK,IAAI,IAAI,QAAQ,GAAG;SAGpC,SAAQ,MAAR;IACC,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;IACD,KAAK;AACJ,YAAO,IAAI,KAAK;AAChB;IACD,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;IACD,KAAK;IACL,KAAK;AACJ,YAAO,KAAK,KAAK;AACjB;;;AAKJ,YAAU;;AAGX,QAAO;;AAQR,MAAM,uBAAuB;AAC7B,MAAM,kBAAkB;AACxB,MAAM,yBAAyB;AAC/B,MAAM,uBAAuB;AAC7B,MAAM,gBAAgB;AACtB,MAAM,0BACL;;;;;AAMD,SAAgB,uBAAuB,UAAuC;CAC7E,MAAM,SAA8B,EAAE;CACtC,MAAM,QAAQ,SAAS,MAAM,KAAK;CAClC,IAAI,IAAI;AAER,QAAO,IAAI,MAAM,QAAQ;EACxB,MAAM,OAAO,MAAM;EAGnB,MAAM,cAAc,KAAK,MAAM,qBAAqB;AACpD,MAAI,aAAa;AAChB,OAAI;AACH,WAAO,KAAK,KAAK,MAAM,YAAY,GAAG,CAAsB;WACrD;AACP,WAAO,KAAK,UAAU,KAAK,CAAC;;AAE7B;AACA;;AAID,MAAI,KAAK,WAAW,MAAM,EAAE;GAC3B,MAAM,OAAO,KAAK,MAAM,EAAE,CAAC,MAAM;GACjC,MAAM,YAAsB,EAAE;AAC9B;AACA,UAAO,IAAI,MAAM,UAAU,CAAC,MAAM,GAAG,WAAW,MAAM,EAAE;AACvD,cAAU,KAAK,MAAM,GAAG;AACxB;;AAED,UAAO,KAAK;IACX,OAAO;IACP,MAAM,aAAa;IACnB,UAAU,QAAQ;IAClB,MAAM,UAAU,KAAK,KAAK;IAC1B,CAAC;AACF;AACA;;AAID,MAAI,KAAK,MAAM,KAAK,IAAI;AACvB;AACA;;EAID,MAAM,eAAe,KAAK,MAAM,gBAAgB;AAChD,MAAI,cAAc;AACjB,UAAO,KAAK,UAAU,aAAa,IAAI,IAAI,aAAa,GAAG,SAAS,CAAC;AACrE;AACA;;AAID,MAAI,KAAK,WAAW,KAAK,EAAE;AAC1B,UAAO,KAAK,UAAU,KAAK,MAAM,EAAE,EAAE,aAAa,CAAC;AACnD;AACA;;EAID,MAAM,UAAU,KAAK,MAAM,uBAAuB;AAClD,MAAI,SAAS;GACZ,MAAM,QAAQ,KAAK,MAAM,QAAQ,GAAG,SAAS,EAAE,GAAG;AAClD,UAAO,KAAK,cAAc,QAAQ,IAAI,UAAU,MAAM,CAAC;AACvD;AACA;;EAID,MAAM,UAAU,KAAK,MAAM,qBAAqB;AAChD,MAAI,SAAS;GACZ,MAAM,QAAQ,KAAK,MAAM,QAAQ,GAAG,SAAS,EAAE,GAAG;AAClD,UAAO,KAAK,cAAc,QAAQ,IAAI,UAAU,MAAM,CAAC;AACvD;AACA;;EAID,MAAM,WAAW,KAAK,MAAM,cAAc;AAC1C,MAAI,UAAU;AACb,UAAO,KAAK;IACX,OAAO;IACP,MAAM,aAAa;IACnB,KAAK,SAAS;IACd,OAAO,EAAE,KAAK,SAAS,IAAI;IAC3B,CAAC;AACF;AACA;;AAID,SAAO,KAAK,UAAU,KAAK,CAAC;AAC5B;;AAGD,QAAO;;AAOR,SAAS,UAAU,MAAc,QAAgB,UAA6B;CAC7E,MAAM,EAAE,OAAO,aAAa,YAAY,KAAK;AAC7C,QAAO;EAAE,OAAO;EAAS,MAAM,aAAa;EAAE;EAAO;EAAU,UAAU;EAAO;;AAGjF,SAAS,cAAc,MAAc,UAAkB,OAAkC;CACxF,MAAM,EAAE,OAAO,aAAa,YAAY,KAAK;AAC7C,QAAO;EACN,OAAO;EACP,MAAM,aAAa;EACnB,OAAO;EACP;EACA;EACA;EACA,UAAU;EACV;;;;;AAMF,SAAS,YAAY,MAA4B;CAChD,MAAM,QAA4B,EAAE;CACpC,MAAM,WAAsB,EAAE;CAC9B,MAAM,QAAQ;CAEd,IAAI,YAAY;CAChB,IAAI;AAEJ,SAAQ,QAAQ,MAAM,KAAK,KAAK,MAAM,MAAM;AAC3C,MAAI,MAAM,QAAQ,UACjB,OAAM,KAAK;GACV,OAAO;GACP,MAAM,aAAa;GACnB,MAAM,KAAK,MAAM,WAAW,MAAM,MAAM;GACxC,OAAO,EAAE;GACT,CAAC;AAGH,MAAI,MAAM,MAAM,KACf,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,SAAS;GAAE,CAAC;WAC3E,MAAM,MAAM,KACtB,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,KAAK;GAAE,CAAC;WACvE,MAAM,MAAM,KACtB,OAAM,KAAK;GAAE,OAAO;GAAQ,MAAM,aAAa;GAAE,MAAM,MAAM;GAAI,OAAO,CAAC,OAAO;GAAE,CAAC;WACzE,MAAM,MAAM,QAAQ,MAAM,MAAM,MAAM;GAChD,MAAM,MAAM,aAAa;AACzB,YAAS,KAAK;IAAE,MAAM;IAAK,OAAO;IAAQ,MAAM,MAAM;IAAI,CAAC;AAC3D,SAAM,KAAK;IAAE,OAAO;IAAQ,MAAM,aAAa;IAAE,MAAM,MAAM;IAAI,OAAO,CAAC,IAAI;IAAE,CAAC;aACtE,MAAM,OAAO,KACvB,OAAM,KAAK;GACV,OAAO;GACP,MAAM,aAAa;GACnB,MAAM,MAAM;GACZ,OAAO,CAAC,iBAAiB;GACzB,CAAC;AAGH,cAAY,MAAM,QAAQ,MAAM,GAAG;;AAGpC,KAAI,YAAY,KAAK,OACpB,OAAM,KAAK;EAAE,OAAO;EAAQ,MAAM,aAAa;EAAE,MAAM,KAAK,MAAM,UAAU;EAAE,OAAO,EAAE;EAAE,CAAC;AAG3F,KAAI,MAAM,WAAW,EACpB,OAAM,KAAK;EAAE,OAAO;EAAQ,MAAM,aAAa;EAAE;EAAM,OAAO,EAAE;EAAE,CAAC;AAGpE,QAAO;EAAE;EAAO;EAAU;;AAO3B,IAAI,aAAa;AAEjB,SAAS,cAAsB;AAC9B,QAAO,KAAK,cAAc,SAAS,GAAG;;;;;;AAqBvC,SAAgB,mBACf,MACA,QACA,MAAe,OACW;AAC1B,KAAI,IAAK,QAAO;CAEhB,MAAM,SAAS,EAAE,GAAG,MAAM;AAC1B,MAAK,MAAM,SAAS,OACnB,KAAI,MAAM,SAAS,kBAAkB,MAAM,QAAQ,OAAO,MAAM,MAAM,CACrE,QAAO,MAAM,QAAQ,uBAAuB,OAAO,MAAM,MAA6B;AAGxF,QAAO;;;;;;AAOR,SAAgB,oBACf,MACA,QAC0B;CAC1B,MAAM,SAAS,EAAE,GAAG,MAAM;AAC1B,MAAK,MAAM,SAAS,OACnB,KAAI,MAAM,SAAS,kBAAkB,OAAO,OAAO,MAAM,UAAU,SAClE,QAAO,MAAM,QAAQ,uBAAuB,OAAO,MAAM,MAAgB;AAG3E,QAAO;;;;;;;;;;;;AClZR,MAAM,4BAA4B;AAelC,SAAS,UAAU,SAAqC;AACvD,QAAO,WAAW,MAAM,QAAQ;;;;;AAMjC,SAAgB,gBAAgB,UAA4B,EAAE,EAE5D;CACD,MAAM,eAAe,QAAQ,gBAAgB,EAAE;CAG/C,IAAI,QAAiD;AACrD,MAAK,IAAI,IAAI,aAAa,SAAS,GAAG,KAAK,GAAG,KAAK;EAClD,MAAM,cAAc,aAAa;EACjC,MAAM,OAAO;AACb,WAAS,QAAQ,YAAY,KAAK,KAAK;;AAGxC,QAAO,EAAE,OAAO,OAAO;;;;;;;;AAaxB,SAAgB,kBAA+B;CAC9C,MAAM,mBAAmB,IAAI,IAAI;EAAC;EAAQ;EAAO;EAAU;EAAQ,CAAC;AAEpE,SAAQ,SAAS,SAAS;AACzB,MAAI,iBAAiB,IAAI,QAAQ,OAAO,EAAE;GACzC,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,WAAQ,IAAI,oBAAoB,IAAI;AACpC,OAAI,CAAC,QAAQ,IAAI,SAAS,EAAE;IAC3B,MAAM,MAAM,IAAI,IAAI,QAAQ,IAAI;AAChC,YAAQ,IAAI,UAAU,IAAI,OAAO;;AAElC,UAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;AAE/C,SAAO,KAAK,QAAQ;;;;;;AAOtB,SAAgB,iBAAiB,OAA4B;AAC5D,SAAQ,SAAS,SAAS;EACzB,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,UAAQ,IAAI,iBAAiB,UAAU,QAAQ;AAC/C,SAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;;;;;;;AAShD,SAAgB,qBAAqB,SAA8B;CAClE,IAAI,gBAA+B;CACnC,IAAI,eAAqC;CAEzC,eAAe,OAAsB;EACpC,MAAM,YAAY,IAAI,IAAI,gCAAgC,QAAQ;EAClE,MAAM,MAAM,MAAM,WAAW,MAAM,WAAW,EAAE,UAAU,UAAU,CAAC;EAGrE,MAAM,YAAY,IAAI,QAAQ,IAAI,aAAa;AAC/C,MAAI,WAAW;GAEd,MAAM,QAAQ,UAAU,MAAM,0BAA0B;AACxD,OAAI,MACH,iBAAgB,MAAM;;AAKxB,MAAI,IAAI,KACP,OAAM,IAAI,MAAM,CAAC,YAAY,GAAG;;AAIlC,QAAO,OAAO,SAAS,SAAS;AAE/B,MAAI,CAAC,eAAe;AACnB,OAAI,CAAC,aACJ,gBAAe,MAAM;AAEtB,SAAM;;AAGP,MAAI,eAAe;GAClB,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;GAC5C,MAAM,WAAW,QAAQ,IAAI,SAAS;AACtC,WAAQ,IAAI,UAAU,WAAW,GAAG,SAAS,IAAI,kBAAkB,cAAc;AACjF,UAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;AAG/C,SAAO,KAAK,QAAQ;;;;;;;AAQtB,SAAgB,mBAAmB,SAInB;CACf,IAAI,aAA4C;CAEhD,eAAe,UAAkC;EAChD,MAAM,MAAM,MAAM,WAAW,MAAM,QAAQ,eAAe;GACzD,QAAQ;GACR,SAAS,EAAE,gBAAgB,oBAAoB;GAC/C,MAAM,KAAK,UAAU;IACpB,YAAY;IACZ,eAAe,QAAQ;IACvB,CAAC;GACF,CAAC;AAEF,MAAI,CAAC,IAAI,GAAI,QAAO;EAQpB,MAAM,OAAQ,MAAM,IAAI,MAAM;EAI9B,MAAM,YACL,KAAK,QAAQ,OAAO,KAAK,SAAS,YAAY,kBAAkB,KAAK,OACjE,KAAK,OACL;EAEL,MAAM,YAAY,UAAU,aACzB,IAAI,KAAK,KAAK,KAAK,GAAG,UAAU,aAAa,IAAK,CAAC,aAAa,GAChE,IAAI,KAAK,KAAK,KAAK,GAAG,KAAS,CAAC,aAAa;AAEhD,MAAI,QAAQ,iBACX,SAAQ,iBACP,UAAU,cACV,UAAU,iBAAiB,QAAQ,cACnC,UACA;AAGF,SAAO,UAAU;;AAGlB,QAAO,OAAO,SAAS,SAAS;EAC/B,MAAM,WAAW,MAAM,KAAK,QAAQ;AAEpC,MAAI,SAAS,WAAW,KAAK;AAE5B,OAAI,CAAC,WACJ,cAAa,SAAS,CAAC,cAAc;AACpC,iBAAa;KACZ;GAGH,MAAM,WAAW,MAAM;AACvB,OAAI,UAAU;IAEb,MAAM,UAAU,IAAI,QAAQ,QAAQ,QAAQ;AAC5C,YAAQ,IAAI,iBAAiB,UAAU,WAAW;AAClD,WAAO,KAAK,IAAI,QAAQ,SAAS,EAAE,SAAS,CAAC,CAAC;;;AAIhD,SAAO"}
@@ -1,4 +1,4 @@
1
- import { r as encodeBase64, t as decodeBase64 } from "./base64-BRICGH2l.mjs";
1
+ import { r as encodeBase64, t as decodeBase64 } from "./base64-MBPo9ozB.mjs";
2
2
 
3
3
  //#region src/database/repositories/types.ts
4
4
  /**
@@ -65,4 +65,4 @@ var EmDashValidationError = class extends Error {
65
65
 
66
66
  //#endregion
67
67
  export { encodeCursor as i, InvalidCursorError as n, decodeCursor as r, EmDashValidationError as t };
68
- //# sourceMappingURL=types-CRxNbK-Z.mjs.map
68
+ //# sourceMappingURL=types-BIgulNsW.mjs.map