emdash 0.8.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{adapters-BKSf3T9R.d.mts → adapters-DoNJiveC.d.mts} +1 -1
- package/dist/{adapters-BKSf3T9R.d.mts.map → adapters-DoNJiveC.d.mts.map} +1 -1
- package/dist/{apply-x0eMK1lX.mjs → apply-BzltprvY.mjs} +85 -135
- package/dist/apply-BzltprvY.mjs.map +1 -0
- package/dist/astro/index.d.mts +6 -6
- package/dist/astro/index.d.mts.map +1 -1
- package/dist/astro/index.mjs +110 -4
- package/dist/astro/index.mjs.map +1 -1
- package/dist/astro/middleware/auth.d.mts +6 -7
- package/dist/astro/middleware/auth.d.mts.map +1 -1
- package/dist/astro/middleware/auth.mjs +16 -59
- package/dist/astro/middleware/auth.mjs.map +1 -1
- package/dist/astro/middleware/redirect.d.mts.map +1 -1
- package/dist/astro/middleware/redirect.mjs +17 -12
- package/dist/astro/middleware/redirect.mjs.map +1 -1
- package/dist/astro/middleware/request-context.d.mts.map +1 -1
- package/dist/astro/middleware/request-context.mjs +9 -6
- package/dist/astro/middleware/request-context.mjs.map +1 -1
- package/dist/astro/middleware/setup.mjs +1 -1
- package/dist/astro/middleware.d.mts.map +1 -1
- package/dist/astro/middleware.mjs +72 -124
- package/dist/astro/middleware.mjs.map +1 -1
- package/dist/astro/types.d.mts +26 -10
- package/dist/astro/types.d.mts.map +1 -1
- package/dist/{base64-MBPo9ozB.mjs → base64-BRICGH2l.mjs} +1 -1
- package/dist/{base64-MBPo9ozB.mjs.map → base64-BRICGH2l.mjs.map} +1 -1
- package/dist/{byline-Chbr2GoP.mjs → byline-BSaNL1w7.mjs} +4 -4
- package/dist/{byline-Chbr2GoP.mjs.map → byline-BSaNL1w7.mjs.map} +1 -1
- package/dist/bylines-CvJ3PYz2.mjs +113 -0
- package/dist/bylines-CvJ3PYz2.mjs.map +1 -0
- package/dist/cache-C6N_hhN7.mjs +65 -0
- package/dist/cache-C6N_hhN7.mjs.map +1 -0
- package/dist/{chunks-HGz06Soa.mjs → chunks-NBQVDOci.mjs} +8 -2
- package/dist/{chunks-HGz06Soa.mjs.map → chunks-NBQVDOci.mjs.map} +1 -1
- package/dist/cli/index.mjs +224 -30
- package/dist/cli/index.mjs.map +1 -1
- package/dist/client/cf-access.d.mts +1 -1
- package/dist/client/index.d.mts +1 -1
- package/dist/client/index.mjs +3 -3
- package/dist/client/index.mjs.map +1 -1
- package/dist/{config-BXwuX8Bx.mjs → config-BI0V3ICQ.mjs} +1 -1
- package/dist/{config-BXwuX8Bx.mjs.map → config-BI0V3ICQ.mjs.map} +1 -1
- package/dist/{content-BcQPYxdV.mjs → content-8lOYF0pr.mjs} +32 -15
- package/dist/{content-BcQPYxdV.mjs.map → content-8lOYF0pr.mjs.map} +1 -1
- package/dist/db/index.d.mts +3 -3
- package/dist/db/index.mjs +2 -2
- package/dist/db/libsql.d.mts +1 -1
- package/dist/db/libsql.d.mts.map +1 -1
- package/dist/db/libsql.mjs +7 -2
- package/dist/db/libsql.mjs.map +1 -1
- package/dist/db/postgres.d.mts +1 -1
- package/dist/db/sqlite.d.mts +1 -1
- package/dist/db/sqlite.d.mts.map +1 -1
- package/dist/db/sqlite.mjs +8 -3
- package/dist/db/sqlite.mjs.map +1 -1
- package/dist/{db-errors-l1Qh2RPR.mjs → db-errors-WRezodiz.mjs} +1 -1
- package/dist/{db-errors-l1Qh2RPR.mjs.map → db-errors-WRezodiz.mjs.map} +1 -1
- package/dist/{default-DCVqE5ib.mjs → default-D8ksjWhO.mjs} +1 -1
- package/dist/{default-DCVqE5ib.mjs.map → default-D8ksjWhO.mjs.map} +1 -1
- package/dist/{dialect-helpers-DhTzaUxP.mjs → dialect-helpers-BKCvISIQ.mjs} +19 -2
- package/dist/dialect-helpers-BKCvISIQ.mjs.map +1 -0
- package/dist/{error-zG5T1UGA.mjs → error-D_-tqP-I.mjs} +1 -1
- package/dist/{error-zG5T1UGA.mjs.map → error-D_-tqP-I.mjs.map} +1 -1
- package/dist/{index-DIb-CzNx.d.mts → index-BFRaVcD6.d.mts} +94 -34
- package/dist/index-BFRaVcD6.d.mts.map +1 -0
- package/dist/index.d.mts +11 -11
- package/dist/index.mjs +29 -27
- package/dist/{load-CyEoextb.mjs → load-DDqMMvZL.mjs} +2 -2
- package/dist/{load-CyEoextb.mjs.map → load-DDqMMvZL.mjs.map} +1 -1
- package/dist/{loader-CndGj8kM.mjs → loader-CKLbBnhK.mjs} +27 -7
- package/dist/loader-CKLbBnhK.mjs.map +1 -0
- package/dist/{manifest-schema-DH9xhc6t.mjs → manifest-schema-DqWNC3lM.mjs} +33 -3
- package/dist/manifest-schema-DqWNC3lM.mjs.map +1 -0
- package/dist/media/index.d.mts +1 -1
- package/dist/media/index.mjs +1 -1
- package/dist/media/local-runtime.d.mts +7 -7
- package/dist/media/local-runtime.mjs +3 -3
- package/dist/{media-D8FbNsl0.mjs → media-BW32b4gi.mjs} +2 -2
- package/dist/{media-D8FbNsl0.mjs.map → media-BW32b4gi.mjs.map} +1 -1
- package/dist/{mode-BnAOqItE.mjs → mode-ier8jbBk.mjs} +1 -1
- package/dist/{mode-BnAOqItE.mjs.map → mode-ier8jbBk.mjs.map} +1 -1
- package/dist/options-BVp3UsTS.mjs +117 -0
- package/dist/options-BVp3UsTS.mjs.map +1 -0
- package/dist/page/index.d.mts +2 -2
- package/dist/{placeholder-D29tWZ7o.d.mts → placeholder-BE4o_2dc.d.mts} +1 -1
- package/dist/{placeholder-D29tWZ7o.d.mts.map → placeholder-BE4o_2dc.d.mts.map} +1 -1
- package/dist/{placeholder-C-fk5hYI.mjs → placeholder-CIJejMlK.mjs} +1 -1
- package/dist/{placeholder-C-fk5hYI.mjs.map → placeholder-CIJejMlK.mjs.map} +1 -1
- package/dist/plugins/adapt-sandbox-entry.d.mts +5 -5
- package/dist/plugins/adapt-sandbox-entry.d.mts.map +1 -1
- package/dist/plugins/adapt-sandbox-entry.mjs +6 -5
- package/dist/plugins/adapt-sandbox-entry.mjs.map +1 -1
- package/dist/public-url-DByxYjUw.mjs +51 -0
- package/dist/public-url-DByxYjUw.mjs.map +1 -0
- package/dist/{query-fqEdLFms.mjs → query-Cg9ZKRQ0.mjs} +114 -16
- package/dist/query-Cg9ZKRQ0.mjs.map +1 -0
- package/dist/{redirect-D_pshWdf.mjs → redirect-BhUBKRc1.mjs} +11 -6
- package/dist/redirect-BhUBKRc1.mjs.map +1 -0
- package/dist/{registry-C3Mr0ODu.mjs → registry-Dw70ChxB.mjs} +38 -4
- package/dist/registry-Dw70ChxB.mjs.map +1 -0
- package/dist/{request-cache-Ci7f5pBb.mjs → request-cache-B-bmkipQ.mjs} +1 -1
- package/dist/{request-cache-Ci7f5pBb.mjs.map → request-cache-B-bmkipQ.mjs.map} +1 -1
- package/dist/runner-Bnoj7vjK.d.mts +44 -0
- package/dist/runner-Bnoj7vjK.d.mts.map +1 -0
- package/dist/{runner-tQ7BJ4T7.mjs → runner-C7ADox5q.mjs} +185 -55
- package/dist/{runner-tQ7BJ4T7.mjs.map → runner-C7ADox5q.mjs.map} +1 -1
- package/dist/runtime.d.mts +6 -6
- package/dist/runtime.mjs +4 -4
- package/dist/{search-BoZYFuUk.mjs → search-dOGEccMa.mjs} +129 -83
- package/dist/search-dOGEccMa.mjs.map +1 -0
- package/dist/secrets-CW3reAnU.mjs +314 -0
- package/dist/secrets-CW3reAnU.mjs.map +1 -0
- package/dist/seed/index.d.mts +2 -2
- package/dist/seed/index.mjs +15 -14
- package/dist/seo/index.d.mts +1 -1
- package/dist/storage/local.d.mts +1 -1
- package/dist/storage/local.mjs +1 -1
- package/dist/storage/s3.d.mts +1 -1
- package/dist/storage/s3.mjs +1 -1
- package/dist/{taxonomies-B4IAshV8.mjs → taxonomies-ZlRtD6AG.mjs} +14 -7
- package/dist/taxonomies-ZlRtD6AG.mjs.map +1 -0
- package/dist/{tokens-D9vnZqYS.mjs → tokens-D7zMmWi2.mjs} +2 -2
- package/dist/{tokens-D9vnZqYS.mjs.map → tokens-D7zMmWi2.mjs.map} +1 -1
- package/dist/{transport-C9ugt2Nr.mjs → transport-BeMCmin1.mjs} +6 -5
- package/dist/{transport-C9ugt2Nr.mjs.map → transport-BeMCmin1.mjs.map} +1 -1
- package/dist/{transport-CUnEL3Vs.d.mts → transport-DNEfeMaU.d.mts} +1 -1
- package/dist/{transport-CUnEL3Vs.d.mts.map → transport-DNEfeMaU.d.mts.map} +1 -1
- package/dist/types-4fVtCIm0.mjs +68 -0
- package/dist/types-4fVtCIm0.mjs.map +1 -0
- package/dist/{types-BmPPSUEx.d.mts → types-BSyXeCFW.d.mts} +24 -2
- package/dist/{types-BmPPSUEx.d.mts.map → types-BSyXeCFW.d.mts.map} +1 -1
- package/dist/{types-i36XcA_X.d.mts → types-BuBIptGk.d.mts} +65 -134
- package/dist/types-BuBIptGk.d.mts.map +1 -0
- package/dist/{types-CgqmmMJB.mjs → types-CDbKp7ND.mjs} +1 -1
- package/dist/{types-CgqmmMJB.mjs.map → types-CDbKp7ND.mjs.map} +1 -1
- package/dist/{types-Bm1dn-q3.mjs → types-CIOg5AR8.mjs} +1 -1
- package/dist/{types-Bm1dn-q3.mjs.map → types-CIOg5AR8.mjs.map} +1 -1
- package/dist/{types-BrA0xf5I.d.mts → types-CJsYGpco.d.mts} +1 -1
- package/dist/{types-BrA0xf5I.d.mts.map → types-CJsYGpco.d.mts.map} +1 -1
- package/dist/{types-BIgulNsW.mjs → types-CRxNbK-Z.mjs} +2 -2
- package/dist/{types-BIgulNsW.mjs.map → types-CRxNbK-Z.mjs.map} +1 -1
- package/dist/{types-CS8FIX7L.d.mts → types-CrtWgIvl.d.mts} +1 -1
- package/dist/{types-CS8FIX7L.d.mts.map → types-CrtWgIvl.d.mts.map} +1 -1
- package/dist/{types-DIMwPFub.d.mts → types-M78DQ1lx.d.mts} +1 -1
- package/dist/{types-DIMwPFub.d.mts.map → types-M78DQ1lx.d.mts.map} +1 -1
- package/dist/{validate-CxVsLehf.mjs → validate-Baqf0slj.mjs} +3 -3
- package/dist/{validate-CxVsLehf.mjs.map → validate-Baqf0slj.mjs.map} +1 -1
- package/dist/{validate-DHxmpFJt.d.mts → validate-BfQh_C_y.d.mts} +4 -4
- package/dist/{validate-DHxmpFJt.d.mts.map → validate-BfQh_C_y.d.mts.map} +1 -1
- package/dist/{validation-C-ZpN2GI.mjs → validation-BfEI7tNe.mjs} +6 -6
- package/dist/{validation-C-ZpN2GI.mjs.map → validation-BfEI7tNe.mjs.map} +1 -1
- package/dist/version-DoxrVdYf.mjs +7 -0
- package/dist/{version-Bbq8TCrz.mjs.map → version-DoxrVdYf.mjs.map} +1 -1
- package/dist/{zod-generator-CpwccCIv.mjs → zod-generator-CC0xNe_K.mjs} +4 -4
- package/dist/zod-generator-CC0xNe_K.mjs.map +1 -0
- package/locals.d.ts +1 -6
- package/package.json +9 -8
- package/src/api/handlers/comments.ts +6 -4
- package/src/api/handlers/content.ts +29 -1
- package/src/api/handlers/device-flow.ts +5 -0
- package/src/api/handlers/marketplace.ts +11 -4
- package/src/api/handlers/oauth-authorization.ts +72 -33
- package/src/api/handlers/revision.ts +23 -14
- package/src/api/handlers/taxonomies.ts +3 -6
- package/src/api/public-url.ts +48 -2
- package/src/api/schemas/comments.ts +2 -2
- package/src/api/schemas/content.ts +17 -0
- package/src/api/schemas/sections.ts +3 -3
- package/src/api/schemas/users.ts +1 -1
- package/src/api/types.ts +5 -1
- package/src/astro/integration/index.ts +17 -0
- package/src/astro/integration/runtime.ts +30 -0
- package/src/astro/integration/virtual-modules.ts +32 -2
- package/src/astro/integration/vite-config.ts +6 -1
- package/src/astro/middleware/auth.ts +13 -6
- package/src/astro/middleware/redirect.ts +29 -16
- package/src/astro/middleware/request-context.ts +15 -5
- package/src/astro/middleware.ts +23 -9
- package/src/astro/routes/api/auth/invite/complete.ts +6 -1
- package/src/astro/routes/api/auth/passkey/register/verify.ts +6 -1
- package/src/astro/routes/api/auth/passkey/verify.ts +6 -1
- package/src/astro/routes/api/auth/signup/complete.ts +6 -1
- package/src/astro/routes/api/comments/[collection]/[contentId]/index.ts +2 -2
- package/src/astro/routes/api/content/[collection]/[id]/discard-draft.ts +4 -2
- package/src/astro/routes/api/content/[collection]/[id]/preview-url.ts +34 -12
- package/src/astro/routes/api/content/[collection]/[id]/publish.ts +32 -2
- package/src/astro/routes/api/content/[collection]/[id]/restore.ts +4 -2
- package/src/astro/routes/api/content/[collection]/[id]/revisions.ts +3 -2
- package/src/astro/routes/api/content/[collection]/[id]/terms/[taxonomy].ts +8 -4
- package/src/astro/routes/api/content/[collection]/[id].ts +12 -0
- package/src/astro/routes/api/import/wordpress/execute.ts +3 -1
- package/src/astro/routes/api/import/wordpress/prepare.ts +7 -8
- package/src/astro/routes/api/import/wordpress-plugin/execute.ts +3 -1
- package/src/astro/routes/api/manifest.ts +62 -45
- package/src/astro/routes/api/media/[id]/confirm.ts +10 -1
- package/src/astro/routes/api/media/providers/[providerId]/index.ts +12 -3
- package/src/astro/routes/api/openapi.json.ts +27 -10
- package/src/astro/routes/api/redirects/404s/index.ts +10 -4
- package/src/astro/routes/api/redirects/404s/summary.ts +4 -2
- package/src/astro/routes/api/redirects/[id].ts +10 -4
- package/src/astro/routes/api/redirects/index.ts +7 -3
- package/src/astro/routes/api/revisions/[revisionId]/index.ts +1 -1
- package/src/astro/routes/api/schema/collections/[slug]/fields/[fieldSlug].ts +0 -2
- package/src/astro/routes/api/schema/collections/[slug]/fields/index.ts +0 -1
- package/src/astro/routes/api/schema/collections/[slug]/fields/reorder.ts +0 -1
- package/src/astro/routes/api/schema/collections/[slug]/index.ts +2 -2
- package/src/astro/routes/api/schema/collections/index.ts +1 -1
- package/src/astro/routes/api/search/index.ts +10 -2
- package/src/astro/routes/api/sections/[slug].ts +10 -4
- package/src/astro/routes/api/sections/index.ts +7 -3
- package/src/astro/routes/api/setup/admin-verify.ts +6 -1
- package/src/astro/routes/api/snapshot.ts +44 -18
- package/src/astro/routes/api/taxonomies/index.ts +0 -1
- package/src/astro/routes/api/themes/preview.ts +11 -5
- package/src/astro/types.ts +23 -3
- package/src/auth/allowed-origins.ts +168 -0
- package/src/auth/passkey-config.ts +35 -13
- package/src/bylines/index.ts +37 -88
- package/src/cli/commands/auth.ts +28 -6
- package/src/cli/commands/bundle-utils.ts +11 -2
- package/src/cli/commands/bundle.ts +28 -8
- package/src/cli/commands/content.ts +13 -0
- package/src/cli/commands/login.ts +8 -1
- package/src/cli/commands/publish.ts +24 -0
- package/src/cli/commands/secrets.ts +183 -0
- package/src/cli/credentials.ts +1 -1
- package/src/cli/index.ts +5 -1
- package/src/client/index.ts +4 -4
- package/src/client/transport.ts +17 -7
- package/src/components/Break.astro +2 -2
- package/src/components/EmDashHead.astro +18 -13
- package/src/components/Embed.astro +1 -1
- package/src/components/Gallery.astro +1 -1
- package/src/components/Image.astro +1 -1
- package/src/components/InlinePortableTextEditor.tsx +104 -18
- package/src/config/secrets.ts +528 -0
- package/src/database/dialect-helpers.ts +50 -0
- package/src/database/migrations/034_published_at_index.ts +1 -1
- package/src/database/migrations/035_bounded_404_log.ts +56 -39
- package/src/database/migrations/runner.ts +156 -23
- package/src/database/repositories/content.ts +36 -12
- package/src/database/repositories/redirect.ts +14 -3
- package/src/database/repositories/taxonomy.ts +26 -0
- package/src/db/libsql.ts +1 -3
- package/src/db/sqlite.ts +2 -5
- package/src/emdash-runtime.ts +84 -159
- package/src/index.ts +9 -0
- package/src/loader.ts +24 -1
- package/src/mcp/server.ts +103 -36
- package/src/page/site-identity.ts +58 -0
- package/src/plugins/adapt-sandbox-entry.ts +22 -10
- package/src/plugins/context.ts +13 -10
- package/src/plugins/define-plugin.ts +40 -12
- package/src/plugins/hooks.ts +23 -19
- package/src/plugins/index.ts +9 -0
- package/src/plugins/manifest-schema.ts +37 -2
- package/src/plugins/types.ts +151 -11
- package/src/preview/urls.ts +23 -3
- package/src/query.ts +148 -5
- package/src/redirects/cache.ts +38 -18
- package/src/schema/registry.ts +56 -0
- package/src/schema/zod-generator.ts +27 -5
- package/src/seed/apply.ts +2 -0
- package/src/settings/index.ts +80 -6
- package/src/settings/types.ts +23 -1
- package/src/taxonomies/index.ts +11 -1
- package/dist/apply-x0eMK1lX.mjs.map +0 -1
- package/dist/bylines-CRNsVG88.mjs +0 -157
- package/dist/bylines-CRNsVG88.mjs.map +0 -1
- package/dist/cache-BkKBuIvS.mjs +0 -56
- package/dist/cache-BkKBuIvS.mjs.map +0 -1
- package/dist/chunk-ClPoSABd.mjs +0 -21
- package/dist/dialect-helpers-DhTzaUxP.mjs.map +0 -1
- package/dist/index-DIb-CzNx.d.mts.map +0 -1
- package/dist/loader-CndGj8kM.mjs.map +0 -1
- package/dist/manifest-schema-DH9xhc6t.mjs.map +0 -1
- package/dist/query-fqEdLFms.mjs.map +0 -1
- package/dist/redirect-D_pshWdf.mjs.map +0 -1
- package/dist/registry-C3Mr0ODu.mjs.map +0 -1
- package/dist/runner-OURCaApa.d.mts +0 -34
- package/dist/runner-OURCaApa.d.mts.map +0 -1
- package/dist/search-BoZYFuUk.mjs.map +0 -1
- package/dist/taxonomies-B4IAshV8.mjs.map +0 -1
- package/dist/types-i36XcA_X.d.mts.map +0 -1
- package/dist/version-Bbq8TCrz.mjs +0 -7
- package/dist/zod-generator-CpwccCIv.mjs.map +0 -1
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
import { i as encodeBase64url, n as decodeBase64url } from "./base64-BRICGH2l.mjs";
|
|
2
|
+
import { t as OptionsRepository } from "./options-BVp3UsTS.mjs";
|
|
3
|
+
import { sha256 } from "@oslojs/crypto/sha2";
|
|
4
|
+
import { encodeHexLowerCase } from "@oslojs/encoding";
|
|
5
|
+
|
|
6
|
+
//#region src/config/secrets.ts
|
|
7
|
+
/**
|
|
8
|
+
* Centralized secrets module
|
|
9
|
+
*
|
|
10
|
+
* Single source of truth for site-level cryptographic secrets:
|
|
11
|
+
*
|
|
12
|
+
* - `EMDASH_ENCRYPTION_KEY` — primary key for encrypting plugin secrets at
|
|
13
|
+
* rest. Multi-key (comma-separated) for rotation forward-compat. v1 ships
|
|
14
|
+
* single-key. Format: `emdash_enc_v1_<43 base64url chars>` representing
|
|
15
|
+
* 32 random bytes. **Operator-provided; never stored in the database.**
|
|
16
|
+
* Losing the key means losing every secret encrypted with it. Validated
|
|
17
|
+
* at runtime startup via `validateEncryptionKeyAtStartup` — request-time
|
|
18
|
+
* resolution does not depend on it, so a malformed key can't 500 the
|
|
19
|
+
* preview/comment hot paths for unrelated visitors.
|
|
20
|
+
* - `EMDASH_IP_SALT` (optional) / DB-stored `emdash:ip_salt` — site-specific
|
|
21
|
+
* salt for hashing commenter IPs. Generated and persisted on first need
|
|
22
|
+
* if no env override is set. Replaces the previous hardcoded
|
|
23
|
+
* `"emdash-ip-salt"` constant which was correlatable across installs.
|
|
24
|
+
* - `EMDASH_PREVIEW_SECRET` (optional) / DB-stored `emdash:preview_secret` —
|
|
25
|
+
* HMAC secret for signing preview URLs. Generated and persisted on first
|
|
26
|
+
* need if no env override is set. Replaces the previous empty-string
|
|
27
|
+
* fallback which silently disabled preview-token verification.
|
|
28
|
+
*
|
|
29
|
+
* The `EMDASH_AUTH_SECRET` env var is consulted only as a legacy fallback
|
|
30
|
+
* source for the IP salt — that's the only path the prior code actually
|
|
31
|
+
* read it from. New deployments don't need to set it.
|
|
32
|
+
*
|
|
33
|
+
* Modeled on `resolveS3Config` in `../storage/s3.ts`.
|
|
34
|
+
*/
|
|
35
|
+
/** v1 encryption key prefix. Bumping requires a separate KDF version. */
|
|
36
|
+
const ENCRYPTION_KEY_PREFIX = "emdash_enc_v1_";
|
|
37
|
+
/** 32 random bytes encoded as unpadded base64url = 43 chars. */
|
|
38
|
+
const ENCRYPTION_KEY_BODY_LENGTH = 43;
|
|
39
|
+
/**
|
|
40
|
+
* Built from the prefix constant via interpolation. The prefix has no regex
|
|
41
|
+
* metacharacters today (`emdash_enc_v1_`), but escaping is cheap defense
|
|
42
|
+
* against anyone changing the prefix in a future bump without remembering.
|
|
43
|
+
*/
|
|
44
|
+
const ENCRYPTION_KEY_PATTERN = new RegExp(`^${ENCRYPTION_KEY_PREFIX.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}[A-Za-z0-9_-]{${ENCRYPTION_KEY_BODY_LENGTH}}$`);
|
|
45
|
+
/** Options-table key for the persisted commenter-IP salt. */
|
|
46
|
+
const IP_SALT_OPTION_KEY = "emdash:ip_salt";
|
|
47
|
+
/** Options-table key for the persisted preview HMAC secret. */
|
|
48
|
+
const PREVIEW_SECRET_OPTION_KEY = "emdash:preview_secret";
|
|
49
|
+
/** Length in bytes of generated values. 32 bytes = 256 bits. */
|
|
50
|
+
const GENERATED_SECRET_BYTES = 32;
|
|
51
|
+
/**
|
|
52
|
+
* Class of validation failures raised by this module.
|
|
53
|
+
*
|
|
54
|
+
* Errors here are operator-facing config problems (malformed key, etc.).
|
|
55
|
+
* They are thrown rather than soft-skipped so misconfiguration fails loudly
|
|
56
|
+
* at startup instead of silently degrading at request time.
|
|
57
|
+
*/
|
|
58
|
+
var EmDashSecretsError = class extends Error {
|
|
59
|
+
name = "EmDashSecretsError";
|
|
60
|
+
code;
|
|
61
|
+
constructor(message, code) {
|
|
62
|
+
super(message);
|
|
63
|
+
this.code = code;
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
/**
|
|
67
|
+
* Parse the `EMDASH_ENCRYPTION_KEY` env var.
|
|
68
|
+
*
|
|
69
|
+
* Accepts a single key or a comma-separated list. The first entry is the
|
|
70
|
+
* primary (used for new writes); all entries are tried for decryption,
|
|
71
|
+
* matched by `kid`. Whitespace around commas is tolerated. Empty entries
|
|
72
|
+
* (e.g. trailing comma) are ignored.
|
|
73
|
+
*
|
|
74
|
+
* Returns `null` for an unset/empty input. Throws `EmDashSecretsError` on
|
|
75
|
+
* any malformed entry — silent skipping would mask deployment mistakes.
|
|
76
|
+
*/
|
|
77
|
+
async function parseEncryptionKeys(raw) {
|
|
78
|
+
if (!raw) return null;
|
|
79
|
+
const entries = raw.split(",").map((entry) => entry.trim()).filter((entry) => entry.length > 0);
|
|
80
|
+
if (entries.length === 0) return null;
|
|
81
|
+
const parsed = [];
|
|
82
|
+
const seenKids = /* @__PURE__ */ new Set();
|
|
83
|
+
for (const entry of entries) {
|
|
84
|
+
if (!ENCRYPTION_KEY_PATTERN.test(entry)) throw new EmDashSecretsError(`EMDASH_ENCRYPTION_KEY entry is malformed (expected "${ENCRYPTION_KEY_PREFIX}" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars). Generate one with \`emdash secrets generate\`.`, "INVALID_ENCRYPTION_KEY");
|
|
85
|
+
const body = entry.slice(14);
|
|
86
|
+
const key = decodeBase64urlStrict(body);
|
|
87
|
+
if (!key) throw new EmDashSecretsError("EMDASH_ENCRYPTION_KEY body is not valid base64url", "INVALID_ENCRYPTION_KEY");
|
|
88
|
+
if (key.length !== GENERATED_SECRET_BYTES) throw new EmDashSecretsError(`EMDASH_ENCRYPTION_KEY must decode to ${GENERATED_SECRET_BYTES} bytes, got ${key.length}`, "INVALID_ENCRYPTION_KEY");
|
|
89
|
+
if (encodeBase64url(key) !== body) throw new EmDashSecretsError("EMDASH_ENCRYPTION_KEY body is not canonical base64url. Generate one with `emdash secrets generate`.", "INVALID_ENCRYPTION_KEY");
|
|
90
|
+
const kid = fingerprintKeyBytes(key);
|
|
91
|
+
if (seenKids.has(kid)) continue;
|
|
92
|
+
seenKids.add(kid);
|
|
93
|
+
parsed.push({
|
|
94
|
+
kid,
|
|
95
|
+
key,
|
|
96
|
+
raw: entry
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
return parsed;
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Compute the kid for a raw key string (the env-var form including the
|
|
103
|
+
* `emdash_enc_v1_` prefix). Public so the CLI's `fingerprint` subcommand
|
|
104
|
+
* and admin endpoints can show kids without exposing raw keys.
|
|
105
|
+
*
|
|
106
|
+
* The kid is derived from the decoded key **bytes**, not the raw string,
|
|
107
|
+
* so admin endpoints / future rotation flows can match envelope kids
|
|
108
|
+
* against bytes regardless of how the env var was originally spelled.
|
|
109
|
+
*
|
|
110
|
+
* Validates the same shape as `parseEncryptionKeys` — including canonical
|
|
111
|
+
* base64url — so the CLI can't print a kid for a key the runtime would
|
|
112
|
+
* later refuse to load.
|
|
113
|
+
*
|
|
114
|
+
* Throws `EmDashSecretsError` for malformed or non-canonical input.
|
|
115
|
+
*/
|
|
116
|
+
async function fingerprintKey(raw) {
|
|
117
|
+
if (!ENCRYPTION_KEY_PATTERN.test(raw)) throw new EmDashSecretsError(`Key must match "${ENCRYPTION_KEY_PREFIX}" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars`, "INVALID_ENCRYPTION_KEY");
|
|
118
|
+
const body = raw.slice(14);
|
|
119
|
+
const bytes = decodeBase64urlStrict(body);
|
|
120
|
+
if (!bytes || bytes.length !== GENERATED_SECRET_BYTES || encodeBase64url(bytes) !== body) throw new EmDashSecretsError(`Key body must decode to ${GENERATED_SECRET_BYTES} canonical base64url bytes`, "INVALID_ENCRYPTION_KEY");
|
|
121
|
+
return fingerprintKeyBytes(bytes);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Internal: kid derivation from raw key bytes. The single source of truth
|
|
125
|
+
* for what makes two keys "the same key" — used by both `parseEncryptionKeys`
|
|
126
|
+
* and `fingerprintKey`.
|
|
127
|
+
*/
|
|
128
|
+
function fingerprintKeyBytes(key) {
|
|
129
|
+
return encodeHexLowerCase(sha256(key)).slice(0, 8);
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Generate a fresh `EMDASH_ENCRYPTION_KEY` value. Used by the CLI's
|
|
133
|
+
* `secrets generate` subcommand and by `create-emdash` scaffolding.
|
|
134
|
+
*/
|
|
135
|
+
function generateEncryptionKey() {
|
|
136
|
+
const bytes = new Uint8Array(GENERATED_SECRET_BYTES);
|
|
137
|
+
crypto.getRandomValues(bytes);
|
|
138
|
+
return `${ENCRYPTION_KEY_PREFIX}${encodeBase64url(bytes)}`;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Resolve site secrets. Reads env vars; for IP salt and preview secret,
|
|
142
|
+
* falls back to a DB-stored value, generating one atomically on first need.
|
|
143
|
+
*
|
|
144
|
+
* Idempotent. Concurrent callers race on the atomic `setIfAbsent`; whichever
|
|
145
|
+
* wins, all callers converge on the same stored value.
|
|
146
|
+
*
|
|
147
|
+
* Note: `EMDASH_ENCRYPTION_KEY` is **not** consumed here. It's validated
|
|
148
|
+
* separately at runtime startup (see `validateEncryptionKeyAtStartup`) so a
|
|
149
|
+
* malformed key can't take down preview-token verification or comment
|
|
150
|
+
* submission for unrelated visitors. Future plugin-secret encryption code
|
|
151
|
+
* will read it via its own dedicated helper.
|
|
152
|
+
*/
|
|
153
|
+
async function resolveSecrets(options) {
|
|
154
|
+
const env = options.env ?? readDefaultEnv();
|
|
155
|
+
const repo = options._repo ?? new OptionsRepository(options.db);
|
|
156
|
+
const previewEnvOverride = pickFirstNonEmpty(env.EMDASH_PREVIEW_SECRET, env.PREVIEW_SECRET);
|
|
157
|
+
const ipSaltEnvOverride = pickFirstNonEmpty(env.EMDASH_IP_SALT, env.EMDASH_AUTH_SECRET, env.AUTH_SECRET);
|
|
158
|
+
const [previewSecret, ipSalt] = await Promise.all([previewEnvOverride !== null ? Promise.resolve({
|
|
159
|
+
value: previewEnvOverride,
|
|
160
|
+
source: "env"
|
|
161
|
+
}) : ensureGeneratedOption(repo, PREVIEW_SECRET_OPTION_KEY), ipSaltEnvOverride !== null ? Promise.resolve({
|
|
162
|
+
value: ipSaltEnvOverride,
|
|
163
|
+
source: "env"
|
|
164
|
+
}) : ensureGeneratedOption(repo, IP_SALT_OPTION_KEY)]);
|
|
165
|
+
return {
|
|
166
|
+
previewSecret: previewSecret.value,
|
|
167
|
+
previewSecretSource: previewSecret.source,
|
|
168
|
+
ipSalt: ipSalt.value,
|
|
169
|
+
ipSaltSource: ipSalt.source
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Validate `EMDASH_ENCRYPTION_KEY` once at runtime startup. Logs an
|
|
174
|
+
* operator-facing error if the value is malformed but does **not** throw —
|
|
175
|
+
* the key is currently inert (no consumers), and the follow-up PR that
|
|
176
|
+
* actually uses it will throw at point of use. This way, deployment
|
|
177
|
+
* mistakes surface immediately in startup logs without wedging unrelated
|
|
178
|
+
* request paths in the meantime.
|
|
179
|
+
*
|
|
180
|
+
* Returns `true` if the key is unset or valid, `false` if it was malformed.
|
|
181
|
+
*/
|
|
182
|
+
async function validateEncryptionKeyAtStartup(env) {
|
|
183
|
+
const resolved = env ?? readDefaultEnv();
|
|
184
|
+
try {
|
|
185
|
+
await parseEncryptionKeys(resolved.EMDASH_ENCRYPTION_KEY);
|
|
186
|
+
return true;
|
|
187
|
+
} catch (error) {
|
|
188
|
+
if (error instanceof EmDashSecretsError) {
|
|
189
|
+
console.error(`[emdash] EMDASH_ENCRYPTION_KEY is invalid: ${error.message} Plugin-secret encryption will fail once it ships. Generate a fresh key with \`emdash secrets generate\`.`);
|
|
190
|
+
return false;
|
|
191
|
+
}
|
|
192
|
+
throw error;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Per-DB cache of resolved secrets, keyed by Kysely instance identity.
|
|
197
|
+
*
|
|
198
|
+
* The resolved values are stable for the lifetime of the deployment (env
|
|
199
|
+
* vars don't change without a restart, and DB-stored values are written
|
|
200
|
+
* once via `setIfAbsent`). Caching avoids one options-table read per
|
|
201
|
+
* request on the hot paths (preview verification, comment hashing).
|
|
202
|
+
*
|
|
203
|
+
* Lives on `globalThis` so module-duplication during SSR bundling can't
|
|
204
|
+
* fragment the cache. See `request-context.ts` for the same pattern.
|
|
205
|
+
*/
|
|
206
|
+
const SECRETS_CACHE_KEY = Symbol.for("@emdash-cms/core/secrets-cache@1");
|
|
207
|
+
function getSecretsCache() {
|
|
208
|
+
const holder = globalThis;
|
|
209
|
+
let entry = holder[SECRETS_CACHE_KEY];
|
|
210
|
+
if (!entry) {
|
|
211
|
+
entry = { cache: /* @__PURE__ */ new WeakMap() };
|
|
212
|
+
holder[SECRETS_CACHE_KEY] = entry;
|
|
213
|
+
}
|
|
214
|
+
return entry.cache;
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Memoized wrapper around `resolveSecrets`. Use this from request-time hot
|
|
218
|
+
* paths (preview verification, comment IP hashing) so they don't reread
|
|
219
|
+
* env / re-query options on every request.
|
|
220
|
+
*
|
|
221
|
+
* The cache is keyed by `Kysely` instance, so playground / per-DO / per-test
|
|
222
|
+
* databases each get their own resolution.
|
|
223
|
+
*/
|
|
224
|
+
function resolveSecretsCached(db) {
|
|
225
|
+
const cache = getSecretsCache();
|
|
226
|
+
const cached = cache.get(db);
|
|
227
|
+
if (cached) return cached;
|
|
228
|
+
const promise = resolveSecrets({ db }).catch((error) => {
|
|
229
|
+
cache.delete(db);
|
|
230
|
+
throw error;
|
|
231
|
+
});
|
|
232
|
+
cache.set(db, promise);
|
|
233
|
+
return promise;
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Read or generate-and-persist a random base64url secret stored in the
|
|
237
|
+
* options table.
|
|
238
|
+
*
|
|
239
|
+
* Concurrency: `setIfAbsent` is an atomic INSERT...ON CONFLICT DO NOTHING.
|
|
240
|
+
* On race, the loser re-reads to converge on the winner's value.
|
|
241
|
+
*/
|
|
242
|
+
async function ensureGeneratedOption(repo, optionKey) {
|
|
243
|
+
const existing = await repo.get(optionKey);
|
|
244
|
+
if (typeof existing === "string" && existing.length > 0) return {
|
|
245
|
+
value: existing,
|
|
246
|
+
source: "db"
|
|
247
|
+
};
|
|
248
|
+
const generated = generateRandomSecret();
|
|
249
|
+
if (await repo.setIfAbsent(optionKey, generated)) return {
|
|
250
|
+
value: generated,
|
|
251
|
+
source: "db"
|
|
252
|
+
};
|
|
253
|
+
const winner = await repo.get(optionKey);
|
|
254
|
+
if (typeof winner !== "string" || winner.length === 0) throw new EmDashSecretsError(`Failed to persist generated secret for "${optionKey}"`, "SECRET_PERSIST_FAILED");
|
|
255
|
+
return {
|
|
256
|
+
value: winner,
|
|
257
|
+
source: "db"
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
/** Generate 32 random bytes encoded as unpadded base64url. */
|
|
261
|
+
function generateRandomSecret() {
|
|
262
|
+
const bytes = new Uint8Array(GENERATED_SECRET_BYTES);
|
|
263
|
+
crypto.getRandomValues(bytes);
|
|
264
|
+
return encodeBase64url(bytes);
|
|
265
|
+
}
|
|
266
|
+
/** Return the first non-empty string from `values`, or `null` if all are empty. */
|
|
267
|
+
function pickFirstNonEmpty(...values) {
|
|
268
|
+
for (const value of values) if (typeof value === "string" && value.length > 0) return value;
|
|
269
|
+
return null;
|
|
270
|
+
}
|
|
271
|
+
const BASE64URL_CHARSET_PATTERN = /^[A-Za-z0-9_-]+$/;
|
|
272
|
+
/**
|
|
273
|
+
* Validate base64url shape and decode. Returns `null` on malformed input
|
|
274
|
+
* (rather than throwing) so the caller can produce a config-specific error.
|
|
275
|
+
*/
|
|
276
|
+
function decodeBase64urlStrict(input) {
|
|
277
|
+
if (!BASE64URL_CHARSET_PATTERN.test(input)) return null;
|
|
278
|
+
try {
|
|
279
|
+
return decodeBase64url(input);
|
|
280
|
+
} catch {
|
|
281
|
+
return null;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Default env reader.
|
|
286
|
+
*
|
|
287
|
+
* Note: this is the **only** code path in core that reads both
|
|
288
|
+
* `import.meta.env` and `process.env`. Route handlers should not — they
|
|
289
|
+
* always run inside the Astro/Vite bundle where `import.meta.env` is
|
|
290
|
+
* the correct source. This resolver is shared with the CLI surface (via
|
|
291
|
+
* `cli/commands/secrets.ts`) which runs outside the bundle, so we
|
|
292
|
+
* deliberately consult both. `import.meta.env` wins so build-time
|
|
293
|
+
* substitutions are honored when present.
|
|
294
|
+
*
|
|
295
|
+
* The convention documented in AGENTS.md ("import.meta.env.EMDASH_X ||
|
|
296
|
+
* import.meta.env.X") is the route-handler convention; this is the
|
|
297
|
+
* shared-with-CLI exception.
|
|
298
|
+
*/
|
|
299
|
+
function readDefaultEnv() {
|
|
300
|
+
const meta = import.meta.env ?? {};
|
|
301
|
+
const proc = typeof process !== "undefined" && process.env ? process.env : {};
|
|
302
|
+
return {
|
|
303
|
+
EMDASH_ENCRYPTION_KEY: meta.EMDASH_ENCRYPTION_KEY ?? proc.EMDASH_ENCRYPTION_KEY,
|
|
304
|
+
EMDASH_PREVIEW_SECRET: meta.EMDASH_PREVIEW_SECRET ?? proc.EMDASH_PREVIEW_SECRET,
|
|
305
|
+
PREVIEW_SECRET: meta.PREVIEW_SECRET ?? proc.PREVIEW_SECRET,
|
|
306
|
+
EMDASH_IP_SALT: meta.EMDASH_IP_SALT ?? proc.EMDASH_IP_SALT,
|
|
307
|
+
EMDASH_AUTH_SECRET: meta.EMDASH_AUTH_SECRET ?? proc.EMDASH_AUTH_SECRET,
|
|
308
|
+
AUTH_SECRET: meta.AUTH_SECRET ?? proc.AUTH_SECRET
|
|
309
|
+
};
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
//#endregion
|
|
313
|
+
export { validateEncryptionKeyAtStartup as a, resolveSecretsCached as i, fingerprintKey as n, generateEncryptionKey as r, EmDashSecretsError as t };
|
|
314
|
+
//# sourceMappingURL=secrets-CW3reAnU.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"secrets-CW3reAnU.mjs","names":[],"sources":["../src/config/secrets.ts"],"sourcesContent":["/**\n * Centralized secrets module\n *\n * Single source of truth for site-level cryptographic secrets:\n *\n * - `EMDASH_ENCRYPTION_KEY` — primary key for encrypting plugin secrets at\n * rest. Multi-key (comma-separated) for rotation forward-compat. v1 ships\n * single-key. Format: `emdash_enc_v1_<43 base64url chars>` representing\n * 32 random bytes. **Operator-provided; never stored in the database.**\n * Losing the key means losing every secret encrypted with it. Validated\n * at runtime startup via `validateEncryptionKeyAtStartup` — request-time\n * resolution does not depend on it, so a malformed key can't 500 the\n * preview/comment hot paths for unrelated visitors.\n * - `EMDASH_IP_SALT` (optional) / DB-stored `emdash:ip_salt` — site-specific\n * salt for hashing commenter IPs. Generated and persisted on first need\n * if no env override is set. Replaces the previous hardcoded\n * `\"emdash-ip-salt\"` constant which was correlatable across installs.\n * - `EMDASH_PREVIEW_SECRET` (optional) / DB-stored `emdash:preview_secret` —\n * HMAC secret for signing preview URLs. Generated and persisted on first\n * need if no env override is set. Replaces the previous empty-string\n * fallback which silently disabled preview-token verification.\n *\n * The `EMDASH_AUTH_SECRET` env var is consulted only as a legacy fallback\n * source for the IP salt — that's the only path the prior code actually\n * read it from. New deployments don't need to set it.\n *\n * Modeled on `resolveS3Config` in `../storage/s3.ts`.\n */\n\nimport { sha256 } from \"@oslojs/crypto/sha2\";\nimport { encodeHexLowerCase } from \"@oslojs/encoding\";\nimport type { Kysely } from \"kysely\";\n\nimport { OptionsRepository } from \"../database/repositories/options.js\";\nimport type { Database } from \"../database/types.js\";\nimport { decodeBase64url, encodeBase64url } from \"../utils/base64.js\";\n\n/** v1 encryption key prefix. Bumping requires a separate KDF version. */\nexport const ENCRYPTION_KEY_PREFIX = \"emdash_enc_v1_\";\n\n/** 32 random bytes encoded as unpadded base64url = 43 chars. */\nconst ENCRYPTION_KEY_BODY_LENGTH = 43;\n\nconst REGEX_META_PATTERN = /[.*+?^${}()|[\\]\\\\]/g;\n\n/**\n * Built from the prefix constant via interpolation. The prefix has no regex\n * metacharacters today (`emdash_enc_v1_`), but escaping is cheap defense\n * against anyone changing the prefix in a future bump without remembering.\n */\nconst ENCRYPTION_KEY_PATTERN = new RegExp(\n\t`^${ENCRYPTION_KEY_PREFIX.replace(REGEX_META_PATTERN, \"\\\\$&\")}[A-Za-z0-9_-]{${ENCRYPTION_KEY_BODY_LENGTH}}$`,\n);\n\n/** Options-table key for the persisted commenter-IP salt. */\nexport const IP_SALT_OPTION_KEY = \"emdash:ip_salt\";\n\n/** Options-table key for the persisted preview HMAC secret. */\nexport const PREVIEW_SECRET_OPTION_KEY = \"emdash:preview_secret\";\n\n/** Length in bytes of generated values. 32 bytes = 256 bits. */\nconst GENERATED_SECRET_BYTES = 32;\n\n/**\n * A parsed encryption key with its kid (key id) fingerprint.\n *\n * `kid` is the first 8 chars of the SHA-256 hash of the decoded key bytes\n * (lowercase hex), used to tag envelopes so the decryptor can pick the right\n * key during rotation.\n */\nexport interface ParsedEncryptionKey {\n\t/** 8-char lowercase hex fingerprint derived from the decoded key bytes. */\n\tkid: string;\n\t/** The 32 raw key bytes, ready for `crypto.subtle.importKey`. */\n\tkey: Uint8Array;\n\t/** The original env-var-formatted string (kept for re-emit; never log). */\n\traw: string;\n}\n\n/** Resolved site secrets. */\nexport interface ResolvedSecrets {\n\t/** HMAC secret for preview URLs. Always non-empty after resolution. */\n\tpreviewSecret: string;\n\t/**\n\t * Source of `previewSecret`. Useful for diagnostics; never expose the\n\t * value itself, only the source.\n\t */\n\tpreviewSecretSource: \"env\" | \"db\";\n\t/** Salt for hashing commenter IPs. Always non-empty after resolution. */\n\tipSalt: string;\n\t/** Source of `ipSalt`. */\n\tipSaltSource: \"env\" | \"db\";\n}\n\n/** Inputs for `resolveSecrets`. */\nexport interface ResolveSecretsOptions {\n\t/**\n\t * The Kysely DB used to persist (and read back) generated salt/preview\n\t * secret values. Required — these values must be stable across requests\n\t * within a deployment.\n\t */\n\tdb: Kysely<Database>;\n\t/**\n\t * Optional explicit env override map. When omitted, falls back to\n\t * `import.meta.env` via the global accessor below. Tests pass an\n\t * explicit map to avoid leaking process state.\n\t */\n\tenv?: SecretsEnv;\n\t/**\n\t * @internal Test seam: inject a custom OptionsRepository to exercise\n\t * the lost-race re-read branch. Production callers never set this.\n\t */\n\t_repo?: OptionsRepository;\n}\n\n/** Environment-variable shape consulted by the resolver. */\nexport interface SecretsEnv {\n\t/**\n\t * Read by `validateEncryptionKeyAtStartup` and (in a follow-up PR) by the\n\t * plugin-secret encryption layer. **Not** consulted by `resolveSecrets`,\n\t * so a malformed value can't 500 the preview/comment hot paths.\n\t */\n\tEMDASH_ENCRYPTION_KEY?: string;\n\tEMDASH_PREVIEW_SECRET?: string;\n\t/** Legacy alias; new docs point at EMDASH_PREVIEW_SECRET. */\n\tPREVIEW_SECRET?: string;\n\tEMDASH_IP_SALT?: string;\n\t/**\n\t * Legacy fallback. Prior code derived the IP salt from\n\t * `EMDASH_AUTH_SECRET || AUTH_SECRET || \"emdash-ip-salt\"`. We preserve\n\t * the env-var fallback (so existing installs keep their stable salt)\n\t * but no longer read it from `import.meta.env` in route handlers.\n\t */\n\tEMDASH_AUTH_SECRET?: string;\n\t/** Legacy alias. */\n\tAUTH_SECRET?: string;\n}\n\n/**\n * Class of validation failures raised by this module.\n *\n * Errors here are operator-facing config problems (malformed key, etc.).\n * They are thrown rather than soft-skipped so misconfiguration fails loudly\n * at startup instead of silently degrading at request time.\n */\nexport class EmDashSecretsError extends Error {\n\toverride readonly name = \"EmDashSecretsError\";\n\treadonly code: string;\n\n\tconstructor(message: string, code: string) {\n\t\tsuper(message);\n\t\tthis.code = code;\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// Encryption key parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse the `EMDASH_ENCRYPTION_KEY` env var.\n *\n * Accepts a single key or a comma-separated list. The first entry is the\n * primary (used for new writes); all entries are tried for decryption,\n * matched by `kid`. Whitespace around commas is tolerated. Empty entries\n * (e.g. trailing comma) are ignored.\n *\n * Returns `null` for an unset/empty input. Throws `EmDashSecretsError` on\n * any malformed entry — silent skipping would mask deployment mistakes.\n */\nexport async function parseEncryptionKeys(\n\traw: string | undefined,\n): Promise<ParsedEncryptionKey[] | null> {\n\tif (!raw) return null;\n\n\tconst entries = raw\n\t\t.split(\",\")\n\t\t.map((entry) => entry.trim())\n\t\t.filter((entry) => entry.length > 0);\n\n\tif (entries.length === 0) return null;\n\n\tconst parsed: ParsedEncryptionKey[] = [];\n\tconst seenKids = new Set<string>();\n\n\tfor (const entry of entries) {\n\t\tif (!ENCRYPTION_KEY_PATTERN.test(entry)) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY entry is malformed (expected \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars). Generate one with \\`emdash secrets generate\\`.`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst body = entry.slice(ENCRYPTION_KEY_PREFIX.length);\n\t\tconst key = decodeBase64urlStrict(body);\n\t\tif (!key) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not valid base64url\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\t\tif (key.length !== GENERATED_SECRET_BYTES) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY must decode to ${GENERATED_SECRET_BYTES} bytes, got ${key.length}`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\t// Reject non-canonical base64url. 43 chars decode to 32 bytes but\n\t\t// the last char only carries 2 information bits — multiple raw\n\t\t// strings can decode to the same bytes. Forcing canonical form\n\t\t// guarantees `kid` (derived from bytes) is stable per key\n\t\t// material, regardless of how the operator pasted it.\n\t\tconst canonical = encodeBase64url(key);\n\t\tif (canonical !== body) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not canonical base64url. Generate one with `emdash secrets generate`.\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst kid = fingerprintKeyBytes(key);\n\t\tif (seenKids.has(kid)) {\n\t\t\t// Duplicate keys are user error (paste mistake during rotation).\n\t\t\t// We dedupe rather than throw — the rotation flow is forgiving.\n\t\t\tcontinue;\n\t\t}\n\t\tseenKids.add(kid);\n\t\tparsed.push({ kid, key, raw: entry });\n\t}\n\n\t// `parsed` always has at least one entry here: `entries` was non-empty\n\t// after filtering, the loop runs at least once, the first iteration\n\t// always passes the empty-`seenKids` check.\n\treturn parsed;\n}\n\n/**\n * Compute the kid for a raw key string (the env-var form including the\n * `emdash_enc_v1_` prefix). Public so the CLI's `fingerprint` subcommand\n * and admin endpoints can show kids without exposing raw keys.\n *\n * The kid is derived from the decoded key **bytes**, not the raw string,\n * so admin endpoints / future rotation flows can match envelope kids\n * against bytes regardless of how the env var was originally spelled.\n *\n * Validates the same shape as `parseEncryptionKeys` — including canonical\n * base64url — so the CLI can't print a kid for a key the runtime would\n * later refuse to load.\n *\n * Throws `EmDashSecretsError` for malformed or non-canonical input.\n */\nexport async function fingerprintKey(raw: string): Promise<string> {\n\tif (!ENCRYPTION_KEY_PATTERN.test(raw)) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key must match \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\tconst body = raw.slice(ENCRYPTION_KEY_PREFIX.length);\n\tconst bytes = decodeBase64urlStrict(body);\n\tif (!bytes || bytes.length !== GENERATED_SECRET_BYTES || encodeBase64url(bytes) !== body) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key body must decode to ${GENERATED_SECRET_BYTES} canonical base64url bytes`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\treturn fingerprintKeyBytes(bytes);\n}\n\n/**\n * Internal: kid derivation from raw key bytes. The single source of truth\n * for what makes two keys \"the same key\" — used by both `parseEncryptionKeys`\n * and `fingerprintKey`.\n */\nfunction fingerprintKeyBytes(key: Uint8Array): string {\n\treturn encodeHexLowerCase(sha256(key)).slice(0, 8);\n}\n\n/**\n * Generate a fresh `EMDASH_ENCRYPTION_KEY` value. Used by the CLI's\n * `secrets generate` subcommand and by `create-emdash` scaffolding.\n */\nexport function generateEncryptionKey(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn `${ENCRYPTION_KEY_PREFIX}${encodeBase64url(bytes)}`;\n}\n\n// ---------------------------------------------------------------------------\n// Site-secret resolution (DB-backed with env override)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve site secrets. Reads env vars; for IP salt and preview secret,\n * falls back to a DB-stored value, generating one atomically on first need.\n *\n * Idempotent. Concurrent callers race on the atomic `setIfAbsent`; whichever\n * wins, all callers converge on the same stored value.\n *\n * Note: `EMDASH_ENCRYPTION_KEY` is **not** consumed here. It's validated\n * separately at runtime startup (see `validateEncryptionKeyAtStartup`) so a\n * malformed key can't take down preview-token verification or comment\n * submission for unrelated visitors. Future plugin-secret encryption code\n * will read it via its own dedicated helper.\n */\nexport async function resolveSecrets(options: ResolveSecretsOptions): Promise<ResolvedSecrets> {\n\tconst env = options.env ?? readDefaultEnv();\n\tconst repo = options._repo ?? new OptionsRepository(options.db);\n\n\tconst previewEnvOverride = pickFirstNonEmpty(env.EMDASH_PREVIEW_SECRET, env.PREVIEW_SECRET);\n\tconst ipSaltEnvOverride = pickFirstNonEmpty(\n\t\tenv.EMDASH_IP_SALT,\n\t\tenv.EMDASH_AUTH_SECRET,\n\t\tenv.AUTH_SECRET,\n\t);\n\n\tconst [previewSecret, ipSalt] = await Promise.all([\n\t\tpreviewEnvOverride !== null\n\t\t\t? Promise.resolve({ value: previewEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, PREVIEW_SECRET_OPTION_KEY),\n\t\tipSaltEnvOverride !== null\n\t\t\t? Promise.resolve({ value: ipSaltEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, IP_SALT_OPTION_KEY),\n\t]);\n\n\treturn {\n\t\tpreviewSecret: previewSecret.value,\n\t\tpreviewSecretSource: previewSecret.source,\n\t\tipSalt: ipSalt.value,\n\t\tipSaltSource: ipSalt.source,\n\t};\n}\n\n/**\n * Validate `EMDASH_ENCRYPTION_KEY` once at runtime startup. Logs an\n * operator-facing error if the value is malformed but does **not** throw —\n * the key is currently inert (no consumers), and the follow-up PR that\n * actually uses it will throw at point of use. This way, deployment\n * mistakes surface immediately in startup logs without wedging unrelated\n * request paths in the meantime.\n *\n * Returns `true` if the key is unset or valid, `false` if it was malformed.\n */\nexport async function validateEncryptionKeyAtStartup(env?: SecretsEnv): Promise<boolean> {\n\tconst resolved = env ?? readDefaultEnv();\n\ttry {\n\t\tawait parseEncryptionKeys(resolved.EMDASH_ENCRYPTION_KEY);\n\t\treturn true;\n\t} catch (error) {\n\t\tif (error instanceof EmDashSecretsError) {\n\t\t\tconsole.error(\n\t\t\t\t`[emdash] EMDASH_ENCRYPTION_KEY is invalid: ${error.message} ` +\n\t\t\t\t\t\"Plugin-secret encryption will fail once it ships. \" +\n\t\t\t\t\t\"Generate a fresh key with `emdash secrets generate`.\",\n\t\t\t);\n\t\t\treturn false;\n\t\t}\n\t\tthrow error;\n\t}\n}\n\n/**\n * Per-DB cache of resolved secrets, keyed by Kysely instance identity.\n *\n * The resolved values are stable for the lifetime of the deployment (env\n * vars don't change without a restart, and DB-stored values are written\n * once via `setIfAbsent`). Caching avoids one options-table read per\n * request on the hot paths (preview verification, comment hashing).\n *\n * Lives on `globalThis` so module-duplication during SSR bundling can't\n * fragment the cache. See `request-context.ts` for the same pattern.\n */\n// Versioned to prevent cache fragmentation if `ResolvedSecrets`'s shape\n// ever changes. Bump the suffix on incompatible changes so a co-resident\n// older build doesn't read a newer-shape value.\nconst SECRETS_CACHE_KEY = Symbol.for(\"@emdash-cms/core/secrets-cache@1\");\n\ninterface SecretsCacheHolder {\n\tcache: WeakMap<Kysely<Database>, Promise<ResolvedSecrets>>;\n}\n\nfunction getSecretsCache(): WeakMap<Kysely<Database>, Promise<ResolvedSecrets>> {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tlet entry = holder[SECRETS_CACHE_KEY];\n\tif (!entry) {\n\t\tentry = { cache: new WeakMap() };\n\t\tholder[SECRETS_CACHE_KEY] = entry;\n\t}\n\treturn entry.cache;\n}\n\n/**\n * Memoized wrapper around `resolveSecrets`. Use this from request-time hot\n * paths (preview verification, comment IP hashing) so they don't reread\n * env / re-query options on every request.\n *\n * The cache is keyed by `Kysely` instance, so playground / per-DO / per-test\n * databases each get their own resolution.\n */\nexport function resolveSecretsCached(db: Kysely<Database>): Promise<ResolvedSecrets> {\n\tconst cache = getSecretsCache();\n\tconst cached = cache.get(db);\n\tif (cached) return cached;\n\tconst promise = resolveSecrets({ db }).catch((error) => {\n\t\t// Don't poison the cache on transient failure; next caller retries.\n\t\tcache.delete(db);\n\t\tthrow error;\n\t});\n\tcache.set(db, promise);\n\treturn promise;\n}\n\n/**\n * Test-only helper: clear the secrets cache. Tests that mutate env between\n * cases need this so a stale resolution doesn't leak across cases.\n *\n * @internal\n */\nexport function _clearSecretsCacheForTesting(): void {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tholder[SECRETS_CACHE_KEY] = undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Internals\n// ---------------------------------------------------------------------------\n\n/**\n * Read or generate-and-persist a random base64url secret stored in the\n * options table.\n *\n * Concurrency: `setIfAbsent` is an atomic INSERT...ON CONFLICT DO NOTHING.\n * On race, the loser re-reads to converge on the winner's value.\n */\nasync function ensureGeneratedOption(\n\trepo: OptionsRepository,\n\toptionKey: string,\n): Promise<{ value: string; source: \"db\" }> {\n\tconst existing = await repo.get<string>(optionKey);\n\tif (typeof existing === \"string\" && existing.length > 0) {\n\t\treturn { value: existing, source: \"db\" };\n\t}\n\n\tconst generated = generateRandomSecret();\n\tconst inserted = await repo.setIfAbsent(optionKey, generated);\n\tif (inserted) {\n\t\treturn { value: generated, source: \"db\" };\n\t}\n\n\t// Lost the race — another process inserted first. Re-read to pick up\n\t// the winner. If the row is somehow still missing or empty, treat that\n\t// as a real error rather than looping.\n\tconst winner = await repo.get<string>(optionKey);\n\tif (typeof winner !== \"string\" || winner.length === 0) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Failed to persist generated secret for \"${optionKey}\"`,\n\t\t\t\"SECRET_PERSIST_FAILED\",\n\t\t);\n\t}\n\treturn { value: winner, source: \"db\" };\n}\n\n/** Generate 32 random bytes encoded as unpadded base64url. */\nfunction generateRandomSecret(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn encodeBase64url(bytes);\n}\n\n/** Return the first non-empty string from `values`, or `null` if all are empty. */\nfunction pickFirstNonEmpty(...values: (string | undefined)[]): string | null {\n\tfor (const value of values) {\n\t\tif (typeof value === \"string\" && value.length > 0) {\n\t\t\treturn value;\n\t\t}\n\t}\n\treturn null;\n}\n\nconst BASE64URL_CHARSET_PATTERN = /^[A-Za-z0-9_-]+$/;\n\n/**\n * Validate base64url shape and decode. Returns `null` on malformed input\n * (rather than throwing) so the caller can produce a config-specific error.\n */\nfunction decodeBase64urlStrict(input: string): Uint8Array | null {\n\t// `decodeBase64url` accepts padded input too; the env-var format is\n\t// strictly unpadded base64url, so we do a charset check first.\n\tif (!BASE64URL_CHARSET_PATTERN.test(input)) return null;\n\ttry {\n\t\treturn decodeBase64url(input);\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Default env reader.\n *\n * Note: this is the **only** code path in core that reads both\n * `import.meta.env` and `process.env`. Route handlers should not — they\n * always run inside the Astro/Vite bundle where `import.meta.env` is\n * the correct source. This resolver is shared with the CLI surface (via\n * `cli/commands/secrets.ts`) which runs outside the bundle, so we\n * deliberately consult both. `import.meta.env` wins so build-time\n * substitutions are honored when present.\n *\n * The convention documented in AGENTS.md (\"import.meta.env.EMDASH_X ||\n * import.meta.env.X\") is the route-handler convention; this is the\n * shared-with-CLI exception.\n */\nfunction readDefaultEnv(): SecretsEnv {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- import.meta.env is loose by design\n\tconst meta = (import.meta.env ?? {}) as Record<string, string | undefined>;\n\tconst proc = typeof process !== \"undefined\" && process.env ? process.env : {};\n\n\treturn {\n\t\tEMDASH_ENCRYPTION_KEY: meta.EMDASH_ENCRYPTION_KEY ?? proc.EMDASH_ENCRYPTION_KEY,\n\t\tEMDASH_PREVIEW_SECRET: meta.EMDASH_PREVIEW_SECRET ?? proc.EMDASH_PREVIEW_SECRET,\n\t\tPREVIEW_SECRET: meta.PREVIEW_SECRET ?? proc.PREVIEW_SECRET,\n\t\tEMDASH_IP_SALT: meta.EMDASH_IP_SALT ?? proc.EMDASH_IP_SALT,\n\t\tEMDASH_AUTH_SECRET: meta.EMDASH_AUTH_SECRET ?? proc.EMDASH_AUTH_SECRET,\n\t\tAUTH_SECRET: meta.AUTH_SECRET ?? proc.AUTH_SECRET,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCA,MAAa,wBAAwB;;AAGrC,MAAM,6BAA6B;;;;;;AASnC,MAAM,yBAAyB,IAAI,OAClC,IAAI,sBAAsB,QARA,uBAQ4B,OAAO,CAAC,gBAAgB,2BAA2B,IACzG;;AAGD,MAAa,qBAAqB;;AAGlC,MAAa,4BAA4B;;AAGzC,MAAM,yBAAyB;;;;;;;;AAoF/B,IAAa,qBAAb,cAAwC,MAAM;CAC7C,AAAkB,OAAO;CACzB,AAAS;CAET,YAAY,SAAiB,MAAc;AAC1C,QAAM,QAAQ;AACd,OAAK,OAAO;;;;;;;;;;;;;;AAmBd,eAAsB,oBACrB,KACwC;AACxC,KAAI,CAAC,IAAK,QAAO;CAEjB,MAAM,UAAU,IACd,MAAM,IAAI,CACV,KAAK,UAAU,MAAM,MAAM,CAAC,CAC5B,QAAQ,UAAU,MAAM,SAAS,EAAE;AAErC,KAAI,QAAQ,WAAW,EAAG,QAAO;CAEjC,MAAM,SAAgC,EAAE;CACxC,MAAM,2BAAW,IAAI,KAAa;AAElC,MAAK,MAAM,SAAS,SAAS;AAC5B,MAAI,CAAC,uBAAuB,KAAK,MAAM,CACtC,OAAM,IAAI,mBACT,uDAAuD,sBAAsB,gBAAgB,2BAA2B,oEACxH,yBACA;EAGF,MAAM,OAAO,MAAM,MAAM,GAA6B;EACtD,MAAM,MAAM,sBAAsB,KAAK;AACvC,MAAI,CAAC,IACJ,OAAM,IAAI,mBACT,qDACA,yBACA;AAEF,MAAI,IAAI,WAAW,uBAClB,OAAM,IAAI,mBACT,wCAAwC,uBAAuB,cAAc,IAAI,UACjF,yBACA;AASF,MADkB,gBAAgB,IAAI,KACpB,KACjB,OAAM,IAAI,mBACT,uGACA,yBACA;EAGF,MAAM,MAAM,oBAAoB,IAAI;AACpC,MAAI,SAAS,IAAI,IAAI,CAGpB;AAED,WAAS,IAAI,IAAI;AACjB,SAAO,KAAK;GAAE;GAAK;GAAK,KAAK;GAAO,CAAC;;AAMtC,QAAO;;;;;;;;;;;;;;;;;AAkBR,eAAsB,eAAe,KAA8B;AAClE,KAAI,CAAC,uBAAuB,KAAK,IAAI,CACpC,OAAM,IAAI,mBACT,mBAAmB,sBAAsB,gBAAgB,2BAA2B,mBACpF,yBACA;CAEF,MAAM,OAAO,IAAI,MAAM,GAA6B;CACpD,MAAM,QAAQ,sBAAsB,KAAK;AACzC,KAAI,CAAC,SAAS,MAAM,WAAW,0BAA0B,gBAAgB,MAAM,KAAK,KACnF,OAAM,IAAI,mBACT,2BAA2B,uBAAuB,6BAClD,yBACA;AAEF,QAAO,oBAAoB,MAAM;;;;;;;AAQlC,SAAS,oBAAoB,KAAyB;AACrD,QAAO,mBAAmB,OAAO,IAAI,CAAC,CAAC,MAAM,GAAG,EAAE;;;;;;AAOnD,SAAgB,wBAAgC;CAC/C,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,GAAG,wBAAwB,gBAAgB,MAAM;;;;;;;;;;;;;;;AAoBzD,eAAsB,eAAe,SAA0D;CAC9F,MAAM,MAAM,QAAQ,OAAO,gBAAgB;CAC3C,MAAM,OAAO,QAAQ,SAAS,IAAI,kBAAkB,QAAQ,GAAG;CAE/D,MAAM,qBAAqB,kBAAkB,IAAI,uBAAuB,IAAI,eAAe;CAC3F,MAAM,oBAAoB,kBACzB,IAAI,gBACJ,IAAI,oBACJ,IAAI,YACJ;CAED,MAAM,CAAC,eAAe,UAAU,MAAM,QAAQ,IAAI,CACjD,uBAAuB,OACpB,QAAQ,QAAQ;EAAE,OAAO;EAAoB,QAAQ;EAAgB,CAAC,GACtE,sBAAsB,MAAM,0BAA0B,EACzD,sBAAsB,OACnB,QAAQ,QAAQ;EAAE,OAAO;EAAmB,QAAQ;EAAgB,CAAC,GACrE,sBAAsB,MAAM,mBAAmB,CAClD,CAAC;AAEF,QAAO;EACN,eAAe,cAAc;EAC7B,qBAAqB,cAAc;EACnC,QAAQ,OAAO;EACf,cAAc,OAAO;EACrB;;;;;;;;;;;;AAaF,eAAsB,+BAA+B,KAAoC;CACxF,MAAM,WAAW,OAAO,gBAAgB;AACxC,KAAI;AACH,QAAM,oBAAoB,SAAS,sBAAsB;AACzD,SAAO;UACC,OAAO;AACf,MAAI,iBAAiB,oBAAoB;AACxC,WAAQ,MACP,8CAA8C,MAAM,QAAQ,2GAG5D;AACD,UAAO;;AAER,QAAM;;;;;;;;;;;;;;AAkBR,MAAM,oBAAoB,OAAO,IAAI,mCAAmC;AAMxE,SAAS,kBAAuE;CAE/E,MAAM,SAAS;CACf,IAAI,QAAQ,OAAO;AACnB,KAAI,CAAC,OAAO;AACX,UAAQ,EAAE,uBAAO,IAAI,SAAS,EAAE;AAChC,SAAO,qBAAqB;;AAE7B,QAAO,MAAM;;;;;;;;;;AAWd,SAAgB,qBAAqB,IAAgD;CACpF,MAAM,QAAQ,iBAAiB;CAC/B,MAAM,SAAS,MAAM,IAAI,GAAG;AAC5B,KAAI,OAAQ,QAAO;CACnB,MAAM,UAAU,eAAe,EAAE,IAAI,CAAC,CAAC,OAAO,UAAU;AAEvD,QAAM,OAAO,GAAG;AAChB,QAAM;GACL;AACF,OAAM,IAAI,IAAI,QAAQ;AACtB,QAAO;;;;;;;;;AA0BR,eAAe,sBACd,MACA,WAC2C;CAC3C,MAAM,WAAW,MAAM,KAAK,IAAY,UAAU;AAClD,KAAI,OAAO,aAAa,YAAY,SAAS,SAAS,EACrD,QAAO;EAAE,OAAO;EAAU,QAAQ;EAAM;CAGzC,MAAM,YAAY,sBAAsB;AAExC,KADiB,MAAM,KAAK,YAAY,WAAW,UAAU,CAE5D,QAAO;EAAE,OAAO;EAAW,QAAQ;EAAM;CAM1C,MAAM,SAAS,MAAM,KAAK,IAAY,UAAU;AAChD,KAAI,OAAO,WAAW,YAAY,OAAO,WAAW,EACnD,OAAM,IAAI,mBACT,2CAA2C,UAAU,IACrD,wBACA;AAEF,QAAO;EAAE,OAAO;EAAQ,QAAQ;EAAM;;;AAIvC,SAAS,uBAA+B;CACvC,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,gBAAgB,MAAM;;;AAI9B,SAAS,kBAAkB,GAAG,QAA+C;AAC5E,MAAK,MAAM,SAAS,OACnB,KAAI,OAAO,UAAU,YAAY,MAAM,SAAS,EAC/C,QAAO;AAGT,QAAO;;AAGR,MAAM,4BAA4B;;;;;AAMlC,SAAS,sBAAsB,OAAkC;AAGhE,KAAI,CAAC,0BAA0B,KAAK,MAAM,CAAE,QAAO;AACnD,KAAI;AACH,SAAO,gBAAgB,MAAM;SACtB;AACP,SAAO;;;;;;;;;;;;;;;;;;AAmBT,SAAS,iBAA6B;CAErC,MAAM,OAAQ,OAAO,KAAK,OAAO,EAAE;CACnC,MAAM,OAAO,OAAO,YAAY,eAAe,QAAQ,MAAM,QAAQ,MAAM,EAAE;AAE7E,QAAO;EACN,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,oBAAoB,KAAK,sBAAsB,KAAK;EACpD,aAAa,KAAK,eAAe,KAAK;EACtC"}
|
package/dist/seed/index.d.mts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import "../types-
|
|
2
|
-
import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-
|
|
1
|
+
import "../types-CrtWgIvl.mjs";
|
|
2
|
+
import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-BfQh_C_y.mjs";
|
|
3
3
|
export { type SeedApplyOptions, type SeedApplyResult, type SeedCollection, type SeedContentEntry, type SeedField, type SeedFile, type SeedMenu, type SeedMenuItem, type SeedRedirect, type SeedSection, type SeedTaxonomy, type SeedTaxonomyTerm, type SeedWidget, type SeedWidgetArea, type ValidationResult, applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
|
package/dist/seed/index.mjs
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
|
-
import "../dialect-helpers-
|
|
2
|
-
import "../content-
|
|
3
|
-
import "../base64-
|
|
4
|
-
import "../types-
|
|
5
|
-
import "../media-
|
|
6
|
-
import { t as applySeed } from "../apply-
|
|
7
|
-
import "../
|
|
8
|
-
import "../
|
|
9
|
-
import "../
|
|
10
|
-
import "../
|
|
11
|
-
import "../
|
|
12
|
-
import
|
|
13
|
-
import { t as
|
|
14
|
-
import {
|
|
1
|
+
import "../dialect-helpers-BKCvISIQ.mjs";
|
|
2
|
+
import "../content-8lOYF0pr.mjs";
|
|
3
|
+
import "../base64-BRICGH2l.mjs";
|
|
4
|
+
import "../types-CRxNbK-Z.mjs";
|
|
5
|
+
import "../media-BW32b4gi.mjs";
|
|
6
|
+
import { t as applySeed } from "../apply-BzltprvY.mjs";
|
|
7
|
+
import "../options-BVp3UsTS.mjs";
|
|
8
|
+
import "../redirect-BhUBKRc1.mjs";
|
|
9
|
+
import "../byline-BSaNL1w7.mjs";
|
|
10
|
+
import "../registry-Dw70ChxB.mjs";
|
|
11
|
+
import "../loader-CKLbBnhK.mjs";
|
|
12
|
+
import "../request-cache-B-bmkipQ.mjs";
|
|
13
|
+
import { t as validateSeed } from "../validate-Baqf0slj.mjs";
|
|
14
|
+
import { t as defaultSeed } from "../default-D8ksjWhO.mjs";
|
|
15
|
+
import { n as loadUserSeed, t as loadSeed } from "../load-DDqMMvZL.mjs";
|
|
15
16
|
|
|
16
17
|
export { applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
|
package/dist/seo/index.d.mts
CHANGED
package/dist/storage/local.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-
|
|
1
|
+
import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-M78DQ1lx.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/storage/local.d.ts
|
|
4
4
|
/**
|
package/dist/storage/local.mjs
CHANGED
package/dist/storage/s3.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-
|
|
1
|
+
import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-M78DQ1lx.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/storage/s3.d.ts
|
|
4
4
|
/**
|
package/dist/storage/s3.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { t as EmDashStorageError } from "../types-
|
|
1
|
+
import { t as EmDashStorageError } from "../types-CDbKp7ND.mjs";
|
|
2
2
|
import { z } from "zod";
|
|
3
3
|
import { DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
|
|
4
4
|
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { n as chunks, t as SQL_BATCH_SIZE } from "./chunks-
|
|
3
|
-
import { t as isMissingTableError } from "./db-errors-
|
|
4
|
-
import {
|
|
5
|
-
import { n as requestCached, r as setRequestCacheEntry } from "./request-cache-
|
|
1
|
+
import { i as __exportAll } from "./runner-C7ADox5q.mjs";
|
|
2
|
+
import { n as chunks, t as SQL_BATCH_SIZE } from "./chunks-NBQVDOci.mjs";
|
|
3
|
+
import { t as isMissingTableError } from "./db-errors-WRezodiz.mjs";
|
|
4
|
+
import { r as getDb } from "./loader-CKLbBnhK.mjs";
|
|
5
|
+
import { n as requestCached, r as setRequestCacheEntry, t as peekRequestCache } from "./request-cache-B-bmkipQ.mjs";
|
|
6
6
|
|
|
7
7
|
//#region src/taxonomies/index.ts
|
|
8
8
|
/**
|
|
@@ -48,8 +48,15 @@ async function getTaxonomyDefs() {
|
|
|
48
48
|
}
|
|
49
49
|
/**
|
|
50
50
|
* Get a single taxonomy definition by name
|
|
51
|
+
*
|
|
52
|
+
* If `getTaxonomyDefs()` has already loaded the full list in this request
|
|
53
|
+
* (which happens during entry-term hydration on every page that renders a
|
|
54
|
+
* collection), find the matching def in memory rather than running a
|
|
55
|
+
* second `WHERE name=?` query against `_emdash_taxonomy_defs`.
|
|
51
56
|
*/
|
|
52
57
|
async function getTaxonomyDef(name) {
|
|
58
|
+
const allDefs = peekRequestCache("taxonomy-defs:all");
|
|
59
|
+
if (allDefs) return (await allDefs).find((d) => d.name === name) ?? null;
|
|
53
60
|
return requestCached(`taxonomy-def:${name}`, async () => {
|
|
54
61
|
const row = await (await getDb()).selectFrom("_emdash_taxonomy_defs").selectAll().where("name", "=", name).executeTakeFirst();
|
|
55
62
|
if (!row) return null;
|
|
@@ -278,7 +285,7 @@ function primeEntryTermsCache(collection, entryId, byTaxonomy, applicableTaxonom
|
|
|
278
285
|
* Get entries by term (wraps getEmDashCollection)
|
|
279
286
|
*/
|
|
280
287
|
async function getEntriesByTerm(collection, taxonomyName, termSlug) {
|
|
281
|
-
const { getEmDashCollection } = await import("./query-
|
|
288
|
+
const { getEmDashCollection } = await import("./query-Cg9ZKRQ0.mjs").then((n) => n.o);
|
|
282
289
|
const { entries } = await getEmDashCollection(collection, { where: { [taxonomyName]: termSlug } });
|
|
283
290
|
return entries;
|
|
284
291
|
}
|
|
@@ -305,4 +312,4 @@ function buildTree(flatTerms, counts) {
|
|
|
305
312
|
|
|
306
313
|
//#endregion
|
|
307
314
|
export { getTaxonomyDefs as a, getTermsForEntries as c, getTaxonomyDef as i, invalidateTermCache as l, getEntriesByTerm as n, getTaxonomyTerms as o, getEntryTerms as r, getTerm as s, getAllTermsForEntries as t, taxonomies_exports as u };
|
|
308
|
-
//# sourceMappingURL=taxonomies-
|
|
315
|
+
//# sourceMappingURL=taxonomies-ZlRtD6AG.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"taxonomies-ZlRtD6AG.mjs","names":[],"sources":["../src/taxonomies/index.ts"],"sourcesContent":["/**\n * Runtime API for taxonomies\n *\n * Provides functions to query taxonomy definitions and terms.\n */\n\nimport { getDb } from \"../loader.js\";\nimport { peekRequestCache, requestCached, setRequestCacheEntry } from \"../request-cache.js\";\nimport { chunks, SQL_BATCH_SIZE } from \"../utils/chunks.js\";\nimport { isMissingTableError } from \"../utils/db-errors.js\";\nimport type { TaxonomyDef, TaxonomyTerm, TaxonomyTermRow } from \"./types.js\";\n\n/**\n * No-op — kept for API compatibility.\n *\n * Used to invalidate a worker-lifetime \"has any term assignments?\" probe.\n * That probe added a query on every cold isolate to save one query on\n * sites with zero term assignments (i.e. the wrong tradeoff), so we\n * dropped it. The batch term join below returns an empty map for empty\n * sites at the same cost as the probe, without the pre-check.\n */\nexport function invalidateTermCache(): void {\n\t// Intentionally empty.\n}\n\n/**\n * Get all taxonomy definitions\n */\nexport async function getTaxonomyDefs(): Promise<TaxonomyDef[]> {\n\treturn requestCached(\"taxonomy-defs:all\", async () => {\n\t\tconst db = await getDb();\n\n\t\tconst rows = await db.selectFrom(\"_emdash_taxonomy_defs\").selectAll().execute();\n\n\t\treturn rows.map((row) => ({\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tlabel: row.label,\n\t\t\tlabelSingular: row.label_singular ?? undefined,\n\t\t\thierarchical: row.hierarchical === 1,\n\t\t\tcollections: row.collections ? JSON.parse(row.collections) : [],\n\t\t}));\n\t});\n}\n\n/**\n * Get a single taxonomy definition by name\n *\n * If `getTaxonomyDefs()` has already loaded the full list in this request\n * (which happens during entry-term hydration on every page that renders a\n * collection), find the matching def in memory rather than running a\n * second `WHERE name=?` query against `_emdash_taxonomy_defs`.\n */\nexport async function getTaxonomyDef(name: string): Promise<TaxonomyDef | null> {\n\tconst allDefs = peekRequestCache<TaxonomyDef[]>(\"taxonomy-defs:all\");\n\tif (allDefs) {\n\t\treturn (await allDefs).find((d) => d.name === name) ?? null;\n\t}\n\n\treturn requestCached(`taxonomy-def:${name}`, async () => {\n\t\tconst db = await getDb();\n\n\t\tconst row = await db\n\t\t\t.selectFrom(\"_emdash_taxonomy_defs\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.executeTakeFirst();\n\n\t\tif (!row) return null;\n\n\t\treturn {\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tlabel: row.label,\n\t\t\tlabelSingular: row.label_singular ?? undefined,\n\t\t\thierarchical: row.hierarchical === 1,\n\t\t\tcollections: row.collections ? JSON.parse(row.collections) : [],\n\t\t};\n\t});\n}\n\n/**\n * Get all terms for a taxonomy (as tree for hierarchical, flat for tags)\n */\nexport async function getTaxonomyTerms(taxonomyName: string): Promise<TaxonomyTerm[]> {\n\treturn requestCached(`taxonomy-terms:${taxonomyName}`, async () => {\n\t\tconst db = await getDb();\n\n\t\t// Get taxonomy definition to check if hierarchical\n\t\tconst def = await getTaxonomyDef(taxonomyName);\n\t\tif (!def) return [];\n\n\t\t// Get all terms for this taxonomy\n\t\tconst rows = await db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", taxonomyName)\n\t\t\t.orderBy(\"label\", \"asc\")\n\t\t\t.execute();\n\n\t\t// Count entries for each term\n\t\tconst countsResult = await db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.select([\"taxonomy_id\"])\n\t\t\t.select((eb) => eb.fn.count<number>(\"entry_id\").as(\"count\"))\n\t\t\t.groupBy(\"taxonomy_id\")\n\t\t\t.execute();\n\n\t\tconst counts = new Map<string, number>();\n\t\tfor (const row of countsResult) {\n\t\t\tcounts.set(row.taxonomy_id, row.count);\n\t\t}\n\n\t\tconst flatTerms: TaxonomyTermRow[] = rows.map((row) => ({\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tslug: row.slug,\n\t\t\tlabel: row.label,\n\t\t\tparent_id: row.parent_id,\n\t\t\tdata: row.data,\n\t\t}));\n\n\t\t// If hierarchical, build tree. Otherwise return flat\n\t\tif (def.hierarchical) {\n\t\t\treturn buildTree(flatTerms, counts);\n\t\t}\n\n\t\treturn flatTerms.map((term) => ({\n\t\t\tid: term.id,\n\t\t\tname: term.name,\n\t\t\tslug: term.slug,\n\t\t\tlabel: term.label,\n\t\t\tchildren: [],\n\t\t\tcount: counts.get(term.id) ?? 0,\n\t\t}));\n\t});\n}\n\n/**\n * Get a single term by taxonomy and slug\n */\nexport async function getTerm(taxonomyName: string, slug: string): Promise<TaxonomyTerm | null> {\n\tconst db = await getDb();\n\n\tconst row = await db\n\t\t.selectFrom(\"taxonomies\")\n\t\t.selectAll()\n\t\t.where(\"name\", \"=\", taxonomyName)\n\t\t.where(\"slug\", \"=\", slug)\n\t\t.executeTakeFirst();\n\n\tif (!row) return null;\n\n\t// Get entry count\n\tconst countResult = await db\n\t\t.selectFrom(\"content_taxonomies\")\n\t\t.select((eb) => eb.fn.count<number>(\"entry_id\").as(\"count\"))\n\t\t.where(\"taxonomy_id\", \"=\", row.id)\n\t\t.executeTakeFirst();\n\n\tconst count = countResult?.count ?? 0;\n\n\t// Get children if hierarchical\n\tconst childRows = await db\n\t\t.selectFrom(\"taxonomies\")\n\t\t.selectAll()\n\t\t.where(\"parent_id\", \"=\", row.id)\n\t\t.orderBy(\"label\", \"asc\")\n\t\t.execute();\n\n\tconst children = childRows.map((child) => ({\n\t\tid: child.id,\n\t\tname: child.name,\n\t\tslug: child.slug,\n\t\tlabel: child.label,\n\t\tparentId: child.parent_id ?? undefined,\n\t\tchildren: [],\n\t}));\n\n\treturn {\n\t\tid: row.id,\n\t\tname: row.name,\n\t\tslug: row.slug,\n\t\tlabel: row.label,\n\t\tparentId: row.parent_id ?? undefined,\n\t\tdescription: row.data ? JSON.parse(row.data).description : undefined,\n\t\tchildren,\n\t\tcount,\n\t};\n}\n\n/**\n * Get terms assigned to an entry\n */\nexport function getEntryTerms(\n\tcollection: string,\n\tentryId: string,\n\ttaxonomyName?: string,\n): Promise<TaxonomyTerm[]> {\n\treturn requestCached(`terms:${collection}:${entryId}:${taxonomyName ?? \"*\"}`, async () => {\n\t\tconst db = await getDb();\n\n\t\tlet query = db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.id\", \"content_taxonomies.taxonomy_id\")\n\t\t\t.selectAll(\"taxonomies\")\n\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t.where(\"content_taxonomies.entry_id\", \"=\", entryId);\n\n\t\tif (taxonomyName) {\n\t\t\tquery = query.where(\"taxonomies.name\", \"=\", taxonomyName);\n\t\t}\n\n\t\tconst rows = await query.execute();\n\n\t\treturn rows.map((row) => ({\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tslug: row.slug,\n\t\t\tlabel: row.label,\n\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\tchildren: [],\n\t\t}));\n\t});\n}\n\n/**\n * Get terms for multiple entries in a single query (batched API)\n *\n * This is more efficient than calling getEntryTerms for each entry\n * when you need terms for a list of entries.\n *\n * @param collection - The collection type (e.g., \"posts\")\n * @param entryIds - Array of entry IDs\n * @param taxonomyName - The taxonomy name (e.g., \"categories\")\n * @returns Map from entry ID to array of terms\n */\nexport async function getTermsForEntries(\n\tcollection: string,\n\tentryIds: string[],\n\ttaxonomyName: string,\n): Promise<Map<string, TaxonomyTerm[]>> {\n\tconst result = new Map<string, TaxonomyTerm[]>();\n\n\t// Initialize all entry IDs with empty arrays so callers can always\n\t// expect the key to be present.\n\tconst uniqueIds = [...new Set(entryIds)];\n\tfor (const id of uniqueIds) {\n\t\tresult.set(id, []);\n\t}\n\n\tif (uniqueIds.length === 0) {\n\t\treturn result;\n\t}\n\n\tconst db = await getDb();\n\n\t// Chunk the IN clause so we stay below D1's ~100 bound-parameter limit\n\t// (and equivalent limits on other dialects). Matches getContentBylinesMany.\n\t//\n\t// Sites with no term assignments get back empty rows for one query —\n\t// the previous \"has any term assignments\" probe spent a round-trip on\n\t// every request to save that single query on empty sites, which is\n\t// backwards. Pre-migration databases (content_taxonomies missing) fall\n\t// through to the `isMissingTableError` catch and return empties.\n\tfor (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {\n\t\tlet rows;\n\t\ttry {\n\t\t\trows = await db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.id\", \"content_taxonomies.taxonomy_id\")\n\t\t\t\t.select([\n\t\t\t\t\t\"content_taxonomies.entry_id\",\n\t\t\t\t\t\"taxonomies.id\",\n\t\t\t\t\t\"taxonomies.name\",\n\t\t\t\t\t\"taxonomies.slug\",\n\t\t\t\t\t\"taxonomies.label\",\n\t\t\t\t\t\"taxonomies.parent_id\",\n\t\t\t\t])\n\t\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t\t.where(\"content_taxonomies.entry_id\", \"in\", chunk)\n\t\t\t\t.where(\"taxonomies.name\", \"=\", taxonomyName)\n\t\t\t\t.execute();\n\t\t} catch (error) {\n\t\t\tif (isMissingTableError(error)) return result;\n\t\t\tthrow error;\n\t\t}\n\n\t\tfor (const row of rows) {\n\t\t\tconst entryId = row.entry_id;\n\t\t\tconst term: TaxonomyTerm = {\n\t\t\t\tid: row.id,\n\t\t\t\tname: row.name,\n\t\t\t\tslug: row.slug,\n\t\t\t\tlabel: row.label,\n\t\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\t\tchildren: [],\n\t\t\t};\n\n\t\t\tconst terms = result.get(entryId);\n\t\t\tif (terms) {\n\t\t\t\tterms.push(term);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result;\n}\n\n/**\n * Batch-fetch terms for multiple entries across ALL taxonomies in a single query.\n *\n * Returns a Map keyed by entry ID, where each value is a Record keyed by\n * taxonomy name with the matching terms as an array. Used by\n * getEmDashCollection to eagerly hydrate `entry.data.terms` and avoid\n * the N+1 pattern that callers hit when they loop and call getEntryTerms.\n *\n * Pre-migration databases (content_taxonomies missing) return an empty\n * Map — the join falls through to the `isMissingTableError` branch.\n */\nexport async function getAllTermsForEntries(\n\tcollection: string,\n\tentryIds: string[],\n): Promise<Map<string, Record<string, TaxonomyTerm[]>>> {\n\tconst result = new Map<string, Record<string, TaxonomyTerm[]>>();\n\n\t// Initialize unique entry IDs with empty objects so callers can always\n\t// expect the key to be present. Deduping also reduces wasted bound\n\t// parameters when a caller accidentally passes duplicates.\n\tconst uniqueIds = [...new Set(entryIds)];\n\tfor (const id of uniqueIds) {\n\t\tresult.set(id, {});\n\t}\n\n\tif (uniqueIds.length === 0) {\n\t\treturn result;\n\t}\n\n\tconst db = await getDb();\n\n\t// Look up which taxonomies apply to this collection. Used below to\n\t// seed empty arrays for taxonomies the entry has no terms in — so\n\t// callers (including the pre-populated getEntryTerms cache) get a\n\t// deterministic `[]` back rather than a cache miss that triggers a DB\n\t// round-trip just to confirm \"no terms\".\n\tconst applicableTaxonomyNames = await getCollectionTaxonomyNames(collection);\n\n\t// Chunk the IN clause to stay below D1's ~100 bound-parameter limit\n\t// (and equivalent limits on other dialects). Matches getContentBylinesMany.\n\t//\n\t// Previously we did a separate \"has any assignments\" probe to skip the\n\t// join on empty sites. That traded one query per request for a query\n\t// saved only on empty sites — backwards. Now the join runs directly\n\t// (returning zero rows cheaply) and pre-migration databases are caught\n\t// by the `isMissingTableError` branch below.\n\tfor (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {\n\t\tlet rows;\n\t\ttry {\n\t\t\trows = await db\n\t\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.id\", \"content_taxonomies.taxonomy_id\")\n\t\t\t\t.select([\n\t\t\t\t\t\"content_taxonomies.entry_id\",\n\t\t\t\t\t\"taxonomies.id\",\n\t\t\t\t\t\"taxonomies.name\",\n\t\t\t\t\t\"taxonomies.slug\",\n\t\t\t\t\t\"taxonomies.label\",\n\t\t\t\t\t\"taxonomies.parent_id\",\n\t\t\t\t])\n\t\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t\t.where(\"content_taxonomies.entry_id\", \"in\", chunk)\n\t\t\t\t.orderBy(\"taxonomies.label\", \"asc\")\n\t\t\t\t.execute();\n\t\t} catch (error) {\n\t\t\tif (isMissingTableError(error)) {\n\t\t\t\tfor (const id of uniqueIds) {\n\t\t\t\t\tprimeEntryTermsCache(collection, id, {}, applicableTaxonomyNames);\n\t\t\t\t}\n\t\t\t\treturn result;\n\t\t\t}\n\t\t\tthrow error;\n\t\t}\n\n\t\tfor (const row of rows) {\n\t\t\tconst entryId = row.entry_id;\n\t\t\tconst term: TaxonomyTerm = {\n\t\t\t\tid: row.id,\n\t\t\t\tname: row.name,\n\t\t\t\tslug: row.slug,\n\t\t\t\tlabel: row.label,\n\t\t\t\tparentId: row.parent_id ?? undefined,\n\t\t\t\tchildren: [],\n\t\t\t};\n\n\t\t\tconst byTaxonomy = result.get(entryId);\n\t\t\tif (!byTaxonomy) continue;\n\t\t\tconst existing = byTaxonomy[row.name];\n\t\t\tif (existing) {\n\t\t\t\texisting.push(term);\n\t\t\t} else {\n\t\t\t\tbyTaxonomy[row.name] = [term];\n\t\t\t}\n\t\t}\n\t}\n\n\t// Prime the request-scoped cache so legacy callers of getEntryTerms\n\t// (which still work per-entry) hit the in-memory cache instead of\n\t// re-querying. This is what gives us the N+1 win in existing templates\n\t// without requiring them to be rewritten.\n\tfor (const [entryId, byTaxonomy] of result) {\n\t\tprimeEntryTermsCache(collection, entryId, byTaxonomy, applicableTaxonomyNames);\n\t}\n\n\treturn result;\n}\n\n/**\n * Return the list of taxonomy names applicable to a collection, request-\n * cached so a page render only pays for it once.\n *\n * Returns an empty list when taxonomies haven't been defined yet.\n */\nasync function getCollectionTaxonomyNames(collection: string): Promise<string[]> {\n\ttry {\n\t\tconst defs = await getTaxonomyDefs();\n\t\treturn defs.filter((d) => d.collections.includes(collection)).map((d) => d.name);\n\t} catch (error) {\n\t\tif (isMissingTableError(error)) return [];\n\t\tthrow error;\n\t}\n}\n\n/**\n * Pre-populate the request-cache for every getEntryTerms call-shape that\n * could hit this entry:\n *\n * getEntryTerms(collection, entryId) -> key `terms:C:E:*`\n * getEntryTerms(collection, entryId, \"tag\") -> key `terms:C:E:tag`\n * getEntryTerms(collection, entryId, \"category\") -> key `terms:C:E:category`\n * ...one per taxonomy that applies to this collection\n *\n * Taxonomies with no rows on this entry are seeded with `[]` so legacy\n * callers short-circuit to the cached empty array instead of re-querying.\n */\nfunction primeEntryTermsCache(\n\tcollection: string,\n\tentryId: string,\n\tbyTaxonomy: Record<string, TaxonomyTerm[]>,\n\tapplicableTaxonomyNames: string[],\n): void {\n\t// Seed every applicable taxonomy with at least [] so\n\t// getEntryTerms(collection, id, \"tag\") doesn't miss the cache when an\n\t// entry has no tags.\n\tfor (const name of applicableTaxonomyNames) {\n\t\tsetRequestCacheEntry(`terms:${collection}:${entryId}:${name}`, byTaxonomy[name] ?? []);\n\t}\n\t// Also seed individual names that show up in data but aren't listed\n\t// as applicable (e.g. taxonomy reassigned to a different collection\n\t// since the terms were written).\n\tfor (const [name, terms] of Object.entries(byTaxonomy)) {\n\t\tsetRequestCacheEntry(`terms:${collection}:${entryId}:${name}`, terms);\n\t}\n\t// Flattened `*` view — all terms across all taxonomies in one array.\n\tconst allTerms = Object.values(byTaxonomy).flat();\n\tsetRequestCacheEntry(`terms:${collection}:${entryId}:*`, allTerms);\n}\n\n/**\n * Get entries by term (wraps getEmDashCollection)\n */\nexport async function getEntriesByTerm(\n\tcollection: string,\n\ttaxonomyName: string,\n\ttermSlug: string,\n): Promise<Array<{ id: string; data: Record<string, unknown> }>> {\n\tconst { getEmDashCollection } = await import(\"../query.js\");\n\n\t// Build options as the expected type — getEmDashCollection accepts\n\t// a generic options object with `where` for filtering by taxonomy\n\tconst options: Record<string, unknown> = {\n\t\twhere: { [taxonomyName]: termSlug },\n\t};\n\tconst { entries } = await getEmDashCollection(collection, options);\n\n\treturn entries;\n}\n\n/**\n * Build tree structure from flat terms\n */\nfunction buildTree(flatTerms: TaxonomyTermRow[], counts: Map<string, number>): TaxonomyTerm[] {\n\tconst map = new Map<string, TaxonomyTerm>();\n\tconst roots: TaxonomyTerm[] = [];\n\n\t// First pass: create nodes\n\tfor (const term of flatTerms) {\n\t\tmap.set(term.id, {\n\t\t\tid: term.id,\n\t\t\tname: term.name,\n\t\t\tslug: term.slug,\n\t\t\tlabel: term.label,\n\t\t\tparentId: term.parent_id ?? undefined,\n\t\t\tdescription: term.data ? JSON.parse(term.data).description : undefined,\n\t\t\tchildren: [],\n\t\t\tcount: counts.get(term.id) ?? 0,\n\t\t});\n\t}\n\n\t// Second pass: build tree\n\tfor (const term of map.values()) {\n\t\tif (term.parentId && map.has(term.parentId)) {\n\t\t\tmap.get(term.parentId)!.children.push(term);\n\t\t} else {\n\t\t\troots.push(term);\n\t\t}\n\t}\n\n\treturn roots;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqBA,SAAgB,sBAA4B;;;;AAO5C,eAAsB,kBAA0C;AAC/D,QAAO,cAAc,qBAAqB,YAAY;AAKrD,UAFa,OAFF,MAAM,OAAO,EAEF,WAAW,wBAAwB,CAAC,WAAW,CAAC,SAAS,EAEnE,KAAK,SAAS;GACzB,IAAI,IAAI;GACR,MAAM,IAAI;GACV,OAAO,IAAI;GACX,eAAe,IAAI,kBAAkB;GACrC,cAAc,IAAI,iBAAiB;GACnC,aAAa,IAAI,cAAc,KAAK,MAAM,IAAI,YAAY,GAAG,EAAE;GAC/D,EAAE;GACF;;;;;;;;;;AAWH,eAAsB,eAAe,MAA2C;CAC/E,MAAM,UAAU,iBAAgC,oBAAoB;AACpE,KAAI,QACH,SAAQ,MAAM,SAAS,MAAM,MAAM,EAAE,SAAS,KAAK,IAAI;AAGxD,QAAO,cAAc,gBAAgB,QAAQ,YAAY;EAGxD,MAAM,MAAM,OAFD,MAAM,OAAO,EAGtB,WAAW,wBAAwB,CACnC,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;AAEpB,MAAI,CAAC,IAAK,QAAO;AAEjB,SAAO;GACN,IAAI,IAAI;GACR,MAAM,IAAI;GACV,OAAO,IAAI;GACX,eAAe,IAAI,kBAAkB;GACrC,cAAc,IAAI,iBAAiB;GACnC,aAAa,IAAI,cAAc,KAAK,MAAM,IAAI,YAAY,GAAG,EAAE;GAC/D;GACA;;;;;AAMH,eAAsB,iBAAiB,cAA+C;AACrF,QAAO,cAAc,kBAAkB,gBAAgB,YAAY;EAClE,MAAM,KAAK,MAAM,OAAO;EAGxB,MAAM,MAAM,MAAM,eAAe,aAAa;AAC9C,MAAI,CAAC,IAAK,QAAO,EAAE;EAGnB,MAAM,OAAO,MAAM,GACjB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,aAAa,CAChC,QAAQ,SAAS,MAAM,CACvB,SAAS;EAGX,MAAM,eAAe,MAAM,GACzB,WAAW,qBAAqB,CAChC,OAAO,CAAC,cAAc,CAAC,CACvB,QAAQ,OAAO,GAAG,GAAG,MAAc,WAAW,CAAC,GAAG,QAAQ,CAAC,CAC3D,QAAQ,cAAc,CACtB,SAAS;EAEX,MAAM,yBAAS,IAAI,KAAqB;AACxC,OAAK,MAAM,OAAO,aACjB,QAAO,IAAI,IAAI,aAAa,IAAI,MAAM;EAGvC,MAAM,YAA+B,KAAK,KAAK,SAAS;GACvD,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,WAAW,IAAI;GACf,MAAM,IAAI;GACV,EAAE;AAGH,MAAI,IAAI,aACP,QAAO,UAAU,WAAW,OAAO;AAGpC,SAAO,UAAU,KAAK,UAAU;GAC/B,IAAI,KAAK;GACT,MAAM,KAAK;GACX,MAAM,KAAK;GACX,OAAO,KAAK;GACZ,UAAU,EAAE;GACZ,OAAO,OAAO,IAAI,KAAK,GAAG,IAAI;GAC9B,EAAE;GACF;;;;;AAMH,eAAsB,QAAQ,cAAsB,MAA4C;CAC/F,MAAM,KAAK,MAAM,OAAO;CAExB,MAAM,MAAM,MAAM,GAChB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,aAAa,CAChC,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;AAEpB,KAAI,CAAC,IAAK,QAAO;CASjB,MAAM,SANc,MAAM,GACxB,WAAW,qBAAqB,CAChC,QAAQ,OAAO,GAAG,GAAG,MAAc,WAAW,CAAC,GAAG,QAAQ,CAAC,CAC3D,MAAM,eAAe,KAAK,IAAI,GAAG,CACjC,kBAAkB,GAEO,SAAS;CAUpC,MAAM,YAPY,MAAM,GACtB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,aAAa,KAAK,IAAI,GAAG,CAC/B,QAAQ,SAAS,MAAM,CACvB,SAAS,EAEgB,KAAK,WAAW;EAC1C,IAAI,MAAM;EACV,MAAM,MAAM;EACZ,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,UAAU,MAAM,aAAa;EAC7B,UAAU,EAAE;EACZ,EAAE;AAEH,QAAO;EACN,IAAI,IAAI;EACR,MAAM,IAAI;EACV,MAAM,IAAI;EACV,OAAO,IAAI;EACX,UAAU,IAAI,aAAa;EAC3B,aAAa,IAAI,OAAO,KAAK,MAAM,IAAI,KAAK,CAAC,cAAc;EAC3D;EACA;EACA;;;;;AAMF,SAAgB,cACf,YACA,SACA,cAC0B;AAC1B,QAAO,cAAc,SAAS,WAAW,GAAG,QAAQ,GAAG,gBAAgB,OAAO,YAAY;EAGzF,IAAI,SAFO,MAAM,OAAO,EAGtB,WAAW,qBAAqB,CAChC,UAAU,cAAc,iBAAiB,iCAAiC,CAC1E,UAAU,aAAa,CACvB,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,KAAK,QAAQ;AAEpD,MAAI,aACH,SAAQ,MAAM,MAAM,mBAAmB,KAAK,aAAa;AAK1D,UAFa,MAAM,MAAM,SAAS,EAEtB,KAAK,SAAS;GACzB,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,UAAU,IAAI,aAAa;GAC3B,UAAU,EAAE;GACZ,EAAE;GACF;;;;;;;;;;;;;AAcH,eAAsB,mBACrB,YACA,UACA,cACuC;CACvC,MAAM,yBAAS,IAAI,KAA6B;CAIhD,MAAM,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AACxC,MAAK,MAAM,MAAM,UAChB,QAAO,IAAI,IAAI,EAAE,CAAC;AAGnB,KAAI,UAAU,WAAW,EACxB,QAAO;CAGR,MAAM,KAAK,MAAM,OAAO;AAUxB,MAAK,MAAM,SAAS,OAAO,WAAW,eAAe,EAAE;EACtD,IAAI;AACJ,MAAI;AACH,UAAO,MAAM,GACX,WAAW,qBAAqB,CAChC,UAAU,cAAc,iBAAiB,iCAAiC,CAC1E,OAAO;IACP;IACA;IACA;IACA;IACA;IACA;IACA,CAAC,CACD,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,MAAM,MAAM,CACjD,MAAM,mBAAmB,KAAK,aAAa,CAC3C,SAAS;WACH,OAAO;AACf,OAAI,oBAAoB,MAAM,CAAE,QAAO;AACvC,SAAM;;AAGP,OAAK,MAAM,OAAO,MAAM;GACvB,MAAM,UAAU,IAAI;GACpB,MAAM,OAAqB;IAC1B,IAAI,IAAI;IACR,MAAM,IAAI;IACV,MAAM,IAAI;IACV,OAAO,IAAI;IACX,UAAU,IAAI,aAAa;IAC3B,UAAU,EAAE;IACZ;GAED,MAAM,QAAQ,OAAO,IAAI,QAAQ;AACjC,OAAI,MACH,OAAM,KAAK,KAAK;;;AAKnB,QAAO;;;;;;;;;;;;;AAcR,eAAsB,sBACrB,YACA,UACuD;CACvD,MAAM,yBAAS,IAAI,KAA6C;CAKhE,MAAM,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AACxC,MAAK,MAAM,MAAM,UAChB,QAAO,IAAI,IAAI,EAAE,CAAC;AAGnB,KAAI,UAAU,WAAW,EACxB,QAAO;CAGR,MAAM,KAAK,MAAM,OAAO;CAOxB,MAAM,0BAA0B,MAAM,2BAA2B,WAAW;AAU5E,MAAK,MAAM,SAAS,OAAO,WAAW,eAAe,EAAE;EACtD,IAAI;AACJ,MAAI;AACH,UAAO,MAAM,GACX,WAAW,qBAAqB,CAChC,UAAU,cAAc,iBAAiB,iCAAiC,CAC1E,OAAO;IACP;IACA;IACA;IACA;IACA;IACA;IACA,CAAC,CACD,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,MAAM,MAAM,CACjD,QAAQ,oBAAoB,MAAM,CAClC,SAAS;WACH,OAAO;AACf,OAAI,oBAAoB,MAAM,EAAE;AAC/B,SAAK,MAAM,MAAM,UAChB,sBAAqB,YAAY,IAAI,EAAE,EAAE,wBAAwB;AAElE,WAAO;;AAER,SAAM;;AAGP,OAAK,MAAM,OAAO,MAAM;GACvB,MAAM,UAAU,IAAI;GACpB,MAAM,OAAqB;IAC1B,IAAI,IAAI;IACR,MAAM,IAAI;IACV,MAAM,IAAI;IACV,OAAO,IAAI;IACX,UAAU,IAAI,aAAa;IAC3B,UAAU,EAAE;IACZ;GAED,MAAM,aAAa,OAAO,IAAI,QAAQ;AACtC,OAAI,CAAC,WAAY;GACjB,MAAM,WAAW,WAAW,IAAI;AAChC,OAAI,SACH,UAAS,KAAK,KAAK;OAEnB,YAAW,IAAI,QAAQ,CAAC,KAAK;;;AAShC,MAAK,MAAM,CAAC,SAAS,eAAe,OACnC,sBAAqB,YAAY,SAAS,YAAY,wBAAwB;AAG/E,QAAO;;;;;;;;AASR,eAAe,2BAA2B,YAAuC;AAChF,KAAI;AAEH,UADa,MAAM,iBAAiB,EACxB,QAAQ,MAAM,EAAE,YAAY,SAAS,WAAW,CAAC,CAAC,KAAK,MAAM,EAAE,KAAK;UACxE,OAAO;AACf,MAAI,oBAAoB,MAAM,CAAE,QAAO,EAAE;AACzC,QAAM;;;;;;;;;;;;;;;AAgBR,SAAS,qBACR,YACA,SACA,YACA,yBACO;AAIP,MAAK,MAAM,QAAQ,wBAClB,sBAAqB,SAAS,WAAW,GAAG,QAAQ,GAAG,QAAQ,WAAW,SAAS,EAAE,CAAC;AAKvF,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,WAAW,CACrD,sBAAqB,SAAS,WAAW,GAAG,QAAQ,GAAG,QAAQ,MAAM;CAGtE,MAAM,WAAW,OAAO,OAAO,WAAW,CAAC,MAAM;AACjD,sBAAqB,SAAS,WAAW,GAAG,QAAQ,KAAK,SAAS;;;;;AAMnE,eAAsB,iBACrB,YACA,cACA,UACgE;CAChE,MAAM,EAAE,wBAAwB,MAAM,OAAO;CAO7C,MAAM,EAAE,YAAY,MAAM,oBAAoB,YAHL,EACxC,OAAO,GAAG,eAAe,UAAU,EACnC,CACiE;AAElE,QAAO;;;;;AAMR,SAAS,UAAU,WAA8B,QAA6C;CAC7F,MAAM,sBAAM,IAAI,KAA2B;CAC3C,MAAM,QAAwB,EAAE;AAGhC,MAAK,MAAM,QAAQ,UAClB,KAAI,IAAI,KAAK,IAAI;EAChB,IAAI,KAAK;EACT,MAAM,KAAK;EACX,MAAM,KAAK;EACX,OAAO,KAAK;EACZ,UAAU,KAAK,aAAa;EAC5B,aAAa,KAAK,OAAO,KAAK,MAAM,KAAK,KAAK,CAAC,cAAc;EAC7D,UAAU,EAAE;EACZ,OAAO,OAAO,IAAI,KAAK,GAAG,IAAI;EAC9B,CAAC;AAIH,MAAK,MAAM,QAAQ,IAAI,QAAQ,CAC9B,KAAI,KAAK,YAAY,IAAI,IAAI,KAAK,SAAS,CAC1C,KAAI,IAAI,KAAK,SAAS,CAAE,SAAS,KAAK,KAAK;KAE3C,OAAM,KAAK,KAAK;AAIlB,QAAO"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { i as encodeBase64url, n as decodeBase64url } from "./base64-
|
|
1
|
+
import { i as encodeBase64url, n as decodeBase64url } from "./base64-BRICGH2l.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/preview/tokens.ts
|
|
4
4
|
/**
|
|
@@ -168,4 +168,4 @@ function parseContentId(contentId) {
|
|
|
168
168
|
|
|
169
169
|
//#endregion
|
|
170
170
|
export { parseContentId as n, verifyPreviewToken as r, generatePreviewToken as t };
|
|
171
|
-
//# sourceMappingURL=tokens-
|
|
171
|
+
//# sourceMappingURL=tokens-D7zMmWi2.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tokens-D9vnZqYS.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
|
|
1
|
+
{"version":3,"file":"tokens-D7zMmWi2.mjs","names":[],"sources":["../src/preview/tokens.ts"],"sourcesContent":["/**\n * Preview token generation and verification\n *\n * Tokens are compact, URL-safe, and HMAC-signed.\n * Format: base64url(JSON payload).base64url(HMAC signature)\n *\n * Payload: { cid: contentId, exp: expiryTimestamp, iat: issuedAt }\n */\n\nimport { encodeBase64url, decodeBase64url } from \"../utils/base64.js\";\n\n// Regex pattern for duration parsing\nconst DURATION_PATTERN = /^(\\d+)([smhdw])$/;\n\n/**\n * Preview token payload\n */\nexport interface PreviewTokenPayload {\n\t/** Content ID in format \"collection:id\" (e.g., \"posts:abc123\") */\n\tcid: string;\n\t/** Expiry timestamp (seconds since epoch) */\n\texp: number;\n\t/** Issued at timestamp (seconds since epoch) */\n\tiat: number;\n}\n\n/**\n * Options for generating a preview token\n */\nexport interface GeneratePreviewTokenOptions {\n\t/** Content ID in format \"collection:id\" */\n\tcontentId: string;\n\t/** How long the token is valid. Accepts \"1h\", \"30m\", \"1d\", or seconds as number. Default: \"1h\" */\n\texpiresIn?: string | number;\n\t/** Secret key for signing. Should be from environment variable. */\n\tsecret: string;\n}\n\n/**\n * Parse duration string to seconds\n * Supports: \"1h\", \"30m\", \"1d\", \"2w\", or raw seconds\n */\nfunction parseDuration(duration: string | number): number {\n\tif (typeof duration === \"number\") {\n\t\treturn duration;\n\t}\n\n\tconst match = duration.match(DURATION_PATTERN);\n\tif (!match) {\n\t\tthrow new Error(\n\t\t\t`Invalid duration format: \"${duration}\". Use \"1h\", \"30m\", \"1d\", \"2w\", or seconds.`,\n\t\t);\n\t}\n\n\tconst value = parseInt(match[1], 10);\n\tconst unit = match[2];\n\n\tswitch (unit) {\n\t\tcase \"s\":\n\t\t\treturn value;\n\t\tcase \"m\":\n\t\t\treturn value * 60;\n\t\tcase \"h\":\n\t\t\treturn value * 60 * 60;\n\t\tcase \"d\":\n\t\t\treturn value * 60 * 60 * 24;\n\t\tcase \"w\":\n\t\t\treturn value * 60 * 60 * 24 * 7;\n\t\tdefault:\n\t\t\tthrow new Error(`Unknown duration unit: ${unit}`);\n\t}\n}\n\n/**\n * Create HMAC-SHA256 signature using Web Crypto API\n */\nasync function createSignature(data: string, secret: string): Promise<Uint8Array> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"sign\"],\n\t);\n\tconst signature = await crypto.subtle.sign(\"HMAC\", key, encoder.encode(data));\n\treturn new Uint8Array(signature);\n}\n\n/**\n * Verify HMAC-SHA256 signature\n */\nasync function verifySignature(\n\tdata: string,\n\tsignature: Uint8Array,\n\tsecret: string,\n): Promise<boolean> {\n\tconst encoder = new TextEncoder();\n\tconst key = await crypto.subtle.importKey(\n\t\t\"raw\",\n\t\tencoder.encode(secret),\n\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\tfalse,\n\t\t[\"verify\"],\n\t);\n\t// Create a new ArrayBuffer from the signature to satisfy BufferSource typing\n\t// (Uint8Array.buffer is ArrayBufferLike which includes SharedArrayBuffer)\n\tconst sigBuffer: ArrayBuffer = new ArrayBuffer(signature.byteLength);\n\tnew Uint8Array(sigBuffer).set(signature);\n\treturn crypto.subtle.verify(\"HMAC\", key, sigBuffer, encoder.encode(data));\n}\n\n/**\n * Generate a preview token for content\n *\n * @example\n * ```ts\n * const token = await generatePreviewToken({\n * contentId: \"posts:abc123\",\n * expiresIn: \"1h\",\n * secret: process.env.PREVIEW_SECRET!,\n * });\n * ```\n */\nexport async function generatePreviewToken(options: GeneratePreviewTokenOptions): Promise<string> {\n\tconst { contentId, expiresIn = \"1h\", secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\tif (!contentId || !contentId.includes(\":\")) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\n\tconst now = Math.floor(Date.now() / 1000);\n\tconst duration = parseDuration(expiresIn);\n\n\tconst payload: PreviewTokenPayload = {\n\t\tcid: contentId,\n\t\texp: now + duration,\n\t\tiat: now,\n\t};\n\n\t// Encode payload\n\tconst payloadJson = JSON.stringify(payload);\n\tconst encodedPayload = encodeBase64url(new TextEncoder().encode(payloadJson));\n\n\t// Sign it\n\tconst signature = await createSignature(encodedPayload, secret);\n\tconst encodedSignature = encodeBase64url(signature);\n\n\treturn `${encodedPayload}.${encodedSignature}`;\n}\n\n/**\n * Result of verifying a preview token\n */\nexport type VerifyPreviewTokenResult =\n\t| { valid: true; payload: PreviewTokenPayload }\n\t| { valid: false; error: \"invalid\" | \"expired\" | \"malformed\" | \"none\" };\n\n/**\n * Options for verifyPreviewToken\n */\nexport type VerifyPreviewTokenOptions = {\n\t/** Secret key for verifying tokens */\n\tsecret: string;\n} & (\n\t| { /** URL to extract _preview token from */ url: URL }\n\t| {\n\t\t\t/** Preview token string (can be null) */ token: string | null | undefined;\n\t }\n);\n\n/**\n * Verify a preview token and return the payload\n *\n * @example\n * ```ts\n * // With URL (extracts _preview query param)\n * const result = await verifyPreviewToken({\n * url: Astro.url,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * // With token directly\n * const result = await verifyPreviewToken({\n * token: someToken,\n * secret: import.meta.env.PREVIEW_SECRET,\n * });\n *\n * if (result.valid) {\n * console.log(result.payload.cid); // \"posts:abc123\"\n * }\n * ```\n */\nexport async function verifyPreviewToken(\n\toptions: VerifyPreviewTokenOptions,\n): Promise<VerifyPreviewTokenResult> {\n\tconst { secret } = options;\n\n\tif (!secret) {\n\t\tthrow new Error(\"Preview secret is required\");\n\t}\n\n\t// Extract token from URL or use provided token\n\tconst token = \"url\" in options ? options.url.searchParams.get(\"_preview\") : options.token;\n\n\t// Handle null/undefined token\n\tif (!token) {\n\t\treturn { valid: false, error: \"none\" };\n\t}\n\n\t// Split token into payload and signature\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 2) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst [encodedPayload, encodedSignature] = parts;\n\n\t// Verify signature\n\tlet signature: Uint8Array;\n\ttry {\n\t\tsignature = decodeBase64url(encodedSignature);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\tconst isValid = await verifySignature(encodedPayload, signature, secret);\n\tif (!isValid) {\n\t\treturn { valid: false, error: \"invalid\" };\n\t}\n\n\t// Decode and parse payload\n\tlet payload: PreviewTokenPayload;\n\ttry {\n\t\tconst payloadBytes = decodeBase64url(encodedPayload);\n\t\tconst payloadJson = new TextDecoder().decode(payloadBytes);\n\t\tpayload = JSON.parse(payloadJson);\n\t} catch {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check required fields\n\tif (\n\t\ttypeof payload.cid !== \"string\" ||\n\t\ttypeof payload.exp !== \"number\" ||\n\t\ttypeof payload.iat !== \"number\"\n\t) {\n\t\treturn { valid: false, error: \"malformed\" };\n\t}\n\n\t// Check expiry\n\tconst now = Math.floor(Date.now() / 1000);\n\tif (payload.exp < now) {\n\t\treturn { valid: false, error: \"expired\" };\n\t}\n\n\treturn { valid: true, payload };\n}\n\n/**\n * Parse a content ID into collection and id\n */\nexport function parseContentId(contentId: string): {\n\tcollection: string;\n\tid: string;\n} {\n\tconst colonIndex = contentId.indexOf(\":\");\n\tif (colonIndex === -1) {\n\t\tthrow new Error('Content ID must be in format \"collection:id\"');\n\t}\n\treturn {\n\t\tcollection: contentId.slice(0, colonIndex),\n\t\tid: contentId.slice(colonIndex + 1),\n\t};\n}\n"],"mappings":";;;;;;;;;;;AAYA,MAAM,mBAAmB;;;;;AA8BzB,SAAS,cAAc,UAAmC;AACzD,KAAI,OAAO,aAAa,SACvB,QAAO;CAGR,MAAM,QAAQ,SAAS,MAAM,iBAAiB;AAC9C,KAAI,CAAC,MACJ,OAAM,IAAI,MACT,6BAA6B,SAAS,6CACtC;CAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;CACpC,MAAM,OAAO,MAAM;AAEnB,SAAQ,MAAR;EACC,KAAK,IACJ,QAAO;EACR,KAAK,IACJ,QAAO,QAAQ;EAChB,KAAK,IACJ,QAAO,QAAQ,KAAK;EACrB,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK;EAC1B,KAAK,IACJ,QAAO,QAAQ,KAAK,KAAK,KAAK;EAC/B,QACC,OAAM,IAAI,MAAM,0BAA0B,OAAO;;;;;;AAOpD,eAAe,gBAAgB,MAAc,QAAqC;CACjF,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,OAAO,CACR;CACD,MAAM,YAAY,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAO,KAAK,CAAC;AAC7E,QAAO,IAAI,WAAW,UAAU;;;;;AAMjC,eAAe,gBACd,MACA,WACA,QACmB;CACnB,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,MAAM,MAAM,OAAO,OAAO,UAC/B,OACA,QAAQ,OAAO,OAAO,EACtB;EAAE,MAAM;EAAQ,MAAM;EAAW,EACjC,OACA,CAAC,SAAS,CACV;CAGD,MAAM,YAAyB,IAAI,YAAY,UAAU,WAAW;AACpE,KAAI,WAAW,UAAU,CAAC,IAAI,UAAU;AACxC,QAAO,OAAO,OAAO,OAAO,QAAQ,KAAK,WAAW,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;AAe1E,eAAsB,qBAAqB,SAAuD;CACjG,MAAM,EAAE,WAAW,YAAY,MAAM,WAAW;AAEhD,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;AAG9C,KAAI,CAAC,aAAa,CAAC,UAAU,SAAS,IAAI,CACzC,OAAM,IAAI,MAAM,iDAA+C;CAGhE,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;CAGzC,MAAM,UAA+B;EACpC,KAAK;EACL,KAAK,MAJW,cAAc,UAAU;EAKxC,KAAK;EACL;CAGD,MAAM,cAAc,KAAK,UAAU,QAAQ;CAC3C,MAAM,iBAAiB,gBAAgB,IAAI,aAAa,CAAC,OAAO,YAAY,CAAC;AAM7E,QAAO,GAAG,eAAe,GAFA,gBADP,MAAM,gBAAgB,gBAAgB,OAAO,CACZ;;;;;;;;;;;;;;;;;;;;;;;;AA+CpD,eAAsB,mBACrB,SACoC;CACpC,MAAM,EAAE,WAAW;AAEnB,KAAI,CAAC,OACJ,OAAM,IAAI,MAAM,6BAA6B;CAI9C,MAAM,QAAQ,SAAS,UAAU,QAAQ,IAAI,aAAa,IAAI,WAAW,GAAG,QAAQ;AAGpF,KAAI,CAAC,MACJ,QAAO;EAAE,OAAO;EAAO,OAAO;EAAQ;CAIvC,MAAM,QAAQ,MAAM,MAAM,IAAI;AAC9B,KAAI,MAAM,WAAW,EACpB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAG5C,MAAM,CAAC,gBAAgB,oBAAoB;CAG3C,IAAI;AACJ,KAAI;AACH,cAAY,gBAAgB,iBAAiB;SACtC;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KAAI,CADY,MAAM,gBAAgB,gBAAgB,WAAW,OAAO,CAEvE,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;CAI1C,IAAI;AACJ,KAAI;EACH,MAAM,eAAe,gBAAgB,eAAe;EACpD,MAAM,cAAc,IAAI,aAAa,CAAC,OAAO,aAAa;AAC1D,YAAU,KAAK,MAAM,YAAY;SAC1B;AACP,SAAO;GAAE,OAAO;GAAO,OAAO;GAAa;;AAI5C,KACC,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,YACvB,OAAO,QAAQ,QAAQ,SAEvB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAa;CAI5C,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;AACzC,KAAI,QAAQ,MAAM,IACjB,QAAO;EAAE,OAAO;EAAO,OAAO;EAAW;AAG1C,QAAO;EAAE,OAAO;EAAM;EAAS;;;;;AAMhC,SAAgB,eAAe,WAG7B;CACD,MAAM,aAAa,UAAU,QAAQ,IAAI;AACzC,KAAI,eAAe,GAClB,OAAM,IAAI,MAAM,iDAA+C;AAEhE,QAAO;EACN,YAAY,UAAU,MAAM,GAAG,WAAW;EAC1C,IAAI,UAAU,MAAM,aAAa,EAAE;EACnC"}
|