emdash 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{adapters-DoNJiveC.d.mts → adapters-BktHA7EO.d.mts} +1 -1
- package/dist/{adapters-DoNJiveC.d.mts.map → adapters-BktHA7EO.d.mts.map} +1 -1
- package/dist/{apply-BzltprvY.mjs → apply-UsrFuO7l.mjs} +156 -254
- package/dist/apply-UsrFuO7l.mjs.map +1 -0
- package/dist/astro/index.d.mts +6 -6
- package/dist/astro/index.mjs +10 -2
- package/dist/astro/index.mjs.map +1 -1
- package/dist/astro/middleware/auth.d.mts +5 -5
- package/dist/astro/middleware/auth.mjs +5 -5
- package/dist/astro/middleware/redirect.mjs +5 -5
- package/dist/astro/middleware/request-context.mjs +4 -4
- package/dist/astro/middleware/setup.mjs +1 -1
- package/dist/astro/middleware.mjs +35 -34
- package/dist/astro/middleware.mjs.map +1 -1
- package/dist/astro/types.d.mts +8 -9
- package/dist/astro/types.d.mts.map +1 -1
- package/dist/{base64-BRICGH2l.mjs → base64-MBPo9ozB.mjs} +1 -1
- package/dist/{base64-BRICGH2l.mjs.map → base64-MBPo9ozB.mjs.map} +1 -1
- package/dist/{byline-BSaNL1w7.mjs → byline-C3vnhIpU.mjs} +4 -4
- package/dist/{byline-BSaNL1w7.mjs.map → byline-C3vnhIpU.mjs.map} +1 -1
- package/dist/{bylines-CvJ3PYz2.mjs → bylines-esI7ioa9.mjs} +5 -5
- package/dist/{bylines-CvJ3PYz2.mjs.map → bylines-esI7ioa9.mjs.map} +1 -1
- package/dist/{cache-C6N_hhN7.mjs → cache-fTzxgMFJ.mjs} +3 -3
- package/dist/{cache-C6N_hhN7.mjs.map → cache-fTzxgMFJ.mjs.map} +1 -1
- package/dist/{chunks-NBQVDOci.mjs → chunks-Da2-b-oA.mjs} +2 -2
- package/dist/{chunks-NBQVDOci.mjs.map → chunks-Da2-b-oA.mjs.map} +1 -1
- package/dist/cli/index.mjs +251 -79
- package/dist/cli/index.mjs.map +1 -1
- package/dist/client/cf-access.d.mts +1 -1
- package/dist/client/index.d.mts +1 -1
- package/dist/client/index.mjs +1 -1
- package/dist/{config-BI0V3ICQ.mjs → config-CVssduLe.mjs} +1 -1
- package/dist/{config-BI0V3ICQ.mjs.map → config-CVssduLe.mjs.map} +1 -1
- package/dist/{content-8lOYF0pr.mjs → content-C7G4QXkK.mjs} +14 -3
- package/dist/content-C7G4QXkK.mjs.map +1 -0
- package/dist/db/index.d.mts +3 -3
- package/dist/db/index.mjs +1 -1
- package/dist/db/libsql.d.mts +1 -1
- package/dist/db/postgres.d.mts +1 -1
- package/dist/db/sqlite.d.mts +1 -1
- package/dist/{db-errors-WRezodiz.mjs → db-errors-B7P2pSCn.mjs} +1 -1
- package/dist/{db-errors-WRezodiz.mjs.map → db-errors-B7P2pSCn.mjs.map} +1 -1
- package/dist/{default-D8ksjWhO.mjs → default-pHuz9WF6.mjs} +1 -1
- package/dist/{default-D8ksjWhO.mjs.map → default-pHuz9WF6.mjs.map} +1 -1
- package/dist/{error-D_-tqP-I.mjs → error-DqnRMM5z.mjs} +1 -1
- package/dist/{error-D_-tqP-I.mjs.map → error-DqnRMM5z.mjs.map} +1 -1
- package/dist/{index-BFRaVcD6.d.mts → index-DjPMOfO0.d.mts} +82 -67
- package/dist/index-DjPMOfO0.d.mts.map +1 -0
- package/dist/index.d.mts +10 -10
- package/dist/index.mjs +28 -27
- package/dist/{load-DDqMMvZL.mjs → load-sXRuM7Us.mjs} +2 -2
- package/dist/{load-DDqMMvZL.mjs.map → load-sXRuM7Us.mjs.map} +1 -1
- package/dist/{loader-CKLbBnhK.mjs → loader-Bx2_9-5e.mjs} +31 -6
- package/dist/loader-Bx2_9-5e.mjs.map +1 -0
- package/dist/{manifest-schema-DqWNC3lM.mjs → manifest-schema-CXAbd1vH.mjs} +1 -1
- package/dist/{manifest-schema-DqWNC3lM.mjs.map → manifest-schema-CXAbd1vH.mjs.map} +1 -1
- package/dist/media/index.d.mts +1 -1
- package/dist/media/index.mjs +1 -1
- package/dist/media/local-runtime.d.mts +7 -7
- package/dist/media/local-runtime.mjs +3 -3
- package/dist/{media-BW32b4gi.mjs → media-D8FbNsl0.mjs} +2 -2
- package/dist/{media-BW32b4gi.mjs.map → media-D8FbNsl0.mjs.map} +1 -1
- package/dist/{mode-ier8jbBk.mjs → mode-YhqNVef_.mjs} +1 -1
- package/dist/{mode-ier8jbBk.mjs.map → mode-YhqNVef_.mjs.map} +1 -1
- package/dist/{options-BVp3UsTS.mjs → options-nPxWnrya.mjs} +1 -1
- package/dist/{options-BVp3UsTS.mjs.map → options-nPxWnrya.mjs.map} +1 -1
- package/dist/page/index.d.mts +2 -2
- package/dist/{patterns-CrCYkMBb.mjs → patterns-DsUZ4uxI.mjs} +1 -1
- package/dist/{patterns-CrCYkMBb.mjs.map → patterns-DsUZ4uxI.mjs.map} +1 -1
- package/dist/{placeholder-BE4o_2dc.d.mts → placeholder-CDPtkelt.d.mts} +1 -1
- package/dist/{placeholder-BE4o_2dc.d.mts.map → placeholder-CDPtkelt.d.mts.map} +1 -1
- package/dist/{placeholder-CIJejMlK.mjs → placeholder-Ci0RLeCk.mjs} +1 -1
- package/dist/{placeholder-CIJejMlK.mjs.map → placeholder-Ci0RLeCk.mjs.map} +1 -1
- package/dist/plugins/adapt-sandbox-entry.d.mts +5 -5
- package/dist/plugins/adapt-sandbox-entry.mjs +2 -2
- package/dist/{public-url-DByxYjUw.mjs → public-url-B1AxbbbQ.mjs} +1 -1
- package/dist/{public-url-DByxYjUw.mjs.map → public-url-B1AxbbbQ.mjs.map} +1 -1
- package/dist/{query-Cg9ZKRQ0.mjs → query-Bo-msrmu.mjs} +13 -13
- package/dist/{query-Cg9ZKRQ0.mjs.map → query-Bo-msrmu.mjs.map} +1 -1
- package/dist/{redirect-BhUBKRc1.mjs → redirect-C5H7VGIX.mjs} +3 -3
- package/dist/{redirect-BhUBKRc1.mjs.map → redirect-C5H7VGIX.mjs.map} +1 -1
- package/dist/{registry-Dw70ChxB.mjs → registry-Beb7wxFc.mjs} +5 -5
- package/dist/{registry-Dw70ChxB.mjs.map → registry-Beb7wxFc.mjs.map} +1 -1
- package/dist/{request-cache-B-bmkipQ.mjs → request-cache-C-tIpYIw.mjs} +1 -1
- package/dist/{request-cache-B-bmkipQ.mjs.map → request-cache-C-tIpYIw.mjs.map} +1 -1
- package/dist/{runner-Bnoj7vjK.d.mts → runner-Clwe4Mme.d.mts} +2 -2
- package/dist/{runner-Bnoj7vjK.d.mts.map → runner-Clwe4Mme.d.mts.map} +1 -1
- package/dist/{runner-C7ADox5q.mjs → runner-DMnlIkh4.mjs} +433 -138
- package/dist/runner-DMnlIkh4.mjs.map +1 -0
- package/dist/runtime.d.mts +6 -6
- package/dist/runtime.mjs +3 -3
- package/dist/{search-dOGEccMa.mjs → search-DkN-BqsS.mjs} +164 -92
- package/dist/search-DkN-BqsS.mjs.map +1 -0
- package/dist/{secrets-CW3reAnU.mjs → secrets-CZ8rxLX3.mjs} +3 -3
- package/dist/{secrets-CW3reAnU.mjs.map → secrets-CZ8rxLX3.mjs.map} +1 -1
- package/dist/seed/index.d.mts +2 -2
- package/dist/seed/index.mjs +15 -14
- package/dist/seo/index.d.mts +1 -1
- package/dist/storage/local.d.mts +1 -1
- package/dist/storage/local.mjs +1 -1
- package/dist/storage/s3.d.mts +1 -1
- package/dist/storage/s3.mjs +1 -1
- package/dist/taxonomies-CTtewrSQ.mjs +407 -0
- package/dist/taxonomies-CTtewrSQ.mjs.map +1 -0
- package/dist/taxonomy-DSxx2K2L.mjs +218 -0
- package/dist/taxonomy-DSxx2K2L.mjs.map +1 -0
- package/dist/{tokens-D7zMmWi2.mjs → tokens-CyRDPVW2.mjs} +2 -2
- package/dist/{tokens-D7zMmWi2.mjs.map → tokens-CyRDPVW2.mjs.map} +1 -1
- package/dist/{transaction-Cn2rjY78.mjs → transaction-D44LBXvU.mjs} +1 -1
- package/dist/{transaction-Cn2rjY78.mjs.map → transaction-D44LBXvU.mjs.map} +1 -1
- package/dist/{transport-DNEfeMaU.d.mts → transport-DX_5rpsq.d.mts} +1 -1
- package/dist/{transport-DNEfeMaU.d.mts.map → transport-DX_5rpsq.d.mts.map} +1 -1
- package/dist/{transport-BeMCmin1.mjs → transport-xpzIjCIB.mjs} +1 -1
- package/dist/{transport-BeMCmin1.mjs.map → transport-xpzIjCIB.mjs.map} +1 -1
- package/dist/{types-CRxNbK-Z.mjs → types-BIgulNsW.mjs} +2 -2
- package/dist/{types-CRxNbK-Z.mjs.map → types-BIgulNsW.mjs.map} +1 -1
- package/dist/{types-CJsYGpco.d.mts → types-B_CXXnzh.d.mts} +1 -1
- package/dist/{types-CJsYGpco.d.mts.map → types-B_CXXnzh.d.mts.map} +1 -1
- package/dist/{types-M78DQ1lx.d.mts → types-C-aFbqmA.d.mts} +1 -1
- package/dist/{types-M78DQ1lx.d.mts.map → types-C-aFbqmA.d.mts.map} +1 -1
- package/dist/{types-4fVtCIm0.mjs → types-CoO6mpV3.mjs} +1 -1
- package/dist/{types-4fVtCIm0.mjs.map → types-CoO6mpV3.mjs.map} +1 -1
- package/dist/{types-BuBIptGk.d.mts → types-D19uBYWn.d.mts} +149 -4
- package/dist/types-D19uBYWn.d.mts.map +1 -0
- package/dist/{types-BSyXeCFW.d.mts → types-Dl1fgFjn.d.mts} +1 -1
- package/dist/{types-BSyXeCFW.d.mts.map → types-Dl1fgFjn.d.mts.map} +1 -1
- package/dist/{types-CrtWgIvl.d.mts → types-Dtx1mSMX.d.mts} +9 -1
- package/dist/types-Dtx1mSMX.d.mts.map +1 -0
- package/dist/{types-CIOg5AR8.mjs → types-Eg829jj9.mjs} +1 -1
- package/dist/{types-CIOg5AR8.mjs.map → types-Eg829jj9.mjs.map} +1 -1
- package/dist/{types-CDbKp7ND.mjs → types-K-EkEQCI.mjs} +1 -1
- package/dist/{types-CDbKp7ND.mjs.map → types-K-EkEQCI.mjs.map} +1 -1
- package/dist/{validate-Baqf0slj.mjs → validate-CBIbxM3L.mjs} +14 -10
- package/dist/validate-CBIbxM3L.mjs.map +1 -0
- package/dist/{validate-BfQh_C_y.d.mts → validate-DHGwADqO.d.mts} +18 -5
- package/dist/validate-DHGwADqO.d.mts.map +1 -0
- package/dist/{validation-BfEI7tNe.mjs → validation-B1NYiEos.mjs} +5 -5
- package/dist/{validation-BfEI7tNe.mjs.map → validation-B1NYiEos.mjs.map} +1 -1
- package/dist/version-CMD42IRC.mjs +7 -0
- package/dist/{version-DoxrVdYf.mjs.map → version-CMD42IRC.mjs.map} +1 -1
- package/dist/{zod-generator-CC0xNe_K.mjs → zod-generator-BNJDQBSZ.mjs} +8 -3
- package/dist/zod-generator-BNJDQBSZ.mjs.map +1 -0
- package/package.json +6 -6
- package/src/api/handlers/content.ts +11 -0
- package/src/api/handlers/dashboard.ts +29 -36
- package/src/api/handlers/menus.ts +256 -75
- package/src/api/handlers/taxonomies.ts +273 -97
- package/src/api/schemas/common.ts +7 -0
- package/src/api/schemas/menus.ts +23 -0
- package/src/api/schemas/taxonomies.ts +39 -0
- package/src/astro/integration/routes.ts +10 -0
- package/src/astro/routes/api/content/[collection]/[id]/permanent.ts +1 -1
- package/src/astro/routes/api/import/wordpress/rewrite-url-helpers.ts +196 -0
- package/src/astro/routes/api/import/wordpress/rewrite-urls.ts +9 -177
- package/src/astro/routes/api/menus/[name]/items.ts +16 -6
- package/src/astro/routes/api/menus/[name]/reorder.ts +8 -3
- package/src/astro/routes/api/menus/[name]/translations.ts +82 -0
- package/src/astro/routes/api/menus/[name].ts +19 -10
- package/src/astro/routes/api/menus/index.ts +9 -6
- package/src/astro/routes/api/taxonomies/[name]/terms/[slug]/translations.ts +89 -0
- package/src/astro/routes/api/taxonomies/[name]/terms/[slug].ts +22 -22
- package/src/astro/routes/api/taxonomies/[name]/terms/index.ts +11 -14
- package/src/astro/routes/api/taxonomies/index.ts +9 -6
- package/src/cli/commands/export-seed.ts +82 -21
- package/src/cli/commands/plugin-init.ts +216 -90
- package/src/database/migrations/036_i18n_menus_and_taxonomies.ts +477 -0
- package/src/database/migrations/runner.ts +2 -0
- package/src/database/repositories/content.ts +11 -0
- package/src/database/repositories/taxonomy.ts +193 -89
- package/src/database/types.ts +10 -2
- package/src/i18n/resolve.ts +37 -0
- package/src/loader.ts +49 -2
- package/src/mcp/server.ts +77 -18
- package/src/menus/index.ts +143 -124
- package/src/menus/types.ts +15 -1
- package/src/schema/zod-generator.ts +12 -2
- package/src/seed/apply.ts +140 -54
- package/src/seed/types.ts +14 -1
- package/src/seed/validate.ts +27 -13
- package/src/taxonomies/index.ts +230 -213
- package/src/taxonomies/types.ts +10 -0
- package/dist/apply-BzltprvY.mjs.map +0 -1
- package/dist/content-8lOYF0pr.mjs.map +0 -1
- package/dist/index-BFRaVcD6.d.mts.map +0 -1
- package/dist/loader-CKLbBnhK.mjs.map +0 -1
- package/dist/runner-C7ADox5q.mjs.map +0 -1
- package/dist/search-dOGEccMa.mjs.map +0 -1
- package/dist/taxonomies-ZlRtD6AG.mjs +0 -315
- package/dist/taxonomies-ZlRtD6AG.mjs.map +0 -1
- package/dist/types-BuBIptGk.d.mts.map +0 -1
- package/dist/types-CrtWgIvl.d.mts.map +0 -1
- package/dist/validate-Baqf0slj.mjs.map +0 -1
- package/dist/validate-BfQh_C_y.d.mts.map +0 -1
- package/dist/version-DoxrVdYf.mjs +0 -7
- package/dist/zod-generator-CC0xNe_K.mjs.map +0 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { i as encodeBase64url, n as decodeBase64url } from "./base64-
|
|
2
|
-
import { t as OptionsRepository } from "./options-
|
|
1
|
+
import { i as encodeBase64url, n as decodeBase64url } from "./base64-MBPo9ozB.mjs";
|
|
2
|
+
import { t as OptionsRepository } from "./options-nPxWnrya.mjs";
|
|
3
3
|
import { sha256 } from "@oslojs/crypto/sha2";
|
|
4
4
|
import { encodeHexLowerCase } from "@oslojs/encoding";
|
|
5
5
|
|
|
@@ -311,4 +311,4 @@ function readDefaultEnv() {
|
|
|
311
311
|
|
|
312
312
|
//#endregion
|
|
313
313
|
export { validateEncryptionKeyAtStartup as a, resolveSecretsCached as i, fingerprintKey as n, generateEncryptionKey as r, EmDashSecretsError as t };
|
|
314
|
-
//# sourceMappingURL=secrets-
|
|
314
|
+
//# sourceMappingURL=secrets-CZ8rxLX3.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"secrets-CW3reAnU.mjs","names":[],"sources":["../src/config/secrets.ts"],"sourcesContent":["/**\n * Centralized secrets module\n *\n * Single source of truth for site-level cryptographic secrets:\n *\n * - `EMDASH_ENCRYPTION_KEY` — primary key for encrypting plugin secrets at\n * rest. Multi-key (comma-separated) for rotation forward-compat. v1 ships\n * single-key. Format: `emdash_enc_v1_<43 base64url chars>` representing\n * 32 random bytes. **Operator-provided; never stored in the database.**\n * Losing the key means losing every secret encrypted with it. Validated\n * at runtime startup via `validateEncryptionKeyAtStartup` — request-time\n * resolution does not depend on it, so a malformed key can't 500 the\n * preview/comment hot paths for unrelated visitors.\n * - `EMDASH_IP_SALT` (optional) / DB-stored `emdash:ip_salt` — site-specific\n * salt for hashing commenter IPs. Generated and persisted on first need\n * if no env override is set. Replaces the previous hardcoded\n * `\"emdash-ip-salt\"` constant which was correlatable across installs.\n * - `EMDASH_PREVIEW_SECRET` (optional) / DB-stored `emdash:preview_secret` —\n * HMAC secret for signing preview URLs. Generated and persisted on first\n * need if no env override is set. Replaces the previous empty-string\n * fallback which silently disabled preview-token verification.\n *\n * The `EMDASH_AUTH_SECRET` env var is consulted only as a legacy fallback\n * source for the IP salt — that's the only path the prior code actually\n * read it from. New deployments don't need to set it.\n *\n * Modeled on `resolveS3Config` in `../storage/s3.ts`.\n */\n\nimport { sha256 } from \"@oslojs/crypto/sha2\";\nimport { encodeHexLowerCase } from \"@oslojs/encoding\";\nimport type { Kysely } from \"kysely\";\n\nimport { OptionsRepository } from \"../database/repositories/options.js\";\nimport type { Database } from \"../database/types.js\";\nimport { decodeBase64url, encodeBase64url } from \"../utils/base64.js\";\n\n/** v1 encryption key prefix. Bumping requires a separate KDF version. */\nexport const ENCRYPTION_KEY_PREFIX = \"emdash_enc_v1_\";\n\n/** 32 random bytes encoded as unpadded base64url = 43 chars. */\nconst ENCRYPTION_KEY_BODY_LENGTH = 43;\n\nconst REGEX_META_PATTERN = /[.*+?^${}()|[\\]\\\\]/g;\n\n/**\n * Built from the prefix constant via interpolation. The prefix has no regex\n * metacharacters today (`emdash_enc_v1_`), but escaping is cheap defense\n * against anyone changing the prefix in a future bump without remembering.\n */\nconst ENCRYPTION_KEY_PATTERN = new RegExp(\n\t`^${ENCRYPTION_KEY_PREFIX.replace(REGEX_META_PATTERN, \"\\\\$&\")}[A-Za-z0-9_-]{${ENCRYPTION_KEY_BODY_LENGTH}}$`,\n);\n\n/** Options-table key for the persisted commenter-IP salt. */\nexport const IP_SALT_OPTION_KEY = \"emdash:ip_salt\";\n\n/** Options-table key for the persisted preview HMAC secret. */\nexport const PREVIEW_SECRET_OPTION_KEY = \"emdash:preview_secret\";\n\n/** Length in bytes of generated values. 32 bytes = 256 bits. */\nconst GENERATED_SECRET_BYTES = 32;\n\n/**\n * A parsed encryption key with its kid (key id) fingerprint.\n *\n * `kid` is the first 8 chars of the SHA-256 hash of the decoded key bytes\n * (lowercase hex), used to tag envelopes so the decryptor can pick the right\n * key during rotation.\n */\nexport interface ParsedEncryptionKey {\n\t/** 8-char lowercase hex fingerprint derived from the decoded key bytes. */\n\tkid: string;\n\t/** The 32 raw key bytes, ready for `crypto.subtle.importKey`. */\n\tkey: Uint8Array;\n\t/** The original env-var-formatted string (kept for re-emit; never log). */\n\traw: string;\n}\n\n/** Resolved site secrets. */\nexport interface ResolvedSecrets {\n\t/** HMAC secret for preview URLs. Always non-empty after resolution. */\n\tpreviewSecret: string;\n\t/**\n\t * Source of `previewSecret`. Useful for diagnostics; never expose the\n\t * value itself, only the source.\n\t */\n\tpreviewSecretSource: \"env\" | \"db\";\n\t/** Salt for hashing commenter IPs. Always non-empty after resolution. */\n\tipSalt: string;\n\t/** Source of `ipSalt`. */\n\tipSaltSource: \"env\" | \"db\";\n}\n\n/** Inputs for `resolveSecrets`. */\nexport interface ResolveSecretsOptions {\n\t/**\n\t * The Kysely DB used to persist (and read back) generated salt/preview\n\t * secret values. Required — these values must be stable across requests\n\t * within a deployment.\n\t */\n\tdb: Kysely<Database>;\n\t/**\n\t * Optional explicit env override map. When omitted, falls back to\n\t * `import.meta.env` via the global accessor below. Tests pass an\n\t * explicit map to avoid leaking process state.\n\t */\n\tenv?: SecretsEnv;\n\t/**\n\t * @internal Test seam: inject a custom OptionsRepository to exercise\n\t * the lost-race re-read branch. Production callers never set this.\n\t */\n\t_repo?: OptionsRepository;\n}\n\n/** Environment-variable shape consulted by the resolver. */\nexport interface SecretsEnv {\n\t/**\n\t * Read by `validateEncryptionKeyAtStartup` and (in a follow-up PR) by the\n\t * plugin-secret encryption layer. **Not** consulted by `resolveSecrets`,\n\t * so a malformed value can't 500 the preview/comment hot paths.\n\t */\n\tEMDASH_ENCRYPTION_KEY?: string;\n\tEMDASH_PREVIEW_SECRET?: string;\n\t/** Legacy alias; new docs point at EMDASH_PREVIEW_SECRET. */\n\tPREVIEW_SECRET?: string;\n\tEMDASH_IP_SALT?: string;\n\t/**\n\t * Legacy fallback. Prior code derived the IP salt from\n\t * `EMDASH_AUTH_SECRET || AUTH_SECRET || \"emdash-ip-salt\"`. We preserve\n\t * the env-var fallback (so existing installs keep their stable salt)\n\t * but no longer read it from `import.meta.env` in route handlers.\n\t */\n\tEMDASH_AUTH_SECRET?: string;\n\t/** Legacy alias. */\n\tAUTH_SECRET?: string;\n}\n\n/**\n * Class of validation failures raised by this module.\n *\n * Errors here are operator-facing config problems (malformed key, etc.).\n * They are thrown rather than soft-skipped so misconfiguration fails loudly\n * at startup instead of silently degrading at request time.\n */\nexport class EmDashSecretsError extends Error {\n\toverride readonly name = \"EmDashSecretsError\";\n\treadonly code: string;\n\n\tconstructor(message: string, code: string) {\n\t\tsuper(message);\n\t\tthis.code = code;\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// Encryption key parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse the `EMDASH_ENCRYPTION_KEY` env var.\n *\n * Accepts a single key or a comma-separated list. The first entry is the\n * primary (used for new writes); all entries are tried for decryption,\n * matched by `kid`. Whitespace around commas is tolerated. Empty entries\n * (e.g. trailing comma) are ignored.\n *\n * Returns `null` for an unset/empty input. Throws `EmDashSecretsError` on\n * any malformed entry — silent skipping would mask deployment mistakes.\n */\nexport async function parseEncryptionKeys(\n\traw: string | undefined,\n): Promise<ParsedEncryptionKey[] | null> {\n\tif (!raw) return null;\n\n\tconst entries = raw\n\t\t.split(\",\")\n\t\t.map((entry) => entry.trim())\n\t\t.filter((entry) => entry.length > 0);\n\n\tif (entries.length === 0) return null;\n\n\tconst parsed: ParsedEncryptionKey[] = [];\n\tconst seenKids = new Set<string>();\n\n\tfor (const entry of entries) {\n\t\tif (!ENCRYPTION_KEY_PATTERN.test(entry)) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY entry is malformed (expected \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars). Generate one with \\`emdash secrets generate\\`.`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst body = entry.slice(ENCRYPTION_KEY_PREFIX.length);\n\t\tconst key = decodeBase64urlStrict(body);\n\t\tif (!key) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not valid base64url\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\t\tif (key.length !== GENERATED_SECRET_BYTES) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY must decode to ${GENERATED_SECRET_BYTES} bytes, got ${key.length}`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\t// Reject non-canonical base64url. 43 chars decode to 32 bytes but\n\t\t// the last char only carries 2 information bits — multiple raw\n\t\t// strings can decode to the same bytes. Forcing canonical form\n\t\t// guarantees `kid` (derived from bytes) is stable per key\n\t\t// material, regardless of how the operator pasted it.\n\t\tconst canonical = encodeBase64url(key);\n\t\tif (canonical !== body) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not canonical base64url. Generate one with `emdash secrets generate`.\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst kid = fingerprintKeyBytes(key);\n\t\tif (seenKids.has(kid)) {\n\t\t\t// Duplicate keys are user error (paste mistake during rotation).\n\t\t\t// We dedupe rather than throw — the rotation flow is forgiving.\n\t\t\tcontinue;\n\t\t}\n\t\tseenKids.add(kid);\n\t\tparsed.push({ kid, key, raw: entry });\n\t}\n\n\t// `parsed` always has at least one entry here: `entries` was non-empty\n\t// after filtering, the loop runs at least once, the first iteration\n\t// always passes the empty-`seenKids` check.\n\treturn parsed;\n}\n\n/**\n * Compute the kid for a raw key string (the env-var form including the\n * `emdash_enc_v1_` prefix). Public so the CLI's `fingerprint` subcommand\n * and admin endpoints can show kids without exposing raw keys.\n *\n * The kid is derived from the decoded key **bytes**, not the raw string,\n * so admin endpoints / future rotation flows can match envelope kids\n * against bytes regardless of how the env var was originally spelled.\n *\n * Validates the same shape as `parseEncryptionKeys` — including canonical\n * base64url — so the CLI can't print a kid for a key the runtime would\n * later refuse to load.\n *\n * Throws `EmDashSecretsError` for malformed or non-canonical input.\n */\nexport async function fingerprintKey(raw: string): Promise<string> {\n\tif (!ENCRYPTION_KEY_PATTERN.test(raw)) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key must match \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\tconst body = raw.slice(ENCRYPTION_KEY_PREFIX.length);\n\tconst bytes = decodeBase64urlStrict(body);\n\tif (!bytes || bytes.length !== GENERATED_SECRET_BYTES || encodeBase64url(bytes) !== body) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key body must decode to ${GENERATED_SECRET_BYTES} canonical base64url bytes`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\treturn fingerprintKeyBytes(bytes);\n}\n\n/**\n * Internal: kid derivation from raw key bytes. The single source of truth\n * for what makes two keys \"the same key\" — used by both `parseEncryptionKeys`\n * and `fingerprintKey`.\n */\nfunction fingerprintKeyBytes(key: Uint8Array): string {\n\treturn encodeHexLowerCase(sha256(key)).slice(0, 8);\n}\n\n/**\n * Generate a fresh `EMDASH_ENCRYPTION_KEY` value. Used by the CLI's\n * `secrets generate` subcommand and by `create-emdash` scaffolding.\n */\nexport function generateEncryptionKey(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn `${ENCRYPTION_KEY_PREFIX}${encodeBase64url(bytes)}`;\n}\n\n// ---------------------------------------------------------------------------\n// Site-secret resolution (DB-backed with env override)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve site secrets. Reads env vars; for IP salt and preview secret,\n * falls back to a DB-stored value, generating one atomically on first need.\n *\n * Idempotent. Concurrent callers race on the atomic `setIfAbsent`; whichever\n * wins, all callers converge on the same stored value.\n *\n * Note: `EMDASH_ENCRYPTION_KEY` is **not** consumed here. It's validated\n * separately at runtime startup (see `validateEncryptionKeyAtStartup`) so a\n * malformed key can't take down preview-token verification or comment\n * submission for unrelated visitors. Future plugin-secret encryption code\n * will read it via its own dedicated helper.\n */\nexport async function resolveSecrets(options: ResolveSecretsOptions): Promise<ResolvedSecrets> {\n\tconst env = options.env ?? readDefaultEnv();\n\tconst repo = options._repo ?? new OptionsRepository(options.db);\n\n\tconst previewEnvOverride = pickFirstNonEmpty(env.EMDASH_PREVIEW_SECRET, env.PREVIEW_SECRET);\n\tconst ipSaltEnvOverride = pickFirstNonEmpty(\n\t\tenv.EMDASH_IP_SALT,\n\t\tenv.EMDASH_AUTH_SECRET,\n\t\tenv.AUTH_SECRET,\n\t);\n\n\tconst [previewSecret, ipSalt] = await Promise.all([\n\t\tpreviewEnvOverride !== null\n\t\t\t? Promise.resolve({ value: previewEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, PREVIEW_SECRET_OPTION_KEY),\n\t\tipSaltEnvOverride !== null\n\t\t\t? Promise.resolve({ value: ipSaltEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, IP_SALT_OPTION_KEY),\n\t]);\n\n\treturn {\n\t\tpreviewSecret: previewSecret.value,\n\t\tpreviewSecretSource: previewSecret.source,\n\t\tipSalt: ipSalt.value,\n\t\tipSaltSource: ipSalt.source,\n\t};\n}\n\n/**\n * Validate `EMDASH_ENCRYPTION_KEY` once at runtime startup. Logs an\n * operator-facing error if the value is malformed but does **not** throw —\n * the key is currently inert (no consumers), and the follow-up PR that\n * actually uses it will throw at point of use. This way, deployment\n * mistakes surface immediately in startup logs without wedging unrelated\n * request paths in the meantime.\n *\n * Returns `true` if the key is unset or valid, `false` if it was malformed.\n */\nexport async function validateEncryptionKeyAtStartup(env?: SecretsEnv): Promise<boolean> {\n\tconst resolved = env ?? readDefaultEnv();\n\ttry {\n\t\tawait parseEncryptionKeys(resolved.EMDASH_ENCRYPTION_KEY);\n\t\treturn true;\n\t} catch (error) {\n\t\tif (error instanceof EmDashSecretsError) {\n\t\t\tconsole.error(\n\t\t\t\t`[emdash] EMDASH_ENCRYPTION_KEY is invalid: ${error.message} ` +\n\t\t\t\t\t\"Plugin-secret encryption will fail once it ships. \" +\n\t\t\t\t\t\"Generate a fresh key with `emdash secrets generate`.\",\n\t\t\t);\n\t\t\treturn false;\n\t\t}\n\t\tthrow error;\n\t}\n}\n\n/**\n * Per-DB cache of resolved secrets, keyed by Kysely instance identity.\n *\n * The resolved values are stable for the lifetime of the deployment (env\n * vars don't change without a restart, and DB-stored values are written\n * once via `setIfAbsent`). Caching avoids one options-table read per\n * request on the hot paths (preview verification, comment hashing).\n *\n * Lives on `globalThis` so module-duplication during SSR bundling can't\n * fragment the cache. See `request-context.ts` for the same pattern.\n */\n// Versioned to prevent cache fragmentation if `ResolvedSecrets`'s shape\n// ever changes. Bump the suffix on incompatible changes so a co-resident\n// older build doesn't read a newer-shape value.\nconst SECRETS_CACHE_KEY = Symbol.for(\"@emdash-cms/core/secrets-cache@1\");\n\ninterface SecretsCacheHolder {\n\tcache: WeakMap<Kysely<Database>, Promise<ResolvedSecrets>>;\n}\n\nfunction getSecretsCache(): WeakMap<Kysely<Database>, Promise<ResolvedSecrets>> {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tlet entry = holder[SECRETS_CACHE_KEY];\n\tif (!entry) {\n\t\tentry = { cache: new WeakMap() };\n\t\tholder[SECRETS_CACHE_KEY] = entry;\n\t}\n\treturn entry.cache;\n}\n\n/**\n * Memoized wrapper around `resolveSecrets`. Use this from request-time hot\n * paths (preview verification, comment IP hashing) so they don't reread\n * env / re-query options on every request.\n *\n * The cache is keyed by `Kysely` instance, so playground / per-DO / per-test\n * databases each get their own resolution.\n */\nexport function resolveSecretsCached(db: Kysely<Database>): Promise<ResolvedSecrets> {\n\tconst cache = getSecretsCache();\n\tconst cached = cache.get(db);\n\tif (cached) return cached;\n\tconst promise = resolveSecrets({ db }).catch((error) => {\n\t\t// Don't poison the cache on transient failure; next caller retries.\n\t\tcache.delete(db);\n\t\tthrow error;\n\t});\n\tcache.set(db, promise);\n\treturn promise;\n}\n\n/**\n * Test-only helper: clear the secrets cache. Tests that mutate env between\n * cases need this so a stale resolution doesn't leak across cases.\n *\n * @internal\n */\nexport function _clearSecretsCacheForTesting(): void {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tholder[SECRETS_CACHE_KEY] = undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Internals\n// ---------------------------------------------------------------------------\n\n/**\n * Read or generate-and-persist a random base64url secret stored in the\n * options table.\n *\n * Concurrency: `setIfAbsent` is an atomic INSERT...ON CONFLICT DO NOTHING.\n * On race, the loser re-reads to converge on the winner's value.\n */\nasync function ensureGeneratedOption(\n\trepo: OptionsRepository,\n\toptionKey: string,\n): Promise<{ value: string; source: \"db\" }> {\n\tconst existing = await repo.get<string>(optionKey);\n\tif (typeof existing === \"string\" && existing.length > 0) {\n\t\treturn { value: existing, source: \"db\" };\n\t}\n\n\tconst generated = generateRandomSecret();\n\tconst inserted = await repo.setIfAbsent(optionKey, generated);\n\tif (inserted) {\n\t\treturn { value: generated, source: \"db\" };\n\t}\n\n\t// Lost the race — another process inserted first. Re-read to pick up\n\t// the winner. If the row is somehow still missing or empty, treat that\n\t// as a real error rather than looping.\n\tconst winner = await repo.get<string>(optionKey);\n\tif (typeof winner !== \"string\" || winner.length === 0) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Failed to persist generated secret for \"${optionKey}\"`,\n\t\t\t\"SECRET_PERSIST_FAILED\",\n\t\t);\n\t}\n\treturn { value: winner, source: \"db\" };\n}\n\n/** Generate 32 random bytes encoded as unpadded base64url. */\nfunction generateRandomSecret(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn encodeBase64url(bytes);\n}\n\n/** Return the first non-empty string from `values`, or `null` if all are empty. */\nfunction pickFirstNonEmpty(...values: (string | undefined)[]): string | null {\n\tfor (const value of values) {\n\t\tif (typeof value === \"string\" && value.length > 0) {\n\t\t\treturn value;\n\t\t}\n\t}\n\treturn null;\n}\n\nconst BASE64URL_CHARSET_PATTERN = /^[A-Za-z0-9_-]+$/;\n\n/**\n * Validate base64url shape and decode. Returns `null` on malformed input\n * (rather than throwing) so the caller can produce a config-specific error.\n */\nfunction decodeBase64urlStrict(input: string): Uint8Array | null {\n\t// `decodeBase64url` accepts padded input too; the env-var format is\n\t// strictly unpadded base64url, so we do a charset check first.\n\tif (!BASE64URL_CHARSET_PATTERN.test(input)) return null;\n\ttry {\n\t\treturn decodeBase64url(input);\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Default env reader.\n *\n * Note: this is the **only** code path in core that reads both\n * `import.meta.env` and `process.env`. Route handlers should not — they\n * always run inside the Astro/Vite bundle where `import.meta.env` is\n * the correct source. This resolver is shared with the CLI surface (via\n * `cli/commands/secrets.ts`) which runs outside the bundle, so we\n * deliberately consult both. `import.meta.env` wins so build-time\n * substitutions are honored when present.\n *\n * The convention documented in AGENTS.md (\"import.meta.env.EMDASH_X ||\n * import.meta.env.X\") is the route-handler convention; this is the\n * shared-with-CLI exception.\n */\nfunction readDefaultEnv(): SecretsEnv {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- import.meta.env is loose by design\n\tconst meta = (import.meta.env ?? {}) as Record<string, string | undefined>;\n\tconst proc = typeof process !== \"undefined\" && process.env ? process.env : {};\n\n\treturn {\n\t\tEMDASH_ENCRYPTION_KEY: meta.EMDASH_ENCRYPTION_KEY ?? proc.EMDASH_ENCRYPTION_KEY,\n\t\tEMDASH_PREVIEW_SECRET: meta.EMDASH_PREVIEW_SECRET ?? proc.EMDASH_PREVIEW_SECRET,\n\t\tPREVIEW_SECRET: meta.PREVIEW_SECRET ?? proc.PREVIEW_SECRET,\n\t\tEMDASH_IP_SALT: meta.EMDASH_IP_SALT ?? proc.EMDASH_IP_SALT,\n\t\tEMDASH_AUTH_SECRET: meta.EMDASH_AUTH_SECRET ?? proc.EMDASH_AUTH_SECRET,\n\t\tAUTH_SECRET: meta.AUTH_SECRET ?? proc.AUTH_SECRET,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCA,MAAa,wBAAwB;;AAGrC,MAAM,6BAA6B;;;;;;AASnC,MAAM,yBAAyB,IAAI,OAClC,IAAI,sBAAsB,QARA,uBAQ4B,OAAO,CAAC,gBAAgB,2BAA2B,IACzG;;AAGD,MAAa,qBAAqB;;AAGlC,MAAa,4BAA4B;;AAGzC,MAAM,yBAAyB;;;;;;;;AAoF/B,IAAa,qBAAb,cAAwC,MAAM;CAC7C,AAAkB,OAAO;CACzB,AAAS;CAET,YAAY,SAAiB,MAAc;AAC1C,QAAM,QAAQ;AACd,OAAK,OAAO;;;;;;;;;;;;;;AAmBd,eAAsB,oBACrB,KACwC;AACxC,KAAI,CAAC,IAAK,QAAO;CAEjB,MAAM,UAAU,IACd,MAAM,IAAI,CACV,KAAK,UAAU,MAAM,MAAM,CAAC,CAC5B,QAAQ,UAAU,MAAM,SAAS,EAAE;AAErC,KAAI,QAAQ,WAAW,EAAG,QAAO;CAEjC,MAAM,SAAgC,EAAE;CACxC,MAAM,2BAAW,IAAI,KAAa;AAElC,MAAK,MAAM,SAAS,SAAS;AAC5B,MAAI,CAAC,uBAAuB,KAAK,MAAM,CACtC,OAAM,IAAI,mBACT,uDAAuD,sBAAsB,gBAAgB,2BAA2B,oEACxH,yBACA;EAGF,MAAM,OAAO,MAAM,MAAM,GAA6B;EACtD,MAAM,MAAM,sBAAsB,KAAK;AACvC,MAAI,CAAC,IACJ,OAAM,IAAI,mBACT,qDACA,yBACA;AAEF,MAAI,IAAI,WAAW,uBAClB,OAAM,IAAI,mBACT,wCAAwC,uBAAuB,cAAc,IAAI,UACjF,yBACA;AASF,MADkB,gBAAgB,IAAI,KACpB,KACjB,OAAM,IAAI,mBACT,uGACA,yBACA;EAGF,MAAM,MAAM,oBAAoB,IAAI;AACpC,MAAI,SAAS,IAAI,IAAI,CAGpB;AAED,WAAS,IAAI,IAAI;AACjB,SAAO,KAAK;GAAE;GAAK;GAAK,KAAK;GAAO,CAAC;;AAMtC,QAAO;;;;;;;;;;;;;;;;;AAkBR,eAAsB,eAAe,KAA8B;AAClE,KAAI,CAAC,uBAAuB,KAAK,IAAI,CACpC,OAAM,IAAI,mBACT,mBAAmB,sBAAsB,gBAAgB,2BAA2B,mBACpF,yBACA;CAEF,MAAM,OAAO,IAAI,MAAM,GAA6B;CACpD,MAAM,QAAQ,sBAAsB,KAAK;AACzC,KAAI,CAAC,SAAS,MAAM,WAAW,0BAA0B,gBAAgB,MAAM,KAAK,KACnF,OAAM,IAAI,mBACT,2BAA2B,uBAAuB,6BAClD,yBACA;AAEF,QAAO,oBAAoB,MAAM;;;;;;;AAQlC,SAAS,oBAAoB,KAAyB;AACrD,QAAO,mBAAmB,OAAO,IAAI,CAAC,CAAC,MAAM,GAAG,EAAE;;;;;;AAOnD,SAAgB,wBAAgC;CAC/C,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,GAAG,wBAAwB,gBAAgB,MAAM;;;;;;;;;;;;;;;AAoBzD,eAAsB,eAAe,SAA0D;CAC9F,MAAM,MAAM,QAAQ,OAAO,gBAAgB;CAC3C,MAAM,OAAO,QAAQ,SAAS,IAAI,kBAAkB,QAAQ,GAAG;CAE/D,MAAM,qBAAqB,kBAAkB,IAAI,uBAAuB,IAAI,eAAe;CAC3F,MAAM,oBAAoB,kBACzB,IAAI,gBACJ,IAAI,oBACJ,IAAI,YACJ;CAED,MAAM,CAAC,eAAe,UAAU,MAAM,QAAQ,IAAI,CACjD,uBAAuB,OACpB,QAAQ,QAAQ;EAAE,OAAO;EAAoB,QAAQ;EAAgB,CAAC,GACtE,sBAAsB,MAAM,0BAA0B,EACzD,sBAAsB,OACnB,QAAQ,QAAQ;EAAE,OAAO;EAAmB,QAAQ;EAAgB,CAAC,GACrE,sBAAsB,MAAM,mBAAmB,CAClD,CAAC;AAEF,QAAO;EACN,eAAe,cAAc;EAC7B,qBAAqB,cAAc;EACnC,QAAQ,OAAO;EACf,cAAc,OAAO;EACrB;;;;;;;;;;;;AAaF,eAAsB,+BAA+B,KAAoC;CACxF,MAAM,WAAW,OAAO,gBAAgB;AACxC,KAAI;AACH,QAAM,oBAAoB,SAAS,sBAAsB;AACzD,SAAO;UACC,OAAO;AACf,MAAI,iBAAiB,oBAAoB;AACxC,WAAQ,MACP,8CAA8C,MAAM,QAAQ,2GAG5D;AACD,UAAO;;AAER,QAAM;;;;;;;;;;;;;;AAkBR,MAAM,oBAAoB,OAAO,IAAI,mCAAmC;AAMxE,SAAS,kBAAuE;CAE/E,MAAM,SAAS;CACf,IAAI,QAAQ,OAAO;AACnB,KAAI,CAAC,OAAO;AACX,UAAQ,EAAE,uBAAO,IAAI,SAAS,EAAE;AAChC,SAAO,qBAAqB;;AAE7B,QAAO,MAAM;;;;;;;;;;AAWd,SAAgB,qBAAqB,IAAgD;CACpF,MAAM,QAAQ,iBAAiB;CAC/B,MAAM,SAAS,MAAM,IAAI,GAAG;AAC5B,KAAI,OAAQ,QAAO;CACnB,MAAM,UAAU,eAAe,EAAE,IAAI,CAAC,CAAC,OAAO,UAAU;AAEvD,QAAM,OAAO,GAAG;AAChB,QAAM;GACL;AACF,OAAM,IAAI,IAAI,QAAQ;AACtB,QAAO;;;;;;;;;AA0BR,eAAe,sBACd,MACA,WAC2C;CAC3C,MAAM,WAAW,MAAM,KAAK,IAAY,UAAU;AAClD,KAAI,OAAO,aAAa,YAAY,SAAS,SAAS,EACrD,QAAO;EAAE,OAAO;EAAU,QAAQ;EAAM;CAGzC,MAAM,YAAY,sBAAsB;AAExC,KADiB,MAAM,KAAK,YAAY,WAAW,UAAU,CAE5D,QAAO;EAAE,OAAO;EAAW,QAAQ;EAAM;CAM1C,MAAM,SAAS,MAAM,KAAK,IAAY,UAAU;AAChD,KAAI,OAAO,WAAW,YAAY,OAAO,WAAW,EACnD,OAAM,IAAI,mBACT,2CAA2C,UAAU,IACrD,wBACA;AAEF,QAAO;EAAE,OAAO;EAAQ,QAAQ;EAAM;;;AAIvC,SAAS,uBAA+B;CACvC,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,gBAAgB,MAAM;;;AAI9B,SAAS,kBAAkB,GAAG,QAA+C;AAC5E,MAAK,MAAM,SAAS,OACnB,KAAI,OAAO,UAAU,YAAY,MAAM,SAAS,EAC/C,QAAO;AAGT,QAAO;;AAGR,MAAM,4BAA4B;;;;;AAMlC,SAAS,sBAAsB,OAAkC;AAGhE,KAAI,CAAC,0BAA0B,KAAK,MAAM,CAAE,QAAO;AACnD,KAAI;AACH,SAAO,gBAAgB,MAAM;SACtB;AACP,SAAO;;;;;;;;;;;;;;;;;;AAmBT,SAAS,iBAA6B;CAErC,MAAM,OAAQ,OAAO,KAAK,OAAO,EAAE;CACnC,MAAM,OAAO,OAAO,YAAY,eAAe,QAAQ,MAAM,QAAQ,MAAM,EAAE;AAE7E,QAAO;EACN,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,oBAAoB,KAAK,sBAAsB,KAAK;EACpD,aAAa,KAAK,eAAe,KAAK;EACtC"}
|
|
1
|
+
{"version":3,"file":"secrets-CZ8rxLX3.mjs","names":[],"sources":["../src/config/secrets.ts"],"sourcesContent":["/**\n * Centralized secrets module\n *\n * Single source of truth for site-level cryptographic secrets:\n *\n * - `EMDASH_ENCRYPTION_KEY` — primary key for encrypting plugin secrets at\n * rest. Multi-key (comma-separated) for rotation forward-compat. v1 ships\n * single-key. Format: `emdash_enc_v1_<43 base64url chars>` representing\n * 32 random bytes. **Operator-provided; never stored in the database.**\n * Losing the key means losing every secret encrypted with it. Validated\n * at runtime startup via `validateEncryptionKeyAtStartup` — request-time\n * resolution does not depend on it, so a malformed key can't 500 the\n * preview/comment hot paths for unrelated visitors.\n * - `EMDASH_IP_SALT` (optional) / DB-stored `emdash:ip_salt` — site-specific\n * salt for hashing commenter IPs. Generated and persisted on first need\n * if no env override is set. Replaces the previous hardcoded\n * `\"emdash-ip-salt\"` constant which was correlatable across installs.\n * - `EMDASH_PREVIEW_SECRET` (optional) / DB-stored `emdash:preview_secret` —\n * HMAC secret for signing preview URLs. Generated and persisted on first\n * need if no env override is set. Replaces the previous empty-string\n * fallback which silently disabled preview-token verification.\n *\n * The `EMDASH_AUTH_SECRET` env var is consulted only as a legacy fallback\n * source for the IP salt — that's the only path the prior code actually\n * read it from. New deployments don't need to set it.\n *\n * Modeled on `resolveS3Config` in `../storage/s3.ts`.\n */\n\nimport { sha256 } from \"@oslojs/crypto/sha2\";\nimport { encodeHexLowerCase } from \"@oslojs/encoding\";\nimport type { Kysely } from \"kysely\";\n\nimport { OptionsRepository } from \"../database/repositories/options.js\";\nimport type { Database } from \"../database/types.js\";\nimport { decodeBase64url, encodeBase64url } from \"../utils/base64.js\";\n\n/** v1 encryption key prefix. Bumping requires a separate KDF version. */\nexport const ENCRYPTION_KEY_PREFIX = \"emdash_enc_v1_\";\n\n/** 32 random bytes encoded as unpadded base64url = 43 chars. */\nconst ENCRYPTION_KEY_BODY_LENGTH = 43;\n\nconst REGEX_META_PATTERN = /[.*+?^${}()|[\\]\\\\]/g;\n\n/**\n * Built from the prefix constant via interpolation. The prefix has no regex\n * metacharacters today (`emdash_enc_v1_`), but escaping is cheap defense\n * against anyone changing the prefix in a future bump without remembering.\n */\nconst ENCRYPTION_KEY_PATTERN = new RegExp(\n\t`^${ENCRYPTION_KEY_PREFIX.replace(REGEX_META_PATTERN, \"\\\\$&\")}[A-Za-z0-9_-]{${ENCRYPTION_KEY_BODY_LENGTH}}$`,\n);\n\n/** Options-table key for the persisted commenter-IP salt. */\nexport const IP_SALT_OPTION_KEY = \"emdash:ip_salt\";\n\n/** Options-table key for the persisted preview HMAC secret. */\nexport const PREVIEW_SECRET_OPTION_KEY = \"emdash:preview_secret\";\n\n/** Length in bytes of generated values. 32 bytes = 256 bits. */\nconst GENERATED_SECRET_BYTES = 32;\n\n/**\n * A parsed encryption key with its kid (key id) fingerprint.\n *\n * `kid` is the first 8 chars of the SHA-256 hash of the decoded key bytes\n * (lowercase hex), used to tag envelopes so the decryptor can pick the right\n * key during rotation.\n */\nexport interface ParsedEncryptionKey {\n\t/** 8-char lowercase hex fingerprint derived from the decoded key bytes. */\n\tkid: string;\n\t/** The 32 raw key bytes, ready for `crypto.subtle.importKey`. */\n\tkey: Uint8Array;\n\t/** The original env-var-formatted string (kept for re-emit; never log). */\n\traw: string;\n}\n\n/** Resolved site secrets. */\nexport interface ResolvedSecrets {\n\t/** HMAC secret for preview URLs. Always non-empty after resolution. */\n\tpreviewSecret: string;\n\t/**\n\t * Source of `previewSecret`. Useful for diagnostics; never expose the\n\t * value itself, only the source.\n\t */\n\tpreviewSecretSource: \"env\" | \"db\";\n\t/** Salt for hashing commenter IPs. Always non-empty after resolution. */\n\tipSalt: string;\n\t/** Source of `ipSalt`. */\n\tipSaltSource: \"env\" | \"db\";\n}\n\n/** Inputs for `resolveSecrets`. */\nexport interface ResolveSecretsOptions {\n\t/**\n\t * The Kysely DB used to persist (and read back) generated salt/preview\n\t * secret values. Required — these values must be stable across requests\n\t * within a deployment.\n\t */\n\tdb: Kysely<Database>;\n\t/**\n\t * Optional explicit env override map. When omitted, falls back to\n\t * `import.meta.env` via the global accessor below. Tests pass an\n\t * explicit map to avoid leaking process state.\n\t */\n\tenv?: SecretsEnv;\n\t/**\n\t * @internal Test seam: inject a custom OptionsRepository to exercise\n\t * the lost-race re-read branch. Production callers never set this.\n\t */\n\t_repo?: OptionsRepository;\n}\n\n/** Environment-variable shape consulted by the resolver. */\nexport interface SecretsEnv {\n\t/**\n\t * Read by `validateEncryptionKeyAtStartup` and (in a follow-up PR) by the\n\t * plugin-secret encryption layer. **Not** consulted by `resolveSecrets`,\n\t * so a malformed value can't 500 the preview/comment hot paths.\n\t */\n\tEMDASH_ENCRYPTION_KEY?: string;\n\tEMDASH_PREVIEW_SECRET?: string;\n\t/** Legacy alias; new docs point at EMDASH_PREVIEW_SECRET. */\n\tPREVIEW_SECRET?: string;\n\tEMDASH_IP_SALT?: string;\n\t/**\n\t * Legacy fallback. Prior code derived the IP salt from\n\t * `EMDASH_AUTH_SECRET || AUTH_SECRET || \"emdash-ip-salt\"`. We preserve\n\t * the env-var fallback (so existing installs keep their stable salt)\n\t * but no longer read it from `import.meta.env` in route handlers.\n\t */\n\tEMDASH_AUTH_SECRET?: string;\n\t/** Legacy alias. */\n\tAUTH_SECRET?: string;\n}\n\n/**\n * Class of validation failures raised by this module.\n *\n * Errors here are operator-facing config problems (malformed key, etc.).\n * They are thrown rather than soft-skipped so misconfiguration fails loudly\n * at startup instead of silently degrading at request time.\n */\nexport class EmDashSecretsError extends Error {\n\toverride readonly name = \"EmDashSecretsError\";\n\treadonly code: string;\n\n\tconstructor(message: string, code: string) {\n\t\tsuper(message);\n\t\tthis.code = code;\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// Encryption key parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse the `EMDASH_ENCRYPTION_KEY` env var.\n *\n * Accepts a single key or a comma-separated list. The first entry is the\n * primary (used for new writes); all entries are tried for decryption,\n * matched by `kid`. Whitespace around commas is tolerated. Empty entries\n * (e.g. trailing comma) are ignored.\n *\n * Returns `null` for an unset/empty input. Throws `EmDashSecretsError` on\n * any malformed entry — silent skipping would mask deployment mistakes.\n */\nexport async function parseEncryptionKeys(\n\traw: string | undefined,\n): Promise<ParsedEncryptionKey[] | null> {\n\tif (!raw) return null;\n\n\tconst entries = raw\n\t\t.split(\",\")\n\t\t.map((entry) => entry.trim())\n\t\t.filter((entry) => entry.length > 0);\n\n\tif (entries.length === 0) return null;\n\n\tconst parsed: ParsedEncryptionKey[] = [];\n\tconst seenKids = new Set<string>();\n\n\tfor (const entry of entries) {\n\t\tif (!ENCRYPTION_KEY_PATTERN.test(entry)) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY entry is malformed (expected \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars). Generate one with \\`emdash secrets generate\\`.`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst body = entry.slice(ENCRYPTION_KEY_PREFIX.length);\n\t\tconst key = decodeBase64urlStrict(body);\n\t\tif (!key) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not valid base64url\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\t\tif (key.length !== GENERATED_SECRET_BYTES) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t`EMDASH_ENCRYPTION_KEY must decode to ${GENERATED_SECRET_BYTES} bytes, got ${key.length}`,\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\t// Reject non-canonical base64url. 43 chars decode to 32 bytes but\n\t\t// the last char only carries 2 information bits — multiple raw\n\t\t// strings can decode to the same bytes. Forcing canonical form\n\t\t// guarantees `kid` (derived from bytes) is stable per key\n\t\t// material, regardless of how the operator pasted it.\n\t\tconst canonical = encodeBase64url(key);\n\t\tif (canonical !== body) {\n\t\t\tthrow new EmDashSecretsError(\n\t\t\t\t\"EMDASH_ENCRYPTION_KEY body is not canonical base64url. Generate one with `emdash secrets generate`.\",\n\t\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t\t);\n\t\t}\n\n\t\tconst kid = fingerprintKeyBytes(key);\n\t\tif (seenKids.has(kid)) {\n\t\t\t// Duplicate keys are user error (paste mistake during rotation).\n\t\t\t// We dedupe rather than throw — the rotation flow is forgiving.\n\t\t\tcontinue;\n\t\t}\n\t\tseenKids.add(kid);\n\t\tparsed.push({ kid, key, raw: entry });\n\t}\n\n\t// `parsed` always has at least one entry here: `entries` was non-empty\n\t// after filtering, the loop runs at least once, the first iteration\n\t// always passes the empty-`seenKids` check.\n\treturn parsed;\n}\n\n/**\n * Compute the kid for a raw key string (the env-var form including the\n * `emdash_enc_v1_` prefix). Public so the CLI's `fingerprint` subcommand\n * and admin endpoints can show kids without exposing raw keys.\n *\n * The kid is derived from the decoded key **bytes**, not the raw string,\n * so admin endpoints / future rotation flows can match envelope kids\n * against bytes regardless of how the env var was originally spelled.\n *\n * Validates the same shape as `parseEncryptionKeys` — including canonical\n * base64url — so the CLI can't print a kid for a key the runtime would\n * later refuse to load.\n *\n * Throws `EmDashSecretsError` for malformed or non-canonical input.\n */\nexport async function fingerprintKey(raw: string): Promise<string> {\n\tif (!ENCRYPTION_KEY_PATTERN.test(raw)) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key must match \"${ENCRYPTION_KEY_PREFIX}\" followed by ${ENCRYPTION_KEY_BODY_LENGTH} base64url chars`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\tconst body = raw.slice(ENCRYPTION_KEY_PREFIX.length);\n\tconst bytes = decodeBase64urlStrict(body);\n\tif (!bytes || bytes.length !== GENERATED_SECRET_BYTES || encodeBase64url(bytes) !== body) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Key body must decode to ${GENERATED_SECRET_BYTES} canonical base64url bytes`,\n\t\t\t\"INVALID_ENCRYPTION_KEY\",\n\t\t);\n\t}\n\treturn fingerprintKeyBytes(bytes);\n}\n\n/**\n * Internal: kid derivation from raw key bytes. The single source of truth\n * for what makes two keys \"the same key\" — used by both `parseEncryptionKeys`\n * and `fingerprintKey`.\n */\nfunction fingerprintKeyBytes(key: Uint8Array): string {\n\treturn encodeHexLowerCase(sha256(key)).slice(0, 8);\n}\n\n/**\n * Generate a fresh `EMDASH_ENCRYPTION_KEY` value. Used by the CLI's\n * `secrets generate` subcommand and by `create-emdash` scaffolding.\n */\nexport function generateEncryptionKey(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn `${ENCRYPTION_KEY_PREFIX}${encodeBase64url(bytes)}`;\n}\n\n// ---------------------------------------------------------------------------\n// Site-secret resolution (DB-backed with env override)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve site secrets. Reads env vars; for IP salt and preview secret,\n * falls back to a DB-stored value, generating one atomically on first need.\n *\n * Idempotent. Concurrent callers race on the atomic `setIfAbsent`; whichever\n * wins, all callers converge on the same stored value.\n *\n * Note: `EMDASH_ENCRYPTION_KEY` is **not** consumed here. It's validated\n * separately at runtime startup (see `validateEncryptionKeyAtStartup`) so a\n * malformed key can't take down preview-token verification or comment\n * submission for unrelated visitors. Future plugin-secret encryption code\n * will read it via its own dedicated helper.\n */\nexport async function resolveSecrets(options: ResolveSecretsOptions): Promise<ResolvedSecrets> {\n\tconst env = options.env ?? readDefaultEnv();\n\tconst repo = options._repo ?? new OptionsRepository(options.db);\n\n\tconst previewEnvOverride = pickFirstNonEmpty(env.EMDASH_PREVIEW_SECRET, env.PREVIEW_SECRET);\n\tconst ipSaltEnvOverride = pickFirstNonEmpty(\n\t\tenv.EMDASH_IP_SALT,\n\t\tenv.EMDASH_AUTH_SECRET,\n\t\tenv.AUTH_SECRET,\n\t);\n\n\tconst [previewSecret, ipSalt] = await Promise.all([\n\t\tpreviewEnvOverride !== null\n\t\t\t? Promise.resolve({ value: previewEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, PREVIEW_SECRET_OPTION_KEY),\n\t\tipSaltEnvOverride !== null\n\t\t\t? Promise.resolve({ value: ipSaltEnvOverride, source: \"env\" as const })\n\t\t\t: ensureGeneratedOption(repo, IP_SALT_OPTION_KEY),\n\t]);\n\n\treturn {\n\t\tpreviewSecret: previewSecret.value,\n\t\tpreviewSecretSource: previewSecret.source,\n\t\tipSalt: ipSalt.value,\n\t\tipSaltSource: ipSalt.source,\n\t};\n}\n\n/**\n * Validate `EMDASH_ENCRYPTION_KEY` once at runtime startup. Logs an\n * operator-facing error if the value is malformed but does **not** throw —\n * the key is currently inert (no consumers), and the follow-up PR that\n * actually uses it will throw at point of use. This way, deployment\n * mistakes surface immediately in startup logs without wedging unrelated\n * request paths in the meantime.\n *\n * Returns `true` if the key is unset or valid, `false` if it was malformed.\n */\nexport async function validateEncryptionKeyAtStartup(env?: SecretsEnv): Promise<boolean> {\n\tconst resolved = env ?? readDefaultEnv();\n\ttry {\n\t\tawait parseEncryptionKeys(resolved.EMDASH_ENCRYPTION_KEY);\n\t\treturn true;\n\t} catch (error) {\n\t\tif (error instanceof EmDashSecretsError) {\n\t\t\tconsole.error(\n\t\t\t\t`[emdash] EMDASH_ENCRYPTION_KEY is invalid: ${error.message} ` +\n\t\t\t\t\t\"Plugin-secret encryption will fail once it ships. \" +\n\t\t\t\t\t\"Generate a fresh key with `emdash secrets generate`.\",\n\t\t\t);\n\t\t\treturn false;\n\t\t}\n\t\tthrow error;\n\t}\n}\n\n/**\n * Per-DB cache of resolved secrets, keyed by Kysely instance identity.\n *\n * The resolved values are stable for the lifetime of the deployment (env\n * vars don't change without a restart, and DB-stored values are written\n * once via `setIfAbsent`). Caching avoids one options-table read per\n * request on the hot paths (preview verification, comment hashing).\n *\n * Lives on `globalThis` so module-duplication during SSR bundling can't\n * fragment the cache. See `request-context.ts` for the same pattern.\n */\n// Versioned to prevent cache fragmentation if `ResolvedSecrets`'s shape\n// ever changes. Bump the suffix on incompatible changes so a co-resident\n// older build doesn't read a newer-shape value.\nconst SECRETS_CACHE_KEY = Symbol.for(\"@emdash-cms/core/secrets-cache@1\");\n\ninterface SecretsCacheHolder {\n\tcache: WeakMap<Kysely<Database>, Promise<ResolvedSecrets>>;\n}\n\nfunction getSecretsCache(): WeakMap<Kysely<Database>, Promise<ResolvedSecrets>> {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tlet entry = holder[SECRETS_CACHE_KEY];\n\tif (!entry) {\n\t\tentry = { cache: new WeakMap() };\n\t\tholder[SECRETS_CACHE_KEY] = entry;\n\t}\n\treturn entry.cache;\n}\n\n/**\n * Memoized wrapper around `resolveSecrets`. Use this from request-time hot\n * paths (preview verification, comment IP hashing) so they don't reread\n * env / re-query options on every request.\n *\n * The cache is keyed by `Kysely` instance, so playground / per-DO / per-test\n * databases each get their own resolution.\n */\nexport function resolveSecretsCached(db: Kysely<Database>): Promise<ResolvedSecrets> {\n\tconst cache = getSecretsCache();\n\tconst cached = cache.get(db);\n\tif (cached) return cached;\n\tconst promise = resolveSecrets({ db }).catch((error) => {\n\t\t// Don't poison the cache on transient failure; next caller retries.\n\t\tcache.delete(db);\n\t\tthrow error;\n\t});\n\tcache.set(db, promise);\n\treturn promise;\n}\n\n/**\n * Test-only helper: clear the secrets cache. Tests that mutate env between\n * cases need this so a stale resolution doesn't leak across cases.\n *\n * @internal\n */\nexport function _clearSecretsCacheForTesting(): void {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- globalThis singleton pattern\n\tconst holder = globalThis as Record<symbol, SecretsCacheHolder | undefined>;\n\tholder[SECRETS_CACHE_KEY] = undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Internals\n// ---------------------------------------------------------------------------\n\n/**\n * Read or generate-and-persist a random base64url secret stored in the\n * options table.\n *\n * Concurrency: `setIfAbsent` is an atomic INSERT...ON CONFLICT DO NOTHING.\n * On race, the loser re-reads to converge on the winner's value.\n */\nasync function ensureGeneratedOption(\n\trepo: OptionsRepository,\n\toptionKey: string,\n): Promise<{ value: string; source: \"db\" }> {\n\tconst existing = await repo.get<string>(optionKey);\n\tif (typeof existing === \"string\" && existing.length > 0) {\n\t\treturn { value: existing, source: \"db\" };\n\t}\n\n\tconst generated = generateRandomSecret();\n\tconst inserted = await repo.setIfAbsent(optionKey, generated);\n\tif (inserted) {\n\t\treturn { value: generated, source: \"db\" };\n\t}\n\n\t// Lost the race — another process inserted first. Re-read to pick up\n\t// the winner. If the row is somehow still missing or empty, treat that\n\t// as a real error rather than looping.\n\tconst winner = await repo.get<string>(optionKey);\n\tif (typeof winner !== \"string\" || winner.length === 0) {\n\t\tthrow new EmDashSecretsError(\n\t\t\t`Failed to persist generated secret for \"${optionKey}\"`,\n\t\t\t\"SECRET_PERSIST_FAILED\",\n\t\t);\n\t}\n\treturn { value: winner, source: \"db\" };\n}\n\n/** Generate 32 random bytes encoded as unpadded base64url. */\nfunction generateRandomSecret(): string {\n\tconst bytes = new Uint8Array(GENERATED_SECRET_BYTES);\n\tcrypto.getRandomValues(bytes);\n\treturn encodeBase64url(bytes);\n}\n\n/** Return the first non-empty string from `values`, or `null` if all are empty. */\nfunction pickFirstNonEmpty(...values: (string | undefined)[]): string | null {\n\tfor (const value of values) {\n\t\tif (typeof value === \"string\" && value.length > 0) {\n\t\t\treturn value;\n\t\t}\n\t}\n\treturn null;\n}\n\nconst BASE64URL_CHARSET_PATTERN = /^[A-Za-z0-9_-]+$/;\n\n/**\n * Validate base64url shape and decode. Returns `null` on malformed input\n * (rather than throwing) so the caller can produce a config-specific error.\n */\nfunction decodeBase64urlStrict(input: string): Uint8Array | null {\n\t// `decodeBase64url` accepts padded input too; the env-var format is\n\t// strictly unpadded base64url, so we do a charset check first.\n\tif (!BASE64URL_CHARSET_PATTERN.test(input)) return null;\n\ttry {\n\t\treturn decodeBase64url(input);\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Default env reader.\n *\n * Note: this is the **only** code path in core that reads both\n * `import.meta.env` and `process.env`. Route handlers should not — they\n * always run inside the Astro/Vite bundle where `import.meta.env` is\n * the correct source. This resolver is shared with the CLI surface (via\n * `cli/commands/secrets.ts`) which runs outside the bundle, so we\n * deliberately consult both. `import.meta.env` wins so build-time\n * substitutions are honored when present.\n *\n * The convention documented in AGENTS.md (\"import.meta.env.EMDASH_X ||\n * import.meta.env.X\") is the route-handler convention; this is the\n * shared-with-CLI exception.\n */\nfunction readDefaultEnv(): SecretsEnv {\n\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- import.meta.env is loose by design\n\tconst meta = (import.meta.env ?? {}) as Record<string, string | undefined>;\n\tconst proc = typeof process !== \"undefined\" && process.env ? process.env : {};\n\n\treturn {\n\t\tEMDASH_ENCRYPTION_KEY: meta.EMDASH_ENCRYPTION_KEY ?? proc.EMDASH_ENCRYPTION_KEY,\n\t\tEMDASH_PREVIEW_SECRET: meta.EMDASH_PREVIEW_SECRET ?? proc.EMDASH_PREVIEW_SECRET,\n\t\tPREVIEW_SECRET: meta.PREVIEW_SECRET ?? proc.PREVIEW_SECRET,\n\t\tEMDASH_IP_SALT: meta.EMDASH_IP_SALT ?? proc.EMDASH_IP_SALT,\n\t\tEMDASH_AUTH_SECRET: meta.EMDASH_AUTH_SECRET ?? proc.EMDASH_AUTH_SECRET,\n\t\tAUTH_SECRET: meta.AUTH_SECRET ?? proc.AUTH_SECRET,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCA,MAAa,wBAAwB;;AAGrC,MAAM,6BAA6B;;;;;;AASnC,MAAM,yBAAyB,IAAI,OAClC,IAAI,sBAAsB,QARA,uBAQ4B,OAAO,CAAC,gBAAgB,2BAA2B,IACzG;;AAGD,MAAa,qBAAqB;;AAGlC,MAAa,4BAA4B;;AAGzC,MAAM,yBAAyB;;;;;;;;AAoF/B,IAAa,qBAAb,cAAwC,MAAM;CAC7C,AAAkB,OAAO;CACzB,AAAS;CAET,YAAY,SAAiB,MAAc;AAC1C,QAAM,QAAQ;AACd,OAAK,OAAO;;;;;;;;;;;;;;AAmBd,eAAsB,oBACrB,KACwC;AACxC,KAAI,CAAC,IAAK,QAAO;CAEjB,MAAM,UAAU,IACd,MAAM,IAAI,CACV,KAAK,UAAU,MAAM,MAAM,CAAC,CAC5B,QAAQ,UAAU,MAAM,SAAS,EAAE;AAErC,KAAI,QAAQ,WAAW,EAAG,QAAO;CAEjC,MAAM,SAAgC,EAAE;CACxC,MAAM,2BAAW,IAAI,KAAa;AAElC,MAAK,MAAM,SAAS,SAAS;AAC5B,MAAI,CAAC,uBAAuB,KAAK,MAAM,CACtC,OAAM,IAAI,mBACT,uDAAuD,sBAAsB,gBAAgB,2BAA2B,oEACxH,yBACA;EAGF,MAAM,OAAO,MAAM,MAAM,GAA6B;EACtD,MAAM,MAAM,sBAAsB,KAAK;AACvC,MAAI,CAAC,IACJ,OAAM,IAAI,mBACT,qDACA,yBACA;AAEF,MAAI,IAAI,WAAW,uBAClB,OAAM,IAAI,mBACT,wCAAwC,uBAAuB,cAAc,IAAI,UACjF,yBACA;AASF,MADkB,gBAAgB,IAAI,KACpB,KACjB,OAAM,IAAI,mBACT,uGACA,yBACA;EAGF,MAAM,MAAM,oBAAoB,IAAI;AACpC,MAAI,SAAS,IAAI,IAAI,CAGpB;AAED,WAAS,IAAI,IAAI;AACjB,SAAO,KAAK;GAAE;GAAK;GAAK,KAAK;GAAO,CAAC;;AAMtC,QAAO;;;;;;;;;;;;;;;;;AAkBR,eAAsB,eAAe,KAA8B;AAClE,KAAI,CAAC,uBAAuB,KAAK,IAAI,CACpC,OAAM,IAAI,mBACT,mBAAmB,sBAAsB,gBAAgB,2BAA2B,mBACpF,yBACA;CAEF,MAAM,OAAO,IAAI,MAAM,GAA6B;CACpD,MAAM,QAAQ,sBAAsB,KAAK;AACzC,KAAI,CAAC,SAAS,MAAM,WAAW,0BAA0B,gBAAgB,MAAM,KAAK,KACnF,OAAM,IAAI,mBACT,2BAA2B,uBAAuB,6BAClD,yBACA;AAEF,QAAO,oBAAoB,MAAM;;;;;;;AAQlC,SAAS,oBAAoB,KAAyB;AACrD,QAAO,mBAAmB,OAAO,IAAI,CAAC,CAAC,MAAM,GAAG,EAAE;;;;;;AAOnD,SAAgB,wBAAgC;CAC/C,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,GAAG,wBAAwB,gBAAgB,MAAM;;;;;;;;;;;;;;;AAoBzD,eAAsB,eAAe,SAA0D;CAC9F,MAAM,MAAM,QAAQ,OAAO,gBAAgB;CAC3C,MAAM,OAAO,QAAQ,SAAS,IAAI,kBAAkB,QAAQ,GAAG;CAE/D,MAAM,qBAAqB,kBAAkB,IAAI,uBAAuB,IAAI,eAAe;CAC3F,MAAM,oBAAoB,kBACzB,IAAI,gBACJ,IAAI,oBACJ,IAAI,YACJ;CAED,MAAM,CAAC,eAAe,UAAU,MAAM,QAAQ,IAAI,CACjD,uBAAuB,OACpB,QAAQ,QAAQ;EAAE,OAAO;EAAoB,QAAQ;EAAgB,CAAC,GACtE,sBAAsB,MAAM,0BAA0B,EACzD,sBAAsB,OACnB,QAAQ,QAAQ;EAAE,OAAO;EAAmB,QAAQ;EAAgB,CAAC,GACrE,sBAAsB,MAAM,mBAAmB,CAClD,CAAC;AAEF,QAAO;EACN,eAAe,cAAc;EAC7B,qBAAqB,cAAc;EACnC,QAAQ,OAAO;EACf,cAAc,OAAO;EACrB;;;;;;;;;;;;AAaF,eAAsB,+BAA+B,KAAoC;CACxF,MAAM,WAAW,OAAO,gBAAgB;AACxC,KAAI;AACH,QAAM,oBAAoB,SAAS,sBAAsB;AACzD,SAAO;UACC,OAAO;AACf,MAAI,iBAAiB,oBAAoB;AACxC,WAAQ,MACP,8CAA8C,MAAM,QAAQ,2GAG5D;AACD,UAAO;;AAER,QAAM;;;;;;;;;;;;;;AAkBR,MAAM,oBAAoB,OAAO,IAAI,mCAAmC;AAMxE,SAAS,kBAAuE;CAE/E,MAAM,SAAS;CACf,IAAI,QAAQ,OAAO;AACnB,KAAI,CAAC,OAAO;AACX,UAAQ,EAAE,uBAAO,IAAI,SAAS,EAAE;AAChC,SAAO,qBAAqB;;AAE7B,QAAO,MAAM;;;;;;;;;;AAWd,SAAgB,qBAAqB,IAAgD;CACpF,MAAM,QAAQ,iBAAiB;CAC/B,MAAM,SAAS,MAAM,IAAI,GAAG;AAC5B,KAAI,OAAQ,QAAO;CACnB,MAAM,UAAU,eAAe,EAAE,IAAI,CAAC,CAAC,OAAO,UAAU;AAEvD,QAAM,OAAO,GAAG;AAChB,QAAM;GACL;AACF,OAAM,IAAI,IAAI,QAAQ;AACtB,QAAO;;;;;;;;;AA0BR,eAAe,sBACd,MACA,WAC2C;CAC3C,MAAM,WAAW,MAAM,KAAK,IAAY,UAAU;AAClD,KAAI,OAAO,aAAa,YAAY,SAAS,SAAS,EACrD,QAAO;EAAE,OAAO;EAAU,QAAQ;EAAM;CAGzC,MAAM,YAAY,sBAAsB;AAExC,KADiB,MAAM,KAAK,YAAY,WAAW,UAAU,CAE5D,QAAO;EAAE,OAAO;EAAW,QAAQ;EAAM;CAM1C,MAAM,SAAS,MAAM,KAAK,IAAY,UAAU;AAChD,KAAI,OAAO,WAAW,YAAY,OAAO,WAAW,EACnD,OAAM,IAAI,mBACT,2CAA2C,UAAU,IACrD,wBACA;AAEF,QAAO;EAAE,OAAO;EAAQ,QAAQ;EAAM;;;AAIvC,SAAS,uBAA+B;CACvC,MAAM,QAAQ,IAAI,WAAW,uBAAuB;AACpD,QAAO,gBAAgB,MAAM;AAC7B,QAAO,gBAAgB,MAAM;;;AAI9B,SAAS,kBAAkB,GAAG,QAA+C;AAC5E,MAAK,MAAM,SAAS,OACnB,KAAI,OAAO,UAAU,YAAY,MAAM,SAAS,EAC/C,QAAO;AAGT,QAAO;;AAGR,MAAM,4BAA4B;;;;;AAMlC,SAAS,sBAAsB,OAAkC;AAGhE,KAAI,CAAC,0BAA0B,KAAK,MAAM,CAAE,QAAO;AACnD,KAAI;AACH,SAAO,gBAAgB,MAAM;SACtB;AACP,SAAO;;;;;;;;;;;;;;;;;;AAmBT,SAAS,iBAA6B;CAErC,MAAM,OAAQ,OAAO,KAAK,OAAO,EAAE;CACnC,MAAM,OAAO,OAAO,YAAY,eAAe,QAAQ,MAAM,QAAQ,MAAM,EAAE;AAE7E,QAAO;EACN,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,uBAAuB,KAAK,yBAAyB,KAAK;EAC1D,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,gBAAgB,KAAK,kBAAkB,KAAK;EAC5C,oBAAoB,KAAK,sBAAsB,KAAK;EACpD,aAAa,KAAK,eAAe,KAAK;EACtC"}
|
package/dist/seed/index.d.mts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import "../types-
|
|
2
|
-
import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-
|
|
1
|
+
import "../types-Dtx1mSMX.mjs";
|
|
2
|
+
import { _ as SeedTaxonomyTerm, a as applySeed, b as ValidationResult, c as SeedCollection, d as SeedFile, f as SeedMenu, g as SeedTaxonomy, h as SeedSection, i as defaultSeed, l as SeedContentEntry, m as SeedRedirect, n as loadSeed, o as SeedApplyOptions, p as SeedMenuItem, r as loadUserSeed, s as SeedApplyResult, t as validateSeed, u as SeedField, v as SeedWidget, y as SeedWidgetArea } from "../validate-DHGwADqO.mjs";
|
|
3
3
|
export { type SeedApplyOptions, type SeedApplyResult, type SeedCollection, type SeedContentEntry, type SeedField, type SeedFile, type SeedMenu, type SeedMenuItem, type SeedRedirect, type SeedSection, type SeedTaxonomy, type SeedTaxonomyTerm, type SeedWidget, type SeedWidgetArea, type ValidationResult, applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
|
package/dist/seed/index.mjs
CHANGED
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
import "../dialect-helpers-BKCvISIQ.mjs";
|
|
2
|
-
import "../content-
|
|
3
|
-
import "../base64-
|
|
4
|
-
import "../types-
|
|
5
|
-
import "../media-
|
|
6
|
-
import
|
|
7
|
-
import "../options-
|
|
8
|
-
import "../redirect-
|
|
9
|
-
import "../byline-
|
|
10
|
-
import "../registry-
|
|
11
|
-
import "../loader-
|
|
12
|
-
import "../request-cache-
|
|
13
|
-
import { t as
|
|
14
|
-
import { t as
|
|
15
|
-
import {
|
|
2
|
+
import "../content-C7G4QXkK.mjs";
|
|
3
|
+
import "../base64-MBPo9ozB.mjs";
|
|
4
|
+
import "../types-BIgulNsW.mjs";
|
|
5
|
+
import "../media-D8FbNsl0.mjs";
|
|
6
|
+
import "../taxonomy-DSxx2K2L.mjs";
|
|
7
|
+
import "../options-nPxWnrya.mjs";
|
|
8
|
+
import "../redirect-C5H7VGIX.mjs";
|
|
9
|
+
import "../byline-C3vnhIpU.mjs";
|
|
10
|
+
import "../registry-Beb7wxFc.mjs";
|
|
11
|
+
import "../loader-Bx2_9-5e.mjs";
|
|
12
|
+
import "../request-cache-C-tIpYIw.mjs";
|
|
13
|
+
import { t as applySeed } from "../apply-UsrFuO7l.mjs";
|
|
14
|
+
import { t as validateSeed } from "../validate-CBIbxM3L.mjs";
|
|
15
|
+
import { t as defaultSeed } from "../default-pHuz9WF6.mjs";
|
|
16
|
+
import { n as loadUserSeed, t as loadSeed } from "../load-sXRuM7Us.mjs";
|
|
16
17
|
|
|
17
18
|
export { applySeed, defaultSeed, loadSeed, loadUserSeed, validateSeed };
|
package/dist/seo/index.d.mts
CHANGED
package/dist/storage/local.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-
|
|
1
|
+
import { a as ListOptions, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, s as LocalStorageConfig, u as SignedUploadUrl } from "../types-C-aFbqmA.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/storage/local.d.ts
|
|
4
4
|
/**
|
package/dist/storage/local.mjs
CHANGED
package/dist/storage/s3.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-
|
|
1
|
+
import { a as ListOptions, c as S3StorageConfig, d as Storage, l as SignedUploadOptions, n as DownloadResult, o as ListResult, p as UploadResult, u as SignedUploadUrl } from "../types-C-aFbqmA.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/storage/s3.d.ts
|
|
4
4
|
/**
|
package/dist/storage/s3.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { t as EmDashStorageError } from "../types-
|
|
1
|
+
import { t as EmDashStorageError } from "../types-K-EkEQCI.mjs";
|
|
2
2
|
import { z } from "zod";
|
|
3
3
|
import { DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
|
|
4
4
|
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
import { i as __exportAll } from "./runner-DMnlIkh4.mjs";
|
|
2
|
+
import { getRequestContext } from "./request-context.mjs";
|
|
3
|
+
import { n as getI18nConfig, r as isI18nEnabled, t as getFallbackChain } from "./config-CVssduLe.mjs";
|
|
4
|
+
import { n as chunks, t as SQL_BATCH_SIZE } from "./chunks-Da2-b-oA.mjs";
|
|
5
|
+
import { t as isMissingTableError } from "./db-errors-B7P2pSCn.mjs";
|
|
6
|
+
import { r as getDb } from "./loader-Bx2_9-5e.mjs";
|
|
7
|
+
import { n as requestCached, r as setRequestCacheEntry, t as peekRequestCache } from "./request-cache-C-tIpYIw.mjs";
|
|
8
|
+
|
|
9
|
+
//#region src/i18n/resolve.ts
|
|
10
|
+
/**
|
|
11
|
+
* Shared locale-resolution helpers.
|
|
12
|
+
*
|
|
13
|
+
* Matches the pattern used by `query.ts` for content: an explicit locale wins,
|
|
14
|
+
* otherwise we fall back to the request-context locale, otherwise to
|
|
15
|
+
* `defaultLocale` when i18n is enabled, otherwise to `undefined` (meaning "do
|
|
16
|
+
* not filter by locale" — legacy single-locale behaviour).
|
|
17
|
+
*/
|
|
18
|
+
/**
|
|
19
|
+
* Resolve the locale to use for a query given an optional explicit value.
|
|
20
|
+
* Returns `undefined` when no locale information is available; callers should
|
|
21
|
+
* treat that as "do not filter by locale".
|
|
22
|
+
*/
|
|
23
|
+
function resolveLocale(explicit) {
|
|
24
|
+
if (explicit !== void 0) return explicit;
|
|
25
|
+
const ctxLocale = getRequestContext()?.locale;
|
|
26
|
+
if (ctxLocale !== void 0) return ctxLocale;
|
|
27
|
+
const cfg = getI18nConfig();
|
|
28
|
+
if (cfg && isI18nEnabled()) return cfg.defaultLocale;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Fallback chain to try when looking up a single item. When i18n is disabled
|
|
32
|
+
* or the locale is unspecified, returns a single-element array (or empty when
|
|
33
|
+
* no locale resolves) so callers can iterate uniformly.
|
|
34
|
+
*/
|
|
35
|
+
function resolveLocaleChain(explicit) {
|
|
36
|
+
const locale = resolveLocale(explicit);
|
|
37
|
+
if (locale === void 0) return [];
|
|
38
|
+
if (!isI18nEnabled()) return [locale];
|
|
39
|
+
return getFallbackChain(locale);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
//#endregion
|
|
43
|
+
//#region src/taxonomies/index.ts
|
|
44
|
+
/**
|
|
45
|
+
* Runtime API for taxonomies.
|
|
46
|
+
*
|
|
47
|
+
* All helpers are locale-aware. When a locale is not passed explicitly we fall
|
|
48
|
+
* back to the request context or the configured `defaultLocale` (see
|
|
49
|
+
* `i18n/resolve.ts`).
|
|
50
|
+
*
|
|
51
|
+
* Because `content_taxonomies.taxonomy_id` stores the translation_group (not a
|
|
52
|
+
* specific term id), the joins here are `taxonomies.translation_group =
|
|
53
|
+
* content_taxonomies.taxonomy_id` + filter by `taxonomies.locale`, which picks
|
|
54
|
+
* the right per-locale term.
|
|
55
|
+
*/
|
|
56
|
+
var taxonomies_exports = /* @__PURE__ */ __exportAll({
|
|
57
|
+
getAllTermsForEntries: () => getAllTermsForEntries,
|
|
58
|
+
getEntriesByTerm: () => getEntriesByTerm,
|
|
59
|
+
getEntryTerms: () => getEntryTerms,
|
|
60
|
+
getTaxonomyDef: () => getTaxonomyDef,
|
|
61
|
+
getTaxonomyDefs: () => getTaxonomyDefs,
|
|
62
|
+
getTaxonomyTerms: () => getTaxonomyTerms,
|
|
63
|
+
getTerm: () => getTerm,
|
|
64
|
+
getTermsForEntries: () => getTermsForEntries,
|
|
65
|
+
invalidateTermCache: () => invalidateTermCache
|
|
66
|
+
});
|
|
67
|
+
/**
|
|
68
|
+
* No-op — kept for API compatibility.
|
|
69
|
+
*/
|
|
70
|
+
function invalidateTermCache() {}
|
|
71
|
+
/**
|
|
72
|
+
* Get every taxonomy definition. Definitions are per-locale (one row per
|
|
73
|
+
* locale inside the same translation_group) — by default we resolve to the
|
|
74
|
+
* active locale.
|
|
75
|
+
*/
|
|
76
|
+
async function getTaxonomyDefs(options = {}) {
|
|
77
|
+
const locale = resolveLocale(options.locale);
|
|
78
|
+
return requestCached(`taxonomy-defs:${locale ?? "*"}`, async () => {
|
|
79
|
+
let query = (await getDb()).selectFrom("_emdash_taxonomy_defs").selectAll();
|
|
80
|
+
if (locale !== void 0) query = query.where("locale", "=", locale);
|
|
81
|
+
return (await query.execute()).map(rowToTaxonomyDef);
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Get a single taxonomy definition by name. Uses the fallback chain so even
|
|
86
|
+
* if there is no translation for the active locale we still return something.
|
|
87
|
+
*
|
|
88
|
+
* If `getTaxonomyDefs()` has already loaded the full list in this request
|
|
89
|
+
* (which happens during entry-term hydration on every page that renders a
|
|
90
|
+
* collection), search the matching def in memory rather than running a
|
|
91
|
+
* second query against `_emdash_taxonomy_defs`.
|
|
92
|
+
*/
|
|
93
|
+
async function getTaxonomyDef(name, options = {}) {
|
|
94
|
+
const chain = resolveLocaleChain(options.locale);
|
|
95
|
+
const allDefs = peekRequestCache(`taxonomy-defs:${resolveLocale(options.locale) ?? "*"}`);
|
|
96
|
+
if (allDefs) {
|
|
97
|
+
const defs = await allDefs;
|
|
98
|
+
if (chain.length === 0) return defs.find((d) => d.name === name) ?? null;
|
|
99
|
+
for (const locale of chain) {
|
|
100
|
+
const found = defs.find((d) => d.name === name && d.locale === locale);
|
|
101
|
+
if (found) return found;
|
|
102
|
+
}
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
return requestCached(`taxonomy-def:${name}:${chain.join(",")}`, async () => {
|
|
106
|
+
const db = await getDb();
|
|
107
|
+
if (chain.length === 0) {
|
|
108
|
+
const row = await db.selectFrom("_emdash_taxonomy_defs").selectAll().where("name", "=", name).orderBy("locale", "asc").executeTakeFirst();
|
|
109
|
+
return row ? rowToTaxonomyDef(row) : null;
|
|
110
|
+
}
|
|
111
|
+
for (const locale of chain) {
|
|
112
|
+
const row = await db.selectFrom("_emdash_taxonomy_defs").selectAll().where("name", "=", name).where("locale", "=", locale).executeTakeFirst();
|
|
113
|
+
if (row) return rowToTaxonomyDef(row);
|
|
114
|
+
}
|
|
115
|
+
return null;
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* All terms of a taxonomy in a specific locale (flat for non-hierarchical,
|
|
120
|
+
* tree for hierarchical).
|
|
121
|
+
*/
|
|
122
|
+
async function getTaxonomyTerms(taxonomyName, options = {}) {
|
|
123
|
+
const locale = resolveLocale(options.locale);
|
|
124
|
+
return requestCached(`taxonomy-terms:${taxonomyName}:${locale ?? "*"}`, async () => {
|
|
125
|
+
const db = await getDb();
|
|
126
|
+
const def = await getTaxonomyDef(taxonomyName, options);
|
|
127
|
+
if (!def) return [];
|
|
128
|
+
let termsQuery = db.selectFrom("taxonomies").selectAll().where("name", "=", taxonomyName).orderBy("label", "asc");
|
|
129
|
+
if (locale !== void 0) termsQuery = termsQuery.where("locale", "=", locale);
|
|
130
|
+
const rows = await termsQuery.execute();
|
|
131
|
+
const countsResult = await db.selectFrom("content_taxonomies").select(["taxonomy_id"]).select((eb) => eb.fn.count("entry_id").as("count")).groupBy("taxonomy_id").execute();
|
|
132
|
+
const counts = /* @__PURE__ */ new Map();
|
|
133
|
+
for (const row of countsResult) counts.set(row.taxonomy_id, row.count);
|
|
134
|
+
const flatTerms = rows.map((row) => ({
|
|
135
|
+
id: row.id,
|
|
136
|
+
name: row.name,
|
|
137
|
+
slug: row.slug,
|
|
138
|
+
label: row.label,
|
|
139
|
+
parent_id: row.parent_id,
|
|
140
|
+
data: row.data,
|
|
141
|
+
locale: row.locale,
|
|
142
|
+
translation_group: row.translation_group
|
|
143
|
+
}));
|
|
144
|
+
if (def.hierarchical) return buildTree(flatTerms, counts);
|
|
145
|
+
return flatTerms.map((term) => ({
|
|
146
|
+
id: term.id,
|
|
147
|
+
name: term.name,
|
|
148
|
+
slug: term.slug,
|
|
149
|
+
label: term.label,
|
|
150
|
+
children: [],
|
|
151
|
+
count: counts.get(term.translation_group ?? term.id) ?? 0,
|
|
152
|
+
locale: term.locale,
|
|
153
|
+
translationGroup: term.translation_group
|
|
154
|
+
}));
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Get a single term by (taxonomy, slug). Honours the fallback chain — if the
|
|
159
|
+
* slug exists in a fallback locale, we return that row (useful for deep-linking
|
|
160
|
+
* to a term page when the translation is missing).
|
|
161
|
+
*/
|
|
162
|
+
async function getTerm(taxonomyName, slug, options = {}) {
|
|
163
|
+
const db = await getDb();
|
|
164
|
+
const chain = resolveLocaleChain(options.locale);
|
|
165
|
+
let row;
|
|
166
|
+
const selectTerm = () => db.selectFrom("taxonomies").selectAll().where("name", "=", taxonomyName).where("slug", "=", slug);
|
|
167
|
+
if (chain.length === 0) row = await selectTerm().orderBy("locale", "asc").executeTakeFirst();
|
|
168
|
+
else {
|
|
169
|
+
row = void 0;
|
|
170
|
+
for (const locale of chain) {
|
|
171
|
+
row = await selectTerm().where("locale", "=", locale).executeTakeFirst();
|
|
172
|
+
if (row) break;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
if (!row) return null;
|
|
176
|
+
const count = (await db.selectFrom("content_taxonomies").select((eb) => eb.fn.count("entry_id").as("count")).where("taxonomy_id", "=", row.translation_group ?? row.id).executeTakeFirst())?.count ?? 0;
|
|
177
|
+
let childrenQuery = db.selectFrom("taxonomies").selectAll().where("parent_id", "=", row.id).orderBy("label", "asc");
|
|
178
|
+
const termLocale = row.locale;
|
|
179
|
+
if (termLocale) childrenQuery = childrenQuery.where("locale", "=", termLocale);
|
|
180
|
+
const children = (await childrenQuery.execute()).map((child) => ({
|
|
181
|
+
id: child.id,
|
|
182
|
+
name: child.name,
|
|
183
|
+
slug: child.slug,
|
|
184
|
+
label: child.label,
|
|
185
|
+
parentId: child.parent_id ?? void 0,
|
|
186
|
+
children: [],
|
|
187
|
+
locale: child.locale,
|
|
188
|
+
translationGroup: child.translation_group
|
|
189
|
+
}));
|
|
190
|
+
return {
|
|
191
|
+
id: row.id,
|
|
192
|
+
name: row.name,
|
|
193
|
+
slug: row.slug,
|
|
194
|
+
label: row.label,
|
|
195
|
+
parentId: row.parent_id ?? void 0,
|
|
196
|
+
description: row.data ? JSON.parse(row.data).description : void 0,
|
|
197
|
+
children,
|
|
198
|
+
count,
|
|
199
|
+
locale: row.locale,
|
|
200
|
+
translationGroup: row.translation_group
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Terms assigned to a content entry, resolved into the active locale. Terms
|
|
205
|
+
* whose translation_group lacks a row in the requested locale are omitted.
|
|
206
|
+
*/
|
|
207
|
+
function getEntryTerms(collection, entryId, taxonomyName, options = {}) {
|
|
208
|
+
const locale = resolveLocale(options.locale);
|
|
209
|
+
return requestCached(`terms:${collection}:${entryId}:${taxonomyName ?? "*"}:${locale ?? "*"}`, async () => {
|
|
210
|
+
let query = (await getDb()).selectFrom("content_taxonomies").innerJoin("taxonomies", "taxonomies.translation_group", "content_taxonomies.taxonomy_id").selectAll("taxonomies").where("content_taxonomies.collection", "=", collection).where("content_taxonomies.entry_id", "=", entryId);
|
|
211
|
+
if (taxonomyName) query = query.where("taxonomies.name", "=", taxonomyName);
|
|
212
|
+
if (locale !== void 0) query = query.where("taxonomies.locale", "=", locale);
|
|
213
|
+
return (await query.execute()).map((row) => ({
|
|
214
|
+
id: row.id,
|
|
215
|
+
name: row.name,
|
|
216
|
+
slug: row.slug,
|
|
217
|
+
label: row.label,
|
|
218
|
+
parentId: row.parent_id ?? void 0,
|
|
219
|
+
children: [],
|
|
220
|
+
locale: row.locale,
|
|
221
|
+
translationGroup: row.translation_group
|
|
222
|
+
}));
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Terms for multiple entries of one taxonomy, single query.
|
|
227
|
+
*/
|
|
228
|
+
async function getTermsForEntries(collection, entryIds, taxonomyName, options = {}) {
|
|
229
|
+
const result = /* @__PURE__ */ new Map();
|
|
230
|
+
const uniqueIds = [...new Set(entryIds)];
|
|
231
|
+
for (const id of uniqueIds) result.set(id, []);
|
|
232
|
+
if (uniqueIds.length === 0) return result;
|
|
233
|
+
const db = await getDb();
|
|
234
|
+
const locale = resolveLocale(options.locale);
|
|
235
|
+
for (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {
|
|
236
|
+
let rows;
|
|
237
|
+
try {
|
|
238
|
+
let query = db.selectFrom("content_taxonomies").innerJoin("taxonomies", "taxonomies.translation_group", "content_taxonomies.taxonomy_id").select([
|
|
239
|
+
"content_taxonomies.entry_id",
|
|
240
|
+
"taxonomies.id",
|
|
241
|
+
"taxonomies.name",
|
|
242
|
+
"taxonomies.slug",
|
|
243
|
+
"taxonomies.label",
|
|
244
|
+
"taxonomies.parent_id",
|
|
245
|
+
"taxonomies.locale",
|
|
246
|
+
"taxonomies.translation_group"
|
|
247
|
+
]).where("content_taxonomies.collection", "=", collection).where("content_taxonomies.entry_id", "in", chunk).where("taxonomies.name", "=", taxonomyName);
|
|
248
|
+
if (locale !== void 0) query = query.where("taxonomies.locale", "=", locale);
|
|
249
|
+
rows = await query.execute();
|
|
250
|
+
} catch (error) {
|
|
251
|
+
if (isMissingTableError(error)) return result;
|
|
252
|
+
throw error;
|
|
253
|
+
}
|
|
254
|
+
for (const row of rows) {
|
|
255
|
+
const term = {
|
|
256
|
+
id: row.id,
|
|
257
|
+
name: row.name,
|
|
258
|
+
slug: row.slug,
|
|
259
|
+
label: row.label,
|
|
260
|
+
parentId: row.parent_id ?? void 0,
|
|
261
|
+
children: [],
|
|
262
|
+
locale: row.locale,
|
|
263
|
+
translationGroup: row.translation_group
|
|
264
|
+
};
|
|
265
|
+
const terms = result.get(row.entry_id);
|
|
266
|
+
if (terms) terms.push(term);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
return result;
|
|
270
|
+
}
|
|
271
|
+
/**
|
|
272
|
+
* Batch-fetch terms for multiple entries across ALL taxonomies in one query.
|
|
273
|
+
* Primes the request-cache for subsequent per-entry calls to `getEntryTerms`.
|
|
274
|
+
*/
|
|
275
|
+
async function getAllTermsForEntries(collection, entryIds, options = {}) {
|
|
276
|
+
const result = /* @__PURE__ */ new Map();
|
|
277
|
+
const uniqueIds = [...new Set(entryIds)];
|
|
278
|
+
for (const id of uniqueIds) result.set(id, {});
|
|
279
|
+
if (uniqueIds.length === 0) return result;
|
|
280
|
+
const db = await getDb();
|
|
281
|
+
const locale = resolveLocale(options.locale);
|
|
282
|
+
const applicableTaxonomyNames = await getCollectionTaxonomyNames(collection, { locale });
|
|
283
|
+
for (const chunk of chunks(uniqueIds, SQL_BATCH_SIZE)) {
|
|
284
|
+
let rows;
|
|
285
|
+
try {
|
|
286
|
+
let query = db.selectFrom("content_taxonomies").innerJoin("taxonomies", "taxonomies.translation_group", "content_taxonomies.taxonomy_id").select([
|
|
287
|
+
"content_taxonomies.entry_id",
|
|
288
|
+
"taxonomies.id",
|
|
289
|
+
"taxonomies.name",
|
|
290
|
+
"taxonomies.slug",
|
|
291
|
+
"taxonomies.label",
|
|
292
|
+
"taxonomies.parent_id",
|
|
293
|
+
"taxonomies.locale",
|
|
294
|
+
"taxonomies.translation_group"
|
|
295
|
+
]).where("content_taxonomies.collection", "=", collection).where("content_taxonomies.entry_id", "in", chunk).orderBy("taxonomies.label", "asc");
|
|
296
|
+
if (locale !== void 0) query = query.where("taxonomies.locale", "=", locale);
|
|
297
|
+
rows = await query.execute();
|
|
298
|
+
} catch (error) {
|
|
299
|
+
if (isMissingTableError(error)) {
|
|
300
|
+
for (const id of uniqueIds) primeEntryTermsCache(collection, id, {}, applicableTaxonomyNames, locale);
|
|
301
|
+
return result;
|
|
302
|
+
}
|
|
303
|
+
throw error;
|
|
304
|
+
}
|
|
305
|
+
for (const row of rows) {
|
|
306
|
+
const term = {
|
|
307
|
+
id: row.id,
|
|
308
|
+
name: row.name,
|
|
309
|
+
slug: row.slug,
|
|
310
|
+
label: row.label,
|
|
311
|
+
parentId: row.parent_id ?? void 0,
|
|
312
|
+
children: [],
|
|
313
|
+
locale: row.locale,
|
|
314
|
+
translationGroup: row.translation_group
|
|
315
|
+
};
|
|
316
|
+
const byTaxonomy = result.get(row.entry_id);
|
|
317
|
+
if (!byTaxonomy) continue;
|
|
318
|
+
const existing = byTaxonomy[row.name];
|
|
319
|
+
if (existing) existing.push(term);
|
|
320
|
+
else byTaxonomy[row.name] = [term];
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
for (const [entryId, byTaxonomy] of result) primeEntryTermsCache(collection, entryId, byTaxonomy, applicableTaxonomyNames, locale);
|
|
324
|
+
return result;
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Return the list of taxonomy names applicable to a collection, request-
|
|
328
|
+
* cached so a page render only pays for it once.
|
|
329
|
+
*
|
|
330
|
+
* Returns an empty list when taxonomies haven't been defined yet.
|
|
331
|
+
*/
|
|
332
|
+
async function getCollectionTaxonomyNames(collection, options) {
|
|
333
|
+
try {
|
|
334
|
+
return (await getTaxonomyDefs(options)).filter((d) => d.collections.includes(collection)).map((d) => d.name);
|
|
335
|
+
} catch (error) {
|
|
336
|
+
if (isMissingTableError(error)) return [];
|
|
337
|
+
throw error;
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
/**
|
|
341
|
+
* Pre-populate the request-cache for every getEntryTerms call-shape that
|
|
342
|
+
* could hit this entry:
|
|
343
|
+
*
|
|
344
|
+
* getEntryTerms(collection, entryId) -> key `terms:C:E:*`
|
|
345
|
+
* getEntryTerms(collection, entryId, "tag") -> key `terms:C:E:tag`
|
|
346
|
+
* getEntryTerms(collection, entryId, "category") -> key `terms:C:E:category`
|
|
347
|
+
* ...one per taxonomy that applies to this collection
|
|
348
|
+
*
|
|
349
|
+
* Taxonomies with no rows on this entry are seeded with `[]` so legacy
|
|
350
|
+
* callers short-circuit to the cached empty array instead of re-querying.
|
|
351
|
+
*/
|
|
352
|
+
function primeEntryTermsCache(collection, entryId, byTaxonomy, applicableTaxonomyNames, locale) {
|
|
353
|
+
const localeKey = locale ?? "*";
|
|
354
|
+
for (const name of applicableTaxonomyNames) setRequestCacheEntry(`terms:${collection}:${entryId}:${name}:${localeKey}`, byTaxonomy[name] ?? []);
|
|
355
|
+
for (const [name, terms] of Object.entries(byTaxonomy)) setRequestCacheEntry(`terms:${collection}:${entryId}:${name}:${localeKey}`, terms);
|
|
356
|
+
const allTerms = Object.values(byTaxonomy).flat();
|
|
357
|
+
setRequestCacheEntry(`terms:${collection}:${entryId}:*:${localeKey}`, allTerms);
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Get entries by term. Both the lookup (term slug in the active locale) and
|
|
361
|
+
* the content query respect the active locale.
|
|
362
|
+
*/
|
|
363
|
+
async function getEntriesByTerm(collection, taxonomyName, termSlug, options = {}) {
|
|
364
|
+
const { getEmDashCollection } = await import("./query-Bo-msrmu.mjs").then((n) => n.o);
|
|
365
|
+
const queryOptions = { where: { [taxonomyName]: termSlug } };
|
|
366
|
+
if (options.locale !== void 0) queryOptions.locale = options.locale;
|
|
367
|
+
const { entries } = await getEmDashCollection(collection, queryOptions);
|
|
368
|
+
return entries;
|
|
369
|
+
}
|
|
370
|
+
function rowToTaxonomyDef(row) {
|
|
371
|
+
return {
|
|
372
|
+
id: row.id,
|
|
373
|
+
name: row.name,
|
|
374
|
+
label: row.label,
|
|
375
|
+
labelSingular: row.label_singular ?? void 0,
|
|
376
|
+
hierarchical: row.hierarchical === 1,
|
|
377
|
+
collections: row.collections ? JSON.parse(row.collections) : [],
|
|
378
|
+
locale: row.locale,
|
|
379
|
+
translationGroup: row.translation_group
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
/**
|
|
383
|
+
* Build tree structure from flat terms
|
|
384
|
+
*/
|
|
385
|
+
function buildTree(flatTerms, counts) {
|
|
386
|
+
const map = /* @__PURE__ */ new Map();
|
|
387
|
+
const roots = [];
|
|
388
|
+
for (const term of flatTerms) map.set(term.id, {
|
|
389
|
+
id: term.id,
|
|
390
|
+
name: term.name,
|
|
391
|
+
slug: term.slug,
|
|
392
|
+
label: term.label,
|
|
393
|
+
parentId: term.parent_id ?? void 0,
|
|
394
|
+
description: term.data ? JSON.parse(term.data).description : void 0,
|
|
395
|
+
children: [],
|
|
396
|
+
count: counts.get(term.translation_group ?? term.id) ?? 0,
|
|
397
|
+
locale: term.locale,
|
|
398
|
+
translationGroup: term.translation_group
|
|
399
|
+
});
|
|
400
|
+
for (const term of map.values()) if (term.parentId && map.has(term.parentId)) map.get(term.parentId).children.push(term);
|
|
401
|
+
else roots.push(term);
|
|
402
|
+
return roots;
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
//#endregion
|
|
406
|
+
export { getTaxonomyDefs as a, getTermsForEntries as c, resolveLocale as d, resolveLocaleChain as f, getTaxonomyDef as i, invalidateTermCache as l, getEntriesByTerm as n, getTaxonomyTerms as o, getEntryTerms as r, getTerm as s, getAllTermsForEntries as t, taxonomies_exports as u };
|
|
407
|
+
//# sourceMappingURL=taxonomies-CTtewrSQ.mjs.map
|