emdash 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. package/dist/{adapters-N6BF7RCD.d.mts → adapters-C2BzVy0p.d.mts} +1 -1
  2. package/dist/{adapters-N6BF7RCD.d.mts.map → adapters-C2BzVy0p.d.mts.map} +1 -1
  3. package/dist/{apply-wmVEOSbR.mjs → apply-Cma_PiF6.mjs} +38 -23
  4. package/dist/apply-Cma_PiF6.mjs.map +1 -0
  5. package/dist/astro/index.d.mts +25 -11
  6. package/dist/astro/index.d.mts.map +1 -1
  7. package/dist/astro/index.mjs +38 -25
  8. package/dist/astro/index.mjs.map +1 -1
  9. package/dist/astro/middleware/auth.d.mts +5 -5
  10. package/dist/astro/middleware/auth.mjs +2 -2
  11. package/dist/astro/middleware/redirect.d.mts.map +1 -1
  12. package/dist/astro/middleware/redirect.mjs +20 -8
  13. package/dist/astro/middleware/redirect.mjs.map +1 -1
  14. package/dist/astro/middleware/request-context.mjs +12 -2
  15. package/dist/astro/middleware/request-context.mjs.map +1 -1
  16. package/dist/astro/middleware/setup.mjs +1 -1
  17. package/dist/astro/middleware.d.mts.map +1 -1
  18. package/dist/astro/middleware.mjs +52 -45
  19. package/dist/astro/middleware.mjs.map +1 -1
  20. package/dist/astro/types.d.mts +9 -9
  21. package/dist/astro/types.d.mts.map +1 -1
  22. package/dist/{byline-1WQPlISL.mjs → byline-WuOq9MFJ.mjs} +5 -4
  23. package/dist/byline-WuOq9MFJ.mjs.map +1 -0
  24. package/dist/{bylines-BYdTYmia.mjs → bylines-C_Wsnz4L.mjs} +38 -6
  25. package/dist/bylines-C_Wsnz4L.mjs.map +1 -0
  26. package/dist/cache-E3Dts-yT.mjs +56 -0
  27. package/dist/cache-E3Dts-yT.mjs.map +1 -0
  28. package/dist/cli/index.mjs +13 -13
  29. package/dist/cli/index.mjs.map +1 -1
  30. package/dist/client/cf-access.d.mts +1 -1
  31. package/dist/client/index.d.mts +1 -1
  32. package/dist/client/index.mjs +1 -1
  33. package/dist/{config-Cq8H0SfX.mjs → config-DkxPrM9l.mjs} +1 -1
  34. package/dist/{config-Cq8H0SfX.mjs.map → config-DkxPrM9l.mjs.map} +1 -1
  35. package/dist/{content-BmXndhdi.mjs → content-BsBoyj8G.mjs} +20 -3
  36. package/dist/content-BsBoyj8G.mjs.map +1 -0
  37. package/dist/db/index.d.mts +3 -3
  38. package/dist/db/index.mjs +2 -2
  39. package/dist/db/libsql.d.mts +1 -1
  40. package/dist/db/postgres.d.mts +1 -1
  41. package/dist/db/sqlite.d.mts +1 -1
  42. package/dist/{default-WYlzADZL.mjs → default-PUx9RK6u.mjs} +1 -1
  43. package/dist/{default-WYlzADZL.mjs.map → default-PUx9RK6u.mjs.map} +1 -1
  44. package/dist/{dialect-helpers-B9uSp2GJ.mjs → dialect-helpers-DhTzaUxP.mjs} +4 -1
  45. package/dist/dialect-helpers-DhTzaUxP.mjs.map +1 -0
  46. package/dist/{error-DrxtnGPg.mjs → error-HBeQbVhV.mjs} +1 -1
  47. package/dist/{error-DrxtnGPg.mjs.map → error-HBeQbVhV.mjs.map} +1 -1
  48. package/dist/{index-UHEVQMus.d.mts → index-CRg3PWfZ.d.mts} +59 -33
  49. package/dist/index-CRg3PWfZ.d.mts.map +1 -0
  50. package/dist/index.d.mts +11 -11
  51. package/dist/index.mjs +20 -20
  52. package/dist/{load-Veizk2cT.mjs → load-BhSSm-TS.mjs} +1 -1
  53. package/dist/{load-Veizk2cT.mjs.map → load-BhSSm-TS.mjs.map} +1 -1
  54. package/dist/{loader-CHb2v0jm.mjs → loader-BYzwzORf.mjs} +4 -2
  55. package/dist/loader-BYzwzORf.mjs.map +1 -0
  56. package/dist/{manifest-schema-CuMio1A9.mjs → manifest-schema-BsXINkQD.mjs} +1 -1
  57. package/dist/{manifest-schema-CuMio1A9.mjs.map → manifest-schema-BsXINkQD.mjs.map} +1 -1
  58. package/dist/media/index.d.mts +1 -1
  59. package/dist/media/index.mjs +1 -1
  60. package/dist/media/local-runtime.d.mts +7 -7
  61. package/dist/{mode-CYeM2rPt.mjs → mode-CyPLdO3C.mjs} +1 -1
  62. package/dist/{mode-CYeM2rPt.mjs.map → mode-CyPLdO3C.mjs.map} +1 -1
  63. package/dist/page/index.d.mts +1 -1
  64. package/dist/patterns-CrCYkMBb.mjs +93 -0
  65. package/dist/patterns-CrCYkMBb.mjs.map +1 -0
  66. package/dist/{placeholder-bOx1xCTY.d.mts → placeholder-BBCtpTES.d.mts} +1 -1
  67. package/dist/{placeholder-bOx1xCTY.d.mts.map → placeholder-BBCtpTES.d.mts.map} +1 -1
  68. package/dist/{placeholder-aiCD8aSZ.mjs → placeholder-DntBEQo7.mjs} +1 -1
  69. package/dist/{placeholder-aiCD8aSZ.mjs.map → placeholder-DntBEQo7.mjs.map} +1 -1
  70. package/dist/plugins/adapt-sandbox-entry.d.mts +5 -5
  71. package/dist/plugins/adapt-sandbox-entry.mjs +1 -1
  72. package/dist/{query-5Hcv_5ER.mjs → query-B6Vu0d2i.mjs} +35 -16
  73. package/dist/{query-5Hcv_5ER.mjs.map → query-B6Vu0d2i.mjs.map} +1 -1
  74. package/dist/{redirect-DIfIni3r.mjs → redirect-7lGhLBNZ.mjs} +10 -93
  75. package/dist/redirect-7lGhLBNZ.mjs.map +1 -0
  76. package/dist/{registry-1EvbAfsC.mjs → registry-BgnP3ysR.mjs} +27 -37
  77. package/dist/registry-BgnP3ysR.mjs.map +1 -0
  78. package/dist/{runner-BoN0-FPi.mjs → runner-Cd-_WyDo.mjs} +18 -6
  79. package/dist/runner-Cd-_WyDo.mjs.map +1 -0
  80. package/dist/{runner-DTqkzOzc.d.mts → runner-DYv3rX8P.d.mts} +10 -3
  81. package/dist/runner-DYv3rX8P.d.mts.map +1 -0
  82. package/dist/runtime.d.mts +6 -6
  83. package/dist/runtime.mjs +2 -2
  84. package/dist/{search-BsYMed12.mjs → search-B5p9D36n.mjs} +108 -57
  85. package/dist/search-B5p9D36n.mjs.map +1 -0
  86. package/dist/seed/index.d.mts +2 -2
  87. package/dist/seed/index.mjs +10 -10
  88. package/dist/seo/index.d.mts +1 -1
  89. package/dist/storage/local.d.mts +1 -1
  90. package/dist/storage/local.mjs +1 -1
  91. package/dist/storage/s3.d.mts +11 -3
  92. package/dist/storage/s3.d.mts.map +1 -1
  93. package/dist/storage/s3.mjs +76 -15
  94. package/dist/storage/s3.mjs.map +1 -1
  95. package/dist/{tokens-DrB-W6Q-.mjs → tokens-DKHiCYCB.mjs} +1 -1
  96. package/dist/{tokens-DrB-W6Q-.mjs.map → tokens-DKHiCYCB.mjs.map} +1 -1
  97. package/dist/transaction-Cn2rjY78.mjs +28 -0
  98. package/dist/transaction-Cn2rjY78.mjs.map +1 -0
  99. package/dist/{transport-Bl8cTdYt.mjs → transport-BtcQ-Z7T.mjs} +1 -1
  100. package/dist/{transport-Bl8cTdYt.mjs.map → transport-BtcQ-Z7T.mjs.map} +1 -1
  101. package/dist/{transport-COOs9GSE.d.mts → transport-CKQA_G44.d.mts} +1 -1
  102. package/dist/{transport-COOs9GSE.d.mts.map → transport-CKQA_G44.d.mts.map} +1 -1
  103. package/dist/{types-7-UjSEyB.d.mts → types-B6BzlZxx.d.mts} +1 -1
  104. package/dist/{types-7-UjSEyB.d.mts.map → types-B6BzlZxx.d.mts.map} +1 -1
  105. package/dist/{types-6dqxBqsH.d.mts → types-BYWYxLcp.d.mts} +109 -5
  106. package/dist/types-BYWYxLcp.d.mts.map +1 -0
  107. package/dist/{types-CIsTnQvJ.d.mts → types-BmkQR1En.d.mts} +1 -1
  108. package/dist/{types-CIsTnQvJ.d.mts.map → types-BmkQR1En.d.mts.map} +1 -1
  109. package/dist/{types-BljtYPSd.d.mts → types-DNZpaCBk.d.mts} +14 -6
  110. package/dist/types-DNZpaCBk.d.mts.map +1 -0
  111. package/dist/{types-Bec-r_3_.mjs → types-Dz9_WMS6.mjs} +1 -1
  112. package/dist/types-Dz9_WMS6.mjs.map +1 -0
  113. package/dist/{types-CcreFIIH.d.mts → types-gLYVCXCQ.d.mts} +1 -1
  114. package/dist/{types-CcreFIIH.d.mts.map → types-gLYVCXCQ.d.mts.map} +1 -1
  115. package/dist/{types-DuNbGKjF.mjs → types-xxCWI3j0.mjs} +1 -1
  116. package/dist/{types-DuNbGKjF.mjs.map → types-xxCWI3j0.mjs.map} +1 -1
  117. package/dist/{validate-B7KP7VLM.d.mts → validate-CcNRWH6I.d.mts} +4 -4
  118. package/dist/{validate-B7KP7VLM.d.mts.map → validate-CcNRWH6I.d.mts.map} +1 -1
  119. package/dist/{validate-CXnRKfJK.mjs → validate-DuZDIxfy.mjs} +2 -2
  120. package/dist/{validate-CXnRKfJK.mjs.map → validate-DuZDIxfy.mjs.map} +1 -1
  121. package/dist/{validate-CqRJb_xU.mjs → validate-VPnKoIzW.mjs} +11 -11
  122. package/dist/{validate-CqRJb_xU.mjs.map → validate-VPnKoIzW.mjs.map} +1 -1
  123. package/dist/version-DlTDRdpv.mjs +7 -0
  124. package/dist/version-DlTDRdpv.mjs.map +1 -0
  125. package/package.json +7 -5
  126. package/src/api/handlers/content.ts +36 -25
  127. package/src/api/handlers/menus.ts +19 -16
  128. package/src/api/handlers/redirects.ts +95 -3
  129. package/src/api/schemas/redirects.ts +1 -0
  130. package/src/astro/integration/index.ts +2 -3
  131. package/src/astro/integration/runtime.ts +8 -14
  132. package/src/astro/integration/vite-config.ts +14 -4
  133. package/src/astro/middleware/redirect.ts +30 -15
  134. package/src/astro/middleware.ts +11 -19
  135. package/src/astro/routes/admin.astro +2 -2
  136. package/src/astro/routes/api/admin/bylines/[id]/index.ts +3 -0
  137. package/src/astro/routes/api/admin/bylines/index.ts +2 -0
  138. package/src/astro/routes/api/comments/[collection]/[contentId]/index.ts +2 -0
  139. package/src/astro/routes/api/import/wordpress/rewrite-urls.ts +2 -0
  140. package/src/astro/routes/api/manifest.ts +3 -1
  141. package/src/astro/routes/api/redirects/[id].ts +3 -0
  142. package/src/astro/routes/api/redirects/index.ts +2 -0
  143. package/src/astro/routes/api/schema/collections/[slug]/index.ts +2 -0
  144. package/src/astro/routes/api/schema/collections/index.ts +1 -0
  145. package/src/astro/storage/adapters.ts +19 -5
  146. package/src/astro/storage/types.ts +12 -4
  147. package/src/astro/types.ts +1 -0
  148. package/src/bylines/index.ts +50 -2
  149. package/src/cleanup.ts +3 -3
  150. package/src/cli/commands/bundle-utils.ts +5 -5
  151. package/src/database/dialect-helpers.ts +3 -0
  152. package/src/database/migrations/011_sections.ts +2 -2
  153. package/src/database/migrations/runner.ts +23 -2
  154. package/src/database/repositories/byline.ts +2 -1
  155. package/src/database/repositories/content.ts +5 -0
  156. package/src/database/repositories/redirect.ts +13 -0
  157. package/src/database/validate.ts +10 -10
  158. package/src/emdash-runtime.ts +23 -9
  159. package/src/index.ts +3 -0
  160. package/src/loader.ts +2 -0
  161. package/src/mcp/server.ts +40 -67
  162. package/src/menus/index.ts +4 -0
  163. package/src/plugins/context.ts +28 -4
  164. package/src/plugins/cron.ts +29 -4
  165. package/src/plugins/hooks.ts +22 -10
  166. package/src/plugins/index.ts +1 -0
  167. package/src/plugins/manager.ts +6 -2
  168. package/src/plugins/marketplace.ts +33 -3
  169. package/src/plugins/routes.ts +3 -3
  170. package/src/plugins/types.ts +7 -0
  171. package/src/query.ts +37 -14
  172. package/src/redirects/cache.ts +68 -0
  173. package/src/redirects/loops.ts +318 -0
  174. package/src/schema/registry.ts +3 -0
  175. package/src/search/fts-manager.ts +24 -11
  176. package/src/search/query.ts +8 -9
  177. package/src/seed/apply.ts +49 -28
  178. package/src/storage/s3.ts +94 -25
  179. package/src/storage/types.ts +13 -5
  180. package/src/utils/slugify.ts +11 -0
  181. package/src/version.ts +12 -0
  182. package/src/visual-editing/toolbar.ts +11 -1
  183. package/dist/apply-wmVEOSbR.mjs.map +0 -1
  184. package/dist/byline-1WQPlISL.mjs.map +0 -1
  185. package/dist/bylines-BYdTYmia.mjs.map +0 -1
  186. package/dist/content-BmXndhdi.mjs.map +0 -1
  187. package/dist/dialect-helpers-B9uSp2GJ.mjs.map +0 -1
  188. package/dist/index-UHEVQMus.d.mts.map +0 -1
  189. package/dist/loader-CHb2v0jm.mjs.map +0 -1
  190. package/dist/redirect-DIfIni3r.mjs.map +0 -1
  191. package/dist/registry-1EvbAfsC.mjs.map +0 -1
  192. package/dist/runner-BoN0-FPi.mjs.map +0 -1
  193. package/dist/runner-DTqkzOzc.d.mts.map +0 -1
  194. package/dist/search-BsYMed12.mjs.map +0 -1
  195. package/dist/types-6dqxBqsH.d.mts.map +0 -1
  196. package/dist/types-Bec-r_3_.mjs.map +0 -1
  197. package/dist/types-BljtYPSd.d.mts.map +0 -1
package/src/storage/s3.ts CHANGED
@@ -15,6 +15,7 @@ import {
15
15
  type ListObjectsV2Response,
16
16
  } from "@aws-sdk/client-s3";
17
17
  import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
18
+ import { z } from "zod";
18
19
 
19
20
  import type {
20
21
  Storage,
@@ -28,6 +29,87 @@ import type {
28
29
  } from "./types.js";
29
30
  import { EmDashStorageError } from "./types.js";
30
31
 
32
+ const ENV_KEYS = {
33
+ endpoint: "S3_ENDPOINT",
34
+ bucket: "S3_BUCKET",
35
+ accessKeyId: "S3_ACCESS_KEY_ID",
36
+ secretAccessKey: "S3_SECRET_ACCESS_KEY",
37
+ region: "S3_REGION",
38
+ publicUrl: "S3_PUBLIC_URL",
39
+ } as const satisfies Record<keyof S3StorageConfig, string>;
40
+
41
+ function fail(msg: string): never {
42
+ throw new EmDashStorageError(msg, "MISSING_S3_CONFIG");
43
+ }
44
+
45
+ const s3ConfigSchema = z.object({
46
+ endpoint: z.url({ protocol: /^https?$/, error: "is not a valid http/https URL" }).optional(),
47
+ bucket: z.string().optional(),
48
+ accessKeyId: z.string().optional(),
49
+ secretAccessKey: z.string().optional(),
50
+ region: z.string().optional(),
51
+ publicUrl: z.string().optional(),
52
+ });
53
+
54
+ function isConfigKey(key: unknown): key is keyof S3StorageConfig {
55
+ return typeof key === "string" && key in ENV_KEYS;
56
+ }
57
+
58
+ /**
59
+ * Build the merged config: for each field, use the explicit value if present,
60
+ * otherwise fall back to the corresponding S3_* env var. Validate once on the
61
+ * final merged result so a malformed env var never breaks the build when the
62
+ * caller provides that field explicitly.
63
+ */
64
+ export function resolveS3Config(partial: Record<string, unknown>): S3StorageConfig {
65
+ const raw: Record<string, unknown> = {};
66
+ for (const [field, envKey] of Object.entries(ENV_KEYS)) {
67
+ const explicit = partial[field];
68
+ if (explicit !== undefined && explicit !== "") {
69
+ raw[field] = explicit;
70
+ continue;
71
+ }
72
+ const envVal = typeof process !== "undefined" && process.env ? process.env[envKey] : undefined;
73
+ if (envVal !== undefined && envVal !== "") {
74
+ raw[field] = envVal;
75
+ }
76
+ }
77
+
78
+ const result = s3ConfigSchema.safeParse(raw);
79
+ if (!result.success) {
80
+ const issue = result.error.issues[0];
81
+ const pathKey = issue?.path[0];
82
+ if (!issue || !isConfigKey(pathKey)) fail("S3 config validation failed");
83
+ const fromExplicit = partial[pathKey] !== undefined && partial[pathKey] !== "";
84
+ const label = fromExplicit ? `s3({ ${pathKey} })` : ENV_KEYS[pathKey];
85
+ fail(`${label} ${issue.message}`);
86
+ }
87
+ const merged = result.data;
88
+
89
+ const endpoint = merged.endpoint;
90
+ const bucket = merged.bucket;
91
+ if (!endpoint || !bucket) {
92
+ const missing: string[] = [];
93
+ if (!endpoint) missing.push(`endpoint: set ${ENV_KEYS.endpoint} or pass endpoint to s3({...})`);
94
+ if (!bucket) missing.push(`bucket: set ${ENV_KEYS.bucket} or pass bucket to s3({...})`);
95
+ fail(`missing required S3 config: ${missing.join("; ")}`);
96
+ }
97
+ const accessKeyId = merged.accessKeyId;
98
+ const secretAccessKey = merged.secretAccessKey;
99
+ if (accessKeyId && !secretAccessKey) {
100
+ fail(
101
+ `S3 credentials incomplete: accessKeyId is set but secretAccessKey is missing (set ${ENV_KEYS.secretAccessKey} or pass secretAccessKey to s3({...}))`,
102
+ );
103
+ }
104
+ if (secretAccessKey && !accessKeyId) {
105
+ fail(
106
+ `S3 credentials incomplete: secretAccessKey is set but accessKeyId is missing (set ${ENV_KEYS.accessKeyId} or pass accessKeyId to s3({...}))`,
107
+ );
108
+ }
109
+
110
+ return { ...merged, endpoint, bucket };
111
+ }
112
+
31
113
  const TRAILING_SLASH_PATTERN = /\/$/;
32
114
 
33
115
  /** Type guard for AWS SDK errors (have a `name` property) */
@@ -52,13 +134,17 @@ export class S3Storage implements Storage {
52
134
  this.client = new S3Client({
53
135
  endpoint: config.endpoint,
54
136
  region: config.region || "auto",
55
- credentials: {
56
- accessKeyId: config.accessKeyId,
57
- secretAccessKey: config.secretAccessKey,
58
- },
137
+ ...(config.accessKeyId && config.secretAccessKey
138
+ ? {
139
+ credentials: {
140
+ accessKeyId: config.accessKeyId,
141
+ secretAccessKey: config.secretAccessKey,
142
+ },
143
+ }
144
+ : {}),
59
145
  // Required for R2 and some S3-compatible services
60
146
  forcePathStyle: true,
61
- });
147
+ } as ConstructorParameters<typeof S3Client>[0]);
62
148
  }
63
149
 
64
150
  async upload(options: {
@@ -238,26 +324,9 @@ export class S3Storage implements Storage {
238
324
 
239
325
  /**
240
326
  * Create S3 storage adapter
241
- * This is the factory function called at runtime
327
+ * This is the factory function called at runtime.
328
+ * Config fields are merged with S3_* env vars; env vars fill in any missing fields.
242
329
  */
243
330
  export function createStorage(config: Record<string, unknown>): Storage {
244
- const { endpoint, bucket, accessKeyId, secretAccessKey, region, publicUrl } = config;
245
- if (
246
- typeof endpoint !== "string" ||
247
- typeof bucket !== "string" ||
248
- typeof accessKeyId !== "string" ||
249
- typeof secretAccessKey !== "string"
250
- ) {
251
- throw new Error(
252
- "S3Storage requires 'endpoint', 'bucket', 'accessKeyId', and 'secretAccessKey' string config values",
253
- );
254
- }
255
- return new S3Storage({
256
- endpoint,
257
- bucket,
258
- accessKeyId,
259
- secretAccessKey,
260
- region: typeof region === "string" ? region : undefined,
261
- publicUrl: typeof publicUrl === "string" ? publicUrl : undefined,
262
- });
331
+ return new S3Storage(resolveS3Config(config));
263
332
  }
@@ -13,11 +13,19 @@ export interface S3StorageConfig {
13
13
  endpoint: string;
14
14
  /** Bucket name */
15
15
  bucket: string;
16
- /** AWS access key ID */
17
- accessKeyId: string;
18
- /** AWS secret access key */
19
- secretAccessKey: string;
20
- /** Optional region (defaults to "auto" for R2) */
16
+ /**
17
+ * AWS access key ID.
18
+ * May be resolved from the `S3_ACCESS_KEY_ID` env var at runtime on Node.
19
+ * Must be provided together with `secretAccessKey`, or both omitted.
20
+ */
21
+ accessKeyId?: string;
22
+ /**
23
+ * AWS secret access key.
24
+ * May be resolved from the `S3_SECRET_ACCESS_KEY` env var at runtime on Node.
25
+ * Must be provided together with `accessKeyId`, or both omitted.
26
+ */
27
+ secretAccessKey?: string;
28
+ /** Optional region (defaults to "auto") */
21
29
  region?: string;
22
30
  /** Optional public URL prefix for generated URLs (e.g., CDN URL) */
23
31
  publicUrl?: string;
@@ -12,6 +12,17 @@ const TRAILING_HYPHEN_PATTERN = /-$/;
12
12
  * Handles unicode by normalizing to NFD and stripping diacritics,
13
13
  * so "café" becomes "cafe", "naïve" becomes "naive", etc.
14
14
  */
15
+ /**
16
+ * Decode a URI-encoded slug parameter.
17
+ *
18
+ * Browsers percent-encode non-ASCII characters in URLs, so a slug like
19
+ * "మేష-రాసి" arrives as "%e0%b0%ae%e0%b1%87%e0%b0%b7-%e0%b0%b0%e0%b0%be%e0%b0%b8%e0%b0%bf".
20
+ * Call this on `Astro.params.slug` before using it in database lookups.
21
+ */
22
+ export function decodeSlug(raw: string | undefined): string | undefined {
23
+ return raw ? decodeURIComponent(raw) : undefined;
24
+ }
25
+
15
26
  export function slugify(text: string, maxLength: number = 80): string {
16
27
  return (
17
28
  text
package/src/version.ts ADDED
@@ -0,0 +1,12 @@
1
+ /**
2
+ * Build-time version constants, replaced by tsdown/Vite `define`.
3
+ * Falls back to "dev" when running uncompiled (tests, dev).
4
+ */
5
+
6
+ declare const __EMDASH_VERSION__: string;
7
+ declare const __EMDASH_COMMIT__: string;
8
+
9
+ export const VERSION: string =
10
+ typeof __EMDASH_VERSION__ !== "undefined" ? __EMDASH_VERSION__ : "dev";
11
+
12
+ export const COMMIT: string = typeof __EMDASH_COMMIT__ !== "undefined" ? __EMDASH_COMMIT__ : "dev";
@@ -530,6 +530,7 @@ export function renderToolbar(config: ToolbarConfig): string {
530
530
  // --- Save status tracking ---
531
531
  var saveState = "idle"; // idle | unsaved | saving | saved | error
532
532
  var saveHideTimer = null;
533
+ var pendingSavePromise = null;
533
534
 
534
535
  function setSaveState(state) {
535
536
  saveState = state;
@@ -624,6 +625,11 @@ export function renderToolbar(config: ToolbarConfig): string {
624
625
 
625
626
  // Publish action
626
627
  function publish(collection, id) {
628
+ if (pendingSavePromise) {
629
+ pendingSavePromise.then(function() { publish(collection, id); });
630
+ return;
631
+ }
632
+
627
633
  publishBtn.disabled = true;
628
634
  publishBtn.textContent = "Publishing\u2026";
629
635
 
@@ -767,7 +773,11 @@ export function renderToolbar(config: ToolbarConfig): string {
767
773
 
768
774
  var newValue = (element.textContent || "").trim();
769
775
  if (newValue !== originalText.trim()) {
770
- saveField(annotation.collection, annotation.id, annotation.field, newValue);
776
+ pendingSavePromise = saveField(annotation.collection, annotation.id, annotation.field, newValue).then(function() {
777
+ pendingSavePromise = null;
778
+ }, function() {
779
+ pendingSavePromise = null;
780
+ });
771
781
  } else {
772
782
  setSaveState("idle");
773
783
  }
@@ -1 +0,0 @@
1
- {"version":3,"file":"apply-wmVEOSbR.mjs","names":[],"sources":["../src/database/repositories/taxonomy.ts","../src/database/repositories/options.ts","../src/settings/index.ts","../src/import/ssrf.ts","../src/seed/apply.ts"],"sourcesContent":["import type { Kysely } from \"kysely\";\nimport { ulid } from \"ulidx\";\n\nimport type { Database, TaxonomyTable, ContentTaxonomyTable } from \"../types.js\";\n\nexport interface Taxonomy {\n\tid: string;\n\tname: string;\n\tslug: string;\n\tlabel: string;\n\tparentId: string | null;\n\tdata: Record<string, unknown> | null;\n}\n\nexport interface CreateTaxonomyInput {\n\tname: string;\n\tslug: string;\n\tlabel: string;\n\tparentId?: string;\n\tdata?: Record<string, unknown>;\n}\n\nexport interface UpdateTaxonomyInput {\n\tslug?: string;\n\tlabel?: string;\n\tparentId?: string | null;\n\tdata?: Record<string, unknown>;\n}\n\n/**\n * Taxonomy repository for categories, tags, and other classification\n *\n * Taxonomies are hierarchical (via parentId) and can be attached to content entries.\n */\nexport class TaxonomyRepository {\n\tconstructor(private db: Kysely<Database>) {}\n\n\t/**\n\t * Create a new taxonomy term\n\t */\n\tasync create(input: CreateTaxonomyInput): Promise<Taxonomy> {\n\t\tconst id = ulid();\n\n\t\tconst row: TaxonomyTable = {\n\t\t\tid,\n\t\t\tname: input.name,\n\t\t\tslug: input.slug,\n\t\t\tlabel: input.label,\n\t\t\tparent_id: input.parentId ?? null,\n\t\t\tdata: input.data ? JSON.stringify(input.data) : null,\n\t\t};\n\n\t\tawait this.db.insertInto(\"taxonomies\").values(row).execute();\n\n\t\tconst taxonomy = await this.findById(id);\n\t\tif (!taxonomy) {\n\t\t\tthrow new Error(\"Failed to create taxonomy\");\n\t\t}\n\t\treturn taxonomy;\n\t}\n\n\t/**\n\t * Find taxonomy by ID\n\t */\n\tasync findById(id: string): Promise<Taxonomy | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"id\", \"=\", id)\n\t\t\t.executeTakeFirst();\n\n\t\treturn row ? this.rowToTaxonomy(row) : null;\n\t}\n\n\t/**\n\t * Find taxonomy by name and slug (unique constraint)\n\t */\n\tasync findBySlug(name: string, slug: string): Promise<Taxonomy | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.where(\"slug\", \"=\", slug)\n\t\t\t.executeTakeFirst();\n\n\t\treturn row ? this.rowToTaxonomy(row) : null;\n\t}\n\n\t/**\n\t * Get all terms for a taxonomy (e.g., all categories)\n\t */\n\tasync findByName(name: string, options: { parentId?: string | null } = {}): Promise<Taxonomy[]> {\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.orderBy(\"label\", \"asc\");\n\n\t\tif (options.parentId !== undefined) {\n\t\t\tif (options.parentId === null) {\n\t\t\t\tquery = query.where(\"parent_id\", \"is\", null);\n\t\t\t} else {\n\t\t\t\tquery = query.where(\"parent_id\", \"=\", options.parentId);\n\t\t\t}\n\t\t}\n\n\t\tconst rows = await query.execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\t/**\n\t * Get children of a taxonomy term\n\t */\n\tasync findChildren(parentId: string): Promise<Taxonomy[]> {\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"taxonomies\")\n\t\t\t.selectAll()\n\t\t\t.where(\"parent_id\", \"=\", parentId)\n\t\t\t.orderBy(\"label\", \"asc\")\n\t\t\t.execute();\n\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\t/**\n\t * Update a taxonomy term\n\t */\n\tasync update(id: string, input: UpdateTaxonomyInput): Promise<Taxonomy | null> {\n\t\tconst existing = await this.findById(id);\n\t\tif (!existing) return null;\n\n\t\tconst updates: Partial<TaxonomyTable> = {};\n\t\tif (input.slug !== undefined) updates.slug = input.slug;\n\t\tif (input.label !== undefined) updates.label = input.label;\n\t\tif (input.parentId !== undefined) updates.parent_id = input.parentId;\n\t\tif (input.data !== undefined) updates.data = JSON.stringify(input.data);\n\n\t\tif (Object.keys(updates).length > 0) {\n\t\t\tawait this.db.updateTable(\"taxonomies\").set(updates).where(\"id\", \"=\", id).execute();\n\t\t}\n\n\t\treturn this.findById(id);\n\t}\n\n\t/**\n\t * Delete a taxonomy term\n\t */\n\tasync delete(id: string): Promise<boolean> {\n\t\t// First remove any content associations\n\t\tawait this.db.deleteFrom(\"content_taxonomies\").where(\"taxonomy_id\", \"=\", id).execute();\n\n\t\tconst result = await this.db.deleteFrom(\"taxonomies\").where(\"id\", \"=\", id).executeTakeFirst();\n\n\t\treturn (result.numDeletedRows ?? 0) > 0;\n\t}\n\n\t// --- Content-Taxonomy Junction ---\n\n\t/**\n\t * Attach a taxonomy term to a content entry\n\t */\n\tasync attachToEntry(collection: string, entryId: string, taxonomyId: string): Promise<void> {\n\t\tconst row: ContentTaxonomyTable = {\n\t\t\tcollection,\n\t\t\tentry_id: entryId,\n\t\t\ttaxonomy_id: taxonomyId,\n\t\t};\n\n\t\t// Use INSERT OR IGNORE pattern for idempotency\n\t\tawait this.db\n\t\t\t.insertInto(\"content_taxonomies\")\n\t\t\t.values(row)\n\t\t\t.onConflict((oc) => oc.doNothing())\n\t\t\t.execute();\n\t}\n\n\t/**\n\t * Detach a taxonomy term from a content entry\n\t */\n\tasync detachFromEntry(collection: string, entryId: string, taxonomyId: string): Promise<void> {\n\t\tawait this.db\n\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t.where(\"taxonomy_id\", \"=\", taxonomyId)\n\t\t\t.execute();\n\t}\n\n\t/**\n\t * Get all taxonomy terms for a content entry\n\t */\n\tasync getTermsForEntry(\n\t\tcollection: string,\n\t\tentryId: string,\n\t\ttaxonomyName?: string,\n\t): Promise<Taxonomy[]> {\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.innerJoin(\"taxonomies\", \"taxonomies.id\", \"content_taxonomies.taxonomy_id\")\n\t\t\t.selectAll(\"taxonomies\")\n\t\t\t.where(\"content_taxonomies.collection\", \"=\", collection)\n\t\t\t.where(\"content_taxonomies.entry_id\", \"=\", entryId);\n\n\t\tif (taxonomyName) {\n\t\t\tquery = query.where(\"taxonomies.name\", \"=\", taxonomyName);\n\t\t}\n\n\t\tconst rows = await query.execute();\n\t\treturn rows.map((row) => this.rowToTaxonomy(row));\n\t}\n\n\t/**\n\t * Set all taxonomy terms for a content entry (replaces existing)\n\t * Uses batch operations to avoid N+1 queries.\n\t */\n\tasync setTermsForEntry(\n\t\tcollection: string,\n\t\tentryId: string,\n\t\ttaxonomyName: string,\n\t\ttaxonomyIds: string[],\n\t): Promise<void> {\n\t\t// Get current terms of this taxonomy type\n\t\tconst current = await this.getTermsForEntry(collection, entryId, taxonomyName);\n\t\tconst currentIds = new Set(current.map((t) => t.id));\n\t\tconst newIds = new Set(taxonomyIds);\n\n\t\t// Batch remove terms no longer present\n\t\tconst toRemove = current.filter((t) => !newIds.has(t.id)).map((t) => t.id);\n\t\tif (toRemove.length > 0) {\n\t\t\tawait this.db\n\t\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t\t.where(\"taxonomy_id\", \"in\", toRemove)\n\t\t\t\t.execute();\n\t\t}\n\n\t\t// Batch add new terms\n\t\tconst toAdd = taxonomyIds.filter((id) => !currentIds.has(id));\n\t\tif (toAdd.length > 0) {\n\t\t\tawait this.db\n\t\t\t\t.insertInto(\"content_taxonomies\")\n\t\t\t\t.values(\n\t\t\t\t\ttoAdd.map((taxonomy_id) => ({\n\t\t\t\t\t\tcollection,\n\t\t\t\t\t\tentry_id: entryId,\n\t\t\t\t\t\ttaxonomy_id,\n\t\t\t\t\t})),\n\t\t\t\t)\n\t\t\t\t.onConflict((oc) => oc.doNothing())\n\t\t\t\t.execute();\n\t\t}\n\t}\n\n\t/**\n\t * Remove all taxonomy associations for an entry (use when entry is deleted)\n\t */\n\tasync clearEntryTerms(collection: string, entryId: string): Promise<number> {\n\t\tconst result = await this.db\n\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t.where(\"collection\", \"=\", collection)\n\t\t\t.where(\"entry_id\", \"=\", entryId)\n\t\t\t.executeTakeFirst();\n\n\t\treturn Number(result.numDeletedRows ?? 0);\n\t}\n\n\t/**\n\t * Count entries that have a specific taxonomy term\n\t */\n\tasync countEntriesWithTerm(taxonomyId: string): Promise<number> {\n\t\tconst result = await this.db\n\t\t\t.selectFrom(\"content_taxonomies\")\n\t\t\t.select((eb) => eb.fn.count(\"entry_id\").as(\"count\"))\n\t\t\t.where(\"taxonomy_id\", \"=\", taxonomyId)\n\t\t\t.executeTakeFirst();\n\n\t\treturn Number(result?.count || 0);\n\t}\n\n\t/**\n\t * Convert database row to Taxonomy object\n\t */\n\tprivate rowToTaxonomy(row: TaxonomyTable): Taxonomy {\n\t\treturn {\n\t\t\tid: row.id,\n\t\t\tname: row.name,\n\t\t\tslug: row.slug,\n\t\t\tlabel: row.label,\n\t\t\tparentId: row.parent_id,\n\t\t\tdata: row.data ? JSON.parse(row.data) : null,\n\t\t};\n\t}\n}\n","import { sql, type Kysely, type SqlBool } from \"kysely\";\n\nimport type { Database, OptionTable } from \"../types.js\";\n\nfunction escapeLike(value: string): string {\n\treturn value.replaceAll(\"\\\\\", \"\\\\\\\\\").replaceAll(\"%\", \"\\\\%\").replaceAll(\"_\", \"\\\\_\");\n}\n\n/**\n * Options repository for key-value settings storage\n *\n * Used for site settings, plugin configuration, and other arbitrary key-value data.\n * Values are stored as JSON for flexibility.\n */\nexport class OptionsRepository {\n\tconstructor(private db: Kysely<Database>) {}\n\n\t/**\n\t * Get an option value\n\t */\n\tasync get<T = unknown>(name: string): Promise<T | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"options\")\n\t\t\t.select(\"value\")\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.executeTakeFirst();\n\n\t\tif (!row) return null;\n\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns any; generic callers provide T\n\t\treturn JSON.parse(row.value) as T;\n\t}\n\n\t/**\n\t * Get an option value with a default\n\t */\n\tasync getOrDefault<T>(name: string, defaultValue: T): Promise<T> {\n\t\tconst value = await this.get<T>(name);\n\t\treturn value ?? defaultValue;\n\t}\n\n\t/**\n\t * Set an option value (creates or updates)\n\t */\n\tasync set<T = unknown>(name: string, value: T): Promise<void> {\n\t\tconst row: OptionTable = {\n\t\t\tname,\n\t\t\tvalue: JSON.stringify(value),\n\t\t};\n\n\t\t// Upsert: insert or replace\n\t\tawait this.db\n\t\t\t.insertInto(\"options\")\n\t\t\t.values(row)\n\t\t\t.onConflict((oc) => oc.column(\"name\").doUpdateSet({ value: row.value }))\n\t\t\t.execute();\n\t}\n\n\t/**\n\t * Delete an option\n\t */\n\tasync delete(name: string): Promise<boolean> {\n\t\tconst result = await this.db.deleteFrom(\"options\").where(\"name\", \"=\", name).executeTakeFirst();\n\n\t\treturn (result.numDeletedRows ?? 0) > 0;\n\t}\n\n\t/**\n\t * Check if an option exists\n\t */\n\tasync exists(name: string): Promise<boolean> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"options\")\n\t\t\t.select(\"name\")\n\t\t\t.where(\"name\", \"=\", name)\n\t\t\t.executeTakeFirst();\n\n\t\treturn !!row;\n\t}\n\n\t/**\n\t * Get multiple options at once\n\t */\n\tasync getMany<T = unknown>(names: string[]): Promise<Map<string, T>> {\n\t\tif (names.length === 0) return new Map();\n\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"options\")\n\t\t\t.select([\"name\", \"value\"])\n\t\t\t.where(\"name\", \"in\", names)\n\t\t\t.execute();\n\n\t\tconst result = new Map<string, T>();\n\t\tfor (const row of rows) {\n\t\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns any; generic callers provide T\n\t\t\tresult.set(row.name, JSON.parse(row.value) as T);\n\t\t}\n\t\treturn result;\n\t}\n\n\t/**\n\t * Set multiple options at once\n\t */\n\tasync setMany<T = unknown>(options: Record<string, T>): Promise<void> {\n\t\tconst entries = Object.entries(options);\n\t\tif (entries.length === 0) return;\n\n\t\tfor (const [name, value] of entries) {\n\t\t\tawait this.set(name, value);\n\t\t}\n\t}\n\n\t/**\n\t * Get all options (use sparingly)\n\t */\n\tasync getAll(): Promise<Map<string, unknown>> {\n\t\tconst rows = await this.db.selectFrom(\"options\").select([\"name\", \"value\"]).execute();\n\n\t\tconst result = new Map<string, unknown>();\n\t\tfor (const row of rows) {\n\t\t\tresult.set(row.name, JSON.parse(row.value));\n\t\t}\n\t\treturn result;\n\t}\n\n\t/**\n\t * Get all options matching a prefix\n\t */\n\tasync getByPrefix<T = unknown>(prefix: string): Promise<Map<string, T>> {\n\t\tconst pattern = `${escapeLike(prefix)}%`;\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"options\")\n\t\t\t.select([\"name\", \"value\"])\n\t\t\t.where(sql<SqlBool>`name LIKE ${pattern} ESCAPE '\\\\'`)\n\t\t\t.execute();\n\n\t\tconst result = new Map<string, T>();\n\t\tfor (const row of rows) {\n\t\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns any; generic callers provide T\n\t\t\tresult.set(row.name, JSON.parse(row.value) as T);\n\t\t}\n\t\treturn result;\n\t}\n\n\t/**\n\t * Delete all options matching a prefix\n\t */\n\tasync deleteByPrefix(prefix: string): Promise<number> {\n\t\tconst pattern = `${escapeLike(prefix)}%`;\n\t\tconst result = await this.db\n\t\t\t.deleteFrom(\"options\")\n\t\t\t.where(sql<SqlBool>`name LIKE ${pattern} ESCAPE '\\\\'`)\n\t\t\t.executeTakeFirst();\n\n\t\treturn Number(result.numDeletedRows ?? 0);\n\t}\n}\n","/**\n * Site Settings API\n *\n * Functions for getting and setting global site configuration.\n * Settings are stored in the options table with 'site:' prefix.\n */\n\nimport type { Kysely } from \"kysely\";\n\nimport { MediaRepository } from \"../database/repositories/media.js\";\nimport { OptionsRepository } from \"../database/repositories/options.js\";\nimport type { Database } from \"../database/types.js\";\nimport { getDb } from \"../loader.js\";\nimport type { Storage } from \"../storage/types.js\";\nimport type { SiteSettings, SiteSettingKey, MediaReference } from \"./types.js\";\n\n/** Prefix for site settings in the options table */\nconst SETTINGS_PREFIX = \"site:\";\n\n/**\n * Type guard for MediaReference values\n */\nfunction isMediaReference(value: unknown): value is MediaReference {\n\treturn typeof value === \"object\" && value !== null && \"mediaId\" in value;\n}\n\n/**\n * Resolve a media reference to include the full URL\n */\nasync function resolveMediaReference(\n\tmediaRef: MediaReference | undefined,\n\tdb: Kysely<Database>,\n\t_storage: Storage | null,\n): Promise<(MediaReference & { url?: string }) | undefined> {\n\tif (!mediaRef?.mediaId) {\n\t\treturn mediaRef;\n\t}\n\n\ttry {\n\t\tconst mediaRepo = new MediaRepository(db);\n\t\tconst media = await mediaRepo.findById(mediaRef.mediaId);\n\n\t\tif (media) {\n\t\t\t// Construct URL using the same pattern as API handlers\n\t\t\treturn {\n\t\t\t\t...mediaRef,\n\t\t\t\turl: `/_emdash/api/media/file/${media.storageKey}`,\n\t\t\t};\n\t\t}\n\t} catch {\n\t\t// If media not found or error, return the reference as-is\n\t}\n\n\treturn mediaRef;\n}\n\n/**\n * Get a single site setting by key\n *\n * Returns `undefined` if the setting has not been configured.\n * For media settings (logo, favicon), the URL is resolved automatically.\n *\n * @param key - The setting key (e.g., \"title\", \"logo\", \"social\")\n * @returns The setting value, or undefined if not set\n *\n * @example\n * ```ts\n * import { getSiteSetting } from \"emdash\";\n *\n * const title = await getSiteSetting(\"title\");\n * const logo = await getSiteSetting(\"logo\");\n * console.log(logo?.url); // Resolved URL\n * ```\n */\nexport async function getSiteSetting<K extends SiteSettingKey>(\n\tkey: K,\n): Promise<SiteSettings[K] | undefined> {\n\tconst db = await getDb();\n\treturn getSiteSettingWithDb(key, db);\n}\n\n/**\n * Get a single site setting by key (with explicit db)\n *\n * @internal Use `getSiteSetting()` in templates. This variant is for admin routes\n * that already have a database handle.\n */\nexport async function getSiteSettingWithDb<K extends SiteSettingKey>(\n\tkey: K,\n\tdb: Kysely<Database>,\n\tstorage: Storage | null = null,\n): Promise<SiteSettings[K] | undefined> {\n\tconst options = new OptionsRepository(db);\n\tconst value = await options.get<SiteSettings[K]>(`${SETTINGS_PREFIX}${key}`);\n\n\tif (!value) {\n\t\treturn undefined;\n\t}\n\n\t// Resolve media references if needed.\n\t// TS cannot narrow generic K from key equality checks — this is a known limitation.\n\t// We use the non-generic getSiteSettingsWithDb for media resolution instead.\n\tif ((key === \"logo\" || key === \"favicon\") && isMediaReference(value)) {\n\t\tconst resolved = await resolveMediaReference(value, db, storage);\n\t\t// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- TS can't narrow generic K from key equality; resolved type is correct\n\t\treturn resolved as SiteSettings[K] | undefined;\n\t}\n\n\treturn value;\n}\n\n/**\n * Get all site settings\n *\n * Returns all configured settings. Unset values are undefined.\n * Media references (logo/favicon) are resolved to include URLs.\n *\n * @example\n * ```ts\n * import { getSiteSettings } from \"emdash\";\n *\n * const settings = await getSiteSettings();\n * console.log(settings.title); // \"My Site\"\n * console.log(settings.logo?.url); // \"/_emdash/api/media/file/abc123\"\n * ```\n */\nexport async function getSiteSettings(): Promise<Partial<SiteSettings>> {\n\tconst db = await getDb();\n\treturn getSiteSettingsWithDb(db);\n}\n\n/**\n * Get all site settings (with explicit db)\n *\n * @internal Use `getSiteSettings()` in templates. This variant is for admin routes\n * that already have a database handle.\n */\nexport async function getSiteSettingsWithDb(\n\tdb: Kysely<Database>,\n\tstorage: Storage | null = null,\n): Promise<Partial<SiteSettings>> {\n\tconst options = new OptionsRepository(db);\n\tconst allOptions = await options.getByPrefix(SETTINGS_PREFIX);\n\n\tconst settings: Record<string, unknown> = {};\n\n\t// Convert Map to settings object, removing the prefix\n\tfor (const [key, value] of allOptions) {\n\t\tconst settingKey = key.replace(SETTINGS_PREFIX, \"\");\n\t\tsettings[settingKey] = value;\n\t}\n\n\tconst typedSettings = settings as Partial<SiteSettings>;\n\n\t// Resolve media references\n\tif (typedSettings.logo) {\n\t\ttypedSettings.logo = await resolveMediaReference(typedSettings.logo, db, storage);\n\t}\n\tif (typedSettings.favicon) {\n\t\ttypedSettings.favicon = await resolveMediaReference(typedSettings.favicon, db, storage);\n\t}\n\n\treturn typedSettings;\n}\n\n/**\n * Set site settings (internal function used by admin API)\n *\n * Merges provided settings with existing ones. Only provided fields are updated.\n * Media references should include just the mediaId; URLs are resolved on read.\n *\n * @param settings - Partial settings object with values to update\n * @param db - Kysely database instance\n * @returns Promise that resolves when settings are saved\n *\n * @internal\n *\n * @example\n * ```ts\n * // Update multiple settings at once\n * await setSiteSettings({\n * title: \"My Site\",\n * tagline: \"Welcome\",\n * logo: { mediaId: \"med_123\", alt: \"Logo\" }\n * }, db);\n * ```\n */\nexport async function setSiteSettings(\n\tsettings: Partial<SiteSettings>,\n\tdb: Kysely<Database>,\n): Promise<void> {\n\tconst options = new OptionsRepository(db);\n\n\t// Convert settings to options format\n\tconst updates: Record<string, unknown> = {};\n\tfor (const [key, value] of Object.entries(settings)) {\n\t\tif (value !== undefined) {\n\t\t\tupdates[`${SETTINGS_PREFIX}${key}`] = value;\n\t\t}\n\t}\n\n\tawait options.setMany(updates);\n}\n\n/**\n * Get a single plugin setting by key.\n *\n * Plugin settings are stored in the options table under\n * `plugin:<pluginId>:settings:<key>`.\n */\nexport async function getPluginSetting<T = unknown>(\n\tpluginId: string,\n\tkey: string,\n): Promise<T | undefined> {\n\tconst db = await getDb();\n\treturn getPluginSettingWithDb<T>(pluginId, key, db);\n}\n\n/**\n * Get a single plugin setting by key (with explicit db).\n *\n * @internal Use `getPluginSetting()` in templates and plugin rendering code.\n */\nexport async function getPluginSettingWithDb<T = unknown>(\n\tpluginId: string,\n\tkey: string,\n\tdb: Kysely<Database>,\n): Promise<T | undefined> {\n\tconst options = new OptionsRepository(db);\n\tconst value = await options.get<T>(`plugin:${pluginId}:settings:${key}`);\n\treturn value ?? undefined;\n}\n\n/**\n * Get all persisted plugin settings for a plugin.\n *\n * Defaults declared in `admin.settingsSchema` are not materialized\n * automatically; callers should apply their own fallback defaults.\n */\nexport async function getPluginSettings(pluginId: string): Promise<Record<string, unknown>> {\n\tconst db = await getDb();\n\treturn getPluginSettingsWithDb(pluginId, db);\n}\n\n/**\n * Get all persisted plugin settings for a plugin (with explicit db).\n *\n * @internal Use `getPluginSettings()` in templates and plugin rendering code.\n */\nexport async function getPluginSettingsWithDb(\n\tpluginId: string,\n\tdb: Kysely<Database>,\n): Promise<Record<string, unknown>> {\n\tconst prefix = `plugin:${pluginId}:settings:`;\n\tconst options = new OptionsRepository(db);\n\tconst allOptions = await options.getByPrefix(prefix);\n\n\tconst settings: Record<string, unknown> = {};\n\tfor (const [key, value] of allOptions) {\n\t\tif (!key.startsWith(prefix)) {\n\t\t\tcontinue;\n\t\t}\n\t\tsettings[key.slice(prefix.length)] = value;\n\t}\n\n\treturn settings;\n}\n","/**\n * SSRF protection for import URLs.\n *\n * Validates that URLs don't target internal/private network addresses.\n * Applied before any fetch() call in the import pipeline.\n */\n\nconst IPV4_MAPPED_IPV6_DOTTED_PATTERN = /^::ffff:(\\d+\\.\\d+\\.\\d+\\.\\d+)$/i;\nconst IPV4_MAPPED_IPV6_HEX_PATTERN = /^::ffff:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;\nconst IPV4_TRANSLATED_HEX_PATTERN = /^::ffff:0:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;\nconst IPV6_EXPANDED_MAPPED_PATTERN =\n\t/^0{0,4}:0{0,4}:0{0,4}:0{0,4}:0{0,4}:ffff:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;\n\n/**\n * IPv4-compatible (deprecated) addresses: ::XXXX:XXXX\n *\n * The WHATWG URL parser normalizes [::127.0.0.1] to [::7f00:1] (no ffff prefix).\n * These are deprecated but still parsed, and bypass the ffff-based checks.\n */\nconst IPV4_COMPATIBLE_HEX_PATTERN = /^::([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;\n\n/**\n * NAT64 prefix (RFC 6052): 64:ff9b::XXXX:XXXX\n *\n * Used by NAT64 gateways to embed IPv4 addresses in IPv6.\n * [64:ff9b::127.0.0.1] normalizes to [64:ff9b::7f00:1].\n */\nconst NAT64_HEX_PATTERN = /^64:ff9b::([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;\n\nconst IPV6_BRACKET_PATTERN = /^\\[|\\]$/g;\n\n/**\n * Private and reserved IP ranges that should never be fetched.\n *\n * Includes:\n * - Loopback (127.0.0.0/8)\n * - Private (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16)\n * - Link-local (169.254.0.0/16)\n * - Cloud metadata (169.254.169.254 — AWS/GCP/Azure)\n * - IPv6 loopback and link-local\n */\nconst BLOCKED_PATTERNS: Array<{ start: number; end: number }> = [\n\t// 127.0.0.0/8 — loopback\n\t{ start: ip4ToNum(127, 0, 0, 0), end: ip4ToNum(127, 255, 255, 255) },\n\t// 10.0.0.0/8 — private\n\t{ start: ip4ToNum(10, 0, 0, 0), end: ip4ToNum(10, 255, 255, 255) },\n\t// 172.16.0.0/12 — private\n\t{ start: ip4ToNum(172, 16, 0, 0), end: ip4ToNum(172, 31, 255, 255) },\n\t// 192.168.0.0/16 — private\n\t{ start: ip4ToNum(192, 168, 0, 0), end: ip4ToNum(192, 168, 255, 255) },\n\t// 169.254.0.0/16 — link-local (includes cloud metadata endpoint)\n\t{ start: ip4ToNum(169, 254, 0, 0), end: ip4ToNum(169, 254, 255, 255) },\n\t// 0.0.0.0/8 — current network\n\t{ start: ip4ToNum(0, 0, 0, 0), end: ip4ToNum(0, 255, 255, 255) },\n];\n\nconst BLOCKED_HOSTNAMES = new Set([\n\t\"localhost\",\n\t\"metadata.google.internal\",\n\t\"metadata.google\",\n\t\"[::1]\",\n]);\n\n/** Blocked URL schemes */\nconst ALLOWED_SCHEMES = new Set([\"http:\", \"https:\"]);\n\nfunction ip4ToNum(a: number, b: number, c: number, d: number): number {\n\treturn ((a << 24) | (b << 16) | (c << 8) | d) >>> 0;\n}\n\nfunction parseIpv4(ip: string): number | null {\n\tconst parts = ip.split(\".\");\n\tif (parts.length !== 4) return null;\n\n\tconst nums = parts.map(Number);\n\tif (nums.some((n) => isNaN(n) || n < 0 || n > 255)) return null;\n\n\treturn ip4ToNum(nums[0], nums[1], nums[2], nums[3]);\n}\n\n/**\n * Convert IPv4-mapped/translated IPv6 addresses from hex form back to IPv4.\n *\n * The WHATWG URL parser normalizes dotted-decimal to hex:\n * [::ffff:127.0.0.1] -> [::ffff:7f00:1]\n * [::ffff:169.254.169.254] -> [::ffff:a9fe:a9fe]\n *\n * Without this conversion, the hex forms bypass isPrivateIp() regex checks.\n */\nexport function normalizeIPv6MappedToIPv4(ip: string): string | null {\n\t// Match hex-form IPv4-mapped IPv6: ::ffff:XXXX:XXXX\n\tlet match = ip.match(IPV4_MAPPED_IPV6_HEX_PATTERN);\n\tif (!match) {\n\t\t// Match IPv4-translated (RFC 6052): ::ffff:0:XXXX:XXXX\n\t\tmatch = ip.match(IPV4_TRANSLATED_HEX_PATTERN);\n\t}\n\tif (!match) {\n\t\t// Match fully expanded form: 0000:0000:0000:0000:0000:ffff:XXXX:XXXX\n\t\tmatch = ip.match(IPV6_EXPANDED_MAPPED_PATTERN);\n\t}\n\tif (!match) {\n\t\t// Match IPv4-compatible (deprecated) form: ::XXXX:XXXX (no ffff prefix)\n\t\tmatch = ip.match(IPV4_COMPATIBLE_HEX_PATTERN);\n\t}\n\tif (!match) {\n\t\t// Match NAT64 prefix (RFC 6052): 64:ff9b::XXXX:XXXX\n\t\tmatch = ip.match(NAT64_HEX_PATTERN);\n\t}\n\tif (match) {\n\t\tconst high = parseInt(match[1] ?? \"\", 16);\n\t\tconst low = parseInt(match[2] ?? \"\", 16);\n\t\treturn `${(high >> 8) & 0xff}.${high & 0xff}.${(low >> 8) & 0xff}.${low & 0xff}`;\n\t}\n\treturn null;\n}\n\nfunction isPrivateIp(ip: string): boolean {\n\t// Handle IPv6 loopback\n\tif (ip === \"::1\" || ip === \"::ffff:127.0.0.1\") return true;\n\n\t// Handle IPv4-mapped IPv6 in hex form (WHATWG URL parser normalizes to this)\n\t// e.g. ::ffff:7f00:1 -> 127.0.0.1, ::ffff:a9fe:a9fe -> 169.254.169.254\n\tconst hexIpv4 = normalizeIPv6MappedToIPv4(ip);\n\tif (hexIpv4) return isPrivateIp(hexIpv4);\n\n\t// Handle IPv4-mapped IPv6 in dotted-decimal form\n\tconst v4Match = ip.match(IPV4_MAPPED_IPV6_DOTTED_PATTERN);\n\tconst ipv4 = v4Match ? v4Match[1] : ip;\n\n\tconst num = parseIpv4(ipv4);\n\tif (num === null) {\n\t\t// If we can't parse it, block IPv6 addresses that look internal\n\t\treturn ip.startsWith(\"fe80:\") || ip.startsWith(\"fc\") || ip.startsWith(\"fd\");\n\t}\n\n\treturn BLOCKED_PATTERNS.some((range) => num >= range.start && num <= range.end);\n}\n\n/**\n * Error thrown when SSRF protection blocks a URL.\n */\nexport class SsrfError extends Error {\n\tcode = \"SSRF_BLOCKED\" as const;\n\n\tconstructor(message: string) {\n\t\tsuper(message);\n\t\tthis.name = \"SsrfError\";\n\t}\n}\n\n/**\n * Validate that a URL is safe to fetch (not targeting internal networks).\n *\n * Checks:\n * 1. URL is well-formed with http/https scheme\n * 2. Hostname is not a known internal name (localhost, metadata endpoints)\n * 3. If hostname is an IP literal, it's not in a private range\n *\n * Note: DNS rebinding attacks are not fully mitigated (hostname could resolve\n * to a private IP). Full protection requires resolving DNS and checking the IP\n * before connecting, which needs a custom fetch implementation. This covers\n * the most common SSRF vectors.\n *\n * @throws SsrfError if the URL targets an internal address\n */\n/** Maximum number of redirects to follow in ssrfSafeFetch */\nconst MAX_REDIRECTS = 5;\n\nexport function validateExternalUrl(url: string): URL {\n\tlet parsed: URL;\n\ttry {\n\t\tparsed = new URL(url);\n\t} catch {\n\t\tthrow new SsrfError(\"Invalid URL\");\n\t}\n\n\t// Only allow http/https\n\tif (!ALLOWED_SCHEMES.has(parsed.protocol)) {\n\t\tthrow new SsrfError(`Scheme '${parsed.protocol}' is not allowed`);\n\t}\n\n\t// Strip brackets from IPv6 hostname\n\tconst hostname = parsed.hostname.replace(IPV6_BRACKET_PATTERN, \"\");\n\n\t// Check against known internal hostnames\n\tif (BLOCKED_HOSTNAMES.has(hostname.toLowerCase())) {\n\t\tthrow new SsrfError(\"URLs targeting internal hosts are not allowed\");\n\t}\n\n\t// Check if hostname is an IP address in a private range\n\tif (isPrivateIp(hostname)) {\n\t\tthrow new SsrfError(\"URLs targeting private IP addresses are not allowed\");\n\t}\n\n\treturn parsed;\n}\n\n/**\n * Fetch a URL with SSRF protection on redirects.\n *\n * Uses `redirect: \"manual\"` to intercept redirects and re-validate each\n * redirect target against SSRF rules before following it. This prevents\n * an attacker from setting up an allowed external URL that redirects to\n * an internal IP (e.g. 169.254.169.254 for cloud metadata).\n *\n * @throws SsrfError if the initial URL or any redirect target is internal\n */\n/** Headers that must be stripped when a redirect crosses origins */\nconst CREDENTIAL_HEADERS = [\"authorization\", \"cookie\", \"proxy-authorization\"];\n\nexport async function ssrfSafeFetch(url: string, init?: RequestInit): Promise<Response> {\n\tlet currentUrl = url;\n\tlet currentInit = init;\n\n\tfor (let i = 0; i <= MAX_REDIRECTS; i++) {\n\t\tvalidateExternalUrl(currentUrl);\n\n\t\tconst response = await globalThis.fetch(currentUrl, {\n\t\t\t...currentInit,\n\t\t\tredirect: \"manual\",\n\t\t});\n\n\t\t// Not a redirect -- return directly\n\t\tif (response.status < 300 || response.status >= 400) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// Extract redirect target\n\t\tconst location = response.headers.get(\"Location\");\n\t\tif (!location) {\n\t\t\treturn response;\n\t\t}\n\n\t\t// Resolve relative redirects against the current URL\n\t\tconst previousOrigin = new URL(currentUrl).origin;\n\t\tcurrentUrl = new URL(location, currentUrl).href;\n\t\tconst nextOrigin = new URL(currentUrl).origin;\n\n\t\t// Strip credential headers on cross-origin redirects\n\t\tif (previousOrigin !== nextOrigin && currentInit) {\n\t\t\tcurrentInit = stripCredentialHeaders(currentInit);\n\t\t}\n\t}\n\n\tthrow new SsrfError(`Too many redirects (max ${MAX_REDIRECTS})`);\n}\n\n/**\n * Return a copy of init with credential headers removed.\n */\nexport function stripCredentialHeaders(init: RequestInit): RequestInit {\n\tif (!init.headers) return init;\n\n\tconst headers = new Headers(init.headers);\n\tfor (const name of CREDENTIAL_HEADERS) {\n\t\theaders.delete(name);\n\t}\n\n\treturn { ...init, headers };\n}\n","/**\n * Seed engine - applies seed files to database\n *\n * This is the core implementation that bootstraps an EmDash site from a seed file.\n * Apply order is critical for foreign keys and references.\n */\n\nimport { imageSize } from \"image-size\";\nimport type { Kysely } from \"kysely\";\nimport mime from \"mime/lite\";\nimport { ulid } from \"ulidx\";\n\nimport { BylineRepository } from \"../database/repositories/byline.js\";\nimport { ContentRepository } from \"../database/repositories/content.js\";\nimport { MediaRepository } from \"../database/repositories/media.js\";\nimport { RedirectRepository } from \"../database/repositories/redirect.js\";\nimport { TaxonomyRepository } from \"../database/repositories/taxonomy.js\";\nimport type { Database } from \"../database/types.js\";\nimport type { MediaValue } from \"../fields/types.js\";\nimport { ssrfSafeFetch, validateExternalUrl } from \"../import/ssrf.js\";\nimport { SchemaRegistry } from \"../schema/registry.js\";\nimport { FTSManager } from \"../search/fts-manager.js\";\nimport { setSiteSettings } from \"../settings/index.js\";\nimport type { Storage } from \"../storage/types.js\";\nimport type {\n\tSeedFile,\n\tSeedApplyOptions,\n\tSeedApplyResult,\n\tSeedTaxonomyTerm,\n\tSeedMenuItem,\n\tSeedWidget,\n\tSeedMediaReference,\n} from \"./types.js\";\n\nconst FILE_EXTENSION_PATTERN = /\\.([a-z0-9]+)(?:\\?|$)/i;\nimport { validateSeed } from \"./validate.js\";\n\n/** Pattern to remove file extensions */\nconst EXTENSION_PATTERN = /\\.[^.]+$/;\n\n/** Pattern to remove query parameters */\nconst QUERY_PARAM_PATTERN = /\\?.*$/;\n\n/** Pattern to remove non-alphanumeric characters (except dash and underscore) */\nconst SANITIZE_PATTERN = /[^a-zA-Z0-9_-]/g;\n\n/** Pattern to collapse multiple hyphens */\nconst MULTIPLE_HYPHENS_PATTERN = /-+/g;\n\n/**\n * Apply a seed file to the database\n *\n * This function is idempotent - safe to run multiple times.\n *\n * @param db - Kysely database instance\n * @param seed - Seed file to apply\n * @param options - Application options\n * @returns Result summary\n */\nexport async function applySeed(\n\tdb: Kysely<Database>,\n\tseed: SeedFile,\n\toptions: SeedApplyOptions = {},\n): Promise<SeedApplyResult> {\n\t// Validate seed first\n\tconst validation = validateSeed(seed);\n\tif (!validation.valid) {\n\t\tthrow new Error(`Invalid seed file:\\n${validation.errors.join(\"\\n\")}`);\n\t}\n\n\tconst {\n\t\tincludeContent = false,\n\t\tstorage,\n\t\tskipMediaDownload = false,\n\t\tonConflict = \"skip\",\n\t} = options;\n\n\t// Result counters\n\tconst result: SeedApplyResult = {\n\t\tcollections: { created: 0, skipped: 0, updated: 0 },\n\t\tfields: { created: 0, skipped: 0, updated: 0 },\n\t\ttaxonomies: { created: 0, terms: 0 },\n\t\tbylines: { created: 0, skipped: 0, updated: 0 },\n\t\tmenus: { created: 0, items: 0 },\n\t\tredirects: { created: 0, skipped: 0, updated: 0 },\n\t\twidgetAreas: { created: 0, widgets: 0 },\n\t\tsections: { created: 0, skipped: 0, updated: 0 },\n\t\tsettings: { applied: 0 },\n\t\tcontent: { created: 0, skipped: 0, updated: 0 },\n\t\tmedia: { created: 0, skipped: 0 },\n\t};\n\n\t// Media context for $media resolution\n\tconst mediaContext: MediaContext = {\n\t\tdb,\n\t\tstorage: storage ?? null,\n\t\tskipMediaDownload,\n\t\tmediaCache: new Map(), // Cache downloaded media by URL to avoid re-downloading\n\t};\n\n\t// Apply order (critical for foreign keys and references):\n\t// 1. Site settings\n\t// 2. Collections + Fields\n\t// 3. Taxonomy definitions + Terms\n\t// 4. Content (so menu refs can resolve)\n\t// 5. Menus + Menu items (can now resolve content refs)\n\t// 6. Redirects\n\t// 7. Widget areas + Widgets\n\n\t// Track seed content IDs for reference resolution (shared across content and menus)\n\tconst seedIdMap = new Map<string, string>(); // seed id -> real entry id\n\tconst seedBylineIdMap = new Map<string, string>(); // seed byline id -> real byline id\n\n\t// 1. Site settings\n\tif (seed.settings) {\n\t\tawait setSiteSettings(seed.settings, db);\n\t\tresult.settings.applied = Object.keys(seed.settings).length;\n\t}\n\n\t// 2-3. Collections and Fields\n\tif (seed.collections) {\n\t\tconst registry = new SchemaRegistry(db);\n\n\t\tfor (const collection of seed.collections) {\n\t\t\t// Check if collection exists\n\t\t\tconst existing = await registry.getCollection(collection.slug);\n\n\t\t\tif (existing) {\n\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\tthrow new Error(`Conflict: collection \"${collection.slug}\" already exists`);\n\t\t\t\t}\n\n\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\tawait registry.updateCollection(collection.slug, {\n\t\t\t\t\t\tlabel: collection.label,\n\t\t\t\t\t\tlabelSingular: collection.labelSingular,\n\t\t\t\t\t\tdescription: collection.description,\n\t\t\t\t\t\ticon: collection.icon,\n\t\t\t\t\t\tsupports: collection.supports || [],\n\t\t\t\t\t\turlPattern: collection.urlPattern,\n\t\t\t\t\t\tcommentsEnabled: collection.commentsEnabled,\n\t\t\t\t\t});\n\t\t\t\t\tresult.collections.updated++;\n\n\t\t\t\t\t// Update or create fields\n\t\t\t\t\tfor (const field of collection.fields) {\n\t\t\t\t\t\tconst existingField = await registry.getField(collection.slug, field.slug);\n\t\t\t\t\t\tif (existingField) {\n\t\t\t\t\t\t\tawait registry.updateField(collection.slug, field.slug, {\n\t\t\t\t\t\t\t\tlabel: field.label,\n\t\t\t\t\t\t\t\trequired: field.required || false,\n\t\t\t\t\t\t\t\tunique: field.unique || false,\n\t\t\t\t\t\t\t\tsearchable: field.searchable || false,\n\t\t\t\t\t\t\t\tdefaultValue: field.defaultValue,\n\t\t\t\t\t\t\t\tvalidation: field.validation,\n\t\t\t\t\t\t\t\twidget: field.widget,\n\t\t\t\t\t\t\t\toptions: field.options,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tresult.fields.updated++;\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tawait registry.createField(collection.slug, {\n\t\t\t\t\t\t\t\tslug: field.slug,\n\t\t\t\t\t\t\t\tlabel: field.label,\n\t\t\t\t\t\t\t\ttype: field.type,\n\t\t\t\t\t\t\t\trequired: field.required || false,\n\t\t\t\t\t\t\t\tunique: field.unique || false,\n\t\t\t\t\t\t\t\tsearchable: field.searchable || false,\n\t\t\t\t\t\t\t\tdefaultValue: field.defaultValue,\n\t\t\t\t\t\t\t\tvalidation: field.validation,\n\t\t\t\t\t\t\t\twidget: field.widget,\n\t\t\t\t\t\t\t\toptions: field.options,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tresult.fields.created++;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// skip\n\t\t\t\tresult.collections.skipped++;\n\t\t\t\tresult.fields.skipped += collection.fields.length;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Create collection\n\t\t\tawait registry.createCollection({\n\t\t\t\tslug: collection.slug,\n\t\t\t\tlabel: collection.label,\n\t\t\t\tlabelSingular: collection.labelSingular,\n\t\t\t\tdescription: collection.description,\n\t\t\t\ticon: collection.icon,\n\t\t\t\tsupports: collection.supports || [],\n\t\t\t\tsource: \"seed\",\n\t\t\t\turlPattern: collection.urlPattern,\n\t\t\t\tcommentsEnabled: collection.commentsEnabled,\n\t\t\t});\n\t\t\tresult.collections.created++;\n\n\t\t\t// Create fields\n\t\t\tfor (const field of collection.fields) {\n\t\t\t\tawait registry.createField(collection.slug, {\n\t\t\t\t\tslug: field.slug,\n\t\t\t\t\tlabel: field.label,\n\t\t\t\t\ttype: field.type,\n\t\t\t\t\trequired: field.required || false,\n\t\t\t\t\tunique: field.unique || false,\n\t\t\t\t\tsearchable: field.searchable || false,\n\t\t\t\t\tdefaultValue: field.defaultValue,\n\t\t\t\t\tvalidation: field.validation,\n\t\t\t\t\twidget: field.widget,\n\t\t\t\t\toptions: field.options,\n\t\t\t\t});\n\t\t\t\tresult.fields.created++;\n\t\t\t}\n\t\t}\n\t}\n\n\t// 4-5. Taxonomies\n\tif (seed.taxonomies) {\n\t\tfor (const taxonomy of seed.taxonomies) {\n\t\t\t// Check if taxonomy definition exists\n\t\t\tconst existingDef = await db\n\t\t\t\t.selectFrom(\"_emdash_taxonomy_defs\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"name\", \"=\", taxonomy.name)\n\t\t\t\t.executeTakeFirst();\n\n\t\t\tif (existingDef) {\n\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\tthrow new Error(`Conflict: taxonomy \"${taxonomy.name}\" already exists`);\n\t\t\t\t}\n\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\tawait db\n\t\t\t\t\t\t.updateTable(\"_emdash_taxonomy_defs\")\n\t\t\t\t\t\t.set({\n\t\t\t\t\t\t\tlabel: taxonomy.label,\n\t\t\t\t\t\t\tlabel_singular: taxonomy.labelSingular ?? null,\n\t\t\t\t\t\t\thierarchical: taxonomy.hierarchical ? 1 : 0,\n\t\t\t\t\t\t\tcollections: JSON.stringify(taxonomy.collections),\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.where(\"id\", \"=\", existingDef.id)\n\t\t\t\t\t\t.execute();\n\t\t\t\t\t// Taxonomy defs don't track an \"updated\" counter -- just the definition is updated\n\t\t\t\t}\n\t\t\t\t// skip: do nothing for the definition\n\t\t\t} else {\n\t\t\t\t// Create taxonomy definition\n\t\t\t\tawait db\n\t\t\t\t\t.insertInto(\"_emdash_taxonomy_defs\")\n\t\t\t\t\t.values({\n\t\t\t\t\t\tid: ulid(),\n\t\t\t\t\t\tname: taxonomy.name,\n\t\t\t\t\t\tlabel: taxonomy.label,\n\t\t\t\t\t\tlabel_singular: taxonomy.labelSingular ?? null,\n\t\t\t\t\t\thierarchical: taxonomy.hierarchical ? 1 : 0,\n\t\t\t\t\t\tcollections: JSON.stringify(taxonomy.collections),\n\t\t\t\t\t})\n\t\t\t\t\t.execute();\n\t\t\t\tresult.taxonomies.created++;\n\t\t\t}\n\n\t\t\t// Create terms (if provided)\n\t\t\tif (taxonomy.terms && taxonomy.terms.length > 0) {\n\t\t\t\tconst termRepo = new TaxonomyRepository(db);\n\n\t\t\t\t// For hierarchical taxonomies, we need to create parents before children\n\t\t\t\tif (taxonomy.hierarchical) {\n\t\t\t\t\tawait applyHierarchicalTerms(termRepo, taxonomy.name, taxonomy.terms, result, onConflict);\n\t\t\t\t} else {\n\t\t\t\t\t// Flat taxonomy - create all terms\n\t\t\t\t\tfor (const term of taxonomy.terms) {\n\t\t\t\t\t\tconst existing = await termRepo.findBySlug(taxonomy.name, term.slug);\n\t\t\t\t\t\tif (existing) {\n\t\t\t\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\t\t\t\tthrow new Error(\n\t\t\t\t\t\t\t\t\t`Conflict: taxonomy term \"${term.slug}\" in \"${taxonomy.name}\" already exists`,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\t\t\t\tawait termRepo.update(existing.id, {\n\t\t\t\t\t\t\t\t\tlabel: term.label,\n\t\t\t\t\t\t\t\t\tdata: term.description ? { description: term.description } : {},\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\tresult.taxonomies.terms++;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t// skip: do nothing\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tawait termRepo.create({\n\t\t\t\t\t\t\t\tname: taxonomy.name,\n\t\t\t\t\t\t\t\tslug: term.slug,\n\t\t\t\t\t\t\t\tlabel: term.label,\n\t\t\t\t\t\t\t\tdata: term.description ? { description: term.description } : undefined,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\tresult.taxonomies.terms++;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// 6. Bylines\n\tif (seed.bylines) {\n\t\tconst bylineRepo = new BylineRepository(db);\n\t\tfor (const byline of seed.bylines) {\n\t\t\tconst existing = await bylineRepo.findBySlug(byline.slug);\n\t\t\tif (existing) {\n\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\tthrow new Error(`Conflict: byline \"${byline.slug}\" already exists`);\n\t\t\t\t}\n\n\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\tawait bylineRepo.update(existing.id, {\n\t\t\t\t\t\tdisplayName: byline.displayName,\n\t\t\t\t\t\tbio: byline.bio ?? null,\n\t\t\t\t\t\twebsiteUrl: byline.websiteUrl ?? null,\n\t\t\t\t\t\tisGuest: byline.isGuest,\n\t\t\t\t\t});\n\t\t\t\t\tseedBylineIdMap.set(byline.id, existing.id);\n\t\t\t\t\tresult.bylines.updated++;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// skip\n\t\t\t\tseedBylineIdMap.set(byline.id, existing.id);\n\t\t\t\tresult.bylines.skipped++;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tconst created = await bylineRepo.create({\n\t\t\t\tslug: byline.slug,\n\t\t\t\tdisplayName: byline.displayName,\n\t\t\t\tbio: byline.bio ?? null,\n\t\t\t\twebsiteUrl: byline.websiteUrl ?? null,\n\t\t\t\tisGuest: byline.isGuest,\n\t\t\t});\n\t\t\tseedBylineIdMap.set(byline.id, created.id);\n\t\t\tresult.bylines.created++;\n\t\t}\n\t}\n\n\t// 7. Content (created before menus so refs can resolve)\n\tif (includeContent && seed.content) {\n\t\tconst contentRepo = new ContentRepository(db);\n\t\tconst bylineRepo = new BylineRepository(db);\n\n\t\t// Create content entries\n\t\tfor (const [collectionSlug, entries] of Object.entries(seed.content)) {\n\t\t\tfor (const entry of entries) {\n\t\t\t\t// Check if entry exists (by slug + locale for locale-aware lookup)\n\t\t\t\tconst existing = await contentRepo.findBySlug(collectionSlug, entry.slug, entry.locale);\n\n\t\t\t\tif (existing) {\n\t\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\t\tthrow new Error(\n\t\t\t\t\t\t\t`Conflict: content \"${entry.slug}\" in \"${collectionSlug}\" already exists`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\n\t\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\t\t// Resolve $ref and $media in data\n\t\t\t\t\t\tconst resolvedData = await resolveReferences(\n\t\t\t\t\t\t\tentry.data,\n\t\t\t\t\t\t\tseedIdMap,\n\t\t\t\t\t\t\tmediaContext,\n\t\t\t\t\t\t\tresult,\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\tconst status = entry.status || \"published\";\n\t\t\t\t\t\tawait contentRepo.update(collectionSlug, existing.id, {\n\t\t\t\t\t\t\tstatus,\n\t\t\t\t\t\t\tdata: resolvedData,\n\t\t\t\t\t\t});\n\n\t\t\t\t\t\tseedIdMap.set(entry.id, existing.id);\n\t\t\t\t\t\tresult.content.updated++;\n\n\t\t\t\t\t\t// Update bylines and taxonomy assignments\n\t\t\t\t\t\tawait applyContentBylines(\n\t\t\t\t\t\t\tbylineRepo,\n\t\t\t\t\t\t\tcollectionSlug,\n\t\t\t\t\t\t\texisting.id,\n\t\t\t\t\t\t\tentry,\n\t\t\t\t\t\t\tseedBylineIdMap,\n\t\t\t\t\t\t\ttrue,\n\t\t\t\t\t\t);\n\t\t\t\t\t\tawait applyContentTaxonomies(db, collectionSlug, existing.id, entry, true);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\n\t\t\t\t\t// skip\n\t\t\t\t\tresult.content.skipped++;\n\t\t\t\t\tseedIdMap.set(entry.id, existing.id);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// Resolve $ref and $media in data\n\t\t\t\tconst resolvedData = await resolveReferences(entry.data, seedIdMap, mediaContext, result);\n\n\t\t\t\t// Resolve translationOf: map from seed-local ID to real EmDash ID\n\t\t\t\tlet translationOf: string | undefined;\n\t\t\t\tif (entry.translationOf) {\n\t\t\t\t\tconst sourceId = seedIdMap.get(entry.translationOf);\n\t\t\t\t\tif (!sourceId) {\n\t\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t\t`content.${collectionSlug}: translationOf \"${entry.translationOf}\" not found (not yet created or missing). Skipping translation link.`,\n\t\t\t\t\t\t);\n\t\t\t\t\t} else {\n\t\t\t\t\t\ttranslationOf = sourceId;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Create entry\n\t\t\t\tconst status = entry.status || \"published\";\n\t\t\t\tconst created = await contentRepo.create({\n\t\t\t\t\ttype: collectionSlug,\n\t\t\t\t\tslug: entry.slug,\n\t\t\t\t\tstatus,\n\t\t\t\t\tdata: resolvedData,\n\t\t\t\t\tlocale: entry.locale,\n\t\t\t\t\ttranslationOf,\n\t\t\t\t\t// Set published_at for published content so RSS/Archives work correctly\n\t\t\t\t\tpublishedAt: status === \"published\" ? new Date().toISOString() : null,\n\t\t\t\t});\n\n\t\t\t\tseedIdMap.set(entry.id, created.id);\n\t\t\t\tresult.content.created++;\n\n\t\t\t\tawait applyContentBylines(bylineRepo, collectionSlug, created.id, entry, seedBylineIdMap);\n\t\t\t\tawait applyContentTaxonomies(db, collectionSlug, created.id, entry, false);\n\t\t\t}\n\t\t}\n\t}\n\n\t// 8. Menus and Menu Items (after content so refs can resolve)\n\tif (seed.menus) {\n\t\tfor (const menu of seed.menus) {\n\t\t\t// Check if menu exists\n\t\t\tconst existingMenu = await db\n\t\t\t\t.selectFrom(\"_emdash_menus\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"name\", \"=\", menu.name)\n\t\t\t\t.executeTakeFirst();\n\n\t\t\tlet menuId: string;\n\n\t\t\tif (existingMenu) {\n\t\t\t\tmenuId = existingMenu.id;\n\t\t\t\t// Clear existing items (menus are recreated)\n\t\t\t\tawait db.deleteFrom(\"_emdash_menu_items\").where(\"menu_id\", \"=\", menuId).execute();\n\t\t\t} else {\n\t\t\t\t// Create menu\n\t\t\t\tmenuId = ulid();\n\t\t\t\tawait db\n\t\t\t\t\t.insertInto(\"_emdash_menus\")\n\t\t\t\t\t.values({\n\t\t\t\t\t\tid: menuId,\n\t\t\t\t\t\tname: menu.name,\n\t\t\t\t\t\tlabel: menu.label,\n\t\t\t\t\t\tcreated_at: new Date().toISOString(),\n\t\t\t\t\t\tupdated_at: new Date().toISOString(),\n\t\t\t\t\t})\n\t\t\t\t\t.execute();\n\t\t\t\tresult.menus.created++;\n\t\t\t}\n\n\t\t\t// Create menu items\n\t\t\tconst itemCount = await applyMenuItems(\n\t\t\t\tdb,\n\t\t\t\tmenuId,\n\t\t\t\tmenu.items,\n\t\t\t\tnull, // parent_id\n\t\t\t\t0, // sort_order\n\t\t\t\tseedIdMap,\n\t\t\t);\n\t\t\tresult.menus.items += itemCount;\n\t\t}\n\t}\n\n\t// 9. Redirects\n\tif (seed.redirects) {\n\t\tconst redirectRepo = new RedirectRepository(db);\n\n\t\tfor (const redirect of seed.redirects) {\n\t\t\tconst existing = await redirectRepo.findBySource(redirect.source);\n\t\t\tif (existing) {\n\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\tthrow new Error(`Conflict: redirect \"${redirect.source}\" already exists`);\n\t\t\t\t}\n\n\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\tawait redirectRepo.update(existing.id, {\n\t\t\t\t\t\tdestination: redirect.destination,\n\t\t\t\t\t\ttype: redirect.type,\n\t\t\t\t\t\tenabled: redirect.enabled,\n\t\t\t\t\t\tgroupName: redirect.groupName,\n\t\t\t\t\t});\n\t\t\t\t\tresult.redirects.updated++;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// skip\n\t\t\t\tresult.redirects.skipped++;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tawait redirectRepo.create({\n\t\t\t\tsource: redirect.source,\n\t\t\t\tdestination: redirect.destination,\n\t\t\t\ttype: redirect.type,\n\t\t\t\tenabled: redirect.enabled,\n\t\t\t\tgroupName: redirect.groupName,\n\t\t\t});\n\t\t\tresult.redirects.created++;\n\t\t}\n\t}\n\n\t// 10. Widget Areas and Widgets\n\tif (seed.widgetAreas) {\n\t\tfor (const area of seed.widgetAreas) {\n\t\t\t// Check if area exists\n\t\t\tconst existingArea = await db\n\t\t\t\t.selectFrom(\"_emdash_widget_areas\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"name\", \"=\", area.name)\n\t\t\t\t.executeTakeFirst();\n\n\t\t\tlet areaId: string;\n\n\t\t\tif (existingArea) {\n\t\t\t\tareaId = existingArea.id;\n\t\t\t\t// Clear existing widgets (areas are recreated)\n\t\t\t\tawait db.deleteFrom(\"_emdash_widgets\").where(\"area_id\", \"=\", areaId).execute();\n\t\t\t} else {\n\t\t\t\t// Create area\n\t\t\t\tareaId = ulid();\n\t\t\t\tawait db\n\t\t\t\t\t.insertInto(\"_emdash_widget_areas\")\n\t\t\t\t\t.values({\n\t\t\t\t\t\tid: areaId,\n\t\t\t\t\t\tname: area.name,\n\t\t\t\t\t\tlabel: area.label,\n\t\t\t\t\t\tdescription: area.description ?? null,\n\t\t\t\t\t})\n\t\t\t\t\t.execute();\n\t\t\t\tresult.widgetAreas.created++;\n\t\t\t}\n\n\t\t\t// Create widgets\n\t\t\tfor (let i = 0; i < area.widgets.length; i++) {\n\t\t\t\tconst widget = area.widgets[i];\n\t\t\t\tawait applyWidget(db, areaId, widget, i);\n\t\t\t\tresult.widgetAreas.widgets++;\n\t\t\t}\n\t\t}\n\t}\n\n\t// 11. Sections\n\tif (seed.sections) {\n\t\tfor (const section of seed.sections) {\n\t\t\t// Check if section exists\n\t\t\tconst existing = await db\n\t\t\t\t.selectFrom(\"_emdash_sections\")\n\t\t\t\t.select(\"id\")\n\t\t\t\t.where(\"slug\", \"=\", section.slug)\n\t\t\t\t.executeTakeFirst();\n\n\t\t\tif (existing) {\n\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\tthrow new Error(`Conflict: section \"${section.slug}\" already exists`);\n\t\t\t\t}\n\n\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\tawait db\n\t\t\t\t\t\t.updateTable(\"_emdash_sections\")\n\t\t\t\t\t\t.set({\n\t\t\t\t\t\t\ttitle: section.title,\n\t\t\t\t\t\t\tdescription: section.description ?? null,\n\t\t\t\t\t\t\tkeywords: section.keywords ? JSON.stringify(section.keywords) : null,\n\t\t\t\t\t\t\tcontent: JSON.stringify(section.content),\n\t\t\t\t\t\t\tsource: section.source || \"theme\",\n\t\t\t\t\t\t\tupdated_at: new Date().toISOString(),\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.where(\"id\", \"=\", existing.id)\n\t\t\t\t\t\t.execute();\n\t\t\t\t\tresult.sections.updated++;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// skip\n\t\t\t\tresult.sections.skipped++;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tconst id = ulid();\n\t\t\tconst now = new Date().toISOString();\n\n\t\t\tawait db\n\t\t\t\t.insertInto(\"_emdash_sections\")\n\t\t\t\t.values({\n\t\t\t\t\tid,\n\t\t\t\t\tslug: section.slug,\n\t\t\t\t\ttitle: section.title,\n\t\t\t\t\tdescription: section.description ?? null,\n\t\t\t\t\tkeywords: section.keywords ? JSON.stringify(section.keywords) : null,\n\t\t\t\t\tcontent: JSON.stringify(section.content),\n\t\t\t\t\tpreview_media_id: null,\n\t\t\t\t\tsource: section.source || \"theme\",\n\t\t\t\t\ttheme_id: section.source === \"theme\" ? section.slug : null,\n\t\t\t\t\tcreated_at: now,\n\t\t\t\t\tupdated_at: now,\n\t\t\t\t})\n\t\t\t\t.execute();\n\n\t\t\tresult.sections.created++;\n\t\t}\n\t}\n\n\t// 11. Enable search for collections that have `search` in supports\n\tif (seed.collections) {\n\t\tconst ftsManager = new FTSManager(db);\n\n\t\tfor (const collection of seed.collections) {\n\t\t\tif (collection.supports?.includes(\"search\")) {\n\t\t\t\t// Check if there are searchable fields\n\t\t\t\tconst searchableFields = await ftsManager.getSearchableFields(collection.slug);\n\t\t\t\tif (searchableFields.length > 0) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tawait ftsManager.enableSearch(collection.slug);\n\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t// Log but don't fail - search can be enabled manually later\n\t\t\t\t\t\tconsole.warn(`Failed to enable search for ${collection.slug}:`, err);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result;\n}\n\n/**\n * Apply hierarchical taxonomy terms (parents before children)\n */\nasync function applyHierarchicalTerms(\n\ttermRepo: TaxonomyRepository,\n\ttaxonomyName: string,\n\tterms: SeedTaxonomyTerm[],\n\tresult: SeedApplyResult,\n\tonConflict: \"skip\" | \"update\" | \"error\" = \"skip\",\n): Promise<void> {\n\t// Map slugs to IDs\n\tconst slugToId = new Map<string, string>();\n\n\t// Multiple passes to handle deep nesting\n\tlet remaining = [...terms];\n\tlet maxPasses = 10; // Prevent infinite loop\n\n\twhile (remaining.length > 0 && maxPasses > 0) {\n\t\tconst processedThisPass: string[] = [];\n\n\t\tfor (const term of remaining) {\n\t\t\t// Check if parent exists (or no parent)\n\t\t\tif (!term.parent || slugToId.has(term.parent)) {\n\t\t\t\tconst parentId = term.parent ? slugToId.get(term.parent) : undefined;\n\n\t\t\t\tconst existing = await termRepo.findBySlug(taxonomyName, term.slug);\n\t\t\t\tif (existing) {\n\t\t\t\t\tif (onConflict === \"error\") {\n\t\t\t\t\t\tthrow new Error(\n\t\t\t\t\t\t\t`Conflict: taxonomy term \"${term.slug}\" in \"${taxonomyName}\" already exists`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t\tif (onConflict === \"update\") {\n\t\t\t\t\t\tawait termRepo.update(existing.id, {\n\t\t\t\t\t\t\tlabel: term.label,\n\t\t\t\t\t\t\tparentId,\n\t\t\t\t\t\t\tdata: term.description ? { description: term.description } : {},\n\t\t\t\t\t\t});\n\t\t\t\t\t\tresult.taxonomies.terms++;\n\t\t\t\t\t}\n\t\t\t\t\tslugToId.set(term.slug, existing.id);\n\t\t\t\t} else {\n\t\t\t\t\tconst created = await termRepo.create({\n\t\t\t\t\t\tname: taxonomyName,\n\t\t\t\t\t\tslug: term.slug,\n\t\t\t\t\t\tlabel: term.label,\n\t\t\t\t\t\tparentId,\n\t\t\t\t\t\tdata: term.description ? { description: term.description } : undefined,\n\t\t\t\t\t});\n\t\t\t\t\tslugToId.set(term.slug, created.id);\n\t\t\t\t\tresult.taxonomies.terms++;\n\t\t\t\t}\n\n\t\t\t\tprocessedThisPass.push(term.slug);\n\t\t\t}\n\t\t}\n\n\t\t// Remove processed terms\n\t\tremaining = remaining.filter((t) => !processedThisPass.includes(t.slug));\n\t\tmaxPasses--;\n\t}\n\n\tif (remaining.length > 0) {\n\t\tconsole.warn(`Could not process ${remaining.length} terms due to missing parents`);\n\t}\n}\n\n/**\n * Apply byline credits to a content entry.\n * In update mode, clears existing credits even if the seed has none.\n */\nasync function applyContentBylines(\n\tbylineRepo: BylineRepository,\n\tcollectionSlug: string,\n\tcontentId: string,\n\tentry: { slug: string; bylines?: Array<{ byline: string; roleLabel?: string }> },\n\tseedBylineIdMap: Map<string, string>,\n\tisUpdate = false,\n): Promise<void> {\n\tif (!entry.bylines || entry.bylines.length === 0) {\n\t\t// In update mode, clear existing bylines when the seed entry has none\n\t\tif (isUpdate) {\n\t\t\tawait bylineRepo.setContentBylines(collectionSlug, contentId, []);\n\t\t}\n\t\treturn;\n\t}\n\n\tconst credits = entry.bylines\n\t\t.map((credit) => {\n\t\t\tconst bylineId = seedBylineIdMap.get(credit.byline);\n\t\t\tif (!bylineId) return null;\n\t\t\treturn {\n\t\t\t\tbylineId,\n\t\t\t\troleLabel: credit.roleLabel ?? null,\n\t\t\t};\n\t\t})\n\t\t.filter((credit): credit is { bylineId: string; roleLabel: string | null } => Boolean(credit));\n\n\tif (credits.length !== entry.bylines.length) {\n\t\tconsole.warn(\n\t\t\t`content.${collectionSlug}.${entry.slug}: one or more byline refs could not be resolved`,\n\t\t);\n\t}\n\n\t// In update mode, always call setContentBylines (even with empty credits)\n\t// to clear stale assignments when all byline refs fail to resolve.\n\t// In create mode, only call if there are credits to assign.\n\tif (credits.length > 0 || isUpdate) {\n\t\tawait bylineRepo.setContentBylines(collectionSlug, contentId, credits);\n\t}\n}\n\n/**\n * Apply taxonomy term assignments to a content entry.\n * In update mode, clears existing assignments before re-attaching.\n */\nasync function applyContentTaxonomies(\n\tdb: Kysely<Database>,\n\tcollectionSlug: string,\n\tcontentId: string,\n\tentry: { taxonomies?: Record<string, string[]> },\n\tisUpdate: boolean,\n): Promise<void> {\n\t// In update mode, clear existing taxonomy assignments first\n\tif (isUpdate) {\n\t\tawait db\n\t\t\t.deleteFrom(\"content_taxonomies\")\n\t\t\t.where(\"collection\", \"=\", collectionSlug)\n\t\t\t.where(\"entry_id\", \"=\", contentId)\n\t\t\t.execute();\n\t}\n\n\tif (!entry.taxonomies) return;\n\n\tfor (const [taxonomyName, termSlugs] of Object.entries(entry.taxonomies)) {\n\t\tconst termRepo = new TaxonomyRepository(db);\n\n\t\tfor (const termSlug of termSlugs) {\n\t\t\tconst term = await termRepo.findBySlug(taxonomyName, termSlug);\n\t\t\tif (term) {\n\t\t\t\tawait termRepo.attachToEntry(collectionSlug, contentId, term.id);\n\t\t\t}\n\t\t}\n\t}\n}\n\n/**\n * Apply menu items recursively\n */\nasync function applyMenuItems(\n\tdb: Kysely<Database>,\n\tmenuId: string,\n\titems: SeedMenuItem[],\n\tparentId: string | null,\n\tstartOrder: number,\n\tseedIdMap: Map<string, string>,\n): Promise<number> {\n\tlet count = 0;\n\tlet order = startOrder;\n\n\tfor (const item of items) {\n\t\tconst itemId = ulid();\n\n\t\t// Resolve reference if needed\n\t\tlet referenceId: string | null = null;\n\t\tlet referenceCollection: string | null = null;\n\n\t\tif (item.type === \"page\" || item.type === \"post\") {\n\t\t\t// Try to resolve from seedIdMap\n\t\t\tif (item.ref && seedIdMap.has(item.ref)) {\n\t\t\t\treferenceId = seedIdMap.get(item.ref)!;\n\t\t\t\t// Default to plural collection name (pages/posts) if not specified\n\t\t\t\treferenceCollection = item.collection || `${item.type}s`;\n\t\t\t}\n\t\t\t// If not in map, the content might not exist yet (will be broken link)\n\t\t}\n\n\t\t// Insert menu item\n\t\tawait db\n\t\t\t.insertInto(\"_emdash_menu_items\")\n\t\t\t.values({\n\t\t\t\tid: itemId,\n\t\t\t\tmenu_id: menuId,\n\t\t\t\tparent_id: parentId,\n\t\t\t\tsort_order: order,\n\t\t\t\ttype: item.type,\n\t\t\t\treference_collection: referenceCollection,\n\t\t\t\treference_id: referenceId,\n\t\t\t\tcustom_url: item.url ?? null,\n\t\t\t\tlabel: item.label || \"\",\n\t\t\t\ttitle_attr: item.titleAttr ?? null,\n\t\t\t\ttarget: item.target ?? null,\n\t\t\t\tcss_classes: item.cssClasses ?? null,\n\t\t\t\tcreated_at: new Date().toISOString(),\n\t\t\t})\n\t\t\t.execute();\n\n\t\tcount++;\n\t\torder++;\n\n\t\t// Process children\n\t\tif (item.children && item.children.length > 0) {\n\t\t\tconst childCount = await applyMenuItems(db, menuId, item.children, itemId, 0, seedIdMap);\n\t\t\tcount += childCount;\n\t\t}\n\t}\n\n\treturn count;\n}\n\n/**\n * Apply a widget\n */\nasync function applyWidget(\n\tdb: Kysely<Database>,\n\tareaId: string,\n\twidget: SeedWidget,\n\tsortOrder: number,\n): Promise<void> {\n\tawait db\n\t\t.insertInto(\"_emdash_widgets\")\n\t\t.values({\n\t\t\tid: ulid(),\n\t\t\tarea_id: areaId,\n\t\t\tsort_order: sortOrder,\n\t\t\ttype: widget.type,\n\t\t\ttitle: widget.title ?? null,\n\t\t\tcontent: widget.content ? JSON.stringify(widget.content) : null,\n\t\t\tmenu_name: widget.menuName ?? null,\n\t\t\tcomponent_id: widget.componentId ?? null,\n\t\t\tcomponent_props: widget.props ? JSON.stringify(widget.props) : null,\n\t\t})\n\t\t.execute();\n}\n\n/**\n * Context for media resolution during seed application\n */\ninterface MediaContext {\n\tdb: Kysely<Database>;\n\tstorage: Storage | null;\n\tskipMediaDownload: boolean;\n\tmediaCache: Map<string, MediaValue>; // URL -> resolved MediaValue\n}\n\n/**\n * Type guard for $media reference\n */\nfunction isSeedMediaReference(value: unknown): value is SeedMediaReference {\n\tif (typeof value !== \"object\" || value === null || !(\"$media\" in value)) {\n\t\treturn false;\n\t}\n\tconst media = (value as Record<string, unknown>).$media;\n\treturn (\n\t\ttypeof media === \"object\" &&\n\t\tmedia !== null &&\n\t\t\"url\" in media &&\n\t\ttypeof (media as Record<string, unknown>).url === \"string\"\n\t);\n}\n\n/**\n * Resolve $ref: and $media references in content data\n */\nasync function resolveReferences(\n\tdata: Record<string, unknown>,\n\tseedIdMap: Map<string, string>,\n\tmediaContext: MediaContext,\n\tresult: SeedApplyResult,\n): Promise<Record<string, unknown>> {\n\tconst resolved: Record<string, unknown> = {};\n\n\tfor (const [key, value] of Object.entries(data)) {\n\t\tresolved[key] = await resolveValue(value, seedIdMap, mediaContext, result);\n\t}\n\n\treturn resolved;\n}\n\n/**\n * Resolve a single value recursively\n */\nasync function resolveValue(\n\tvalue: unknown,\n\tseedIdMap: Map<string, string>,\n\tmediaContext: MediaContext,\n\tresult: SeedApplyResult,\n): Promise<unknown> {\n\t// Handle $ref: syntax\n\tif (typeof value === \"string\" && value.startsWith(\"$ref:\")) {\n\t\tconst seedId = value.slice(5);\n\t\treturn seedIdMap.get(seedId) ?? value; // Return unresolved if not found\n\t}\n\n\t// Handle $media syntax\n\tif (isSeedMediaReference(value)) {\n\t\treturn resolveMedia(value, mediaContext, result);\n\t}\n\n\t// Handle arrays\n\tif (Array.isArray(value)) {\n\t\treturn Promise.all(value.map((item) => resolveValue(item, seedIdMap, mediaContext, result)));\n\t}\n\n\t// Handle objects recursively\n\tif (typeof value === \"object\" && value !== null) {\n\t\tconst resolved: Record<string, unknown> = {};\n\t\tfor (const [k, v] of Object.entries(value)) {\n\t\t\tresolved[k] = await resolveValue(v, seedIdMap, mediaContext, result);\n\t\t}\n\t\treturn resolved;\n\t}\n\n\treturn value;\n}\n\n/**\n * Resolve a $media reference by downloading and uploading the media\n */\nasync function resolveMedia(\n\tref: SeedMediaReference,\n\tctx: MediaContext,\n\tresult: SeedApplyResult,\n): Promise<MediaValue | null> {\n\tconst { url, alt, filename, caption } = ref.$media;\n\n\t// Check cache first\n\tconst cached = ctx.mediaCache.get(url);\n\tif (cached) {\n\t\tresult.media.skipped++;\n\t\treturn { ...cached, alt: alt ?? cached.alt };\n\t}\n\n\t// When skipMediaDownload is set, resolve $media to an external URL reference\n\t// without downloading or storing anything. Used by playground mode.\n\tif (ctx.skipMediaDownload) {\n\t\tconst mediaValue: MediaValue = {\n\t\t\tprovider: \"external\",\n\t\t\tid: ulid(),\n\t\t\tsrc: url,\n\t\t\talt: alt ?? undefined,\n\t\t\tfilename: filename ?? undefined,\n\t\t};\n\t\tctx.mediaCache.set(url, mediaValue);\n\t\tresult.media.created++;\n\t\treturn mediaValue;\n\t}\n\n\t// Storage is required for $media resolution\n\tif (!ctx.storage) {\n\t\tconsole.warn(`Skipping $media reference (no storage configured): ${url}`);\n\t\tresult.media.skipped++;\n\t\treturn null;\n\t}\n\n\ttry {\n\t\t// SSRF protection: validate URL before downloading\n\t\tvalidateExternalUrl(url);\n\n\t\t// Download the media (ssrfSafeFetch re-validates redirect targets)\n\t\tconsole.log(` 📥 Downloading: ${url}`);\n\t\tconst response = await ssrfSafeFetch(url, {\n\t\t\theaders: {\n\t\t\t\t// Some services like Unsplash require a user-agent\n\t\t\t\t\"User-Agent\": \"EmDash-CMS/1.0\",\n\t\t\t},\n\t\t});\n\n\t\tif (!response.ok) {\n\t\t\tconsole.warn(` ⚠️ Failed to download ${url}: ${response.status}`);\n\t\t\tresult.media.skipped++;\n\t\t\treturn null;\n\t\t}\n\n\t\t// Get content type and determine extension\n\t\tconst contentType = response.headers.get(\"content-type\") || \"application/octet-stream\";\n\t\tconst ext = getExtensionFromContentType(contentType) || getExtensionFromUrl(url) || \".bin\";\n\n\t\t// Generate filename and storage key\n\t\tconst id = ulid();\n\t\tconst finalFilename = filename || generateFilename(url, ext);\n\t\tconst storageKey = `${id}${ext}`;\n\n\t\t// Get the body as buffer\n\t\tconst arrayBuffer = await response.arrayBuffer();\n\t\tconst body = new Uint8Array(arrayBuffer);\n\n\t\t// Get image dimensions if it's an image\n\t\tlet width: number | undefined;\n\t\tlet height: number | undefined;\n\t\tif (contentType.startsWith(\"image/\")) {\n\t\t\tconst dimensions = getImageDimensions(body);\n\t\t\twidth = dimensions?.width;\n\t\t\theight = dimensions?.height;\n\t\t}\n\n\t\t// Upload to storage\n\t\tawait ctx.storage.upload({\n\t\t\tkey: storageKey,\n\t\t\tbody,\n\t\t\tcontentType,\n\t\t});\n\n\t\t// Create media record\n\t\tconst mediaRepo = new MediaRepository(ctx.db);\n\t\tawait mediaRepo.create({\n\t\t\tfilename: finalFilename,\n\t\t\tmimeType: contentType,\n\t\t\tsize: body.length,\n\t\t\twidth,\n\t\t\theight,\n\t\t\talt,\n\t\t\tcaption,\n\t\t\tstorageKey,\n\t\t\tstatus: \"ready\",\n\t\t});\n\n\t\t// Create the MediaValue - only store id, URL is built at runtime by EmDashMedia\n\t\tconst mediaValue: MediaValue = {\n\t\t\tprovider: \"local\",\n\t\t\tid,\n\t\t\talt: alt ?? undefined,\n\t\t\twidth,\n\t\t\theight,\n\t\t\tmimeType: contentType,\n\t\t\tfilename: finalFilename,\n\t\t\tmeta: { storageKey },\n\t\t};\n\n\t\t// Cache for reuse\n\t\tctx.mediaCache.set(url, mediaValue);\n\t\tresult.media.created++;\n\n\t\tconsole.log(` ✅ Uploaded: ${finalFilename}`);\n\t\treturn mediaValue;\n\t} catch (error) {\n\t\tconsole.warn(\n\t\t\t` ⚠️ Error processing $media ${url}:`,\n\t\t\terror instanceof Error ? error.message : error,\n\t\t);\n\t\tresult.media.skipped++;\n\t\treturn null;\n\t}\n}\n\n/**\n * Get file extension from content type\n */\nfunction getExtensionFromContentType(contentType: string): string | null {\n\t// Handle content-type with parameters like \"image/jpeg; charset=utf-8\"\n\tconst baseMime = contentType.split(\";\")[0].trim();\n\tconst ext = mime.getExtension(baseMime);\n\treturn ext ? `.${ext}` : null;\n}\n\n/**\n * Get file extension from URL\n */\nfunction getExtensionFromUrl(url: string): string | null {\n\ttry {\n\t\tconst pathname = new URL(url).pathname;\n\t\tconst match = pathname.match(FILE_EXTENSION_PATTERN);\n\t\treturn match ? `.${match[1]}` : null;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Generate a filename from URL\n */\nfunction generateFilename(url: string, ext: string): string {\n\ttry {\n\t\tconst pathname = new URL(url).pathname;\n\t\tconst basename = pathname.split(\"/\").pop() || \"media\";\n\t\t// Remove any existing extension and query params\n\t\tconst name = basename.replace(EXTENSION_PATTERN, \"\").replace(QUERY_PARAM_PATTERN, \"\");\n\t\t// Sanitize: only alphanumeric, dash, underscore\n\t\tconst sanitized = name.replace(SANITIZE_PATTERN, \"-\").replace(MULTIPLE_HYPHENS_PATTERN, \"-\");\n\t\treturn `${sanitized || \"media\"}${ext}`;\n\t} catch {\n\t\treturn `media${ext}`;\n\t}\n}\n\n/**\n * Get image dimensions from buffer using image-size.\n * Supports PNG, JPEG, GIF, WebP, AVIF, SVG, TIFF, and more.\n */\nfunction getImageDimensions(buffer: Uint8Array): { width: number; height: number } | null {\n\ttry {\n\t\tconst result = imageSize(buffer);\n\t\tif (result.width != null && result.height != null) {\n\t\t\treturn { width: result.width, height: result.height };\n\t\t}\n\t\treturn null;\n\t} catch {\n\t\treturn null;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAkCA,IAAa,qBAAb,MAAgC;CAC/B,YAAY,AAAQ,IAAsB;EAAtB;;;;;CAKpB,MAAM,OAAO,OAA+C;EAC3D,MAAM,KAAK,MAAM;EAEjB,MAAM,MAAqB;GAC1B;GACA,MAAM,MAAM;GACZ,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,WAAW,MAAM,YAAY;GAC7B,MAAM,MAAM,OAAO,KAAK,UAAU,MAAM,KAAK,GAAG;GAChD;AAED,QAAM,KAAK,GAAG,WAAW,aAAa,CAAC,OAAO,IAAI,CAAC,SAAS;EAE5D,MAAM,WAAW,MAAM,KAAK,SAAS,GAAG;AACxC,MAAI,CAAC,SACJ,OAAM,IAAI,MAAM,4BAA4B;AAE7C,SAAO;;;;;CAMR,MAAM,SAAS,IAAsC;EACpD,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,MAAM,KAAK,GAAG,CACpB,kBAAkB;AAEpB,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;;;;CAMxC,MAAM,WAAW,MAAc,MAAwC;EACtE,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;AAEpB,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;;;;CAMxC,MAAM,WAAW,MAAc,UAAwC,EAAE,EAAuB;EAC/F,IAAI,QAAQ,KAAK,GACf,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,QAAQ,SAAS,MAAM;AAEzB,MAAI,QAAQ,aAAa,OACxB,KAAI,QAAQ,aAAa,KACxB,SAAQ,MAAM,MAAM,aAAa,MAAM,KAAK;MAE5C,SAAQ,MAAM,MAAM,aAAa,KAAK,QAAQ,SAAS;AAKzD,UADa,MAAM,MAAM,SAAS,EACtB,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;;;;CAMlD,MAAM,aAAa,UAAuC;AAQzD,UAPa,MAAM,KAAK,GACtB,WAAW,aAAa,CACxB,WAAW,CACX,MAAM,aAAa,KAAK,SAAS,CACjC,QAAQ,SAAS,MAAM,CACvB,SAAS,EAEC,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;;;;CAMlD,MAAM,OAAO,IAAY,OAAsD;AAE9E,MAAI,CADa,MAAM,KAAK,SAAS,GAAG,CACzB,QAAO;EAEtB,MAAM,UAAkC,EAAE;AAC1C,MAAI,MAAM,SAAS,OAAW,SAAQ,OAAO,MAAM;AACnD,MAAI,MAAM,UAAU,OAAW,SAAQ,QAAQ,MAAM;AACrD,MAAI,MAAM,aAAa,OAAW,SAAQ,YAAY,MAAM;AAC5D,MAAI,MAAM,SAAS,OAAW,SAAQ,OAAO,KAAK,UAAU,MAAM,KAAK;AAEvE,MAAI,OAAO,KAAK,QAAQ,CAAC,SAAS,EACjC,OAAM,KAAK,GAAG,YAAY,aAAa,CAAC,IAAI,QAAQ,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,SAAS;AAGpF,SAAO,KAAK,SAAS,GAAG;;;;;CAMzB,MAAM,OAAO,IAA8B;AAE1C,QAAM,KAAK,GAAG,WAAW,qBAAqB,CAAC,MAAM,eAAe,KAAK,GAAG,CAAC,SAAS;AAItF,WAFe,MAAM,KAAK,GAAG,WAAW,aAAa,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,kBAAkB,EAE9E,kBAAkB,KAAK;;;;;CAQvC,MAAM,cAAc,YAAoB,SAAiB,YAAmC;EAC3F,MAAM,MAA4B;GACjC;GACA,UAAU;GACV,aAAa;GACb;AAGD,QAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,OAAO,IAAI,CACX,YAAY,OAAO,GAAG,WAAW,CAAC,CAClC,SAAS;;;;;CAMZ,MAAM,gBAAgB,YAAoB,SAAiB,YAAmC;AAC7F,QAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,MAAM,eAAe,KAAK,WAAW,CACrC,SAAS;;;;;CAMZ,MAAM,iBACL,YACA,SACA,cACsB;EACtB,IAAI,QAAQ,KAAK,GACf,WAAW,qBAAqB,CAChC,UAAU,cAAc,iBAAiB,iCAAiC,CAC1E,UAAU,aAAa,CACvB,MAAM,iCAAiC,KAAK,WAAW,CACvD,MAAM,+BAA+B,KAAK,QAAQ;AAEpD,MAAI,aACH,SAAQ,MAAM,MAAM,mBAAmB,KAAK,aAAa;AAI1D,UADa,MAAM,MAAM,SAAS,EACtB,KAAK,QAAQ,KAAK,cAAc,IAAI,CAAC;;;;;;CAOlD,MAAM,iBACL,YACA,SACA,cACA,aACgB;EAEhB,MAAM,UAAU,MAAM,KAAK,iBAAiB,YAAY,SAAS,aAAa;EAC9E,MAAM,aAAa,IAAI,IAAI,QAAQ,KAAK,MAAM,EAAE,GAAG,CAAC;EACpD,MAAM,SAAS,IAAI,IAAI,YAAY;EAGnC,MAAM,WAAW,QAAQ,QAAQ,MAAM,CAAC,OAAO,IAAI,EAAE,GAAG,CAAC,CAAC,KAAK,MAAM,EAAE,GAAG;AAC1E,MAAI,SAAS,SAAS,EACrB,OAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,MAAM,eAAe,MAAM,SAAS,CACpC,SAAS;EAIZ,MAAM,QAAQ,YAAY,QAAQ,OAAO,CAAC,WAAW,IAAI,GAAG,CAAC;AAC7D,MAAI,MAAM,SAAS,EAClB,OAAM,KAAK,GACT,WAAW,qBAAqB,CAChC,OACA,MAAM,KAAK,iBAAiB;GAC3B;GACA,UAAU;GACV;GACA,EAAE,CACH,CACA,YAAY,OAAO,GAAG,WAAW,CAAC,CAClC,SAAS;;;;;CAOb,MAAM,gBAAgB,YAAoB,SAAkC;EAC3E,MAAM,SAAS,MAAM,KAAK,GACxB,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,WAAW,CACpC,MAAM,YAAY,KAAK,QAAQ,CAC/B,kBAAkB;AAEpB,SAAO,OAAO,OAAO,kBAAkB,EAAE;;;;;CAM1C,MAAM,qBAAqB,YAAqC;EAC/D,MAAM,SAAS,MAAM,KAAK,GACxB,WAAW,qBAAqB,CAChC,QAAQ,OAAO,GAAG,GAAG,MAAM,WAAW,CAAC,GAAG,QAAQ,CAAC,CACnD,MAAM,eAAe,KAAK,WAAW,CACrC,kBAAkB;AAEpB,SAAO,OAAO,QAAQ,SAAS,EAAE;;;;;CAMlC,AAAQ,cAAc,KAA8B;AACnD,SAAO;GACN,IAAI,IAAI;GACR,MAAM,IAAI;GACV,MAAM,IAAI;GACV,OAAO,IAAI;GACX,UAAU,IAAI;GACd,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,KAAK,GAAG;GACxC;;;;;;AC/RH,SAAS,WAAW,OAAuB;AAC1C,QAAO,MAAM,WAAW,MAAM,OAAO,CAAC,WAAW,KAAK,MAAM,CAAC,WAAW,KAAK,MAAM;;;;;;;;AASpF,IAAa,oBAAb,MAA+B;CAC9B,YAAY,AAAQ,IAAsB;EAAtB;;;;;CAKpB,MAAM,IAAiB,MAAiC;EACvD,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,UAAU,CACrB,OAAO,QAAQ,CACf,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;AAEpB,MAAI,CAAC,IAAK,QAAO;AAEjB,SAAO,KAAK,MAAM,IAAI,MAAM;;;;;CAM7B,MAAM,aAAgB,MAAc,cAA6B;AAEhE,SADc,MAAM,KAAK,IAAO,KAAK,IACrB;;;;;CAMjB,MAAM,IAAiB,MAAc,OAAyB;EAC7D,MAAM,MAAmB;GACxB;GACA,OAAO,KAAK,UAAU,MAAM;GAC5B;AAGD,QAAM,KAAK,GACT,WAAW,UAAU,CACrB,OAAO,IAAI,CACX,YAAY,OAAO,GAAG,OAAO,OAAO,CAAC,YAAY,EAAE,OAAO,IAAI,OAAO,CAAC,CAAC,CACvE,SAAS;;;;;CAMZ,MAAM,OAAO,MAAgC;AAG5C,WAFe,MAAM,KAAK,GAAG,WAAW,UAAU,CAAC,MAAM,QAAQ,KAAK,KAAK,CAAC,kBAAkB,EAE/E,kBAAkB,KAAK;;;;;CAMvC,MAAM,OAAO,MAAgC;AAO5C,SAAO,CAAC,CANI,MAAM,KAAK,GACrB,WAAW,UAAU,CACrB,OAAO,OAAO,CACd,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;;;;;CAQrB,MAAM,QAAqB,OAA0C;AACpE,MAAI,MAAM,WAAW,EAAG,wBAAO,IAAI,KAAK;EAExC,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,UAAU,CACrB,OAAO,CAAC,QAAQ,QAAQ,CAAC,CACzB,MAAM,QAAQ,MAAM,MAAM,CAC1B,SAAS;EAEX,MAAM,yBAAS,IAAI,KAAgB;AACnC,OAAK,MAAM,OAAO,KAEjB,QAAO,IAAI,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,CAAM;AAEjD,SAAO;;;;;CAMR,MAAM,QAAqB,SAA2C;EACrE,MAAM,UAAU,OAAO,QAAQ,QAAQ;AACvC,MAAI,QAAQ,WAAW,EAAG;AAE1B,OAAK,MAAM,CAAC,MAAM,UAAU,QAC3B,OAAM,KAAK,IAAI,MAAM,MAAM;;;;;CAO7B,MAAM,SAAwC;EAC7C,MAAM,OAAO,MAAM,KAAK,GAAG,WAAW,UAAU,CAAC,OAAO,CAAC,QAAQ,QAAQ,CAAC,CAAC,SAAS;EAEpF,MAAM,yBAAS,IAAI,KAAsB;AACzC,OAAK,MAAM,OAAO,KACjB,QAAO,IAAI,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,CAAC;AAE5C,SAAO;;;;;CAMR,MAAM,YAAyB,QAAyC;EACvE,MAAM,UAAU,GAAG,WAAW,OAAO,CAAC;EACtC,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,UAAU,CACrB,OAAO,CAAC,QAAQ,QAAQ,CAAC,CACzB,MAAM,GAAY,aAAa,QAAQ,cAAc,CACrD,SAAS;EAEX,MAAM,yBAAS,IAAI,KAAgB;AACnC,OAAK,MAAM,OAAO,KAEjB,QAAO,IAAI,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,CAAM;AAEjD,SAAO;;;;;CAMR,MAAM,eAAe,QAAiC;EACrD,MAAM,UAAU,GAAG,WAAW,OAAO,CAAC;EACtC,MAAM,SAAS,MAAM,KAAK,GACxB,WAAW,UAAU,CACrB,MAAM,GAAY,aAAa,QAAQ,cAAc,CACrD,kBAAkB;AAEpB,SAAO,OAAO,OAAO,kBAAkB,EAAE;;;;;;;ACxI3C,MAAM,kBAAkB;;;;AAKxB,SAAS,iBAAiB,OAAyC;AAClE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,aAAa;;;;;AAMpE,eAAe,sBACd,UACA,IACA,UAC2D;AAC3D,KAAI,CAAC,UAAU,QACd,QAAO;AAGR,KAAI;EAEH,MAAM,QAAQ,MADI,IAAI,gBAAgB,GAAG,CACX,SAAS,SAAS,QAAQ;AAExD,MAAI,MAEH,QAAO;GACN,GAAG;GACH,KAAK,2BAA2B,MAAM;GACtC;SAEK;AAIR,QAAO;;;;;;;;;;;;;;;;;;;;AAqBR,eAAsB,eACrB,KACuC;AAEvC,QAAO,qBAAqB,KADjB,MAAM,OAAO,CACY;;;;;;;;AASrC,eAAsB,qBACrB,KACA,IACA,UAA0B,MACa;CAEvC,MAAM,QAAQ,MADE,IAAI,kBAAkB,GAAG,CACb,IAAqB,GAAG,kBAAkB,MAAM;AAE5E,KAAI,CAAC,MACJ;AAMD,MAAK,QAAQ,UAAU,QAAQ,cAAc,iBAAiB,MAAM,CAGnE,QAFiB,MAAM,sBAAsB,OAAO,IAAI,QAAQ;AAKjE,QAAO;;;;;;;;;;;;;;;;;AAkBR,eAAsB,kBAAkD;AAEvE,QAAO,sBADI,MAAM,OAAO,CACQ;;;;;;;;AASjC,eAAsB,sBACrB,IACA,UAA0B,MACO;CAEjC,MAAM,aAAa,MADH,IAAI,kBAAkB,GAAG,CACR,YAAY,gBAAgB;CAE7D,MAAM,WAAoC,EAAE;AAG5C,MAAK,MAAM,CAAC,KAAK,UAAU,YAAY;EACtC,MAAM,aAAa,IAAI,QAAQ,iBAAiB,GAAG;AACnD,WAAS,cAAc;;CAGxB,MAAM,gBAAgB;AAGtB,KAAI,cAAc,KACjB,eAAc,OAAO,MAAM,sBAAsB,cAAc,MAAM,IAAI,QAAQ;AAElF,KAAI,cAAc,QACjB,eAAc,UAAU,MAAM,sBAAsB,cAAc,SAAS,IAAI,QAAQ;AAGxF,QAAO;;;;;;;;;;;;;;;;;;;;;;;;AAyBR,eAAsB,gBACrB,UACA,IACgB;CAChB,MAAM,UAAU,IAAI,kBAAkB,GAAG;CAGzC,MAAM,UAAmC,EAAE;AAC3C,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,SAAS,CAClD,KAAI,UAAU,OACb,SAAQ,GAAG,kBAAkB,SAAS;AAIxC,OAAM,QAAQ,QAAQ,QAAQ;;;;;;;;AAS/B,eAAsB,iBACrB,UACA,KACyB;AAEzB,QAAO,uBAA0B,UAAU,KADhC,MAAM,OAAO,CAC2B;;;;;;;AAQpD,eAAsB,uBACrB,UACA,KACA,IACyB;AAGzB,QADc,MADE,IAAI,kBAAkB,GAAG,CACb,IAAO,UAAU,SAAS,YAAY,MAAM,IACxD;;;;;;;;AASjB,eAAsB,kBAAkB,UAAoD;AAE3F,QAAO,wBAAwB,UADpB,MAAM,OAAO,CACoB;;;;;;;AAQ7C,eAAsB,wBACrB,UACA,IACmC;CACnC,MAAM,SAAS,UAAU,SAAS;CAElC,MAAM,aAAa,MADH,IAAI,kBAAkB,GAAG,CACR,YAAY,OAAO;CAEpD,MAAM,WAAoC,EAAE;AAC5C,MAAK,MAAM,CAAC,KAAK,UAAU,YAAY;AACtC,MAAI,CAAC,IAAI,WAAW,OAAO,CAC1B;AAED,WAAS,IAAI,MAAM,OAAO,OAAO,IAAI;;AAGtC,QAAO;;;;;;;;;;;AClQR,MAAM,kCAAkC;AACxC,MAAM,+BAA+B;AACrC,MAAM,8BAA8B;AACpC,MAAM,+BACL;;;;;;;AAQD,MAAM,8BAA8B;;;;;;;AAQpC,MAAM,oBAAoB;AAE1B,MAAM,uBAAuB;;;;;;;;;;;AAY7B,MAAM,mBAA0D;CAE/D;EAAE,OAAO,SAAS,KAAK,GAAG,GAAG,EAAE;EAAE,KAAK,SAAS,KAAK,KAAK,KAAK,IAAI;EAAE;CAEpE;EAAE,OAAO,SAAS,IAAI,GAAG,GAAG,EAAE;EAAE,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI;EAAE;CAElE;EAAE,OAAO,SAAS,KAAK,IAAI,GAAG,EAAE;EAAE,KAAK,SAAS,KAAK,IAAI,KAAK,IAAI;EAAE;CAEpE;EAAE,OAAO,SAAS,KAAK,KAAK,GAAG,EAAE;EAAE,KAAK,SAAS,KAAK,KAAK,KAAK,IAAI;EAAE;CAEtE;EAAE,OAAO,SAAS,KAAK,KAAK,GAAG,EAAE;EAAE,KAAK,SAAS,KAAK,KAAK,KAAK,IAAI;EAAE;CAEtE;EAAE,OAAO,SAAS,GAAG,GAAG,GAAG,EAAE;EAAE,KAAK,SAAS,GAAG,KAAK,KAAK,IAAI;EAAE;CAChE;AAED,MAAM,oBAAoB,IAAI,IAAI;CACjC;CACA;CACA;CACA;CACA,CAAC;;AAGF,MAAM,kBAAkB,IAAI,IAAI,CAAC,SAAS,SAAS,CAAC;AAEpD,SAAS,SAAS,GAAW,GAAW,GAAW,GAAmB;AACrE,SAAS,KAAK,KAAO,KAAK,KAAO,KAAK,IAAK,OAAO;;AAGnD,SAAS,UAAU,IAA2B;CAC7C,MAAM,QAAQ,GAAG,MAAM,IAAI;AAC3B,KAAI,MAAM,WAAW,EAAG,QAAO;CAE/B,MAAM,OAAO,MAAM,IAAI,OAAO;AAC9B,KAAI,KAAK,MAAM,MAAM,MAAM,EAAE,IAAI,IAAI,KAAK,IAAI,IAAI,CAAE,QAAO;AAE3D,QAAO,SAAS,KAAK,IAAI,KAAK,IAAI,KAAK,IAAI,KAAK,GAAG;;;;;;;;;;;AAYpD,SAAgB,0BAA0B,IAA2B;CAEpE,IAAI,QAAQ,GAAG,MAAM,6BAA6B;AAClD,KAAI,CAAC,MAEJ,SAAQ,GAAG,MAAM,4BAA4B;AAE9C,KAAI,CAAC,MAEJ,SAAQ,GAAG,MAAM,6BAA6B;AAE/C,KAAI,CAAC,MAEJ,SAAQ,GAAG,MAAM,4BAA4B;AAE9C,KAAI,CAAC,MAEJ,SAAQ,GAAG,MAAM,kBAAkB;AAEpC,KAAI,OAAO;EACV,MAAM,OAAO,SAAS,MAAM,MAAM,IAAI,GAAG;EACzC,MAAM,MAAM,SAAS,MAAM,MAAM,IAAI,GAAG;AACxC,SAAO,GAAI,QAAQ,IAAK,IAAK,GAAG,OAAO,IAAK,GAAI,OAAO,IAAK,IAAK,GAAG,MAAM;;AAE3E,QAAO;;AAGR,SAAS,YAAY,IAAqB;AAEzC,KAAI,OAAO,SAAS,OAAO,mBAAoB,QAAO;CAItD,MAAM,UAAU,0BAA0B,GAAG;AAC7C,KAAI,QAAS,QAAO,YAAY,QAAQ;CAGxC,MAAM,UAAU,GAAG,MAAM,gCAAgC;CAGzD,MAAM,MAAM,UAFC,UAAU,QAAQ,KAAK,GAET;AAC3B,KAAI,QAAQ,KAEX,QAAO,GAAG,WAAW,QAAQ,IAAI,GAAG,WAAW,KAAK,IAAI,GAAG,WAAW,KAAK;AAG5E,QAAO,iBAAiB,MAAM,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;;;;;AAMhF,IAAa,YAAb,cAA+B,MAAM;CACpC,OAAO;CAEP,YAAY,SAAiB;AAC5B,QAAM,QAAQ;AACd,OAAK,OAAO;;;;;;;;;;;;;;;;;;;AAoBd,MAAM,gBAAgB;AAEtB,SAAgB,oBAAoB,KAAkB;CACrD,IAAI;AACJ,KAAI;AACH,WAAS,IAAI,IAAI,IAAI;SACd;AACP,QAAM,IAAI,UAAU,cAAc;;AAInC,KAAI,CAAC,gBAAgB,IAAI,OAAO,SAAS,CACxC,OAAM,IAAI,UAAU,WAAW,OAAO,SAAS,kBAAkB;CAIlE,MAAM,WAAW,OAAO,SAAS,QAAQ,sBAAsB,GAAG;AAGlE,KAAI,kBAAkB,IAAI,SAAS,aAAa,CAAC,CAChD,OAAM,IAAI,UAAU,gDAAgD;AAIrE,KAAI,YAAY,SAAS,CACxB,OAAM,IAAI,UAAU,sDAAsD;AAG3E,QAAO;;;;;;;;;;;;;AAcR,MAAM,qBAAqB;CAAC;CAAiB;CAAU;CAAsB;AAE7E,eAAsB,cAAc,KAAa,MAAuC;CACvF,IAAI,aAAa;CACjB,IAAI,cAAc;AAElB,MAAK,IAAI,IAAI,GAAG,KAAK,eAAe,KAAK;AACxC,sBAAoB,WAAW;EAE/B,MAAM,WAAW,MAAM,WAAW,MAAM,YAAY;GACnD,GAAG;GACH,UAAU;GACV,CAAC;AAGF,MAAI,SAAS,SAAS,OAAO,SAAS,UAAU,IAC/C,QAAO;EAIR,MAAM,WAAW,SAAS,QAAQ,IAAI,WAAW;AACjD,MAAI,CAAC,SACJ,QAAO;EAIR,MAAM,iBAAiB,IAAI,IAAI,WAAW,CAAC;AAC3C,eAAa,IAAI,IAAI,UAAU,WAAW,CAAC;AAI3C,MAAI,mBAHe,IAAI,IAAI,WAAW,CAAC,UAGF,YACpC,eAAc,uBAAuB,YAAY;;AAInD,OAAM,IAAI,UAAU,2BAA2B,cAAc,GAAG;;;;;AAMjE,SAAgB,uBAAuB,MAAgC;AACtE,KAAI,CAAC,KAAK,QAAS,QAAO;CAE1B,MAAM,UAAU,IAAI,QAAQ,KAAK,QAAQ;AACzC,MAAK,MAAM,QAAQ,mBAClB,SAAQ,OAAO,KAAK;AAGrB,QAAO;EAAE,GAAG;EAAM;EAAS;;;;;;;;;;;;AChO5B,MAAM,yBAAyB;;AAI/B,MAAM,oBAAoB;;AAG1B,MAAM,sBAAsB;;AAG5B,MAAM,mBAAmB;;AAGzB,MAAM,2BAA2B;;;;;;;;;;;AAYjC,eAAsB,UACrB,IACA,MACA,UAA4B,EAAE,EACH;CAE3B,MAAM,aAAa,aAAa,KAAK;AACrC,KAAI,CAAC,WAAW,MACf,OAAM,IAAI,MAAM,uBAAuB,WAAW,OAAO,KAAK,KAAK,GAAG;CAGvE,MAAM,EACL,iBAAiB,OACjB,SACA,oBAAoB,OACpB,aAAa,WACV;CAGJ,MAAM,SAA0B;EAC/B,aAAa;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EACnD,QAAQ;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EAC9C,YAAY;GAAE,SAAS;GAAG,OAAO;GAAG;EACpC,SAAS;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EAC/C,OAAO;GAAE,SAAS;GAAG,OAAO;GAAG;EAC/B,WAAW;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EACjD,aAAa;GAAE,SAAS;GAAG,SAAS;GAAG;EACvC,UAAU;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EAChD,UAAU,EAAE,SAAS,GAAG;EACxB,SAAS;GAAE,SAAS;GAAG,SAAS;GAAG,SAAS;GAAG;EAC/C,OAAO;GAAE,SAAS;GAAG,SAAS;GAAG;EACjC;CAGD,MAAM,eAA6B;EAClC;EACA,SAAS,WAAW;EACpB;EACA,4BAAY,IAAI,KAAK;EACrB;CAYD,MAAM,4BAAY,IAAI,KAAqB;CAC3C,MAAM,kCAAkB,IAAI,KAAqB;AAGjD,KAAI,KAAK,UAAU;AAClB,QAAM,gBAAgB,KAAK,UAAU,GAAG;AACxC,SAAO,SAAS,UAAU,OAAO,KAAK,KAAK,SAAS,CAAC;;AAItD,KAAI,KAAK,aAAa;EACrB,MAAM,WAAW,IAAI,eAAe,GAAG;AAEvC,OAAK,MAAM,cAAc,KAAK,aAAa;AAI1C,OAFiB,MAAM,SAAS,cAAc,WAAW,KAAK,EAEhD;AACb,QAAI,eAAe,QAClB,OAAM,IAAI,MAAM,yBAAyB,WAAW,KAAK,kBAAkB;AAG5E,QAAI,eAAe,UAAU;AAC5B,WAAM,SAAS,iBAAiB,WAAW,MAAM;MAChD,OAAO,WAAW;MAClB,eAAe,WAAW;MAC1B,aAAa,WAAW;MACxB,MAAM,WAAW;MACjB,UAAU,WAAW,YAAY,EAAE;MACnC,YAAY,WAAW;MACvB,iBAAiB,WAAW;MAC5B,CAAC;AACF,YAAO,YAAY;AAGnB,UAAK,MAAM,SAAS,WAAW,OAE9B,KADsB,MAAM,SAAS,SAAS,WAAW,MAAM,MAAM,KAAK,EACvD;AAClB,YAAM,SAAS,YAAY,WAAW,MAAM,MAAM,MAAM;OACvD,OAAO,MAAM;OACb,UAAU,MAAM,YAAY;OAC5B,QAAQ,MAAM,UAAU;OACxB,YAAY,MAAM,cAAc;OAChC,cAAc,MAAM;OACpB,YAAY,MAAM;OAClB,QAAQ,MAAM;OACd,SAAS,MAAM;OACf,CAAC;AACF,aAAO,OAAO;YACR;AACN,YAAM,SAAS,YAAY,WAAW,MAAM;OAC3C,MAAM,MAAM;OACZ,OAAO,MAAM;OACb,MAAM,MAAM;OACZ,UAAU,MAAM,YAAY;OAC5B,QAAQ,MAAM,UAAU;OACxB,YAAY,MAAM,cAAc;OAChC,cAAc,MAAM;OACpB,YAAY,MAAM;OAClB,QAAQ,MAAM;OACd,SAAS,MAAM;OACf,CAAC;AACF,aAAO,OAAO;;AAGhB;;AAID,WAAO,YAAY;AACnB,WAAO,OAAO,WAAW,WAAW,OAAO;AAC3C;;AAID,SAAM,SAAS,iBAAiB;IAC/B,MAAM,WAAW;IACjB,OAAO,WAAW;IAClB,eAAe,WAAW;IAC1B,aAAa,WAAW;IACxB,MAAM,WAAW;IACjB,UAAU,WAAW,YAAY,EAAE;IACnC,QAAQ;IACR,YAAY,WAAW;IACvB,iBAAiB,WAAW;IAC5B,CAAC;AACF,UAAO,YAAY;AAGnB,QAAK,MAAM,SAAS,WAAW,QAAQ;AACtC,UAAM,SAAS,YAAY,WAAW,MAAM;KAC3C,MAAM,MAAM;KACZ,OAAO,MAAM;KACb,MAAM,MAAM;KACZ,UAAU,MAAM,YAAY;KAC5B,QAAQ,MAAM,UAAU;KACxB,YAAY,MAAM,cAAc;KAChC,cAAc,MAAM;KACpB,YAAY,MAAM;KAClB,QAAQ,MAAM;KACd,SAAS,MAAM;KACf,CAAC;AACF,WAAO,OAAO;;;;AAMjB,KAAI,KAAK,WACR,MAAK,MAAM,YAAY,KAAK,YAAY;EAEvC,MAAM,cAAc,MAAM,GACxB,WAAW,wBAAwB,CACnC,WAAW,CACX,MAAM,QAAQ,KAAK,SAAS,KAAK,CACjC,kBAAkB;AAEpB,MAAI,aAAa;AAChB,OAAI,eAAe,QAClB,OAAM,IAAI,MAAM,uBAAuB,SAAS,KAAK,kBAAkB;AAExE,OAAI,eAAe,SAClB,OAAM,GACJ,YAAY,wBAAwB,CACpC,IAAI;IACJ,OAAO,SAAS;IAChB,gBAAgB,SAAS,iBAAiB;IAC1C,cAAc,SAAS,eAAe,IAAI;IAC1C,aAAa,KAAK,UAAU,SAAS,YAAY;IACjD,CAAC,CACD,MAAM,MAAM,KAAK,YAAY,GAAG,CAChC,SAAS;SAIN;AAEN,SAAM,GACJ,WAAW,wBAAwB,CACnC,OAAO;IACP,IAAI,MAAM;IACV,MAAM,SAAS;IACf,OAAO,SAAS;IAChB,gBAAgB,SAAS,iBAAiB;IAC1C,cAAc,SAAS,eAAe,IAAI;IAC1C,aAAa,KAAK,UAAU,SAAS,YAAY;IACjD,CAAC,CACD,SAAS;AACX,UAAO,WAAW;;AAInB,MAAI,SAAS,SAAS,SAAS,MAAM,SAAS,GAAG;GAChD,MAAM,WAAW,IAAI,mBAAmB,GAAG;AAG3C,OAAI,SAAS,aACZ,OAAM,uBAAuB,UAAU,SAAS,MAAM,SAAS,OAAO,QAAQ,WAAW;OAGzF,MAAK,MAAM,QAAQ,SAAS,OAAO;IAClC,MAAM,WAAW,MAAM,SAAS,WAAW,SAAS,MAAM,KAAK,KAAK;AACpE,QAAI,UAAU;AACb,SAAI,eAAe,QAClB,OAAM,IAAI,MACT,4BAA4B,KAAK,KAAK,QAAQ,SAAS,KAAK,kBAC5D;AAEF,SAAI,eAAe,UAAU;AAC5B,YAAM,SAAS,OAAO,SAAS,IAAI;OAClC,OAAO,KAAK;OACZ,MAAM,KAAK,cAAc,EAAE,aAAa,KAAK,aAAa,GAAG,EAAE;OAC/D,CAAC;AACF,aAAO,WAAW;;WAGb;AACN,WAAM,SAAS,OAAO;MACrB,MAAM,SAAS;MACf,MAAM,KAAK;MACX,OAAO,KAAK;MACZ,MAAM,KAAK,cAAc,EAAE,aAAa,KAAK,aAAa,GAAG;MAC7D,CAAC;AACF,YAAO,WAAW;;;;;AASxB,KAAI,KAAK,SAAS;EACjB,MAAM,aAAa,IAAI,iBAAiB,GAAG;AAC3C,OAAK,MAAM,UAAU,KAAK,SAAS;GAClC,MAAM,WAAW,MAAM,WAAW,WAAW,OAAO,KAAK;AACzD,OAAI,UAAU;AACb,QAAI,eAAe,QAClB,OAAM,IAAI,MAAM,qBAAqB,OAAO,KAAK,kBAAkB;AAGpE,QAAI,eAAe,UAAU;AAC5B,WAAM,WAAW,OAAO,SAAS,IAAI;MACpC,aAAa,OAAO;MACpB,KAAK,OAAO,OAAO;MACnB,YAAY,OAAO,cAAc;MACjC,SAAS,OAAO;MAChB,CAAC;AACF,qBAAgB,IAAI,OAAO,IAAI,SAAS,GAAG;AAC3C,YAAO,QAAQ;AACf;;AAID,oBAAgB,IAAI,OAAO,IAAI,SAAS,GAAG;AAC3C,WAAO,QAAQ;AACf;;GAGD,MAAM,UAAU,MAAM,WAAW,OAAO;IACvC,MAAM,OAAO;IACb,aAAa,OAAO;IACpB,KAAK,OAAO,OAAO;IACnB,YAAY,OAAO,cAAc;IACjC,SAAS,OAAO;IAChB,CAAC;AACF,mBAAgB,IAAI,OAAO,IAAI,QAAQ,GAAG;AAC1C,UAAO,QAAQ;;;AAKjB,KAAI,kBAAkB,KAAK,SAAS;EACnC,MAAM,cAAc,IAAI,kBAAkB,GAAG;EAC7C,MAAM,aAAa,IAAI,iBAAiB,GAAG;AAG3C,OAAK,MAAM,CAAC,gBAAgB,YAAY,OAAO,QAAQ,KAAK,QAAQ,CACnE,MAAK,MAAM,SAAS,SAAS;GAE5B,MAAM,WAAW,MAAM,YAAY,WAAW,gBAAgB,MAAM,MAAM,MAAM,OAAO;AAEvF,OAAI,UAAU;AACb,QAAI,eAAe,QAClB,OAAM,IAAI,MACT,sBAAsB,MAAM,KAAK,QAAQ,eAAe,kBACxD;AAGF,QAAI,eAAe,UAAU;KAE5B,MAAM,eAAe,MAAM,kBAC1B,MAAM,MACN,WACA,cACA,OACA;KAED,MAAM,SAAS,MAAM,UAAU;AAC/B,WAAM,YAAY,OAAO,gBAAgB,SAAS,IAAI;MACrD;MACA,MAAM;MACN,CAAC;AAEF,eAAU,IAAI,MAAM,IAAI,SAAS,GAAG;AACpC,YAAO,QAAQ;AAGf,WAAM,oBACL,YACA,gBACA,SAAS,IACT,OACA,iBACA,KACA;AACD,WAAM,uBAAuB,IAAI,gBAAgB,SAAS,IAAI,OAAO,KAAK;AAC1E;;AAID,WAAO,QAAQ;AACf,cAAU,IAAI,MAAM,IAAI,SAAS,GAAG;AACpC;;GAID,MAAM,eAAe,MAAM,kBAAkB,MAAM,MAAM,WAAW,cAAc,OAAO;GAGzF,IAAI;AACJ,OAAI,MAAM,eAAe;IACxB,MAAM,WAAW,UAAU,IAAI,MAAM,cAAc;AACnD,QAAI,CAAC,SACJ,SAAQ,KACP,WAAW,eAAe,mBAAmB,MAAM,cAAc,sEACjE;QAED,iBAAgB;;GAKlB,MAAM,SAAS,MAAM,UAAU;GAC/B,MAAM,UAAU,MAAM,YAAY,OAAO;IACxC,MAAM;IACN,MAAM,MAAM;IACZ;IACA,MAAM;IACN,QAAQ,MAAM;IACd;IAEA,aAAa,WAAW,+BAAc,IAAI,MAAM,EAAC,aAAa,GAAG;IACjE,CAAC;AAEF,aAAU,IAAI,MAAM,IAAI,QAAQ,GAAG;AACnC,UAAO,QAAQ;AAEf,SAAM,oBAAoB,YAAY,gBAAgB,QAAQ,IAAI,OAAO,gBAAgB;AACzF,SAAM,uBAAuB,IAAI,gBAAgB,QAAQ,IAAI,OAAO,MAAM;;;AAM7E,KAAI,KAAK,MACR,MAAK,MAAM,QAAQ,KAAK,OAAO;EAE9B,MAAM,eAAe,MAAM,GACzB,WAAW,gBAAgB,CAC3B,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,KAAK,CAC7B,kBAAkB;EAEpB,IAAI;AAEJ,MAAI,cAAc;AACjB,YAAS,aAAa;AAEtB,SAAM,GAAG,WAAW,qBAAqB,CAAC,MAAM,WAAW,KAAK,OAAO,CAAC,SAAS;SAC3E;AAEN,YAAS,MAAM;AACf,SAAM,GACJ,WAAW,gBAAgB,CAC3B,OAAO;IACP,IAAI;IACJ,MAAM,KAAK;IACX,OAAO,KAAK;IACZ,6BAAY,IAAI,MAAM,EAAC,aAAa;IACpC,6BAAY,IAAI,MAAM,EAAC,aAAa;IACpC,CAAC,CACD,SAAS;AACX,UAAO,MAAM;;EAId,MAAM,YAAY,MAAM,eACvB,IACA,QACA,KAAK,OACL,MACA,GACA,UACA;AACD,SAAO,MAAM,SAAS;;AAKxB,KAAI,KAAK,WAAW;EACnB,MAAM,eAAe,IAAI,mBAAmB,GAAG;AAE/C,OAAK,MAAM,YAAY,KAAK,WAAW;GACtC,MAAM,WAAW,MAAM,aAAa,aAAa,SAAS,OAAO;AACjE,OAAI,UAAU;AACb,QAAI,eAAe,QAClB,OAAM,IAAI,MAAM,uBAAuB,SAAS,OAAO,kBAAkB;AAG1E,QAAI,eAAe,UAAU;AAC5B,WAAM,aAAa,OAAO,SAAS,IAAI;MACtC,aAAa,SAAS;MACtB,MAAM,SAAS;MACf,SAAS,SAAS;MAClB,WAAW,SAAS;MACpB,CAAC;AACF,YAAO,UAAU;AACjB;;AAID,WAAO,UAAU;AACjB;;AAGD,SAAM,aAAa,OAAO;IACzB,QAAQ,SAAS;IACjB,aAAa,SAAS;IACtB,MAAM,SAAS;IACf,SAAS,SAAS;IAClB,WAAW,SAAS;IACpB,CAAC;AACF,UAAO,UAAU;;;AAKnB,KAAI,KAAK,YACR,MAAK,MAAM,QAAQ,KAAK,aAAa;EAEpC,MAAM,eAAe,MAAM,GACzB,WAAW,uBAAuB,CAClC,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,KAAK,CAC7B,kBAAkB;EAEpB,IAAI;AAEJ,MAAI,cAAc;AACjB,YAAS,aAAa;AAEtB,SAAM,GAAG,WAAW,kBAAkB,CAAC,MAAM,WAAW,KAAK,OAAO,CAAC,SAAS;SACxE;AAEN,YAAS,MAAM;AACf,SAAM,GACJ,WAAW,uBAAuB,CAClC,OAAO;IACP,IAAI;IACJ,MAAM,KAAK;IACX,OAAO,KAAK;IACZ,aAAa,KAAK,eAAe;IACjC,CAAC,CACD,SAAS;AACX,UAAO,YAAY;;AAIpB,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK;GAC7C,MAAM,SAAS,KAAK,QAAQ;AAC5B,SAAM,YAAY,IAAI,QAAQ,QAAQ,EAAE;AACxC,UAAO,YAAY;;;AAMtB,KAAI,KAAK,SACR,MAAK,MAAM,WAAW,KAAK,UAAU;EAEpC,MAAM,WAAW,MAAM,GACrB,WAAW,mBAAmB,CAC9B,OAAO,KAAK,CACZ,MAAM,QAAQ,KAAK,QAAQ,KAAK,CAChC,kBAAkB;AAEpB,MAAI,UAAU;AACb,OAAI,eAAe,QAClB,OAAM,IAAI,MAAM,sBAAsB,QAAQ,KAAK,kBAAkB;AAGtE,OAAI,eAAe,UAAU;AAC5B,UAAM,GACJ,YAAY,mBAAmB,CAC/B,IAAI;KACJ,OAAO,QAAQ;KACf,aAAa,QAAQ,eAAe;KACpC,UAAU,QAAQ,WAAW,KAAK,UAAU,QAAQ,SAAS,GAAG;KAChE,SAAS,KAAK,UAAU,QAAQ,QAAQ;KACxC,QAAQ,QAAQ,UAAU;KAC1B,6BAAY,IAAI,MAAM,EAAC,aAAa;KACpC,CAAC,CACD,MAAM,MAAM,KAAK,SAAS,GAAG,CAC7B,SAAS;AACX,WAAO,SAAS;AAChB;;AAID,UAAO,SAAS;AAChB;;EAGD,MAAM,KAAK,MAAM;EACjB,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AAEpC,QAAM,GACJ,WAAW,mBAAmB,CAC9B,OAAO;GACP;GACA,MAAM,QAAQ;GACd,OAAO,QAAQ;GACf,aAAa,QAAQ,eAAe;GACpC,UAAU,QAAQ,WAAW,KAAK,UAAU,QAAQ,SAAS,GAAG;GAChE,SAAS,KAAK,UAAU,QAAQ,QAAQ;GACxC,kBAAkB;GAClB,QAAQ,QAAQ,UAAU;GAC1B,UAAU,QAAQ,WAAW,UAAU,QAAQ,OAAO;GACtD,YAAY;GACZ,YAAY;GACZ,CAAC,CACD,SAAS;AAEX,SAAO,SAAS;;AAKlB,KAAI,KAAK,aAAa;EACrB,MAAM,aAAa,IAAI,WAAW,GAAG;AAErC,OAAK,MAAM,cAAc,KAAK,YAC7B,KAAI,WAAW,UAAU,SAAS,SAAS,EAG1C;QADyB,MAAM,WAAW,oBAAoB,WAAW,KAAK,EACzD,SAAS,EAC7B,KAAI;AACH,UAAM,WAAW,aAAa,WAAW,KAAK;YACtC,KAAK;AAEb,YAAQ,KAAK,+BAA+B,WAAW,KAAK,IAAI,IAAI;;;;AAOzE,QAAO;;;;;AAMR,eAAe,uBACd,UACA,cACA,OACA,QACA,aAA0C,QAC1B;CAEhB,MAAM,2BAAW,IAAI,KAAqB;CAG1C,IAAI,YAAY,CAAC,GAAG,MAAM;CAC1B,IAAI,YAAY;AAEhB,QAAO,UAAU,SAAS,KAAK,YAAY,GAAG;EAC7C,MAAM,oBAA8B,EAAE;AAEtC,OAAK,MAAM,QAAQ,UAElB,KAAI,CAAC,KAAK,UAAU,SAAS,IAAI,KAAK,OAAO,EAAE;GAC9C,MAAM,WAAW,KAAK,SAAS,SAAS,IAAI,KAAK,OAAO,GAAG;GAE3D,MAAM,WAAW,MAAM,SAAS,WAAW,cAAc,KAAK,KAAK;AACnE,OAAI,UAAU;AACb,QAAI,eAAe,QAClB,OAAM,IAAI,MACT,4BAA4B,KAAK,KAAK,QAAQ,aAAa,kBAC3D;AAEF,QAAI,eAAe,UAAU;AAC5B,WAAM,SAAS,OAAO,SAAS,IAAI;MAClC,OAAO,KAAK;MACZ;MACA,MAAM,KAAK,cAAc,EAAE,aAAa,KAAK,aAAa,GAAG,EAAE;MAC/D,CAAC;AACF,YAAO,WAAW;;AAEnB,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG;UAC9B;IACN,MAAM,UAAU,MAAM,SAAS,OAAO;KACrC,MAAM;KACN,MAAM,KAAK;KACX,OAAO,KAAK;KACZ;KACA,MAAM,KAAK,cAAc,EAAE,aAAa,KAAK,aAAa,GAAG;KAC7D,CAAC;AACF,aAAS,IAAI,KAAK,MAAM,QAAQ,GAAG;AACnC,WAAO,WAAW;;AAGnB,qBAAkB,KAAK,KAAK,KAAK;;AAKnC,cAAY,UAAU,QAAQ,MAAM,CAAC,kBAAkB,SAAS,EAAE,KAAK,CAAC;AACxE;;AAGD,KAAI,UAAU,SAAS,EACtB,SAAQ,KAAK,qBAAqB,UAAU,OAAO,+BAA+B;;;;;;AAQpF,eAAe,oBACd,YACA,gBACA,WACA,OACA,iBACA,WAAW,OACK;AAChB,KAAI,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,GAAG;AAEjD,MAAI,SACH,OAAM,WAAW,kBAAkB,gBAAgB,WAAW,EAAE,CAAC;AAElE;;CAGD,MAAM,UAAU,MAAM,QACpB,KAAK,WAAW;EAChB,MAAM,WAAW,gBAAgB,IAAI,OAAO,OAAO;AACnD,MAAI,CAAC,SAAU,QAAO;AACtB,SAAO;GACN;GACA,WAAW,OAAO,aAAa;GAC/B;GACA,CACD,QAAQ,WAAqE,QAAQ,OAAO,CAAC;AAE/F,KAAI,QAAQ,WAAW,MAAM,QAAQ,OACpC,SAAQ,KACP,WAAW,eAAe,GAAG,MAAM,KAAK,iDACxC;AAMF,KAAI,QAAQ,SAAS,KAAK,SACzB,OAAM,WAAW,kBAAkB,gBAAgB,WAAW,QAAQ;;;;;;AAQxE,eAAe,uBACd,IACA,gBACA,WACA,OACA,UACgB;AAEhB,KAAI,SACH,OAAM,GACJ,WAAW,qBAAqB,CAChC,MAAM,cAAc,KAAK,eAAe,CACxC,MAAM,YAAY,KAAK,UAAU,CACjC,SAAS;AAGZ,KAAI,CAAC,MAAM,WAAY;AAEvB,MAAK,MAAM,CAAC,cAAc,cAAc,OAAO,QAAQ,MAAM,WAAW,EAAE;EACzE,MAAM,WAAW,IAAI,mBAAmB,GAAG;AAE3C,OAAK,MAAM,YAAY,WAAW;GACjC,MAAM,OAAO,MAAM,SAAS,WAAW,cAAc,SAAS;AAC9D,OAAI,KACH,OAAM,SAAS,cAAc,gBAAgB,WAAW,KAAK,GAAG;;;;;;;AASpE,eAAe,eACd,IACA,QACA,OACA,UACA,YACA,WACkB;CAClB,IAAI,QAAQ;CACZ,IAAI,QAAQ;AAEZ,MAAK,MAAM,QAAQ,OAAO;EACzB,MAAM,SAAS,MAAM;EAGrB,IAAI,cAA6B;EACjC,IAAI,sBAAqC;AAEzC,MAAI,KAAK,SAAS,UAAU,KAAK,SAAS,QAEzC;OAAI,KAAK,OAAO,UAAU,IAAI,KAAK,IAAI,EAAE;AACxC,kBAAc,UAAU,IAAI,KAAK,IAAI;AAErC,0BAAsB,KAAK,cAAc,GAAG,KAAK,KAAK;;;AAMxD,QAAM,GACJ,WAAW,qBAAqB,CAChC,OAAO;GACP,IAAI;GACJ,SAAS;GACT,WAAW;GACX,YAAY;GACZ,MAAM,KAAK;GACX,sBAAsB;GACtB,cAAc;GACd,YAAY,KAAK,OAAO;GACxB,OAAO,KAAK,SAAS;GACrB,YAAY,KAAK,aAAa;GAC9B,QAAQ,KAAK,UAAU;GACvB,aAAa,KAAK,cAAc;GAChC,6BAAY,IAAI,MAAM,EAAC,aAAa;GACpC,CAAC,CACD,SAAS;AAEX;AACA;AAGA,MAAI,KAAK,YAAY,KAAK,SAAS,SAAS,GAAG;GAC9C,MAAM,aAAa,MAAM,eAAe,IAAI,QAAQ,KAAK,UAAU,QAAQ,GAAG,UAAU;AACxF,YAAS;;;AAIX,QAAO;;;;;AAMR,eAAe,YACd,IACA,QACA,QACA,WACgB;AAChB,OAAM,GACJ,WAAW,kBAAkB,CAC7B,OAAO;EACP,IAAI,MAAM;EACV,SAAS;EACT,YAAY;EACZ,MAAM,OAAO;EACb,OAAO,OAAO,SAAS;EACvB,SAAS,OAAO,UAAU,KAAK,UAAU,OAAO,QAAQ,GAAG;EAC3D,WAAW,OAAO,YAAY;EAC9B,cAAc,OAAO,eAAe;EACpC,iBAAiB,OAAO,QAAQ,KAAK,UAAU,OAAO,MAAM,GAAG;EAC/D,CAAC,CACD,SAAS;;;;;AAgBZ,SAAS,qBAAqB,OAA6C;AAC1E,KAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,YAAY,OAChE,QAAO;CAER,MAAM,QAAS,MAAkC;AACjD,QACC,OAAO,UAAU,YACjB,UAAU,QACV,SAAS,SACT,OAAQ,MAAkC,QAAQ;;;;;AAOpD,eAAe,kBACd,MACA,WACA,cACA,QACmC;CACnC,MAAM,WAAoC,EAAE;AAE5C,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,KAAK,CAC9C,UAAS,OAAO,MAAM,aAAa,OAAO,WAAW,cAAc,OAAO;AAG3E,QAAO;;;;;AAMR,eAAe,aACd,OACA,WACA,cACA,QACmB;AAEnB,KAAI,OAAO,UAAU,YAAY,MAAM,WAAW,QAAQ,EAAE;EAC3D,MAAM,SAAS,MAAM,MAAM,EAAE;AAC7B,SAAO,UAAU,IAAI,OAAO,IAAI;;AAIjC,KAAI,qBAAqB,MAAM,CAC9B,QAAO,aAAa,OAAO,cAAc,OAAO;AAIjD,KAAI,MAAM,QAAQ,MAAM,CACvB,QAAO,QAAQ,IAAI,MAAM,KAAK,SAAS,aAAa,MAAM,WAAW,cAAc,OAAO,CAAC,CAAC;AAI7F,KAAI,OAAO,UAAU,YAAY,UAAU,MAAM;EAChD,MAAM,WAAoC,EAAE;AAC5C,OAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,MAAM,CACzC,UAAS,KAAK,MAAM,aAAa,GAAG,WAAW,cAAc,OAAO;AAErE,SAAO;;AAGR,QAAO;;;;;AAMR,eAAe,aACd,KACA,KACA,QAC6B;CAC7B,MAAM,EAAE,KAAK,KAAK,UAAU,YAAY,IAAI;CAG5C,MAAM,SAAS,IAAI,WAAW,IAAI,IAAI;AACtC,KAAI,QAAQ;AACX,SAAO,MAAM;AACb,SAAO;GAAE,GAAG;GAAQ,KAAK,OAAO,OAAO;GAAK;;AAK7C,KAAI,IAAI,mBAAmB;EAC1B,MAAM,aAAyB;GAC9B,UAAU;GACV,IAAI,MAAM;GACV,KAAK;GACL,KAAK,OAAO;GACZ,UAAU,YAAY;GACtB;AACD,MAAI,WAAW,IAAI,KAAK,WAAW;AACnC,SAAO,MAAM;AACb,SAAO;;AAIR,KAAI,CAAC,IAAI,SAAS;AACjB,UAAQ,KAAK,sDAAsD,MAAM;AACzE,SAAO,MAAM;AACb,SAAO;;AAGR,KAAI;AAEH,sBAAoB,IAAI;AAGxB,UAAQ,IAAI,qBAAqB,MAAM;EACvC,MAAM,WAAW,MAAM,cAAc,KAAK,EACzC,SAAS,EAER,cAAc,kBACd,EACD,CAAC;AAEF,MAAI,CAAC,SAAS,IAAI;AACjB,WAAQ,KAAK,2BAA2B,IAAI,IAAI,SAAS,SAAS;AAClE,UAAO,MAAM;AACb,UAAO;;EAIR,MAAM,cAAc,SAAS,QAAQ,IAAI,eAAe,IAAI;EAC5D,MAAM,MAAM,4BAA4B,YAAY,IAAI,oBAAoB,IAAI,IAAI;EAGpF,MAAM,KAAK,MAAM;EACjB,MAAM,gBAAgB,YAAY,iBAAiB,KAAK,IAAI;EAC5D,MAAM,aAAa,GAAG,KAAK;EAG3B,MAAM,cAAc,MAAM,SAAS,aAAa;EAChD,MAAM,OAAO,IAAI,WAAW,YAAY;EAGxC,IAAI;EACJ,IAAI;AACJ,MAAI,YAAY,WAAW,SAAS,EAAE;GACrC,MAAM,aAAa,mBAAmB,KAAK;AAC3C,WAAQ,YAAY;AACpB,YAAS,YAAY;;AAItB,QAAM,IAAI,QAAQ,OAAO;GACxB,KAAK;GACL;GACA;GACA,CAAC;AAIF,QADkB,IAAI,gBAAgB,IAAI,GAAG,CAC7B,OAAO;GACtB,UAAU;GACV,UAAU;GACV,MAAM,KAAK;GACX;GACA;GACA;GACA;GACA;GACA,QAAQ;GACR,CAAC;EAGF,MAAM,aAAyB;GAC9B,UAAU;GACV;GACA,KAAK,OAAO;GACZ;GACA;GACA,UAAU;GACV,UAAU;GACV,MAAM,EAAE,YAAY;GACpB;AAGD,MAAI,WAAW,IAAI,KAAK,WAAW;AACnC,SAAO,MAAM;AAEb,UAAQ,IAAI,iBAAiB,gBAAgB;AAC7C,SAAO;UACC,OAAO;AACf,UAAQ,KACP,gCAAgC,IAAI,IACpC,iBAAiB,QAAQ,MAAM,UAAU,MACzC;AACD,SAAO,MAAM;AACb,SAAO;;;;;;AAOT,SAAS,4BAA4B,aAAoC;CAExE,MAAM,WAAW,YAAY,MAAM,IAAI,CAAC,GAAG,MAAM;CACjD,MAAM,MAAM,KAAK,aAAa,SAAS;AACvC,QAAO,MAAM,IAAI,QAAQ;;;;;AAM1B,SAAS,oBAAoB,KAA4B;AACxD,KAAI;EAEH,MAAM,QADW,IAAI,IAAI,IAAI,CAAC,SACP,MAAM,uBAAuB;AACpD,SAAO,QAAQ,IAAI,MAAM,OAAO;SACzB;AACP,SAAO;;;;;;AAOT,SAAS,iBAAiB,KAAa,KAAqB;AAC3D,KAAI;AAOH,SAAO,IANU,IAAI,IAAI,IAAI,CAAC,SACJ,MAAM,IAAI,CAAC,KAAK,IAAI,SAExB,QAAQ,mBAAmB,GAAG,CAAC,QAAQ,qBAAqB,GAAG,CAE9D,QAAQ,kBAAkB,IAAI,CAAC,QAAQ,0BAA0B,IAAI,IACrE,UAAU;SAC1B;AACP,SAAO,QAAQ;;;;;;;AAQjB,SAAS,mBAAmB,QAA8D;AACzF,KAAI;EACH,MAAM,SAAS,UAAU,OAAO;AAChC,MAAI,OAAO,SAAS,QAAQ,OAAO,UAAU,KAC5C,QAAO;GAAE,OAAO,OAAO;GAAO,QAAQ,OAAO;GAAQ;AAEtD,SAAO;SACA;AACP,SAAO"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"byline-1WQPlISL.mjs","names":[],"sources":["../src/utils/chunks.ts","../src/database/repositories/byline.ts"],"sourcesContent":["/**\n * Split an array into chunks of at most `size` elements.\n *\n * Used to keep SQL `IN (?, ?, …)` clauses within Cloudflare D1's\n * bound-parameter limit (~100 per statement).\n */\nexport function chunks<T>(arr: T[], size: number): T[][] {\n\tif (arr.length === 0) return [];\n\tconst result: T[][] = [];\n\tfor (let i = 0; i < arr.length; i += size) {\n\t\tresult.push(arr.slice(i, i + size));\n\t}\n\treturn result;\n}\n\n/** Conservative default chunk size for SQL IN clauses (well within D1's limit). */\nexport const SQL_BATCH_SIZE = 50;\n","import { sql, type Kysely, type Selectable } from \"kysely\";\nimport { ulid } from \"ulidx\";\n\nimport { chunks, SQL_BATCH_SIZE } from \"../../utils/chunks.js\";\nimport { listTablesLike } from \"../dialect-helpers.js\";\nimport type { BylineTable, Database } from \"../types.js\";\nimport { validateIdentifier } from \"../validate.js\";\nimport {\n\tdecodeCursor,\n\tencodeCursor,\n\ttype BylineSummary,\n\ttype ContentBylineCredit,\n\ttype FindManyResult,\n} from \"./types.js\";\n\ntype BylineRow = Selectable<BylineTable>;\n\nexport interface CreateBylineInput {\n\tslug: string;\n\tdisplayName: string;\n\tbio?: string | null;\n\tavatarMediaId?: string | null;\n\twebsiteUrl?: string | null;\n\tuserId?: string | null;\n\tisGuest?: boolean;\n}\n\nexport interface UpdateBylineInput {\n\tslug?: string;\n\tdisplayName?: string;\n\tbio?: string | null;\n\tavatarMediaId?: string | null;\n\twebsiteUrl?: string | null;\n\tuserId?: string | null;\n\tisGuest?: boolean;\n}\n\nexport interface ContentBylineInput {\n\tbylineId: string;\n\troleLabel?: string | null;\n}\n\nfunction rowToByline(row: BylineRow): BylineSummary {\n\treturn {\n\t\tid: row.id,\n\t\tslug: row.slug,\n\t\tdisplayName: row.display_name,\n\t\tbio: row.bio,\n\t\tavatarMediaId: row.avatar_media_id,\n\t\twebsiteUrl: row.website_url,\n\t\tuserId: row.user_id,\n\t\tisGuest: row.is_guest === 1,\n\t\tcreatedAt: row.created_at,\n\t\tupdatedAt: row.updated_at,\n\t};\n}\n\nexport class BylineRepository {\n\tconstructor(private db: Kysely<Database>) {}\n\n\tasync findById(id: string): Promise<BylineSummary | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t.selectAll()\n\t\t\t.where(\"id\", \"=\", id)\n\t\t\t.executeTakeFirst();\n\t\treturn row ? rowToByline(row) : null;\n\t}\n\n\tasync findBySlug(slug: string): Promise<BylineSummary | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t.selectAll()\n\t\t\t.where(\"slug\", \"=\", slug)\n\t\t\t.executeTakeFirst();\n\t\treturn row ? rowToByline(row) : null;\n\t}\n\n\tasync findByUserId(userId: string): Promise<BylineSummary | null> {\n\t\tconst row = await this.db\n\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t.selectAll()\n\t\t\t.where(\"user_id\", \"=\", userId)\n\t\t\t.executeTakeFirst();\n\t\treturn row ? rowToByline(row) : null;\n\t}\n\n\tasync findMany(options?: {\n\t\tsearch?: string;\n\t\tisGuest?: boolean;\n\t\tuserId?: string;\n\t\tcursor?: string;\n\t\tlimit?: number;\n\t}): Promise<FindManyResult<BylineSummary>> {\n\t\tconst limit = Math.min(Math.max(options?.limit ?? 50, 1), 100);\n\n\t\tlet query = this.db\n\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t.selectAll()\n\t\t\t.orderBy(\"created_at\", \"desc\")\n\t\t\t.orderBy(\"id\", \"desc\")\n\t\t\t.limit(limit + 1);\n\n\t\tif (options?.search) {\n\t\t\tconst escaped = options.search\n\t\t\t\t.replaceAll(\"\\\\\", \"\\\\\\\\\")\n\t\t\t\t.replaceAll(\"%\", \"\\\\%\")\n\t\t\t\t.replaceAll(\"_\", \"\\\\_\");\n\t\t\tconst term = `%${escaped}%`;\n\t\t\tquery = query.where((eb) =>\n\t\t\t\teb.or([eb(\"display_name\", \"like\", term), eb(\"slug\", \"like\", term)]),\n\t\t\t);\n\t\t}\n\n\t\tif (options?.isGuest !== undefined) {\n\t\t\tquery = query.where(\"is_guest\", \"=\", options.isGuest ? 1 : 0);\n\t\t}\n\n\t\tif (options?.userId !== undefined) {\n\t\t\tquery = query.where(\"user_id\", \"=\", options.userId);\n\t\t}\n\n\t\tif (options?.cursor) {\n\t\t\tconst decoded = decodeCursor(options.cursor);\n\t\t\tif (decoded) {\n\t\t\t\tquery = query.where((eb) =>\n\t\t\t\t\teb.or([\n\t\t\t\t\t\teb(\"created_at\", \"<\", decoded.orderValue),\n\t\t\t\t\t\teb.and([eb(\"created_at\", \"=\", decoded.orderValue), eb(\"id\", \"<\", decoded.id)]),\n\t\t\t\t\t]),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tconst rows = await query.execute();\n\t\tconst items = rows.slice(0, limit).map(rowToByline);\n\t\tconst result: FindManyResult<BylineSummary> = { items };\n\n\t\tif (rows.length > limit) {\n\t\t\tconst last = items.at(-1);\n\t\t\tif (last) {\n\t\t\t\tresult.nextCursor = encodeCursor(last.createdAt, last.id);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\tasync create(input: CreateBylineInput): Promise<BylineSummary> {\n\t\tconst id = ulid();\n\t\tconst now = new Date().toISOString();\n\n\t\tawait this.db\n\t\t\t.insertInto(\"_emdash_bylines\")\n\t\t\t.values({\n\t\t\t\tid,\n\t\t\t\tslug: input.slug,\n\t\t\t\tdisplay_name: input.displayName,\n\t\t\t\tbio: input.bio ?? null,\n\t\t\t\tavatar_media_id: input.avatarMediaId ?? null,\n\t\t\t\twebsite_url: input.websiteUrl ?? null,\n\t\t\t\tuser_id: input.userId ?? null,\n\t\t\t\tis_guest: input.isGuest ? 1 : 0,\n\t\t\t\tcreated_at: now,\n\t\t\t\tupdated_at: now,\n\t\t\t})\n\t\t\t.execute();\n\n\t\tconst byline = await this.findById(id);\n\t\tif (!byline) {\n\t\t\tthrow new Error(\"Failed to create byline\");\n\t\t}\n\t\treturn byline;\n\t}\n\n\tasync update(id: string, input: UpdateBylineInput): Promise<BylineSummary | null> {\n\t\tconst existing = await this.findById(id);\n\t\tif (!existing) return null;\n\n\t\tconst updates: Record<string, unknown> = {\n\t\t\tupdated_at: new Date().toISOString(),\n\t\t};\n\n\t\tif (input.slug !== undefined) updates.slug = input.slug;\n\t\tif (input.displayName !== undefined) updates.display_name = input.displayName;\n\t\tif (input.bio !== undefined) updates.bio = input.bio;\n\t\tif (input.avatarMediaId !== undefined) updates.avatar_media_id = input.avatarMediaId;\n\t\tif (input.websiteUrl !== undefined) updates.website_url = input.websiteUrl;\n\t\tif (input.userId !== undefined) updates.user_id = input.userId;\n\t\tif (input.isGuest !== undefined) updates.is_guest = input.isGuest ? 1 : 0;\n\n\t\tawait this.db.updateTable(\"_emdash_bylines\").set(updates).where(\"id\", \"=\", id).execute();\n\t\treturn await this.findById(id);\n\t}\n\n\tasync delete(id: string): Promise<boolean> {\n\t\tconst existing = await this.findById(id);\n\t\tif (!existing) return false;\n\n\t\tawait this.db.transaction().execute(async (trx) => {\n\t\t\tawait trx.deleteFrom(\"_emdash_content_bylines\").where(\"byline_id\", \"=\", id).execute();\n\n\t\t\tawait trx.deleteFrom(\"_emdash_bylines\").where(\"id\", \"=\", id).execute();\n\n\t\t\tconst tableNames = await listTablesLike(trx, \"ec_%\");\n\t\t\tfor (const tableName of tableNames) {\n\t\t\t\tvalidateIdentifier(tableName, \"content table\");\n\t\t\t\tawait sql`\n\t\t\t\t\tUPDATE ${sql.ref(tableName)}\n\t\t\t\t\tSET primary_byline_id = NULL\n\t\t\t\t\tWHERE primary_byline_id = ${id}\n\t\t\t\t`.execute(trx);\n\t\t\t}\n\t\t});\n\n\t\treturn true;\n\t}\n\n\tasync getContentBylines(\n\t\tcollectionSlug: string,\n\t\tcontentId: string,\n\t): Promise<ContentBylineCredit[]> {\n\t\tconst rows = await this.db\n\t\t\t.selectFrom(\"_emdash_content_bylines as cb\")\n\t\t\t.innerJoin(\"_emdash_bylines as b\", \"b.id\", \"cb.byline_id\")\n\t\t\t.select([\n\t\t\t\t\"cb.sort_order as sort_order\",\n\t\t\t\t\"cb.role_label as role_label\",\n\t\t\t\t\"b.id as id\",\n\t\t\t\t\"b.slug as slug\",\n\t\t\t\t\"b.display_name as display_name\",\n\t\t\t\t\"b.bio as bio\",\n\t\t\t\t\"b.avatar_media_id as avatar_media_id\",\n\t\t\t\t\"b.website_url as website_url\",\n\t\t\t\t\"b.user_id as user_id\",\n\t\t\t\t\"b.is_guest as is_guest\",\n\t\t\t\t\"b.created_at as created_at\",\n\t\t\t\t\"b.updated_at as updated_at\",\n\t\t\t])\n\t\t\t.where(\"cb.collection_slug\", \"=\", collectionSlug)\n\t\t\t.where(\"cb.content_id\", \"=\", contentId)\n\t\t\t.orderBy(\"cb.sort_order\", \"asc\")\n\t\t\t.execute();\n\n\t\treturn rows.map((row) => ({\n\t\t\tbyline: rowToByline(row),\n\t\t\tsortOrder: row.sort_order,\n\t\t\troleLabel: row.role_label,\n\t\t}));\n\t}\n\n\t/**\n\t * Batch-fetch byline credits for multiple content items in a single query.\n\t * Returns a Map keyed by contentId.\n\t */\n\tasync getContentBylinesMany(\n\t\tcollectionSlug: string,\n\t\tcontentIds: string[],\n\t): Promise<Map<string, ContentBylineCredit[]>> {\n\t\tconst result = new Map<string, ContentBylineCredit[]>();\n\t\tif (contentIds.length === 0) return result;\n\n\t\tconst uniqueContentIds = [...new Set(contentIds)];\n\t\tfor (const chunk of chunks(uniqueContentIds, SQL_BATCH_SIZE)) {\n\t\t\tconst rows = await this.db\n\t\t\t\t.selectFrom(\"_emdash_content_bylines as cb\")\n\t\t\t\t.innerJoin(\"_emdash_bylines as b\", \"b.id\", \"cb.byline_id\")\n\t\t\t\t.select([\n\t\t\t\t\t\"cb.content_id as content_id\",\n\t\t\t\t\t\"cb.sort_order as sort_order\",\n\t\t\t\t\t\"cb.role_label as role_label\",\n\t\t\t\t\t\"b.id as id\",\n\t\t\t\t\t\"b.slug as slug\",\n\t\t\t\t\t\"b.display_name as display_name\",\n\t\t\t\t\t\"b.bio as bio\",\n\t\t\t\t\t\"b.avatar_media_id as avatar_media_id\",\n\t\t\t\t\t\"b.website_url as website_url\",\n\t\t\t\t\t\"b.user_id as user_id\",\n\t\t\t\t\t\"b.is_guest as is_guest\",\n\t\t\t\t\t\"b.created_at as created_at\",\n\t\t\t\t\t\"b.updated_at as updated_at\",\n\t\t\t\t])\n\t\t\t\t.where(\"cb.collection_slug\", \"=\", collectionSlug)\n\t\t\t\t.where(\"cb.content_id\", \"in\", chunk)\n\t\t\t\t.orderBy(\"cb.sort_order\", \"asc\")\n\t\t\t\t.execute();\n\n\t\t\tfor (const row of rows) {\n\t\t\t\tconst contentId = row.content_id;\n\t\t\t\tconst credit: ContentBylineCredit = {\n\t\t\t\t\tbyline: rowToByline(row),\n\t\t\t\t\tsortOrder: row.sort_order,\n\t\t\t\t\troleLabel: row.role_label,\n\t\t\t\t};\n\t\t\t\tconst existing = result.get(contentId);\n\t\t\t\tif (existing) {\n\t\t\t\t\texisting.push(credit);\n\t\t\t\t} else {\n\t\t\t\t\tresult.set(contentId, [credit]);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Batch-fetch byline profiles linked to user IDs in a single query.\n\t * Returns a Map keyed by userId.\n\t */\n\tasync findByUserIds(userIds: string[]): Promise<Map<string, BylineSummary>> {\n\t\tconst result = new Map<string, BylineSummary>();\n\t\tif (userIds.length === 0) return result;\n\n\t\tfor (const chunk of chunks(userIds, SQL_BATCH_SIZE)) {\n\t\t\tconst rows = await this.db\n\t\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t\t.selectAll()\n\t\t\t\t.where(\"user_id\", \"in\", chunk)\n\t\t\t\t.execute();\n\n\t\t\tfor (const row of rows) {\n\t\t\t\tif (row.user_id) {\n\t\t\t\t\tresult.set(row.user_id, rowToByline(row));\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn result;\n\t}\n\n\tasync setContentBylines(\n\t\tcollectionSlug: string,\n\t\tcontentId: string,\n\t\tinputBylines: ContentBylineInput[],\n\t): Promise<ContentBylineCredit[]> {\n\t\tvalidateIdentifier(collectionSlug, \"collection slug\");\n\t\tconst tableName = `ec_${collectionSlug}`;\n\t\tvalidateIdentifier(tableName, \"content table\");\n\n\t\tconst seen = new Set<string>();\n\t\tconst bylines = inputBylines.filter((item) => {\n\t\t\tif (seen.has(item.bylineId)) return false;\n\t\t\tseen.add(item.bylineId);\n\t\t\treturn true;\n\t\t});\n\n\t\t// This method is expected to be called within a transaction context\n\t\t// (content handlers wrap in withTransaction, seed applies sequentially).\n\t\t// All operations use this.db directly -- callers are responsible for\n\t\t// wrapping in a transaction when atomicity is required.\n\t\tif (bylines.length > 0) {\n\t\t\tconst ids = bylines.map((item) => item.bylineId);\n\t\t\tconst rows = await this.db\n\t\t\t\t.selectFrom(\"_emdash_bylines\")\n\t\t\t\t.select(\"id\")\n\t\t\t\t.where(\"id\", \"in\", ids)\n\t\t\t\t.execute();\n\t\t\tif (rows.length !== ids.length) {\n\t\t\t\tthrow new Error(\"One or more byline IDs do not exist\");\n\t\t\t}\n\t\t}\n\n\t\tawait this.db\n\t\t\t.deleteFrom(\"_emdash_content_bylines\")\n\t\t\t.where(\"collection_slug\", \"=\", collectionSlug)\n\t\t\t.where(\"content_id\", \"=\", contentId)\n\t\t\t.execute();\n\n\t\tfor (let i = 0; i < bylines.length; i++) {\n\t\t\tconst item = bylines[i];\n\t\t\tawait this.db\n\t\t\t\t.insertInto(\"_emdash_content_bylines\")\n\t\t\t\t.values({\n\t\t\t\t\tid: ulid(),\n\t\t\t\t\tcollection_slug: collectionSlug,\n\t\t\t\t\tcontent_id: contentId,\n\t\t\t\t\tbyline_id: item.bylineId,\n\t\t\t\t\tsort_order: i,\n\t\t\t\t\trole_label: item.roleLabel ?? null,\n\t\t\t\t\tcreated_at: new Date().toISOString(),\n\t\t\t\t})\n\t\t\t\t.execute();\n\t\t}\n\n\t\tawait sql`\n\t\t\tUPDATE ${sql.ref(tableName)}\n\t\t\tSET primary_byline_id = ${bylines[0]?.bylineId ?? null}\n\t\t\tWHERE id = ${contentId}\n\t\t`.execute(this.db);\n\n\t\treturn await this.getContentBylines(collectionSlug, contentId);\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;AAMA,SAAgB,OAAU,KAAU,MAAqB;AACxD,KAAI,IAAI,WAAW,EAAG,QAAO,EAAE;CAC/B,MAAM,SAAgB,EAAE;AACxB,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,KACpC,QAAO,KAAK,IAAI,MAAM,GAAG,IAAI,KAAK,CAAC;AAEpC,QAAO;;;AAIR,MAAa,iBAAiB;;;;AC0B9B,SAAS,YAAY,KAA+B;AACnD,QAAO;EACN,IAAI,IAAI;EACR,MAAM,IAAI;EACV,aAAa,IAAI;EACjB,KAAK,IAAI;EACT,eAAe,IAAI;EACnB,YAAY,IAAI;EAChB,QAAQ,IAAI;EACZ,SAAS,IAAI,aAAa;EAC1B,WAAW,IAAI;EACf,WAAW,IAAI;EACf;;AAGF,IAAa,mBAAb,MAA8B;CAC7B,YAAY,AAAQ,IAAsB;EAAtB;;CAEpB,MAAM,SAAS,IAA2C;EACzD,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,kBAAkB,CAC7B,WAAW,CACX,MAAM,MAAM,KAAK,GAAG,CACpB,kBAAkB;AACpB,SAAO,MAAM,YAAY,IAAI,GAAG;;CAGjC,MAAM,WAAW,MAA6C;EAC7D,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,kBAAkB,CAC7B,WAAW,CACX,MAAM,QAAQ,KAAK,KAAK,CACxB,kBAAkB;AACpB,SAAO,MAAM,YAAY,IAAI,GAAG;;CAGjC,MAAM,aAAa,QAA+C;EACjE,MAAM,MAAM,MAAM,KAAK,GACrB,WAAW,kBAAkB,CAC7B,WAAW,CACX,MAAM,WAAW,KAAK,OAAO,CAC7B,kBAAkB;AACpB,SAAO,MAAM,YAAY,IAAI,GAAG;;CAGjC,MAAM,SAAS,SAM4B;EAC1C,MAAM,QAAQ,KAAK,IAAI,KAAK,IAAI,SAAS,SAAS,IAAI,EAAE,EAAE,IAAI;EAE9D,IAAI,QAAQ,KAAK,GACf,WAAW,kBAAkB,CAC7B,WAAW,CACX,QAAQ,cAAc,OAAO,CAC7B,QAAQ,MAAM,OAAO,CACrB,MAAM,QAAQ,EAAE;AAElB,MAAI,SAAS,QAAQ;GAKpB,MAAM,OAAO,IAJG,QAAQ,OACtB,WAAW,MAAM,OAAO,CACxB,WAAW,KAAK,MAAM,CACtB,WAAW,KAAK,MAAM,CACC;AACzB,WAAQ,MAAM,OAAO,OACpB,GAAG,GAAG,CAAC,GAAG,gBAAgB,QAAQ,KAAK,EAAE,GAAG,QAAQ,QAAQ,KAAK,CAAC,CAAC,CACnE;;AAGF,MAAI,SAAS,YAAY,OACxB,SAAQ,MAAM,MAAM,YAAY,KAAK,QAAQ,UAAU,IAAI,EAAE;AAG9D,MAAI,SAAS,WAAW,OACvB,SAAQ,MAAM,MAAM,WAAW,KAAK,QAAQ,OAAO;AAGpD,MAAI,SAAS,QAAQ;GACpB,MAAM,UAAU,aAAa,QAAQ,OAAO;AAC5C,OAAI,QACH,SAAQ,MAAM,OAAO,OACpB,GAAG,GAAG,CACL,GAAG,cAAc,KAAK,QAAQ,WAAW,EACzC,GAAG,IAAI,CAAC,GAAG,cAAc,KAAK,QAAQ,WAAW,EAAE,GAAG,MAAM,KAAK,QAAQ,GAAG,CAAC,CAAC,CAC9E,CAAC,CACF;;EAIH,MAAM,OAAO,MAAM,MAAM,SAAS;EAClC,MAAM,QAAQ,KAAK,MAAM,GAAG,MAAM,CAAC,IAAI,YAAY;EACnD,MAAM,SAAwC,EAAE,OAAO;AAEvD,MAAI,KAAK,SAAS,OAAO;GACxB,MAAM,OAAO,MAAM,GAAG,GAAG;AACzB,OAAI,KACH,QAAO,aAAa,aAAa,KAAK,WAAW,KAAK,GAAG;;AAI3D,SAAO;;CAGR,MAAM,OAAO,OAAkD;EAC9D,MAAM,KAAK,MAAM;EACjB,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AAEpC,QAAM,KAAK,GACT,WAAW,kBAAkB,CAC7B,OAAO;GACP;GACA,MAAM,MAAM;GACZ,cAAc,MAAM;GACpB,KAAK,MAAM,OAAO;GAClB,iBAAiB,MAAM,iBAAiB;GACxC,aAAa,MAAM,cAAc;GACjC,SAAS,MAAM,UAAU;GACzB,UAAU,MAAM,UAAU,IAAI;GAC9B,YAAY;GACZ,YAAY;GACZ,CAAC,CACD,SAAS;EAEX,MAAM,SAAS,MAAM,KAAK,SAAS,GAAG;AACtC,MAAI,CAAC,OACJ,OAAM,IAAI,MAAM,0BAA0B;AAE3C,SAAO;;CAGR,MAAM,OAAO,IAAY,OAAyD;AAEjF,MAAI,CADa,MAAM,KAAK,SAAS,GAAG,CACzB,QAAO;EAEtB,MAAM,UAAmC,EACxC,6BAAY,IAAI,MAAM,EAAC,aAAa,EACpC;AAED,MAAI,MAAM,SAAS,OAAW,SAAQ,OAAO,MAAM;AACnD,MAAI,MAAM,gBAAgB,OAAW,SAAQ,eAAe,MAAM;AAClE,MAAI,MAAM,QAAQ,OAAW,SAAQ,MAAM,MAAM;AACjD,MAAI,MAAM,kBAAkB,OAAW,SAAQ,kBAAkB,MAAM;AACvE,MAAI,MAAM,eAAe,OAAW,SAAQ,cAAc,MAAM;AAChE,MAAI,MAAM,WAAW,OAAW,SAAQ,UAAU,MAAM;AACxD,MAAI,MAAM,YAAY,OAAW,SAAQ,WAAW,MAAM,UAAU,IAAI;AAExE,QAAM,KAAK,GAAG,YAAY,kBAAkB,CAAC,IAAI,QAAQ,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,SAAS;AACxF,SAAO,MAAM,KAAK,SAAS,GAAG;;CAG/B,MAAM,OAAO,IAA8B;AAE1C,MAAI,CADa,MAAM,KAAK,SAAS,GAAG,CACzB,QAAO;AAEtB,QAAM,KAAK,GAAG,aAAa,CAAC,QAAQ,OAAO,QAAQ;AAClD,SAAM,IAAI,WAAW,0BAA0B,CAAC,MAAM,aAAa,KAAK,GAAG,CAAC,SAAS;AAErF,SAAM,IAAI,WAAW,kBAAkB,CAAC,MAAM,MAAM,KAAK,GAAG,CAAC,SAAS;GAEtE,MAAM,aAAa,MAAM,eAAe,KAAK,OAAO;AACpD,QAAK,MAAM,aAAa,YAAY;AACnC,uBAAmB,WAAW,gBAAgB;AAC9C,UAAM,GAAG;cACC,IAAI,IAAI,UAAU,CAAC;;iCAEA,GAAG;MAC9B,QAAQ,IAAI;;IAEd;AAEF,SAAO;;CAGR,MAAM,kBACL,gBACA,WACiC;AAuBjC,UAtBa,MAAM,KAAK,GACtB,WAAW,gCAAgC,CAC3C,UAAU,wBAAwB,QAAQ,eAAe,CACzD,OAAO;GACP;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA,CAAC,CACD,MAAM,sBAAsB,KAAK,eAAe,CAChD,MAAM,iBAAiB,KAAK,UAAU,CACtC,QAAQ,iBAAiB,MAAM,CAC/B,SAAS,EAEC,KAAK,SAAS;GACzB,QAAQ,YAAY,IAAI;GACxB,WAAW,IAAI;GACf,WAAW,IAAI;GACf,EAAE;;;;;;CAOJ,MAAM,sBACL,gBACA,YAC8C;EAC9C,MAAM,yBAAS,IAAI,KAAoC;AACvD,MAAI,WAAW,WAAW,EAAG,QAAO;EAEpC,MAAM,mBAAmB,CAAC,GAAG,IAAI,IAAI,WAAW,CAAC;AACjD,OAAK,MAAM,SAAS,OAAO,kBAAkB,eAAe,EAAE;GAC7D,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,gCAAgC,CAC3C,UAAU,wBAAwB,QAAQ,eAAe,CACzD,OAAO;IACP;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA,CAAC,CACD,MAAM,sBAAsB,KAAK,eAAe,CAChD,MAAM,iBAAiB,MAAM,MAAM,CACnC,QAAQ,iBAAiB,MAAM,CAC/B,SAAS;AAEX,QAAK,MAAM,OAAO,MAAM;IACvB,MAAM,YAAY,IAAI;IACtB,MAAM,SAA8B;KACnC,QAAQ,YAAY,IAAI;KACxB,WAAW,IAAI;KACf,WAAW,IAAI;KACf;IACD,MAAM,WAAW,OAAO,IAAI,UAAU;AACtC,QAAI,SACH,UAAS,KAAK,OAAO;QAErB,QAAO,IAAI,WAAW,CAAC,OAAO,CAAC;;;AAKlC,SAAO;;;;;;CAOR,MAAM,cAAc,SAAwD;EAC3E,MAAM,yBAAS,IAAI,KAA4B;AAC/C,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,OAAK,MAAM,SAAS,OAAO,SAAS,eAAe,EAAE;GACpD,MAAM,OAAO,MAAM,KAAK,GACtB,WAAW,kBAAkB,CAC7B,WAAW,CACX,MAAM,WAAW,MAAM,MAAM,CAC7B,SAAS;AAEX,QAAK,MAAM,OAAO,KACjB,KAAI,IAAI,QACP,QAAO,IAAI,IAAI,SAAS,YAAY,IAAI,CAAC;;AAI5C,SAAO;;CAGR,MAAM,kBACL,gBACA,WACA,cACiC;AACjC,qBAAmB,gBAAgB,kBAAkB;EACrD,MAAM,YAAY,MAAM;AACxB,qBAAmB,WAAW,gBAAgB;EAE9C,MAAM,uBAAO,IAAI,KAAa;EAC9B,MAAM,UAAU,aAAa,QAAQ,SAAS;AAC7C,OAAI,KAAK,IAAI,KAAK,SAAS,CAAE,QAAO;AACpC,QAAK,IAAI,KAAK,SAAS;AACvB,UAAO;IACN;AAMF,MAAI,QAAQ,SAAS,GAAG;GACvB,MAAM,MAAM,QAAQ,KAAK,SAAS,KAAK,SAAS;AAMhD,QALa,MAAM,KAAK,GACtB,WAAW,kBAAkB,CAC7B,OAAO,KAAK,CACZ,MAAM,MAAM,MAAM,IAAI,CACtB,SAAS,EACF,WAAW,IAAI,OACvB,OAAM,IAAI,MAAM,sCAAsC;;AAIxD,QAAM,KAAK,GACT,WAAW,0BAA0B,CACrC,MAAM,mBAAmB,KAAK,eAAe,CAC7C,MAAM,cAAc,KAAK,UAAU,CACnC,SAAS;AAEX,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;GACxC,MAAM,OAAO,QAAQ;AACrB,SAAM,KAAK,GACT,WAAW,0BAA0B,CACrC,OAAO;IACP,IAAI,MAAM;IACV,iBAAiB;IACjB,YAAY;IACZ,WAAW,KAAK;IAChB,YAAY;IACZ,YAAY,KAAK,aAAa;IAC9B,6BAAY,IAAI,MAAM,EAAC,aAAa;IACpC,CAAC,CACD,SAAS;;AAGZ,QAAM,GAAG;YACC,IAAI,IAAI,UAAU,CAAC;6BACF,QAAQ,IAAI,YAAY,KAAK;gBAC1C,UAAU;IACtB,QAAQ,KAAK,GAAG;AAElB,SAAO,MAAM,KAAK,kBAAkB,gBAAgB,UAAU"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"bylines-BYdTYmia.mjs","names":[],"sources":["../src/bylines/index.ts"],"sourcesContent":["/**\n * Runtime API for bylines\n *\n * Provides functions to query byline profiles and byline credits\n * associated with content entries. Follows the same pattern as\n * the taxonomies runtime API.\n */\n\nimport { sql } from \"kysely\";\n\nimport { BylineRepository } from \"../database/repositories/byline.js\";\nimport type { BylineSummary, ContentBylineCredit } from \"../database/repositories/types.js\";\nimport { validateIdentifier } from \"../database/validate.js\";\nimport { getDb } from \"../loader.js\";\nimport { chunks, SQL_BATCH_SIZE } from \"../utils/chunks.js\";\n\n/**\n * Get a byline by ID.\n *\n * @example\n * ```ts\n * import { getByline } from \"emdash\";\n *\n * const byline = await getByline(\"01HXYZ...\");\n * if (byline) {\n * console.log(byline.displayName);\n * }\n * ```\n */\nexport async function getByline(id: string): Promise<BylineSummary | null> {\n\tconst db = await getDb();\n\tconst repo = new BylineRepository(db);\n\treturn repo.findById(id);\n}\n\n/**\n * Get a byline by slug.\n *\n * @example\n * ```ts\n * import { getBylineBySlug } from \"emdash\";\n *\n * const byline = await getBylineBySlug(\"jane-doe\");\n * if (byline) {\n * console.log(byline.displayName); // \"Jane Doe\"\n * }\n * ```\n */\nexport async function getBylineBySlug(slug: string): Promise<BylineSummary | null> {\n\tconst db = await getDb();\n\tconst repo = new BylineRepository(db);\n\treturn repo.findBySlug(slug);\n}\n\n/**\n * Get byline credits for a single content entry.\n *\n * Returns explicit byline credits from the junction table. If none exist\n * but the entry has an `authorId`, falls back to the user-linked byline\n * (marked as source: \"inferred\").\n *\n * @example\n * ```ts\n * import { getEntryBylines } from \"emdash\";\n *\n * const bylines = await getEntryBylines(\"posts\", post.data.id);\n * for (const credit of bylines) {\n * console.log(credit.byline.displayName, credit.roleLabel);\n * }\n * ```\n */\nexport async function getEntryBylines(\n\tcollection: string,\n\tentryId: string,\n): Promise<ContentBylineCredit[]> {\n\tvalidateIdentifier(collection, \"collection\");\n\tconst db = await getDb();\n\tconst repo = new BylineRepository(db);\n\n\tconst explicit = await repo.getContentBylines(collection, entryId);\n\tif (explicit.length > 0) {\n\t\treturn explicit.map((c) => ({ ...c, source: \"explicit\" as const }));\n\t}\n\n\t// Fallback: look up user-linked byline from author_id\n\tconst authorId = await getAuthorId(db, collection, entryId);\n\tif (authorId) {\n\t\tconst fallback = await repo.findByUserId(authorId);\n\t\tif (fallback) {\n\t\t\treturn [{ byline: fallback, sortOrder: 0, roleLabel: null, source: \"inferred\" }];\n\t\t}\n\t}\n\n\treturn [];\n}\n\n/**\n * Batch-fetch byline credits for multiple content entries in a single query.\n *\n * This is more efficient than calling getEntryBylines for each entry\n * when you need bylines for a list of entries (e.g., a blog index page).\n *\n * @param collection - The collection slug (e.g., \"posts\")\n * @param entryIds - Array of entry IDs\n * @returns Map from entry ID to array of byline credits\n *\n * @example\n * ```ts\n * import { getBylinesForEntries, getEmDashCollection } from \"emdash\";\n *\n * const { entries } = await getEmDashCollection(\"posts\");\n * const ids = entries.map(e => e.data.id);\n * const bylinesMap = await getBylinesForEntries(\"posts\", ids);\n *\n * for (const entry of entries) {\n * const bylines = bylinesMap.get(entry.data.id) ?? [];\n * // render bylines\n * }\n * ```\n */\nexport async function getBylinesForEntries(\n\tcollection: string,\n\tentryIds: string[],\n): Promise<Map<string, ContentBylineCredit[]>> {\n\tvalidateIdentifier(collection, \"collection\");\n\tconst result = new Map<string, ContentBylineCredit[]>();\n\n\t// Initialize all entry IDs with empty arrays\n\tfor (const id of entryIds) {\n\t\tresult.set(id, []);\n\t}\n\n\tif (entryIds.length === 0) {\n\t\treturn result;\n\t}\n\n\tconst db = await getDb();\n\tconst repo = new BylineRepository(db);\n\n\t// 1. Batch fetch all explicit byline credits\n\tconst bylinesMap = await repo.getContentBylinesMany(collection, entryIds);\n\n\t// 2. Collect entry IDs that need fallback lookup\n\tconst fallbackEntryIds: string[] = [];\n\tconst needsFallback: Map<string, string> = new Map(); // entryId -> authorId\n\n\tfor (const id of entryIds) {\n\t\tif (!bylinesMap.has(id)) {\n\t\t\t// Need to check author_id for this entry — but we only have the IDs,\n\t\t\t// so batch-fetch them from the content table\n\t\t\tfallbackEntryIds.push(id);\n\t\t}\n\t}\n\n\t// Batch-fetch author_ids for entries that need fallback\n\tif (fallbackEntryIds.length > 0) {\n\t\tconst authorMap = await getAuthorIds(db, collection, fallbackEntryIds);\n\t\tfor (const [entryId, authorId] of authorMap) {\n\t\t\tneedsFallback.set(entryId, authorId);\n\t\t}\n\t}\n\n\t// 3. Batch fetch user-linked bylines for fallback\n\tconst uniqueAuthorIds = [...new Set(needsFallback.values())];\n\tconst authorBylineMap = await repo.findByUserIds(uniqueAuthorIds);\n\n\t// 4. Assign results\n\tfor (const id of entryIds) {\n\t\tconst explicit = bylinesMap.get(id);\n\t\tif (explicit && explicit.length > 0) {\n\t\t\tresult.set(\n\t\t\t\tid,\n\t\t\t\texplicit.map((c) => ({ ...c, source: \"explicit\" as const })),\n\t\t\t);\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst authorId = needsFallback.get(id);\n\t\tif (authorId) {\n\t\t\tconst fallback = authorBylineMap.get(authorId);\n\t\t\tif (fallback) {\n\t\t\t\tresult.set(id, [{ byline: fallback, sortOrder: 0, roleLabel: null, source: \"inferred\" }]);\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t}\n\n\t\t// Already initialized with empty array\n\t}\n\n\treturn result;\n}\n\n/**\n * Look up the author_id for a single content entry.\n * Uses raw SQL since we need dynamic table names.\n */\nasync function getAuthorId(\n\tdb: Awaited<ReturnType<typeof getDb>>,\n\tcollection: string,\n\tentryId: string,\n): Promise<string | null> {\n\tconst tableName = `ec_${collection}`;\n\tvalidateIdentifier(tableName, \"content table\");\n\n\tconst result = await sql<{ author_id: string | null }>`\n\t\tSELECT author_id FROM ${sql.ref(tableName)}\n\t\tWHERE id = ${entryId}\n\t\tLIMIT 1\n\t`.execute(db);\n\n\treturn result.rows[0]?.author_id ?? null;\n}\n\n/**\n * Batch-fetch author_ids for multiple content entries.\n * Returns Map<entryId, authorId> (only entries with non-null author_id).\n */\nasync function getAuthorIds(\n\tdb: Awaited<ReturnType<typeof getDb>>,\n\tcollection: string,\n\tentryIds: string[],\n): Promise<Map<string, string>> {\n\tconst tableName = `ec_${collection}`;\n\tvalidateIdentifier(tableName, \"content table\");\n\n\tconst map = new Map<string, string>();\n\tfor (const chunk of chunks(entryIds, SQL_BATCH_SIZE)) {\n\t\tconst result = await sql<{ id: string; author_id: string | null }>`\n\t\t\tSELECT id, author_id FROM ${sql.ref(tableName)}\n\t\t\tWHERE id IN (${sql.join(chunk.map((id) => sql`${id}`))})\n\t\t`.execute(db);\n\n\t\tfor (const row of result.rows) {\n\t\t\tif (row.author_id) {\n\t\t\t\tmap.set(row.id, row.author_id);\n\t\t\t}\n\t\t}\n\t}\n\treturn map;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BA,eAAsB,UAAU,IAA2C;AAG1E,QADa,IAAI,iBADN,MAAM,OAAO,CACa,CACzB,SAAS,GAAG;;;;;;;;;;;;;;;AAgBzB,eAAsB,gBAAgB,MAA6C;AAGlF,QADa,IAAI,iBADN,MAAM,OAAO,CACa,CACzB,WAAW,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;AAqE7B,eAAsB,qBACrB,YACA,UAC8C;AAC9C,oBAAmB,YAAY,aAAa;CAC5C,MAAM,yBAAS,IAAI,KAAoC;AAGvD,MAAK,MAAM,MAAM,SAChB,QAAO,IAAI,IAAI,EAAE,CAAC;AAGnB,KAAI,SAAS,WAAW,EACvB,QAAO;CAGR,MAAM,KAAK,MAAM,OAAO;CACxB,MAAM,OAAO,IAAI,iBAAiB,GAAG;CAGrC,MAAM,aAAa,MAAM,KAAK,sBAAsB,YAAY,SAAS;CAGzE,MAAM,mBAA6B,EAAE;CACrC,MAAM,gCAAqC,IAAI,KAAK;AAEpD,MAAK,MAAM,MAAM,SAChB,KAAI,CAAC,WAAW,IAAI,GAAG,CAGtB,kBAAiB,KAAK,GAAG;AAK3B,KAAI,iBAAiB,SAAS,GAAG;EAChC,MAAM,YAAY,MAAM,aAAa,IAAI,YAAY,iBAAiB;AACtE,OAAK,MAAM,CAAC,SAAS,aAAa,UACjC,eAAc,IAAI,SAAS,SAAS;;CAKtC,MAAM,kBAAkB,CAAC,GAAG,IAAI,IAAI,cAAc,QAAQ,CAAC,CAAC;CAC5D,MAAM,kBAAkB,MAAM,KAAK,cAAc,gBAAgB;AAGjE,MAAK,MAAM,MAAM,UAAU;EAC1B,MAAM,WAAW,WAAW,IAAI,GAAG;AACnC,MAAI,YAAY,SAAS,SAAS,GAAG;AACpC,UAAO,IACN,IACA,SAAS,KAAK,OAAO;IAAE,GAAG;IAAG,QAAQ;IAAqB,EAAE,CAC5D;AACD;;EAGD,MAAM,WAAW,cAAc,IAAI,GAAG;AACtC,MAAI,UAAU;GACb,MAAM,WAAW,gBAAgB,IAAI,SAAS;AAC9C,OAAI,UAAU;AACb,WAAO,IAAI,IAAI,CAAC;KAAE,QAAQ;KAAU,WAAW;KAAG,WAAW;KAAM,QAAQ;KAAY,CAAC,CAAC;AACzF;;;;AAOH,QAAO;;;;;;AA4BR,eAAe,aACd,IACA,YACA,UAC+B;CAC/B,MAAM,YAAY,MAAM;AACxB,oBAAmB,WAAW,gBAAgB;CAE9C,MAAM,sBAAM,IAAI,KAAqB;AACrC,MAAK,MAAM,SAAS,OAAO,UAAU,eAAe,EAAE;EACrD,MAAM,SAAS,MAAM,GAA6C;+BACrC,IAAI,IAAI,UAAU,CAAC;kBAChC,IAAI,KAAK,MAAM,KAAK,OAAO,GAAG,GAAG,KAAK,CAAC,CAAC;IACtD,QAAQ,GAAG;AAEb,OAAK,MAAM,OAAO,OAAO,KACxB,KAAI,IAAI,UACP,KAAI,IAAI,IAAI,IAAI,IAAI,UAAU;;AAIjC,QAAO"}