@setzkasten-cms/astro-admin 1.1.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/package.json +13 -6
  2. package/src/api-routes/__tests__/feature-gate.test.ts +60 -0
  3. package/src/api-routes/__tests__/history-rollback.test.ts +196 -0
  4. package/src/api-routes/__tests__/history.test.ts +168 -0
  5. package/src/api-routes/__tests__/setup-github-app-callback.test.ts +7 -0
  6. package/src/api-routes/__tests__/webhook-signing.test.ts +39 -0
  7. package/src/api-routes/__tests__/webhooks.test.ts +219 -0
  8. package/src/api-routes/_feature-gate.ts +39 -0
  9. package/src/api-routes/_role-resolver.ts +60 -0
  10. package/src/api-routes/_storage-config.ts +15 -2
  11. package/src/api-routes/_webhook-dispatcher.ts +120 -0
  12. package/src/api-routes/_webhook-signing.ts +13 -0
  13. package/src/api-routes/_webhook-status-store.ts +31 -0
  14. package/src/api-routes/auth-callback.ts +2 -0
  15. package/src/api-routes/auth-setzkasten-login.ts +16 -1
  16. package/src/api-routes/editors.ts +15 -0
  17. package/src/api-routes/history-rollback.ts +144 -0
  18. package/src/api-routes/history-version.ts +57 -0
  19. package/src/api-routes/history.ts +119 -0
  20. package/src/api-routes/icons-local.ts +169 -0
  21. package/src/api-routes/section-commit-pending.ts +108 -9
  22. package/src/api-routes/section-delete.ts +14 -0
  23. package/src/api-routes/setup-github-app-callback.ts +20 -2
  24. package/src/api-routes/updater-register.ts +31 -2
  25. package/src/api-routes/webhooks-status.ts +17 -0
  26. package/src/api-routes/webhooks-test.ts +134 -0
  27. package/src/api-routes/webhooks.ts +163 -0
  28. package/src/init/__tests__/patcher-edge-cases.test.ts +34 -1
  29. package/src/init/template-patcher-v2.ts +9 -4
@@ -0,0 +1,144 @@
1
+ import type { APIRoute } from 'astro'
2
+ import { resolveStorageConfigForRequest } from './_storage-config'
3
+ import { resolveGitHubTokenForRequest } from './_github-token'
4
+ import { parseSession, requireAdmin } from './_auth-guard'
5
+ import { withTrailers } from './_commit-trailers'
6
+ import { invalidateCache } from './_github-cache'
7
+
8
+ interface RollbackBody {
9
+ path?: string
10
+ sha?: string
11
+ /** Optional ETag-style guard: client sends the SHA they think is HEAD;
12
+ * if HEAD has moved, we 409 to prevent stomping live edits. */
13
+ expectedHeadSha?: string
14
+ }
15
+
16
+ /**
17
+ * POST /api/setzkasten/history/rollback
18
+ *
19
+ * Body: { path, sha, expectedHeadSha? }
20
+ *
21
+ * Restores `path` to the contents from `sha` by writing a new commit
22
+ * (no `git revert` — JSON content is set wholesale). The original SHA
23
+ * stays in history so users can roll forward again.
24
+ *
25
+ * Conflict semantics: the client passes the SHA they currently render in
26
+ * the file picker. If the file's HEAD has moved between page-load and the
27
+ * rollback click, we return 409 with `code: 'head-moved'` so the UI can
28
+ * tell the user to refresh.
29
+ *
30
+ * Admin-only — editors can edit, but rollback is destructive enough to
31
+ * warrant the audit-log control.
32
+ */
33
+ export const POST: APIRoute = async ({ request, cookies }) => {
34
+ const denied = requireAdmin(cookies.get('setzkasten_session')?.value)
35
+ if (denied) return denied
36
+
37
+ const session = parseSession(cookies.get('setzkasten_session')?.value)
38
+ if (!session) return new Response('Unauthorized', { status: 401 })
39
+
40
+ let body: RollbackBody
41
+ try {
42
+ body = (await request.json()) as RollbackBody
43
+ } catch {
44
+ return Response.json({ error: 'Invalid JSON body' }, { status: 400 })
45
+ }
46
+ const { path, sha, expectedHeadSha } = body
47
+ if (!path || !sha) {
48
+ return Response.json({ error: 'path and sha are required' }, { status: 400 })
49
+ }
50
+
51
+ const tokenResult = await resolveGitHubTokenForRequest(request)
52
+ if (!tokenResult.ok) return new Response(tokenResult.error.message, { status: 500 })
53
+
54
+ const storage = await resolveStorageConfigForRequest(request)
55
+ if (!storage) {
56
+ return Response.json({ error: 'Could not resolve owner/repo' }, { status: 400 })
57
+ }
58
+ const { owner, repo, branch } = storage
59
+
60
+ const headers = {
61
+ Authorization: `Bearer ${tokenResult.value}`,
62
+ Accept: 'application/vnd.github+json',
63
+ 'X-GitHub-Api-Version': '2022-11-28',
64
+ 'Content-Type': 'application/json',
65
+ }
66
+
67
+ // 1. Fetch contents of the file at the target sha
68
+ const versionRes = await fetch(
69
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${sha}`,
70
+ { headers },
71
+ )
72
+ if (versionRes.status === 404) {
73
+ return Response.json(
74
+ { error: 'File did not exist at the requested sha', code: 'version-not-found' },
75
+ { status: 404 },
76
+ )
77
+ }
78
+ if (!versionRes.ok) {
79
+ return Response.json(
80
+ { error: `Failed to read version: ${versionRes.status}` },
81
+ { status: 502 },
82
+ )
83
+ }
84
+ const versionData = (await versionRes.json()) as {
85
+ content: string
86
+ encoding: string
87
+ }
88
+ const targetContent =
89
+ versionData.encoding === 'base64'
90
+ ? Buffer.from(versionData.content, 'base64').toString('utf-8')
91
+ : versionData.content
92
+
93
+ // 2. Fetch current HEAD SHA of the file (for conflict detection + PUT sha param)
94
+ const headRes = await fetch(
95
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${branch}`,
96
+ { headers },
97
+ )
98
+ let currentSha: string | null = null
99
+ if (headRes.ok) {
100
+ const data = (await headRes.json()) as { sha: string }
101
+ currentSha = data.sha
102
+ }
103
+
104
+ if (expectedHeadSha && currentSha && expectedHeadSha !== currentSha) {
105
+ return Response.json(
106
+ {
107
+ error: 'Datei wurde inzwischen geändert. Bitte den Verlauf neu laden.',
108
+ code: 'head-moved',
109
+ },
110
+ { status: 409 },
111
+ )
112
+ }
113
+
114
+ // 3. Write new commit with the historical content
115
+ const shortSha = sha.slice(0, 7)
116
+ const fileName = path.split('/').pop() ?? path
117
+ const message = withTrailers(
118
+ `revert(${fileName}): rollback to ${shortSha}`,
119
+ session.user.email,
120
+ )
121
+
122
+ const putBody: Record<string, unknown> = {
123
+ message,
124
+ content: Buffer.from(targetContent).toString('base64'),
125
+ branch,
126
+ }
127
+ if (currentSha) putBody.sha = currentSha
128
+
129
+ const putRes = await fetch(
130
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}`,
131
+ { method: 'PUT', headers, body: JSON.stringify(putBody) },
132
+ )
133
+
134
+ if (!putRes.ok) {
135
+ const text = await putRes.text()
136
+ return Response.json({ error: `Rollback write failed: ${text}` }, { status: 502 })
137
+ }
138
+
139
+ // Invalidate history cache for this path — fresh commit shows up in lists.
140
+ invalidateCache(`history:${owner}/${repo}:${branch}:${path}:head`)
141
+
142
+ const putData = (await putRes.json()) as { commit: { sha: string } }
143
+ return Response.json({ ok: true, commitSha: putData.commit.sha })
144
+ }
@@ -0,0 +1,57 @@
1
+ import type { APIRoute } from 'astro'
2
+ import { resolveStorageConfigForRequest } from './_storage-config'
3
+ import { resolveGitHubTokenForRequest } from './_github-token'
4
+ import { requireAdmin } from './_auth-guard'
5
+ import { cachedFetch } from './_github-cache'
6
+
7
+ /**
8
+ * GET /api/setzkasten/history/version?path=<file>&sha=<commit-sha>
9
+ *
10
+ * Returns the file content at a specific commit (for diff rendering).
11
+ * Cached per (path, sha) for 5 minutes — historical content is immutable.
12
+ */
13
+ export const GET: APIRoute = async ({ request, url, cookies }) => {
14
+ const denied = requireAdmin(cookies.get('setzkasten_session')?.value)
15
+ if (denied) return denied
16
+
17
+ const path = url.searchParams.get('path')
18
+ const sha = url.searchParams.get('sha')
19
+ if (!path || !sha) {
20
+ return Response.json({ error: 'Missing required `path` or `sha`.' }, { status: 400 })
21
+ }
22
+
23
+ const tokenResult = await resolveGitHubTokenForRequest(request)
24
+ if (!tokenResult.ok) return new Response(tokenResult.error.message, { status: 500 })
25
+
26
+ const storage = await resolveStorageConfigForRequest(request)
27
+ if (!storage) {
28
+ return Response.json({ error: 'Could not resolve owner/repo' }, { status: 400 })
29
+ }
30
+ const { owner, repo } = storage
31
+
32
+ const cacheKey = `history-version:${owner}/${repo}:${path}:${sha}`
33
+ const result = await cachedFetch(cacheKey, 5 * 60_000, async () => {
34
+ const u = new URL(
35
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}`,
36
+ )
37
+ u.searchParams.set('ref', sha)
38
+ const res = await fetch(u, {
39
+ headers: {
40
+ Authorization: `Bearer ${tokenResult.value}`,
41
+ Accept: 'application/vnd.github+json',
42
+ 'X-GitHub-Api-Version': '2022-11-28',
43
+ },
44
+ })
45
+ if (res.status === 404) return { ok: false as const, status: 404, error: 'File not found at given sha' }
46
+ if (!res.ok) return { ok: false as const, status: 502, error: `GitHub returned ${res.status}` }
47
+ const data = (await res.json()) as { content: string; encoding: string; sha: string }
48
+ const raw =
49
+ data.encoding === 'base64'
50
+ ? Buffer.from(data.content, 'base64').toString('utf-8')
51
+ : data.content
52
+ return { ok: true as const, value: { content: raw, sha: data.sha } }
53
+ })
54
+
55
+ if (!result.ok) return Response.json({ error: result.error }, { status: result.status })
56
+ return Response.json(result.value)
57
+ }
@@ -0,0 +1,119 @@
1
+ import type { APIRoute } from 'astro'
2
+ import { resolveStorageConfigForRequest } from './_storage-config'
3
+ import { resolveGitHubTokenForRequest } from './_github-token'
4
+ import { requireAdmin } from './_auth-guard'
5
+ import { parseCoAuthorTrailers, type CommitInfo } from '@setzkasten-cms/core'
6
+ import { cachedFetch } from './_github-cache'
7
+
8
+ /**
9
+ * GET /api/setzkasten/history?path=<contentPath>&before=<sha>
10
+ *
11
+ * Returns up to 5 most recent commits affecting the given file. Pagination
12
+ * via `before=<sha>` returns 10 more older commits — clients call this on
13
+ * "Mehr laden". Admin-only — editors can read content but not the audit
14
+ * trail (and certainly not roll back).
15
+ */
16
+ export const GET: APIRoute = async ({ request, url, cookies }) => {
17
+ const denied = requireAdmin(cookies.get('setzkasten_session')?.value)
18
+ if (denied) return denied
19
+
20
+ const path = url.searchParams.get('path')
21
+ const before = url.searchParams.get('before')
22
+ if (!path) {
23
+ return Response.json({ error: 'Missing required `path` parameter.' }, { status: 400 })
24
+ }
25
+
26
+ const tokenResult = await resolveGitHubTokenForRequest(request)
27
+ if (!tokenResult.ok) return new Response(tokenResult.error.message, { status: 500 })
28
+
29
+ const storage = await resolveStorageConfigForRequest(request)
30
+ if (!storage) {
31
+ return Response.json({ error: 'Could not resolve owner/repo' }, { status: 400 })
32
+ }
33
+ const { owner, repo, branch } = storage
34
+ const perPage = before ? 10 : 5
35
+
36
+ // Cache history per (path, before) for 60s — invalidated by rollback.
37
+ const cacheKey = `history:${owner}/${repo}:${branch}:${path}:${before ?? 'head'}`
38
+ const commits = await cachedFetch(cacheKey, 60_000, () =>
39
+ fetchCommits(owner, repo, branch, path, perPage, before, tokenResult.value),
40
+ )
41
+
42
+ if (!commits.ok) {
43
+ return Response.json({ error: commits.error }, { status: commits.status })
44
+ }
45
+ return Response.json({ commits: commits.value })
46
+ }
47
+
48
+ interface CommitsListSuccess {
49
+ ok: true
50
+ value: readonly CommitInfo[]
51
+ }
52
+ interface CommitsListFailure {
53
+ ok: false
54
+ status: number
55
+ error: string
56
+ }
57
+ type CommitsResult = CommitsListSuccess | CommitsListFailure
58
+
59
+ async function fetchCommits(
60
+ owner: string,
61
+ repo: string,
62
+ branch: string,
63
+ path: string,
64
+ perPage: number,
65
+ before: string | null,
66
+ token: string,
67
+ ): Promise<CommitsResult> {
68
+ // GitHub paginates by `?sha=<commit>` — passing `before` as `sha` gets
69
+ // commits older than (and including) that SHA. We start one before
70
+ // requested SHA so the same commit doesn't appear twice. The simplest
71
+ // way: pass sha=<before>, request perPage+1, and skip the first.
72
+ const sha = before ?? branch
73
+ const u = new URL(`https://api.github.com/repos/${owner}/${repo}/commits`)
74
+ u.searchParams.set('path', path)
75
+ u.searchParams.set('sha', sha)
76
+ u.searchParams.set('per_page', String(before ? perPage + 1 : perPage))
77
+
78
+ const res = await fetch(u, {
79
+ headers: {
80
+ Authorization: `Bearer ${token}`,
81
+ Accept: 'application/vnd.github+json',
82
+ 'X-GitHub-Api-Version': '2022-11-28',
83
+ },
84
+ })
85
+
86
+ if (res.status === 404) return { ok: true, value: [] }
87
+ if (!res.ok) {
88
+ return { ok: false, status: 502, error: `GitHub returned ${res.status}` }
89
+ }
90
+
91
+ const data = (await res.json()) as Array<{
92
+ sha: string
93
+ commit: {
94
+ author: { name: string; email: string; date: string }
95
+ message: string
96
+ }
97
+ author: { avatar_url?: string } | null
98
+ }>
99
+
100
+ // Skip first if we paginated (the `before` SHA itself).
101
+ const start = before ? 1 : 0
102
+ const slice = data.slice(start, start + perPage)
103
+ const commits: CommitInfo[] = slice.map((c) => {
104
+ const [firstLine, ...rest] = c.commit.message.split('\n')
105
+ const body = rest.join('\n')
106
+ return {
107
+ sha: c.sha,
108
+ shortSha: c.sha.slice(0, 7),
109
+ authoredAt: c.commit.author.date,
110
+ authorName: c.commit.author.name,
111
+ authorEmail: c.commit.author.email,
112
+ authorAvatarUrl: c.author?.avatar_url,
113
+ coAuthors: parseCoAuthorTrailers(body),
114
+ message: firstLine ?? '',
115
+ body,
116
+ }
117
+ })
118
+ return { ok: true, value: commits }
119
+ }
@@ -0,0 +1,169 @@
1
+ import type { APIRoute } from 'astro'
2
+ import {
3
+ LOCAL_ICONS_DISCOVERY_PATHS,
4
+ resolveLocalIconsPaths,
5
+ sanitizeSvg,
6
+ } from '@setzkasten-cms/core'
7
+ import { resolveStorageConfigForRequest, prefixPath } from './_storage-config'
8
+ import { resolveGitHubTokenForRequest } from './_github-token'
9
+
10
+ /**
11
+ * GET /api/setzkasten/icons/local
12
+ *
13
+ * Lists `.svg` files from the website's local icons folder(s) so the admin
14
+ * picker can render them as a "Lokal" tab.
15
+ *
16
+ * Path resolution:
17
+ * 1. If `icons.localPath` is set in the website config, scan exactly the
18
+ * listed folders (string or string[]). Whatever the user wrote wins.
19
+ * 2. Otherwise, scan `LOCAL_ICONS_DISCOVERY_PATHS` until a folder yields
20
+ * at least one SVG. Lets projects with `public/icons/` or
21
+ * `src/assets/svg/` work without touching their config — and surfaces
22
+ * the discovered path so the admin can copy it into setzkasten.config.ts.
23
+ *
24
+ * Response:
25
+ * {
26
+ * icons: Array<{ name, svg, source }>,
27
+ * paths: string[], // paths that contributed icons
28
+ * discovered: boolean, // true when discovery fallback ran
29
+ * }
30
+ *
31
+ * Each `svg` value is server-side sanitized.
32
+ */
33
+ export const GET: APIRoute = async ({ request, cookies }) => {
34
+ if (!cookies.get('setzkasten_session')?.value) {
35
+ return new Response('Unauthorized', { status: 401 })
36
+ }
37
+
38
+ const tokenResult = await resolveGitHubTokenForRequest(request)
39
+ if (!tokenResult.ok) {
40
+ return new Response(tokenResult.error.message, { status: 500 })
41
+ }
42
+ const token = tokenResult.value
43
+
44
+ const storage = await resolveStorageConfigForRequest(request)
45
+ if (!storage) {
46
+ return Response.json({ error: 'Could not resolve owner/repo' }, { status: 400 })
47
+ }
48
+ const { owner, repo, branch, projectPrefix } = storage
49
+
50
+ const fullConfig = (globalThis as any).__SETZKASTEN_FULL_CONFIG__ as
51
+ | { icons?: { localPath?: string | readonly string[] } }
52
+ | undefined
53
+ const configured = resolveLocalIconsPaths(fullConfig?.icons ?? null)
54
+ const discovered = configured === null
55
+ const candidatePaths = configured ?? LOCAL_ICONS_DISCOVERY_PATHS
56
+
57
+ const headers = {
58
+ Authorization: `Bearer ${token}`,
59
+ Accept: 'application/vnd.github+json',
60
+ 'X-GitHub-Api-Version': '2022-11-28',
61
+ }
62
+
63
+ // Scan paths in order. In configured mode, every path contributes —
64
+ // brand icons + product icons can happily coexist. In discovery mode,
65
+ // we stop after the first hit so we don't show e.g. raster assets in
66
+ // `public/icons` next to the real icons in `src/icons`.
67
+ const MAX_ICONS = 200
68
+ const yieldedPaths: string[] = []
69
+ type DirEntry = { name: string; type: 'file' | 'dir'; download_url?: string | null }
70
+ const allEntries: Array<{ path: string; entry: DirEntry }> = []
71
+
72
+ for (const relativePath of candidatePaths) {
73
+ if (allEntries.length >= MAX_ICONS) break
74
+ const path = prefixPath(relativePath, projectPrefix)
75
+ const listRes = await fetch(
76
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${branch}`,
77
+ { headers },
78
+ )
79
+ if (listRes.status === 404) continue
80
+ if (!listRes.ok) continue
81
+ const json = await listRes.json().catch(() => null)
82
+ if (!Array.isArray(json)) continue
83
+
84
+ const svgs = (json as DirEntry[]).filter(
85
+ (e) => e.type === 'file' && e.name.toLowerCase().endsWith('.svg'),
86
+ )
87
+ if (svgs.length === 0) continue
88
+
89
+ yieldedPaths.push(relativePath)
90
+ const remaining = MAX_ICONS - allEntries.length
91
+ for (const entry of svgs.slice(0, remaining)) {
92
+ allEntries.push({ path: relativePath, entry })
93
+ }
94
+
95
+ if (discovered) break
96
+ }
97
+
98
+ // Bounded parallel fetch. Without this cap, hundreds of simultaneous
99
+ // requests against raw.githubusercontent.com carrying the user's token
100
+ // would burn rate limits on the first picker open.
101
+ const PARALLELISM = 8
102
+ type Entry = { path: string; entry: DirEntry }
103
+ type Result = { id: string; name: string; svg: string; source: string }
104
+ const results: Array<Result | null> = new Array(allEntries.length).fill(null)
105
+ let cursor = 0
106
+
107
+ async function worker() {
108
+ while (true) {
109
+ const idx = cursor++
110
+ if (idx >= allEntries.length) return
111
+ const { path, entry } = allEntries[idx]!
112
+ results[idx] = await fetchAndSanitize(path, entry, token)
113
+ }
114
+ }
115
+
116
+ await Promise.all(Array.from({ length: Math.min(PARALLELISM, allEntries.length) }, worker))
117
+
118
+ // De-dupe by stable id ("path/basename") so colliding basenames across
119
+ // folders don't corrupt React keys or storage. Stored value uses the
120
+ // namespaced id; the bare basename is kept as a label for the picker.
121
+ const seen = new Set<string>()
122
+ const icons: Result[] = []
123
+ for (const r of results) {
124
+ if (!r) continue
125
+ if (seen.has(r.id)) continue
126
+ seen.add(r.id)
127
+ icons.push(r)
128
+ }
129
+
130
+ return Response.json({
131
+ icons,
132
+ paths: yieldedPaths,
133
+ discovered,
134
+ })
135
+ }
136
+
137
+ async function fetchAndSanitize(
138
+ path: string,
139
+ entry: { name: string; download_url?: string | null },
140
+ token: string,
141
+ ): Promise<{ id: string; name: string; svg: string; source: string } | null> {
142
+ const baseName = entry.name.replace(/\.svg$/i, '')
143
+ const url = entry.download_url
144
+ if (!url) return null
145
+
146
+ // Token only goes to GitHub-controlled hosts. The Contents API can in
147
+ // principle return any download_url; refusing other hosts means a
148
+ // future API change can't quietly leak the token to third parties.
149
+ let parsed: URL
150
+ try {
151
+ parsed = new URL(url)
152
+ } catch {
153
+ return null
154
+ }
155
+ const host = parsed.hostname
156
+ const githubHost = host === 'raw.githubusercontent.com' || host.endsWith('.githubusercontent.com')
157
+ if (!githubHost) return null
158
+
159
+ try {
160
+ const res = await fetch(url, { headers: { Authorization: `Bearer ${token}` } })
161
+ if (!res.ok) return null
162
+ const raw = await res.text()
163
+ const svg = sanitizeSvg(raw)
164
+ if (!svg) return null
165
+ return { id: `${path}/${baseName}`, name: baseName, svg, source: path }
166
+ } catch {
167
+ return null
168
+ }
169
+ }
@@ -1,10 +1,13 @@
1
1
  import type { APIRoute } from 'astro'
2
2
  import { writeFile } from 'node:fs/promises'
3
3
  import { join } from 'node:path'
4
- import { resolveStorageConfigForRequest } from './_storage-config'
4
+ import { resolveStorageConfigForRequest, prefixPath } from './_storage-config'
5
5
  import { parseSession, guardPageAccess } from './_auth-guard'
6
6
  import { withTrailers } from './_commit-trailers'
7
7
  import { resolveGitHubTokenForRequest } from './_github-token'
8
+ import { convertToSetHtml } from '../init/template-patcher-v2'
9
+ import { readPagesMeta } from './_pages-meta-store'
10
+ import { setPageLastModified } from '@setzkasten-cms/core'
8
11
 
9
12
  /**
10
13
  * POST /api/setzkasten/sections/commit-pending
@@ -80,6 +83,45 @@ export const POST: APIRoute = async ({ request, cookies }) => {
80
83
  })),
81
84
  ]
82
85
 
86
+ // Auto-upgrade plain-text fields to set:html when the user introduces
87
+ // formatting via the inline RTE. Without this, Astro's `{value}` escapes
88
+ // tags and the published page shows literal `<strong>…</strong>`. We
89
+ // detect HTML in any committed string value, fetch the section template,
90
+ // run convertToSetHtml (idempotent — no-op if already converted), and
91
+ // include the patched template in the same batch commit.
92
+ const sectionsWithHtml = [...sections, ...edits]
93
+ .filter(s => containsHtmlValue(s.content))
94
+ .map(s => s.key)
95
+ const projectPrefix = (storage as { projectPrefix?: string }).projectPrefix
96
+ for (const sectionKey of sectionsWithHtml) {
97
+ const componentPath = prefixPath(
98
+ `src/components/sections/${pascalCase(sectionKey)}Section.astro`,
99
+ projectPrefix ?? '',
100
+ )
101
+ if (files.some(f => f.path === componentPath)) continue
102
+ const original = await fetchFileContent(owner, repo, branch, componentPath, githubToken)
103
+ if (!original) continue
104
+ const patched = convertToSetHtml(original)
105
+ if (patched !== original) {
106
+ files.push({ path: componentPath, content: patched })
107
+ }
108
+ }
109
+
110
+ // Fold the recency-meta update into this same batch commit. Previously
111
+ // we issued a follow-up PUT via recordPageEdit, which produced a second
112
+ // commit ("chore(meta): update _pages-meta.json") and a second deploy
113
+ // for every save — visible noise in history and wasted CI minutes.
114
+ const metaContentPath: string = serverConfig?.storage?.contentPath ?? 'content'
115
+ const metaTarget = { owner, repo, branch, contentPath: metaContentPath, token: githubToken }
116
+ const metaSnapshot = await readPagesMeta(metaTarget)
117
+ if (metaSnapshot.ok) {
118
+ const nextMeta = setPageLastModified(metaSnapshot.value.meta, pageKey, Date.now())
119
+ files.push({
120
+ path: `${metaContentPath}/_pages-meta.json`,
121
+ content: JSON.stringify(nextMeta, null, 2),
122
+ })
123
+ }
124
+
83
125
  const parts: string[] = []
84
126
  if (sections.length > 0) {
85
127
  const keys = sections.map(s => s.key).join(', ')
@@ -110,14 +152,25 @@ export const POST: APIRoute = async ({ request, cookies }) => {
110
152
  )
111
153
  }
112
154
 
113
- // Best-effort recency tracking. Metadata write must not derail the
114
- // primary save — surface failures via the trailing return only.
115
- const { recordPageEdit } = await import('./_pages-meta-store.js')
116
- const metaContentPath: string = serverConfig?.storage?.contentPath ?? 'content'
117
- await recordPageEdit(
118
- { owner, repo, branch, contentPath: metaContentPath, token: tokenResult.value },
119
- pageKey,
120
- ).catch(() => {})
155
+ // Fire content.save webhooks. Best-effort, fire-and-forget does
156
+ // not block the response.
157
+ const { fireWebhooks } = await import('./_webhook-dispatcher.js')
158
+ const parsedSession = parseSession(cookies.get('setzkasten_session')?.value)
159
+ void fireWebhooks(
160
+ 'content.save',
161
+ {
162
+ website: { id: owner, repo: `${owner}/${repo}`, branch },
163
+ user: {
164
+ email: parsedSession?.user?.email ?? 'unknown',
165
+ name: parsedSession?.user?.name,
166
+ },
167
+ commit: { sha: commitResult.sha, message: `Commit on ${pageKey}` },
168
+ files: sections.map((s: { key: string }) => ({
169
+ path: `${metaContentPath}/_sections/${s.key}.json`,
170
+ })),
171
+ },
172
+ request,
173
+ )
121
174
 
122
175
  return Response.json({ success: true, commitSha: commitResult.sha })
123
176
  } catch (error) {
@@ -129,6 +182,52 @@ export const POST: APIRoute = async ({ request, cookies }) => {
129
182
  }
130
183
  }
131
184
 
185
+ /** Recursively scan a section content tree for any string value containing
186
+ * inline HTML markup (a `<` followed by an ASCII letter or `/`). Used to
187
+ * decide whether the section template needs upgrading to set:html. */
188
+ function containsHtmlValue(value: unknown): boolean {
189
+ if (typeof value === 'string') return /<\/?[a-z]/i.test(value)
190
+ if (Array.isArray(value)) return value.some(containsHtmlValue)
191
+ if (value && typeof value === 'object') return Object.values(value).some(containsHtmlValue)
192
+ return false
193
+ }
194
+
195
+ function pascalCase(input: string): string {
196
+ return input
197
+ .split(/[-_\s]+/)
198
+ .filter(Boolean)
199
+ .map(s => s.charAt(0).toUpperCase() + s.slice(1))
200
+ .join('')
201
+ }
202
+
203
+ async function fetchFileContent(
204
+ owner: string,
205
+ repo: string,
206
+ branch: string,
207
+ path: string,
208
+ token: string,
209
+ ): Promise<string | null> {
210
+ try {
211
+ const res = await fetch(
212
+ `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${branch}`,
213
+ {
214
+ headers: {
215
+ Authorization: `Bearer ${token}`,
216
+ Accept: 'application/vnd.github+json',
217
+ 'X-GitHub-Api-Version': '2022-11-28',
218
+ },
219
+ },
220
+ )
221
+ if (!res.ok) return null
222
+ const data = await res.json() as { content: string; encoding: string }
223
+ return data.encoding === 'base64'
224
+ ? Buffer.from(data.content, 'base64').toString('utf-8')
225
+ : data.content
226
+ } catch {
227
+ return null
228
+ }
229
+ }
230
+
132
231
  async function batchCommit(
133
232
  owner: string, repo: string, branch: string,
134
233
  files: Array<{ path: string; content: string }>,
@@ -90,6 +90,20 @@ export const DELETE: APIRoute = async ({ request, cookies }) => {
90
90
  pageKey,
91
91
  ).catch(() => {})
92
92
 
93
+ // Fire content.delete webhooks (fire-and-forget).
94
+ const { fireWebhooks } = await import('./_webhook-dispatcher.js')
95
+ const session = parseSession(cookies.get('setzkasten_session')?.value)
96
+ void fireWebhooks(
97
+ 'content.delete',
98
+ {
99
+ website: { id: owner, repo: `${owner}/${repo}`, branch },
100
+ user: { email: session?.user?.email ?? 'unknown', name: session?.user?.name },
101
+ commit: { sha: commitResult.sha, message: `Delete ${sectionKey} from ${pageKey}` },
102
+ files: [{ path: sectionJsonPath }],
103
+ },
104
+ request,
105
+ )
106
+
93
107
  return Response.json({ success: true, commitSha: commitResult.sha })
94
108
  } catch (error) {
95
109
  console.error('[setzkasten] section-delete error:', error)