koguma 2.1.0 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli/constants.ts CHANGED
@@ -4,8 +4,26 @@
4
4
  * Eliminates magic strings scattered across modules.
5
5
  */
6
6
 
7
- /** CLI version string */
8
- export const CLI_VERSION = 'v2.0.0';
7
+ import { readFileSync } from 'fs';
8
+ import { resolve, dirname } from 'path';
9
+
10
+ /** CLI version string — read dynamically from package.json at runtime */
11
+ function readCliVersion(): string {
12
+ try {
13
+ const pkgPath = resolve(
14
+ dirname(new URL(import.meta.url).pathname),
15
+ '..',
16
+ 'package.json'
17
+ );
18
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')) as {
19
+ version?: string;
20
+ };
21
+ return `v${pkg.version ?? '?'}`;
22
+ } catch {
23
+ return 'v?';
24
+ }
25
+ }
26
+ export const CLI_VERSION = readCliVersion();
9
27
 
10
28
  /** Project config file name */
11
29
  export const CONFIG_FILE = 'koguma.toml';
package/cli/content.ts CHANGED
@@ -7,15 +7,19 @@
7
7
  * ├── post/
8
8
  * │ ├── hello-world.md # markdown body + frontmatter
9
9
  * │ └── our-mission.md
10
- * ├── siteSettings/
11
- * │ └── index.md # singletons use index.md
10
+ * ├── landingPage/
11
+ * │ ├── index.md # singletons use index.md (1st md field = body)
12
+ * │ ├── heroBody.md # sibling file for extra markdown field
13
+ * │ └── aboutBody.md # sibling file for extra markdown field
12
14
  * └── media/ # optional local images (not managed here)
13
15
  * └── hero.jpg
14
16
  *
15
17
  * All content files use the .md extension with YAML frontmatter.
16
18
  * The file body (below the frontmatter) is mapped to the first `markdown`
17
- * field in the content type definition. Files without a markdown field
18
- * are frontmatter-only .md files (no body below the --- delimiter).
19
+ * field in the content type definition. Additional markdown fields are
20
+ * stored as sibling .md files (pure markdown, no frontmatter):
21
+ * - Singletons: {fieldId}.md alongside index.md
22
+ * - Collections: {slug}.{fieldId}.md alongside {slug}.md
19
23
  *
20
24
  * Legacy .yml files are still accepted on read for backwards compatibility.
21
25
  */
@@ -113,13 +117,40 @@ export function parseContentFile(
113
117
  /**
114
118
  * Scan the content/ directory and return all parsed entries.
115
119
  * Skips the `media/` subdirectory and any files/directories starting with `_`.
120
+ *
121
+ * Also detects sibling .md files for extra markdown fields:
122
+ * - Singletons: {fieldId}.md alongside index.md
123
+ * - Collections: {slug}.{fieldId}.md alongside {slug}.md
124
+ *
125
+ * Returns a siblingMap: Map<"contentType/slug", Record<fieldId, markdownContent>>
116
126
  */
117
- export function readContentDir(contentDir: string): ContentEntry[] {
118
- if (!existsSync(contentDir)) return [];
127
+ export function readContentDir(
128
+ contentDir: string,
129
+ contentTypes?: ContentTypeInfo[]
130
+ ): {
131
+ entries: ContentEntry[];
132
+ siblingMap: Map<string, Record<string, string>>;
133
+ } {
134
+ if (!existsSync(contentDir)) return { entries: [], siblingMap: new Map() };
119
135
 
120
136
  const entries: ContentEntry[] = [];
137
+ const siblingMap = new Map<string, Record<string, string>>();
121
138
  const subdirs = readdirSync(contentDir);
122
139
 
140
+ // Build a set of markdown field IDs per content type for sibling detection
141
+ const ctMdFields = new Map<string, Set<string>>();
142
+ if (contentTypes) {
143
+ for (const ct of contentTypes) {
144
+ const mdFields = Object.entries(ct.fieldMeta)
145
+ .filter(([, meta]) => meta.fieldType === 'markdown')
146
+ .map(([id]) => id);
147
+ // Only care about extra fields (skip the first — it goes in the main body)
148
+ if (mdFields.length > 1) {
149
+ ctMdFields.set(ct.id, new Set(mdFields.slice(1)));
150
+ }
151
+ }
152
+ }
153
+
123
154
  for (const subdir of subdirs) {
124
155
  // Skip media/ — it's for local image storage, not content
125
156
  if (subdir === 'media') continue;
@@ -130,25 +161,93 @@ export function readContentDir(contentDir: string): ContentEntry[] {
130
161
  if (!statSync(subdirPath).isDirectory()) continue;
131
162
 
132
163
  const files = readdirSync(subdirPath);
164
+ const extraMdFieldIds = ctMdFields.get(subdir);
165
+
166
+ // First pass: identify main content files vs sibling field files
167
+ const mainFiles: string[] = [];
168
+ const siblingFiles: string[] = [];
169
+
133
170
  for (const file of files) {
134
- // Skip _-prefixed files (_example.md, dev drafts, etc.)
135
171
  if (file.startsWith('_')) continue;
136
-
137
172
  const ext = extname(file).toLowerCase();
138
173
  if (!['.md', '.yml', '.yaml'].includes(ext)) continue;
139
174
 
140
175
  const filePath = resolve(subdirPath, file);
141
176
  if (!statSync(filePath).isFile()) continue;
142
177
 
178
+ const name = basename(file, ext);
179
+
180
+ // Check if this is a sibling field file
181
+ if (ext === '.md' && extraMdFieldIds) {
182
+ // Singleton sibling: fieldId.md (not index.md)
183
+ if (name !== 'index' && extraMdFieldIds.has(name)) {
184
+ siblingFiles.push(file);
185
+ continue;
186
+ }
187
+ // Collection sibling: slug.fieldId.md
188
+ const dotIdx = name.lastIndexOf('.');
189
+ if (dotIdx > 0) {
190
+ const maybeFieldId = name.slice(dotIdx + 1);
191
+ if (extraMdFieldIds.has(maybeFieldId)) {
192
+ siblingFiles.push(file);
193
+ continue;
194
+ }
195
+ }
196
+ }
197
+
198
+ mainFiles.push(file);
199
+ }
200
+
201
+ // Parse main content files
202
+ for (const file of mainFiles) {
203
+ const filePath = resolve(subdirPath, file);
143
204
  entries.push(parseContentFile(filePath, subdir));
144
205
  }
206
+
207
+ // Parse sibling field files and merge into siblingMap
208
+ for (const file of siblingFiles) {
209
+ const ext = extname(file).toLowerCase();
210
+ const name = basename(file, ext);
211
+ const content = readFileSync(resolve(subdirPath, file), 'utf-8').trim();
212
+
213
+ // Determine which entry this belongs to and which field
214
+ const dotIdx = name.lastIndexOf('.');
215
+ if (dotIdx > 0) {
216
+ // Collection: slug.fieldId.md
217
+ const slug = name.slice(0, dotIdx);
218
+ const fieldId = name.slice(dotIdx + 1);
219
+ const key = `${subdir}/${slug}`;
220
+ const existing = siblingMap.get(key) ?? {};
221
+ existing[fieldId] = content;
222
+ siblingMap.set(key, existing);
223
+ } else {
224
+ // Singleton: fieldId.md → key is contentType/contentType
225
+ const fieldId = name;
226
+ const key = `${subdir}/${subdir}`;
227
+ const existing = siblingMap.get(key) ?? {};
228
+ existing[fieldId] = content;
229
+ siblingMap.set(key, existing);
230
+ }
231
+ }
145
232
  }
146
233
 
147
- return entries;
234
+ return { entries, siblingMap };
148
235
  }
149
236
 
150
237
  // ── Convert ContentEntry → D1 row data ─────────────────────────────
151
238
 
239
+ /**
240
+ * Find ALL markdown field IDs in a content type's field metadata.
241
+ * Returns an ordered array (first element = primary body field).
242
+ */
243
+ export function findMarkdownFields(
244
+ fieldMeta: Record<string, { fieldType: string }>
245
+ ): string[] {
246
+ return Object.entries(fieldMeta)
247
+ .filter(([, meta]) => meta.fieldType === 'markdown')
248
+ .map(([id]) => id);
249
+ }
250
+
152
251
  /**
153
252
  * Find the first markdown field ID in a content type's field metadata.
154
253
  * Returns null if the content type has no markdown field.
@@ -156,19 +255,18 @@ export function readContentDir(contentDir: string): ContentEntry[] {
156
255
  export function findMarkdownField(
157
256
  fieldMeta: Record<string, { fieldType: string }>
158
257
  ): string | null {
159
- for (const [id, meta] of Object.entries(fieldMeta)) {
160
- if (meta.fieldType === 'markdown') return id;
161
- }
162
- return null;
258
+ return findMarkdownFields(fieldMeta)[0] ?? null;
163
259
  }
164
260
 
165
261
  /**
166
262
  * Convert a ContentEntry into the flat data object suitable for D1 insertion.
167
263
  * The markdown body (if present) is assigned to the first markdown field.
264
+ * Additional markdown fields can be passed in via siblingFields.
168
265
  */
169
266
  export function contentEntryToDbRow(
170
267
  entry: ContentEntry,
171
- ctInfo: ContentTypeInfo
268
+ ctInfo: ContentTypeInfo,
269
+ siblingFields?: Record<string, string>
172
270
  ): Record<string, unknown> {
173
271
  const data: Record<string, unknown> = { ...entry.fields };
174
272
 
@@ -180,6 +278,13 @@ export function contentEntryToDbRow(
180
278
  }
181
279
  }
182
280
 
281
+ // Merge sibling markdown fields
282
+ if (siblingFields) {
283
+ for (const [fieldId, content] of Object.entries(siblingFields)) {
284
+ data[fieldId] = content;
285
+ }
286
+ }
287
+
183
288
  // Set slug
184
289
  if (!data.slug && !entry.singleton) {
185
290
  data.slug = entry.slug;
@@ -219,7 +324,11 @@ export function contentEntryToDbRow(
219
324
  export function dbRowToContentFile(
220
325
  row: Record<string, unknown>,
221
326
  ctInfo: ContentTypeInfo
222
- ): { content: string; extension: string } {
327
+ ): {
328
+ content: string;
329
+ extension: string;
330
+ siblingFiles?: { fieldId: string; content: string }[];
331
+ } {
223
332
  // Parse the data blob if it's still a JSON string
224
333
  let fields: Record<string, unknown>;
225
334
  if (typeof row.data === 'string') {
@@ -242,22 +351,38 @@ export function dbRowToContentFile(
242
351
  // Strip system-only fields from frontmatter
243
352
  delete fields.slug;
244
353
 
245
- const mdFieldId = findMarkdownField(ctInfo.fieldMeta);
354
+ const mdFields = findMarkdownFields(ctInfo.fieldMeta);
355
+ const primaryMdField = mdFields[0] ?? null;
356
+ const extraMdFields = mdFields.slice(1);
357
+
358
+ // Collect sibling files for extra markdown fields
359
+ const siblingFiles: { fieldId: string; content: string }[] = [];
360
+ for (const fieldId of extraMdFields) {
361
+ if (fields[fieldId]) {
362
+ siblingFiles.push({ fieldId, content: String(fields[fieldId]) });
363
+ delete fields[fieldId]; // Don't put in frontmatter
364
+ }
365
+ }
246
366
 
247
- if (mdFieldId && fields[mdFieldId]) {
367
+ if (primaryMdField && fields[primaryMdField]) {
248
368
  // Markdown file: body goes below frontmatter
249
- const body = String(fields[mdFieldId]);
250
- delete fields[mdFieldId];
369
+ const body = String(fields[primaryMdField]);
370
+ delete fields[primaryMdField];
251
371
  const fm = matter.stringify('', fields).trim();
252
372
  return {
253
373
  content: `${fm}\n\n${body}\n`,
254
- extension: '.md'
374
+ extension: '.md',
375
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
255
376
  };
256
377
  }
257
378
 
258
379
  // Frontmatter-only file (no markdown field or empty body)
259
380
  const fm = matter.stringify('', fields).trim();
260
- return { content: fm + '\n', extension: '.md' };
381
+ return {
382
+ content: fm + '\n',
383
+ extension: '.md',
384
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
385
+ };
261
386
  }
262
387
 
263
388
  // ── Write content/ directory from D1 entries ───────────────────────
@@ -287,7 +412,10 @@ export function writeContentDir(
287
412
  const typeDir = resolve(contentDir, typeId);
288
413
  mkdirSync(typeDir, { recursive: true });
289
414
 
290
- const { content, extension } = dbRowToContentFile(entry, ctInfo);
415
+ const { content, extension, siblingFiles } = dbRowToContentFile(
416
+ entry,
417
+ ctInfo
418
+ );
291
419
 
292
420
  // Determine filename
293
421
  const slug = (entry.slug as string) || (entry.id as string);
@@ -296,6 +424,17 @@ export function writeContentDir(
296
424
 
297
425
  writeFileSync(resolve(typeDir, filename), content);
298
426
  count++;
427
+
428
+ // Write sibling files for extra markdown fields
429
+ if (siblingFiles) {
430
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
431
+ const siblingName = isSingleton
432
+ ? `${fieldId}.md`
433
+ : `${slug}.${fieldId}.md`;
434
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
435
+ count++;
436
+ }
437
+ }
299
438
  }
300
439
 
301
440
  return count;
@@ -316,16 +455,21 @@ export function prepareContentForSync(
316
455
  contentTypes: ContentTypeInfo[]
317
456
  ): { contentType: string; rowData: Record<string, unknown> }[] {
318
457
  const ctMap = new Map(contentTypes.map(ct => [ct.id, ct]));
319
- const entries = readContentDir(contentDir);
458
+ const { entries, siblingMap } = readContentDir(contentDir, contentTypes);
320
459
  const results: { contentType: string; rowData: Record<string, unknown> }[] =
321
460
  [];
322
461
 
323
462
  for (const entry of entries) {
324
463
  const ctInfo = ctMap.get(entry.contentType);
325
464
  if (!ctInfo) continue;
465
+
466
+ // Look up sibling markdown fields for this entry
467
+ const entryKey = `${entry.contentType}/${entry.slug}`;
468
+ const siblings = siblingMap.get(entryKey);
469
+
326
470
  results.push({
327
471
  contentType: entry.contentType,
328
- rowData: contentEntryToDbRow(entry, ctInfo)
472
+ rowData: contentEntryToDbRow(entry, ctInfo, siblings)
329
473
  });
330
474
  }
331
475
 
package/cli/dev-sync.ts CHANGED
@@ -3,17 +3,32 @@
3
3
  *
4
4
  * Two event-driven sync paths:
5
5
  * 1. File watcher: content/ file changes → parse → INSERT OR REPLACE into D1
6
+ * content/media/ image changes → put/delete in local R2 + assets table
6
7
  * 2. Sync server: router webhook (POST /sync) → write content/ file
8
+ * router webhook (POST /sync/media) → write file to content/media/
9
+ * router webhook (DELETE /sync/media/:filename) → remove from content/media/
7
10
  *
8
11
  * Loop prevention: a shared cooldown map tracks recent writes by entry ID.
9
12
  * When either side writes, it records the ID. The other side skips that ID
10
13
  * for a short cooldown window to prevent infinite loops.
11
14
  */
12
15
 
13
- import { watch, existsSync, statSync } from 'fs';
14
- import { resolve, relative, join, extname, sep } from 'path';
16
+ import {
17
+ watch,
18
+ existsSync,
19
+ statSync,
20
+ writeFileSync,
21
+ unlinkSync,
22
+ mkdirSync
23
+ } from 'fs';
24
+ import { resolve, relative, join, extname, basename, sep } from 'path';
15
25
  import { log, ok, warn, ANSI } from './log.ts';
16
- import { d1InsertRow, applySchema, type D1Target } from './wrangler.ts';
26
+ import {
27
+ d1InsertRow,
28
+ applySchema,
29
+ r2PutLocal,
30
+ type D1Target
31
+ } from './wrangler.ts';
17
32
  import { buildInsertSql } from '../src/db/sql.ts';
18
33
  import {
19
34
  parseContentFile,
@@ -81,7 +96,7 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
81
96
  const ext = extname(filename);
82
97
  if (ext !== '.md' && ext !== '.yml' && ext !== '.yaml') return;
83
98
 
84
- // Skip media/ subdirectory
99
+ // Skip media/ subdirectory — handled separately by the media watcher
85
100
  if (filename.startsWith('media' + sep) || filename.startsWith('media/'))
86
101
  return;
87
102
 
@@ -118,8 +133,14 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
118
133
  // Pack into v2 format: entries(id, content_type, slug, data, status)
119
134
  const rowData = contentEntryToDbRow(entry, ctInfo);
120
135
  const {
121
- id, slug: rowSlug, status, publish_at, publishAt,
122
- created_at: _ca, updated_at: _ua, content_type: _ct,
136
+ id,
137
+ slug: rowSlug,
138
+ status,
139
+ publish_at,
140
+ publishAt,
141
+ created_at: _ca,
142
+ updated_at: _ua,
143
+ content_type: _ct,
123
144
  ...fields
124
145
  } = rowData;
125
146
  d1InsertRow(root, dbName, '--local', 'entries', {
@@ -152,6 +173,129 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
152
173
  };
153
174
  }
154
175
 
176
+ // ── Media file watcher: content/media/ → local R2 ─────────────────
177
+
178
+ const MEDIA_EXTENSIONS = new Set([
179
+ '.png',
180
+ '.jpg',
181
+ '.jpeg',
182
+ '.gif',
183
+ '.webp',
184
+ '.svg',
185
+ '.avif',
186
+ '.ico'
187
+ ]);
188
+ const MIME_TYPES: Record<string, string> = {
189
+ '.png': 'image/png',
190
+ '.jpg': 'image/jpeg',
191
+ '.jpeg': 'image/jpeg',
192
+ '.gif': 'image/gif',
193
+ '.webp': 'image/webp',
194
+ '.svg': 'image/svg+xml',
195
+ '.avif': 'image/avif',
196
+ '.ico': 'image/x-icon'
197
+ };
198
+
199
+ interface MediaWatcherOptions {
200
+ root: string;
201
+ dbName: string;
202
+ }
203
+
204
+ function startMediaWatcher(opts: MediaWatcherOptions): { stop: () => void } {
205
+ const { root, dbName } = opts;
206
+ const mediaDir = resolve(root, 'content/media');
207
+
208
+ if (!existsSync(mediaDir)) return { stop: () => {} };
209
+
210
+ const debounceTimers = new Map<string, ReturnType<typeof setTimeout>>();
211
+
212
+ const watcher = watch(mediaDir, (_event, filename) => {
213
+ if (!filename) return;
214
+ const ext = extname(filename).toLowerCase();
215
+ if (!MEDIA_EXTENSIONS.has(ext)) return;
216
+ if (filename.startsWith('.') || filename.startsWith('_')) return;
217
+
218
+ const existing = debounceTimers.get(filename);
219
+ if (existing) clearTimeout(existing);
220
+
221
+ debounceTimers.set(
222
+ filename,
223
+ setTimeout(() => {
224
+ debounceTimers.delete(filename);
225
+ const filePath = resolve(mediaDir, filename);
226
+
227
+ if (!existsSync(filePath)) {
228
+ // File removed — delete from local R2 + assets table
229
+ const id = `media-${filename.replace(/\.\w+$/, '')}`;
230
+ const key = `media-${filename.replace(/\.\w+$/, '')}${ext}`;
231
+ try {
232
+ const { run } = require('./exec.ts');
233
+ // Remove from local R2
234
+ const { ensureWranglerConfig } = require('./config.ts');
235
+ const configPath = ensureWranglerConfig(root);
236
+ run(
237
+ `bunx wrangler r2 object delete ${key} --config ${configPath} --local`,
238
+ { cwd: root, silent: true }
239
+ );
240
+ } catch {
241
+ /* R2 object may not exist */
242
+ }
243
+ try {
244
+ d1InsertRow(root, dbName, '--local', 'assets', { id, _delete: 1 });
245
+ } catch {
246
+ /* may not exist */
247
+ }
248
+ // Use a direct DELETE instead
249
+ try {
250
+ const { run } = require('./exec.ts');
251
+ const { ensureWranglerConfig } = require('./config.ts');
252
+ const configPath = ensureWranglerConfig(root);
253
+ run(
254
+ `bunx wrangler d1 execute ${dbName} --local --config ${configPath} --command "DELETE FROM assets WHERE id = '${id}'"`,
255
+ { cwd: root, silent: true }
256
+ );
257
+ } catch {
258
+ /* ignore */
259
+ }
260
+ ok(
261
+ `${ANSI.DIM}sync:${ANSI.RESET} media/${filename} removed from local R2`
262
+ );
263
+ return;
264
+ }
265
+
266
+ // File added/changed — put into local R2 + upsert assets table
267
+ const id = `media-${filename.replace(/\.\w+$/, '')}`;
268
+ const key = `${id}${ext}`;
269
+ try {
270
+ r2PutLocal(root, 'media', key, filePath);
271
+ d1InsertRow(root, dbName, '--local', 'assets', {
272
+ id,
273
+ data: JSON.stringify({
274
+ title: filename,
275
+ url: `/api/media/${key}`,
276
+ content_type: MIME_TYPES[ext] ?? 'application/octet-stream',
277
+ width: null,
278
+ height: null,
279
+ file_size: null
280
+ })
281
+ });
282
+ ok(`${ANSI.DIM}sync:${ANSI.RESET} media/${filename} → local R2`);
283
+ } catch (e) {
284
+ warn(`Media sync failed for ${filename}: ${e}`);
285
+ }
286
+ }, 300)
287
+ );
288
+ });
289
+
290
+ return {
291
+ stop: () => {
292
+ watcher.close();
293
+ for (const timer of debounceTimers.values()) clearTimeout(timer);
294
+ debounceTimers.clear();
295
+ }
296
+ };
297
+ }
298
+
155
299
  // ── Sync server: router webhook → content/ files ───────────────────
156
300
 
157
301
  interface SyncServerOptions {
@@ -167,13 +311,19 @@ export function killStalePortHolder(port: number): void {
167
311
  for (const pid of out.split('\n')) {
168
312
  const n = parseInt(pid, 10);
169
313
  if (n > 0 && n !== process.pid) {
170
- try { process.kill(n, 'SIGTERM'); } catch { /* already gone */ }
314
+ try {
315
+ process.kill(n, 'SIGTERM');
316
+ } catch {
317
+ /* already gone */
318
+ }
171
319
  }
172
320
  }
173
321
  // Brief pause to let the OS release the socket
174
322
  execSync('sleep 0.2');
175
323
  }
176
- } catch { /* lsof not found or no process — fine */ }
324
+ } catch {
325
+ /* lsof not found or no process — fine */
326
+ }
177
327
  }
178
328
 
179
329
  function startSyncServer(opts: SyncServerOptions): {
@@ -248,6 +398,50 @@ function startSyncServer(opts: SyncServerOptions): {
248
398
  }
249
399
  }
250
400
 
401
+ // ── POST /sync/media — write uploaded file to content/media/ ──
402
+ if (req.method === 'POST' && url.pathname === '/sync/media') {
403
+ try {
404
+ const form = await req.formData();
405
+ const file = form.get('file') as File | null;
406
+ const filename = form.get('filename') as string | null;
407
+ if (!file || !filename)
408
+ return new Response('Missing file or filename', { status: 400 });
409
+
410
+ const mediaDir = resolve(contentDir, 'media');
411
+ if (!existsSync(mediaDir)) mkdirSync(mediaDir, { recursive: true });
412
+
413
+ const buf = Buffer.from(await file.arrayBuffer());
414
+ writeFileSync(resolve(mediaDir, filename), buf);
415
+ ok(
416
+ `${ANSI.DIM}sync:${ANSI.RESET} dashboard → content/media/${filename}`
417
+ );
418
+ return new Response('ok');
419
+ } catch (e) {
420
+ warn(`Media sync write error: ${e}`);
421
+ return new Response('error', { status: 500 });
422
+ }
423
+ }
424
+
425
+ // ── DELETE /sync/media/:filename — remove from content/media/ ──
426
+ if (req.method === 'DELETE' && url.pathname.startsWith('/sync/media/')) {
427
+ try {
428
+ const filename = decodeURIComponent(
429
+ url.pathname.slice('/sync/media/'.length)
430
+ );
431
+ const filePath = resolve(contentDir, 'media', filename);
432
+ if (existsSync(filePath)) {
433
+ unlinkSync(filePath);
434
+ ok(
435
+ `${ANSI.DIM}sync:${ANSI.RESET} dashboard deleted content/media/${filename}`
436
+ );
437
+ }
438
+ return new Response('ok');
439
+ } catch (e) {
440
+ warn(`Media sync delete error: ${e}`);
441
+ return new Response('error', { status: 500 });
442
+ }
443
+ }
444
+
251
445
  return new Response('not found', { status: 404 });
252
446
  }
253
447
  });
@@ -288,16 +482,20 @@ export function startDevSync(
288
482
 
289
483
  // Start both sync paths
290
484
  const fileWatcher = startFileWatcher({ root, dbName, contentTypes });
485
+ const mediaWatcher = startMediaWatcher({ root, dbName });
291
486
  const syncServer = startSyncServer({ root, contentTypes });
292
487
 
293
488
  if (!opts?.silent) {
294
- ok(`Dev sync active — file watcher + sync server on :${syncServer.port}`);
489
+ ok(
490
+ `Dev sync active — file watcher + media watcher + sync server on :${syncServer.port}`
491
+ );
295
492
  }
296
493
 
297
494
  return {
298
495
  syncUrl: `http://localhost:${syncServer.port}`,
299
496
  stop: () => {
300
497
  fileWatcher.stop();
498
+ mediaWatcher.stop();
301
499
  syncServer.stop();
302
500
  recentWrites.clear();
303
501
  }