koguma 2.0.0 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -149,7 +149,7 @@ content/
149
149
  │ ├── hello-world.md # file = one entry
150
150
  │ └── our-mission.md
151
151
  ├── siteSettings/
152
- │ └── index.yml # singletons use index.yml
152
+ │ └── index.md # singletons use index.md
153
153
  └── media/ # optional local images
154
154
  └── hero-banner.jpg
155
155
  ```
@@ -244,7 +244,7 @@ Your Project
244
244
  ├── post/
245
245
  │ └── hello-world.md
246
246
  └── siteSettings/
247
- └── index.yml
247
+ └── index.md
248
248
 
249
249
  Koguma (this package)
250
250
  ├── src/
package/cli/constants.ts CHANGED
@@ -4,8 +4,26 @@
4
4
  * Eliminates magic strings scattered across modules.
5
5
  */
6
6
 
7
- /** CLI version string */
8
- export const CLI_VERSION = 'v2.0.0';
7
+ import { readFileSync } from 'fs';
8
+ import { resolve, dirname } from 'path';
9
+
10
+ /** CLI version string — read dynamically from package.json at runtime */
11
+ function readCliVersion(): string {
12
+ try {
13
+ const pkgPath = resolve(
14
+ dirname(new URL(import.meta.url).pathname),
15
+ '..',
16
+ 'package.json'
17
+ );
18
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')) as {
19
+ version?: string;
20
+ };
21
+ return `v${pkg.version ?? '?'}`;
22
+ } catch {
23
+ return 'v?';
24
+ }
25
+ }
26
+ export const CLI_VERSION = readCliVersion();
9
27
 
10
28
  /** Project config file name */
11
29
  export const CONFIG_FILE = 'koguma.toml';
package/cli/content.ts CHANGED
@@ -7,15 +7,21 @@
7
7
  * ├── post/
8
8
  * │ ├── hello-world.md # markdown body + frontmatter
9
9
  * │ └── our-mission.md
10
- * ├── siteSettings/
11
- * │ └── index.yml # singletons use index.yml
10
+ * ├── landingPage/
11
+ * │ ├── index.md # singletons use index.md (1st md field = body)
12
+ * │ ├── heroBody.md # sibling file for extra markdown field
13
+ * │ └── aboutBody.md # sibling file for extra markdown field
12
14
  * └── media/ # optional local images (not managed here)
13
15
  * └── hero.jpg
14
16
  *
15
- * Markdown files use YAML frontmatter for structured fields.
17
+ * All content files use the .md extension with YAML frontmatter.
16
18
  * The file body (below the frontmatter) is mapped to the first `markdown`
17
- * field in the content type definition. If the content type has no markdown
18
- * field, YAML-only files (.yml) are used.
19
+ * field in the content type definition. Additional markdown fields are
20
+ * stored as sibling .md files (pure markdown, no frontmatter):
21
+ * - Singletons: {fieldId}.md alongside index.md
22
+ * - Collections: {slug}.{fieldId}.md alongside {slug}.md
23
+ *
24
+ * Legacy .yml files are still accepted on read for backwards compatibility.
19
25
  */
20
26
 
21
27
  import matter from 'gray-matter';
@@ -36,7 +42,7 @@ export interface ContentEntry {
36
42
  contentType: string;
37
43
  /** Slug derived from filename (without extension) */
38
44
  slug: string;
39
- /** Whether this is a singleton (index.yml) */
45
+ /** Whether this is a singleton (index.md) */
40
46
  singleton: boolean;
41
47
  /** All frontmatter fields */
42
48
  fields: Record<string, unknown>;
@@ -56,7 +62,8 @@ export interface ContentTypeInfo {
56
62
  // ── Parse a single content file ────────────────────────────────────
57
63
 
58
64
  /**
59
- * Parse a .md or .yml file into a ContentEntry.
65
+ * Parse a .md or .yml/.yaml file into a ContentEntry.
66
+ * (.yml/.yaml are accepted for backwards compatibility)
60
67
  */
61
68
  export function parseContentFile(
62
69
  filePath: string,
@@ -65,7 +72,7 @@ export function parseContentFile(
65
72
  const raw = readFileSync(filePath, 'utf-8');
66
73
  const ext = extname(filePath).toLowerCase();
67
74
  const name = basename(filePath, ext);
68
- const singleton = name === 'index' && ext === '.yml';
75
+ const singleton = name === 'index' && (ext === '.yml' || ext === '.md');
69
76
 
70
77
  if (ext === '.yml' || ext === '.yaml') {
71
78
  // YAML-only file (no markdown body)
@@ -97,8 +104,8 @@ export function parseContentFile(
97
104
 
98
105
  return {
99
106
  contentType,
100
- slug: name,
101
- singleton: false,
107
+ slug: singleton ? contentType : name,
108
+ singleton,
102
109
  fields: parsed.data,
103
110
  body,
104
111
  filePath
@@ -110,13 +117,40 @@ export function parseContentFile(
110
117
  /**
111
118
  * Scan the content/ directory and return all parsed entries.
112
119
  * Skips the `media/` subdirectory and any files/directories starting with `_`.
120
+ *
121
+ * Also detects sibling .md files for extra markdown fields:
122
+ * - Singletons: {fieldId}.md alongside index.md
123
+ * - Collections: {slug}.{fieldId}.md alongside {slug}.md
124
+ *
125
+ * Returns a siblingMap: Map<"contentType/slug", Record<fieldId, markdownContent>>
113
126
  */
114
- export function readContentDir(contentDir: string): ContentEntry[] {
115
- if (!existsSync(contentDir)) return [];
127
+ export function readContentDir(
128
+ contentDir: string,
129
+ contentTypes?: ContentTypeInfo[]
130
+ ): {
131
+ entries: ContentEntry[];
132
+ siblingMap: Map<string, Record<string, string>>;
133
+ } {
134
+ if (!existsSync(contentDir)) return { entries: [], siblingMap: new Map() };
116
135
 
117
136
  const entries: ContentEntry[] = [];
137
+ const siblingMap = new Map<string, Record<string, string>>();
118
138
  const subdirs = readdirSync(contentDir);
119
139
 
140
+ // Build a set of markdown field IDs per content type for sibling detection
141
+ const ctMdFields = new Map<string, Set<string>>();
142
+ if (contentTypes) {
143
+ for (const ct of contentTypes) {
144
+ const mdFields = Object.entries(ct.fieldMeta)
145
+ .filter(([, meta]) => meta.fieldType === 'markdown')
146
+ .map(([id]) => id);
147
+ // Only care about extra fields (skip the first — it goes in the main body)
148
+ if (mdFields.length > 1) {
149
+ ctMdFields.set(ct.id, new Set(mdFields.slice(1)));
150
+ }
151
+ }
152
+ }
153
+
120
154
  for (const subdir of subdirs) {
121
155
  // Skip media/ — it's for local image storage, not content
122
156
  if (subdir === 'media') continue;
@@ -127,25 +161,93 @@ export function readContentDir(contentDir: string): ContentEntry[] {
127
161
  if (!statSync(subdirPath).isDirectory()) continue;
128
162
 
129
163
  const files = readdirSync(subdirPath);
164
+ const extraMdFieldIds = ctMdFields.get(subdir);
165
+
166
+ // First pass: identify main content files vs sibling field files
167
+ const mainFiles: string[] = [];
168
+ const siblingFiles: string[] = [];
169
+
130
170
  for (const file of files) {
131
- // Skip _-prefixed files (_example.md, dev drafts, etc.)
132
171
  if (file.startsWith('_')) continue;
133
-
134
172
  const ext = extname(file).toLowerCase();
135
173
  if (!['.md', '.yml', '.yaml'].includes(ext)) continue;
136
174
 
137
175
  const filePath = resolve(subdirPath, file);
138
176
  if (!statSync(filePath).isFile()) continue;
139
177
 
178
+ const name = basename(file, ext);
179
+
180
+ // Check if this is a sibling field file
181
+ if (ext === '.md' && extraMdFieldIds) {
182
+ // Singleton sibling: fieldId.md (not index.md)
183
+ if (name !== 'index' && extraMdFieldIds.has(name)) {
184
+ siblingFiles.push(file);
185
+ continue;
186
+ }
187
+ // Collection sibling: slug.fieldId.md
188
+ const dotIdx = name.lastIndexOf('.');
189
+ if (dotIdx > 0) {
190
+ const maybeFieldId = name.slice(dotIdx + 1);
191
+ if (extraMdFieldIds.has(maybeFieldId)) {
192
+ siblingFiles.push(file);
193
+ continue;
194
+ }
195
+ }
196
+ }
197
+
198
+ mainFiles.push(file);
199
+ }
200
+
201
+ // Parse main content files
202
+ for (const file of mainFiles) {
203
+ const filePath = resolve(subdirPath, file);
140
204
  entries.push(parseContentFile(filePath, subdir));
141
205
  }
206
+
207
+ // Parse sibling field files and merge into siblingMap
208
+ for (const file of siblingFiles) {
209
+ const ext = extname(file).toLowerCase();
210
+ const name = basename(file, ext);
211
+ const content = readFileSync(resolve(subdirPath, file), 'utf-8').trim();
212
+
213
+ // Determine which entry this belongs to and which field
214
+ const dotIdx = name.lastIndexOf('.');
215
+ if (dotIdx > 0) {
216
+ // Collection: slug.fieldId.md
217
+ const slug = name.slice(0, dotIdx);
218
+ const fieldId = name.slice(dotIdx + 1);
219
+ const key = `${subdir}/${slug}`;
220
+ const existing = siblingMap.get(key) ?? {};
221
+ existing[fieldId] = content;
222
+ siblingMap.set(key, existing);
223
+ } else {
224
+ // Singleton: fieldId.md → key is contentType/contentType
225
+ const fieldId = name;
226
+ const key = `${subdir}/${subdir}`;
227
+ const existing = siblingMap.get(key) ?? {};
228
+ existing[fieldId] = content;
229
+ siblingMap.set(key, existing);
230
+ }
231
+ }
142
232
  }
143
233
 
144
- return entries;
234
+ return { entries, siblingMap };
145
235
  }
146
236
 
147
237
  // ── Convert ContentEntry → D1 row data ─────────────────────────────
148
238
 
239
+ /**
240
+ * Find ALL markdown field IDs in a content type's field metadata.
241
+ * Returns an ordered array (first element = primary body field).
242
+ */
243
+ export function findMarkdownFields(
244
+ fieldMeta: Record<string, { fieldType: string }>
245
+ ): string[] {
246
+ return Object.entries(fieldMeta)
247
+ .filter(([, meta]) => meta.fieldType === 'markdown')
248
+ .map(([id]) => id);
249
+ }
250
+
149
251
  /**
150
252
  * Find the first markdown field ID in a content type's field metadata.
151
253
  * Returns null if the content type has no markdown field.
@@ -153,19 +255,18 @@ export function readContentDir(contentDir: string): ContentEntry[] {
153
255
  export function findMarkdownField(
154
256
  fieldMeta: Record<string, { fieldType: string }>
155
257
  ): string | null {
156
- for (const [id, meta] of Object.entries(fieldMeta)) {
157
- if (meta.fieldType === 'markdown') return id;
158
- }
159
- return null;
258
+ return findMarkdownFields(fieldMeta)[0] ?? null;
160
259
  }
161
260
 
162
261
  /**
163
262
  * Convert a ContentEntry into the flat data object suitable for D1 insertion.
164
263
  * The markdown body (if present) is assigned to the first markdown field.
264
+ * Additional markdown fields can be passed in via siblingFields.
165
265
  */
166
266
  export function contentEntryToDbRow(
167
267
  entry: ContentEntry,
168
- ctInfo: ContentTypeInfo
268
+ ctInfo: ContentTypeInfo,
269
+ siblingFields?: Record<string, string>
169
270
  ): Record<string, unknown> {
170
271
  const data: Record<string, unknown> = { ...entry.fields };
171
272
 
@@ -177,6 +278,13 @@ export function contentEntryToDbRow(
177
278
  }
178
279
  }
179
280
 
281
+ // Merge sibling markdown fields
282
+ if (siblingFields) {
283
+ for (const [fieldId, content] of Object.entries(siblingFields)) {
284
+ data[fieldId] = content;
285
+ }
286
+ }
287
+
180
288
  // Set slug
181
289
  if (!data.slug && !entry.singleton) {
182
290
  data.slug = entry.slug;
@@ -216,7 +324,11 @@ export function contentEntryToDbRow(
216
324
  export function dbRowToContentFile(
217
325
  row: Record<string, unknown>,
218
326
  ctInfo: ContentTypeInfo
219
- ): { content: string; extension: string } {
327
+ ): {
328
+ content: string;
329
+ extension: string;
330
+ siblingFiles?: { fieldId: string; content: string }[];
331
+ } {
220
332
  // Parse the data blob if it's still a JSON string
221
333
  let fields: Record<string, unknown>;
222
334
  if (typeof row.data === 'string') {
@@ -239,28 +351,44 @@ export function dbRowToContentFile(
239
351
  // Strip system-only fields from frontmatter
240
352
  delete fields.slug;
241
353
 
242
- const mdFieldId = findMarkdownField(ctInfo.fieldMeta);
354
+ const mdFields = findMarkdownFields(ctInfo.fieldMeta);
355
+ const primaryMdField = mdFields[0] ?? null;
356
+ const extraMdFields = mdFields.slice(1);
357
+
358
+ // Collect sibling files for extra markdown fields
359
+ const siblingFiles: { fieldId: string; content: string }[] = [];
360
+ for (const fieldId of extraMdFields) {
361
+ if (fields[fieldId]) {
362
+ siblingFiles.push({ fieldId, content: String(fields[fieldId]) });
363
+ delete fields[fieldId]; // Don't put in frontmatter
364
+ }
365
+ }
243
366
 
244
- if (mdFieldId && fields[mdFieldId]) {
367
+ if (primaryMdField && fields[primaryMdField]) {
245
368
  // Markdown file: body goes below frontmatter
246
- const body = String(fields[mdFieldId]);
247
- delete fields[mdFieldId];
369
+ const body = String(fields[primaryMdField]);
370
+ delete fields[primaryMdField];
248
371
  const fm = matter.stringify('', fields).trim();
249
372
  return {
250
373
  content: `${fm}\n\n${body}\n`,
251
- extension: '.md'
374
+ extension: '.md',
375
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
252
376
  };
253
377
  }
254
378
 
255
- // YAML-only file (no markdown field or empty body)
379
+ // Frontmatter-only file (no markdown field or empty body)
256
380
  const fm = matter.stringify('', fields).trim();
257
- return { content: fm + '\n', extension: '.yml' };
381
+ return {
382
+ content: fm + '\n',
383
+ extension: '.md',
384
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
385
+ };
258
386
  }
259
387
 
260
388
  // ── Write content/ directory from D1 entries ───────────────────────
261
389
 
262
390
  /**
263
- * Write D1 entries to the content/ directory as .md/.yml files.
391
+ * Write D1 entries to the content/ directory as .md files.
264
392
  * Each content type gets its own subdirectory.
265
393
  *
266
394
  * @param contentDir - Absolute path to the content/ directory
@@ -284,7 +412,10 @@ export function writeContentDir(
284
412
  const typeDir = resolve(contentDir, typeId);
285
413
  mkdirSync(typeDir, { recursive: true });
286
414
 
287
- const { content, extension } = dbRowToContentFile(entry, ctInfo);
415
+ const { content, extension, siblingFiles } = dbRowToContentFile(
416
+ entry,
417
+ ctInfo
418
+ );
288
419
 
289
420
  // Determine filename
290
421
  const slug = (entry.slug as string) || (entry.id as string);
@@ -293,6 +424,17 @@ export function writeContentDir(
293
424
 
294
425
  writeFileSync(resolve(typeDir, filename), content);
295
426
  count++;
427
+
428
+ // Write sibling files for extra markdown fields
429
+ if (siblingFiles) {
430
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
431
+ const siblingName = isSingleton
432
+ ? `${fieldId}.md`
433
+ : `${slug}.${fieldId}.md`;
434
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
435
+ count++;
436
+ }
437
+ }
296
438
  }
297
439
 
298
440
  return count;
@@ -313,16 +455,21 @@ export function prepareContentForSync(
313
455
  contentTypes: ContentTypeInfo[]
314
456
  ): { contentType: string; rowData: Record<string, unknown> }[] {
315
457
  const ctMap = new Map(contentTypes.map(ct => [ct.id, ct]));
316
- const entries = readContentDir(contentDir);
458
+ const { entries, siblingMap } = readContentDir(contentDir, contentTypes);
317
459
  const results: { contentType: string; rowData: Record<string, unknown> }[] =
318
460
  [];
319
461
 
320
462
  for (const entry of entries) {
321
463
  const ctInfo = ctMap.get(entry.contentType);
322
464
  if (!ctInfo) continue;
465
+
466
+ // Look up sibling markdown fields for this entry
467
+ const entryKey = `${entry.contentType}/${entry.slug}`;
468
+ const siblings = siblingMap.get(entryKey);
469
+
323
470
  results.push({
324
471
  contentType: entry.contentType,
325
- rowData: contentEntryToDbRow(entry, ctInfo)
472
+ rowData: contentEntryToDbRow(entry, ctInfo, siblings)
326
473
  });
327
474
  }
328
475
 
package/cli/dev-sync.ts CHANGED
@@ -3,17 +3,32 @@
3
3
  *
4
4
  * Two event-driven sync paths:
5
5
  * 1. File watcher: content/ file changes → parse → INSERT OR REPLACE into D1
6
+ * content/media/ image changes → put/delete in local R2 + assets table
6
7
  * 2. Sync server: router webhook (POST /sync) → write content/ file
8
+ * router webhook (POST /sync/media) → write file to content/media/
9
+ * router webhook (DELETE /sync/media/:filename) → remove from content/media/
7
10
  *
8
11
  * Loop prevention: a shared cooldown map tracks recent writes by entry ID.
9
12
  * When either side writes, it records the ID. The other side skips that ID
10
13
  * for a short cooldown window to prevent infinite loops.
11
14
  */
12
15
 
13
- import { watch, existsSync, statSync } from 'fs';
14
- import { resolve, relative, join, extname, sep } from 'path';
16
+ import {
17
+ watch,
18
+ existsSync,
19
+ statSync,
20
+ writeFileSync,
21
+ unlinkSync,
22
+ mkdirSync
23
+ } from 'fs';
24
+ import { resolve, relative, join, extname, basename, sep } from 'path';
15
25
  import { log, ok, warn, ANSI } from './log.ts';
16
- import { d1InsertRow, applySchema, type D1Target } from './wrangler.ts';
26
+ import {
27
+ d1InsertRow,
28
+ applySchema,
29
+ r2PutLocal,
30
+ type D1Target
31
+ } from './wrangler.ts';
17
32
  import { buildInsertSql } from '../src/db/sql.ts';
18
33
  import {
19
34
  parseContentFile,
@@ -81,7 +96,7 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
81
96
  const ext = extname(filename);
82
97
  if (ext !== '.md' && ext !== '.yml' && ext !== '.yaml') return;
83
98
 
84
- // Skip media/ subdirectory
99
+ // Skip media/ subdirectory — handled separately by the media watcher
85
100
  if (filename.startsWith('media' + sep) || filename.startsWith('media/'))
86
101
  return;
87
102
 
@@ -118,8 +133,14 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
118
133
  // Pack into v2 format: entries(id, content_type, slug, data, status)
119
134
  const rowData = contentEntryToDbRow(entry, ctInfo);
120
135
  const {
121
- id, slug: rowSlug, status, publish_at, publishAt,
122
- created_at: _ca, updated_at: _ua, content_type: _ct,
136
+ id,
137
+ slug: rowSlug,
138
+ status,
139
+ publish_at,
140
+ publishAt,
141
+ created_at: _ca,
142
+ updated_at: _ua,
143
+ content_type: _ct,
123
144
  ...fields
124
145
  } = rowData;
125
146
  d1InsertRow(root, dbName, '--local', 'entries', {
@@ -152,6 +173,129 @@ function startFileWatcher(opts: FileWatcherOptions): { stop: () => void } {
152
173
  };
153
174
  }
154
175
 
176
+ // ── Media file watcher: content/media/ → local R2 ─────────────────
177
+
178
+ const MEDIA_EXTENSIONS = new Set([
179
+ '.png',
180
+ '.jpg',
181
+ '.jpeg',
182
+ '.gif',
183
+ '.webp',
184
+ '.svg',
185
+ '.avif',
186
+ '.ico'
187
+ ]);
188
+ const MIME_TYPES: Record<string, string> = {
189
+ '.png': 'image/png',
190
+ '.jpg': 'image/jpeg',
191
+ '.jpeg': 'image/jpeg',
192
+ '.gif': 'image/gif',
193
+ '.webp': 'image/webp',
194
+ '.svg': 'image/svg+xml',
195
+ '.avif': 'image/avif',
196
+ '.ico': 'image/x-icon'
197
+ };
198
+
199
+ interface MediaWatcherOptions {
200
+ root: string;
201
+ dbName: string;
202
+ }
203
+
204
+ function startMediaWatcher(opts: MediaWatcherOptions): { stop: () => void } {
205
+ const { root, dbName } = opts;
206
+ const mediaDir = resolve(root, 'content/media');
207
+
208
+ if (!existsSync(mediaDir)) return { stop: () => {} };
209
+
210
+ const debounceTimers = new Map<string, ReturnType<typeof setTimeout>>();
211
+
212
+ const watcher = watch(mediaDir, (_event, filename) => {
213
+ if (!filename) return;
214
+ const ext = extname(filename).toLowerCase();
215
+ if (!MEDIA_EXTENSIONS.has(ext)) return;
216
+ if (filename.startsWith('.') || filename.startsWith('_')) return;
217
+
218
+ const existing = debounceTimers.get(filename);
219
+ if (existing) clearTimeout(existing);
220
+
221
+ debounceTimers.set(
222
+ filename,
223
+ setTimeout(() => {
224
+ debounceTimers.delete(filename);
225
+ const filePath = resolve(mediaDir, filename);
226
+
227
+ if (!existsSync(filePath)) {
228
+ // File removed — delete from local R2 + assets table
229
+ const id = `media-${filename.replace(/\.\w+$/, '')}`;
230
+ const key = `media-${filename.replace(/\.\w+$/, '')}${ext}`;
231
+ try {
232
+ const { run } = require('./exec.ts');
233
+ // Remove from local R2
234
+ const { ensureWranglerConfig } = require('./config.ts');
235
+ const configPath = ensureWranglerConfig(root);
236
+ run(
237
+ `bunx wrangler r2 object delete ${key} --config ${configPath} --local`,
238
+ { cwd: root, silent: true }
239
+ );
240
+ } catch {
241
+ /* R2 object may not exist */
242
+ }
243
+ try {
244
+ d1InsertRow(root, dbName, '--local', 'assets', { id, _delete: 1 });
245
+ } catch {
246
+ /* may not exist */
247
+ }
248
+ // Use a direct DELETE instead
249
+ try {
250
+ const { run } = require('./exec.ts');
251
+ const { ensureWranglerConfig } = require('./config.ts');
252
+ const configPath = ensureWranglerConfig(root);
253
+ run(
254
+ `bunx wrangler d1 execute ${dbName} --local --config ${configPath} --command "DELETE FROM assets WHERE id = '${id}'"`,
255
+ { cwd: root, silent: true }
256
+ );
257
+ } catch {
258
+ /* ignore */
259
+ }
260
+ ok(
261
+ `${ANSI.DIM}sync:${ANSI.RESET} media/${filename} removed from local R2`
262
+ );
263
+ return;
264
+ }
265
+
266
+ // File added/changed — put into local R2 + upsert assets table
267
+ const id = `media-${filename.replace(/\.\w+$/, '')}`;
268
+ const key = `${id}${ext}`;
269
+ try {
270
+ r2PutLocal(root, 'media', key, filePath);
271
+ d1InsertRow(root, dbName, '--local', 'assets', {
272
+ id,
273
+ data: JSON.stringify({
274
+ title: filename,
275
+ url: `/api/media/${key}`,
276
+ content_type: MIME_TYPES[ext] ?? 'application/octet-stream',
277
+ width: null,
278
+ height: null,
279
+ file_size: null
280
+ })
281
+ });
282
+ ok(`${ANSI.DIM}sync:${ANSI.RESET} media/${filename} → local R2`);
283
+ } catch (e) {
284
+ warn(`Media sync failed for ${filename}: ${e}`);
285
+ }
286
+ }, 300)
287
+ );
288
+ });
289
+
290
+ return {
291
+ stop: () => {
292
+ watcher.close();
293
+ for (const timer of debounceTimers.values()) clearTimeout(timer);
294
+ debounceTimers.clear();
295
+ }
296
+ };
297
+ }
298
+
155
299
  // ── Sync server: router webhook → content/ files ───────────────────
156
300
 
157
301
  interface SyncServerOptions {
@@ -167,13 +311,19 @@ export function killStalePortHolder(port: number): void {
167
311
  for (const pid of out.split('\n')) {
168
312
  const n = parseInt(pid, 10);
169
313
  if (n > 0 && n !== process.pid) {
170
- try { process.kill(n, 'SIGTERM'); } catch { /* already gone */ }
314
+ try {
315
+ process.kill(n, 'SIGTERM');
316
+ } catch {
317
+ /* already gone */
318
+ }
171
319
  }
172
320
  }
173
321
  // Brief pause to let the OS release the socket
174
322
  execSync('sleep 0.2');
175
323
  }
176
- } catch { /* lsof not found or no process — fine */ }
324
+ } catch {
325
+ /* lsof not found or no process — fine */
326
+ }
177
327
  }
178
328
 
179
329
  function startSyncServer(opts: SyncServerOptions): {
@@ -248,6 +398,50 @@ function startSyncServer(opts: SyncServerOptions): {
248
398
  }
249
399
  }
250
400
 
401
+ // ── POST /sync/media — write uploaded file to content/media/ ──
402
+ if (req.method === 'POST' && url.pathname === '/sync/media') {
403
+ try {
404
+ const form = await req.formData();
405
+ const file = form.get('file') as File | null;
406
+ const filename = form.get('filename') as string | null;
407
+ if (!file || !filename)
408
+ return new Response('Missing file or filename', { status: 400 });
409
+
410
+ const mediaDir = resolve(contentDir, 'media');
411
+ if (!existsSync(mediaDir)) mkdirSync(mediaDir, { recursive: true });
412
+
413
+ const buf = Buffer.from(await file.arrayBuffer());
414
+ writeFileSync(resolve(mediaDir, filename), buf);
415
+ ok(
416
+ `${ANSI.DIM}sync:${ANSI.RESET} dashboard → content/media/${filename}`
417
+ );
418
+ return new Response('ok');
419
+ } catch (e) {
420
+ warn(`Media sync write error: ${e}`);
421
+ return new Response('error', { status: 500 });
422
+ }
423
+ }
424
+
425
+ // ── DELETE /sync/media/:filename — remove from content/media/ ──
426
+ if (req.method === 'DELETE' && url.pathname.startsWith('/sync/media/')) {
427
+ try {
428
+ const filename = decodeURIComponent(
429
+ url.pathname.slice('/sync/media/'.length)
430
+ );
431
+ const filePath = resolve(contentDir, 'media', filename);
432
+ if (existsSync(filePath)) {
433
+ unlinkSync(filePath);
434
+ ok(
435
+ `${ANSI.DIM}sync:${ANSI.RESET} dashboard deleted content/media/${filename}`
436
+ );
437
+ }
438
+ return new Response('ok');
439
+ } catch (e) {
440
+ warn(`Media sync delete error: ${e}`);
441
+ return new Response('error', { status: 500 });
442
+ }
443
+ }
444
+
251
445
  return new Response('not found', { status: 404 });
252
446
  }
253
447
  });
@@ -288,16 +482,20 @@ export function startDevSync(
288
482
 
289
483
  // Start both sync paths
290
484
  const fileWatcher = startFileWatcher({ root, dbName, contentTypes });
485
+ const mediaWatcher = startMediaWatcher({ root, dbName });
291
486
  const syncServer = startSyncServer({ root, contentTypes });
292
487
 
293
488
  if (!opts?.silent) {
294
- ok(`Dev sync active — file watcher + sync server on :${syncServer.port}`);
489
+ ok(
490
+ `Dev sync active — file watcher + media watcher + sync server on :${syncServer.port}`
491
+ );
295
492
  }
296
493
 
297
494
  return {
298
495
  syncUrl: `http://localhost:${syncServer.port}`,
299
496
  stop: () => {
300
497
  fileWatcher.stop();
498
+ mediaWatcher.stop();
301
499
  syncServer.stop();
302
500
  recentWrites.clear();
303
501
  }