koguma 2.1.0 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli/index.ts CHANGED
@@ -13,7 +13,13 @@
13
13
  * All commands auto-detect the project root by looking for koguma.toml.
14
14
  */
15
15
 
16
- import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync } from 'fs';
16
+ import {
17
+ existsSync,
18
+ readFileSync,
19
+ writeFileSync,
20
+ mkdirSync,
21
+ readdirSync
22
+ } from 'fs';
17
23
  import { resolve, dirname, extname } from 'path';
18
24
 
19
25
  import { ANSI, log, ok, warn, fail, header } from './log.ts';
@@ -47,6 +53,7 @@ import {
47
53
  d1ExecuteBatchSqlAsync,
48
54
  r2PutLocal,
49
55
  r2PutLocalAsync,
56
+ r2PutRemoteAsync,
50
57
  wranglerDev,
51
58
  wranglerDeploy,
52
59
  createD1Database,
@@ -59,7 +66,11 @@ import {
59
66
  validateContent,
60
67
  type ContentTypeInfo
61
68
  } from './content.ts';
62
- import { startDevSync, DEV_SYNC_ENV_VAR, killStalePortHolder } from './dev-sync.ts';
69
+ import {
70
+ startDevSync,
71
+ DEV_SYNC_ENV_VAR,
72
+ killStalePortHolder
73
+ } from './dev-sync.ts';
63
74
  import { buildInsertSql, wrapForShell } from '../src/db/sql.ts';
64
75
  import { intro, outro, handleCancel, p, BRAND } from './ui.ts';
65
76
 
@@ -156,8 +167,12 @@ async function syncContentToLocalD1(
156
167
  f => !f.startsWith('.') && !f.startsWith('_')
157
168
  );
158
169
  const mimeTypes: Record<string, string> = {
159
- '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg',
160
- '.gif': 'image/gif', '.webp': 'image/webp', '.svg': 'image/svg+xml',
170
+ '.png': 'image/png',
171
+ '.jpg': 'image/jpeg',
172
+ '.jpeg': 'image/jpeg',
173
+ '.gif': 'image/gif',
174
+ '.webp': 'image/webp',
175
+ '.svg': 'image/svg+xml'
161
176
  };
162
177
 
163
178
  for (const file of mediaFiles) {
@@ -167,15 +182,19 @@ async function syncContentToLocalD1(
167
182
 
168
183
  assetIdMap.set(file, id);
169
184
  mediaUploads.push({ filePath: resolve(mediaDir, file), key });
170
- assetSql.push(buildInsertSql('assets', {
171
- id,
172
- data: JSON.stringify({
173
- title: file,
174
- url: `/api/media/${key}`,
175
- content_type: mimeTypes[ext] ?? 'application/octet-stream',
176
- width: null, height: null, file_size: null
185
+ assetSql.push(
186
+ buildInsertSql('assets', {
187
+ id,
188
+ data: JSON.stringify({
189
+ title: file,
190
+ url: `/api/media/${key}`,
191
+ content_type: mimeTypes[ext] ?? 'application/octet-stream',
192
+ width: null,
193
+ height: null,
194
+ file_size: null
195
+ })
177
196
  })
178
- }));
197
+ );
179
198
  }
180
199
  }
181
200
 
@@ -184,8 +203,14 @@ async function syncContentToLocalD1(
184
203
  const entrySql: string[] = [];
185
204
  for (const { contentType, rowData } of prepared) {
186
205
  const {
187
- id, slug, status, publish_at, publishAt,
188
- created_at: _ca, updated_at: _ua, content_type: _ct,
206
+ id,
207
+ slug,
208
+ status,
209
+ publish_at,
210
+ publishAt,
211
+ created_at: _ca,
212
+ updated_at: _ua,
213
+ content_type: _ct,
189
214
  ...fields
190
215
  } = rowData;
191
216
 
@@ -193,9 +218,16 @@ async function syncContentToLocalD1(
193
218
  const ct = config.contentTypes.find(c => c.id === contentType);
194
219
  if (ct && assetIdMap.size > 0) {
195
220
  for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
196
- if (meta.fieldType === 'image' && typeof fields[fieldId] === 'string') {
197
- fields[fieldId] = assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
198
- } else if (meta.fieldType === 'images' && Array.isArray(fields[fieldId])) {
221
+ if (
222
+ meta.fieldType === 'image' &&
223
+ typeof fields[fieldId] === 'string'
224
+ ) {
225
+ fields[fieldId] =
226
+ assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
227
+ } else if (
228
+ meta.fieldType === 'images' &&
229
+ Array.isArray(fields[fieldId])
230
+ ) {
199
231
  fields[fieldId] = (fields[fieldId] as string[]).map(
200
232
  v => assetIdMap.get(v) ?? v
201
233
  );
@@ -203,21 +235,26 @@ async function syncContentToLocalD1(
203
235
  }
204
236
  }
205
237
 
206
- entrySql.push(buildInsertSql('entries', {
207
- id: id as string,
208
- content_type: contentType,
209
- slug: (slug as string | undefined) ?? null,
210
- data: JSON.stringify(fields),
211
- status: (status as string | undefined) ?? 'draft',
212
- ...(publish_at !== undefined ? { publish_at } : {}),
213
- ...(publishAt !== undefined ? { publish_at: publishAt } : {})
214
- }));
238
+ entrySql.push(
239
+ buildInsertSql('entries', {
240
+ id: id as string,
241
+ content_type: contentType,
242
+ slug: (slug as string | undefined) ?? null,
243
+ data: JSON.stringify(fields),
244
+ status: (status as string | undefined) ?? 'draft',
245
+ ...(publish_at !== undefined ? { publish_at } : {}),
246
+ ...(publishAt !== undefined ? { publish_at: publishAt } : {})
247
+ })
248
+ );
215
249
  }
216
250
 
217
251
  // Single batch: assets + entries written to one SQL file
218
252
  const allStatements = [...assetSql, ...entrySql];
219
253
  if (allStatements.length > 0) {
220
- if (s) s.message(`Syncing ${entrySql.length} entries + ${assetSql.length} assets...`);
254
+ if (s)
255
+ s.message(
256
+ `Syncing ${entrySql.length} entries + ${assetSql.length} assets...`
257
+ );
221
258
  await d1ExecuteBatchSqlAsync(root, dbName, '--local', allStatements);
222
259
  }
223
260
 
@@ -542,7 +579,10 @@ async function cmdDev(): Promise<void> {
542
579
  /**
543
580
  * koguma push — Ship to production.
544
581
  *
545
- * Content sync build admin deploy → sync data + media to remote.
582
+ * Reads content/ directly as the source of truth:
583
+ * - Entries + media metadata → remote D1 (no local D1 roundtrip)
584
+ * - content/media/ files → remote R2 (no localhost dependency)
585
+ * - Then builds and deploys the worker.
546
586
  */
547
587
  async function cmdPush(): Promise<void> {
548
588
  intro('push');
@@ -557,127 +597,178 @@ async function cmdPush(): Promise<void> {
557
597
 
558
598
  const { dbName, bucketName } = getProjectNames(root);
559
599
 
560
- // ── Confirmation ──
561
- const confirmed = await p.confirm({
562
- message: `Push local content to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET}?`,
563
- initialValue: true
564
- });
565
- if (handleCancel(confirmed)) return;
566
- if (!confirmed) {
567
- outro('Push cancelled.');
568
- return;
600
+ // ── Confirmation (Enter = yes, n = cancel, --yes to skip) ──
601
+ const skipConfirm =
602
+ process.argv.includes('--yes') || process.argv.includes('-y');
603
+ if (!skipConfirm) {
604
+ const answer = await p.text({
605
+ message: `Push to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET}? (Y/n)`,
606
+ defaultValue: 'y',
607
+ placeholder: 'y'
608
+ });
609
+ if (handleCancel(answer)) return;
610
+ if ((answer as string).toLowerCase().startsWith('n')) {
611
+ outro('Push cancelled.');
612
+ return;
613
+ }
614
+ } else {
615
+ p.log.info(`Pushing to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET} (--yes)`);
569
616
  }
570
617
 
571
- // ── Step 1: Apply schema to remote ──
618
+ // ── Step 1/4: Apply schema to remote ──
572
619
  const s1 = p.spinner();
573
620
  s1.start('Applying schema to remote...');
574
621
  applySchema(root, dbName, '--remote');
575
622
  s1.stop('Remote schema applied');
576
623
 
577
- // ── Step 2: Sync content/ → local D1 ──
624
+ // ── Step 2/4: content/ → remote D1 (direct, no local roundtrip) ──
578
625
  const s2 = p.spinner();
579
- s2.start('Syncing content/ to local D1...');
580
- await syncContentToLocalD1(root, dbName, s2);
581
- s2.stop('Local content synced');
582
-
583
- // ── Step 3: Export local content ──
584
- const s3 = p.spinner();
585
- s3.start('Exporting local content...');
586
-
587
- let localEntries: Record<string, unknown>[] = [];
588
- try {
589
- localEntries = d1Query(root, dbName, '--local', 'SELECT * FROM entries');
590
- } catch {
591
- p.log.warn('Could not export entries from local');
592
- }
626
+ s2.start('Reading content/ and syncing to remote D1...');
627
+ const contentDir = resolve(root, CONTENT_DIR);
628
+ const mediaDir = resolve(contentDir, 'media');
629
+ let syncedEntries = 0;
630
+ let syncedAssets = 0;
593
631
 
594
- let localAssets: Record<string, unknown>[] = [];
595
632
  try {
596
- localAssets = d1Query(root, dbName, '--local', 'SELECT * FROM assets');
597
- } catch {
598
- p.log.warn('Could not export assets from local');
599
- }
600
- s3.stop(
601
- `Exported ${localEntries.length} entries + ${localAssets.length} assets`
602
- );
633
+ const config = await loadSiteConfig(root);
603
634
 
604
- // ── Step 4: Import to remote ──
605
- const total = localEntries.length + localAssets.length;
606
- if (total > 0) {
607
- const prog = p.progress({ max: total });
608
- prog.start('Importing content to remote...');
635
+ // Build asset rows from content/media/ filenames
636
+ const mimeTypes: Record<string, string> = {
637
+ '.png': 'image/png',
638
+ '.jpg': 'image/jpeg',
639
+ '.jpeg': 'image/jpeg',
640
+ '.gif': 'image/gif',
641
+ '.webp': 'image/webp',
642
+ '.svg': 'image/svg+xml',
643
+ '.avif': 'image/avif',
644
+ '.ico': 'image/x-icon'
645
+ };
646
+ const assetIdMap = new Map<string, string>();
647
+ const allStatements: string[] = [];
609
648
 
610
- let done = 0;
611
- for (const asset of localAssets) {
612
- d1InsertRow(root, dbName, '--remote', 'assets', asset);
613
- done++;
614
- prog.advance(done, `Importing... (${done}/${total})`);
615
- }
616
- for (const entry of localEntries) {
617
- d1InsertRow(root, dbName, '--remote', 'entries', entry);
618
- done++;
619
- prog.advance(done, `Importing... (${done}/${total})`);
649
+ if (existsSync(mediaDir)) {
650
+ const mediaFiles = readdirSync(mediaDir).filter(
651
+ f => !f.startsWith('.') && !f.startsWith('_')
652
+ );
653
+ for (const file of mediaFiles) {
654
+ const ext = extname(file).toLowerCase();
655
+ const id = `media-${file.replace(/\.\w+$/, '')}`;
656
+ const key = `${id}${ext}`;
657
+ assetIdMap.set(file, id);
658
+ allStatements.push(
659
+ buildInsertSql('assets', {
660
+ id,
661
+ data: JSON.stringify({
662
+ title: file,
663
+ url: `/api/media/${key}`,
664
+ content_type: mimeTypes[ext] ?? 'application/octet-stream',
665
+ width: null,
666
+ height: null,
667
+ file_size: null
668
+ })
669
+ })
670
+ );
671
+ }
672
+ syncedAssets = assetIdMap.size;
620
673
  }
621
- prog.stop(
622
- `Synced ${localEntries.length} entries + ${localAssets.length} assets to remote`
623
- );
624
- }
625
674
 
626
- // ── Step 5: Upload media to remote ──
627
- if (localAssets.length > 0) {
628
- const cookie = await authenticate(remoteUrl, root);
629
- const sProg = p.progress({ max: localAssets.length });
630
- sProg.start('Uploading media to remote...');
631
-
632
- let uploaded = 0;
633
- for (const asset of localAssets) {
634
- const assetData =
635
- typeof asset.data === 'string'
636
- ? JSON.parse(asset.data)
637
- : (asset.data as Record<string, string>);
638
- const assetUrl = assetData.url ?? '';
639
- const assetTitle = assetData.title ?? '';
640
- const contentType = assetData.content_type ?? 'application/octet-stream';
641
- const key = assetUrl.replace('/api/media/', '');
675
+ // Read content/ entries build SQL
676
+ const prepared = prepareContentForSync(contentDir, config.contentTypes);
677
+ for (const { contentType, rowData } of prepared) {
678
+ const {
679
+ id,
680
+ slug,
681
+ status,
682
+ publish_at,
683
+ publishAt,
684
+ created_at: _ca,
685
+ updated_at: _ua,
686
+ content_type: _ct,
687
+ ...fields
688
+ } = rowData;
642
689
 
643
- try {
644
- const dlRes = await fetch(`http://localhost:8787${assetUrl}`);
645
- if (!dlRes.ok) {
646
- uploaded++;
647
- sProg.advance(uploaded);
648
- continue;
690
+ // Resolve media filenames → asset IDs
691
+ const ct = config.contentTypes.find(c => c.id === contentType);
692
+ if (ct && assetIdMap.size > 0) {
693
+ for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
694
+ if (
695
+ meta.fieldType === 'image' &&
696
+ typeof fields[fieldId] === 'string'
697
+ ) {
698
+ fields[fieldId] =
699
+ assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
700
+ } else if (
701
+ meta.fieldType === 'images' &&
702
+ Array.isArray(fields[fieldId])
703
+ ) {
704
+ fields[fieldId] = (fields[fieldId] as string[]).map(
705
+ v => assetIdMap.get(v) ?? v
706
+ );
707
+ }
649
708
  }
709
+ }
650
710
 
651
- const blob = await dlRes.blob();
652
- const formData = new FormData();
653
- formData.append('file', new File([blob], key, { type: contentType }));
654
- formData.append('title', assetTitle || key);
711
+ allStatements.push(
712
+ buildInsertSql('entries', {
713
+ id: id as string,
714
+ content_type: contentType,
715
+ slug: (slug as string | undefined) ?? null,
716
+ data: JSON.stringify(fields),
717
+ status: (status as string | undefined) ?? 'published',
718
+ ...(publish_at !== undefined ? { publish_at } : {}),
719
+ ...(publishAt !== undefined ? { publish_at: publishAt } : {})
720
+ })
721
+ );
722
+ }
723
+ syncedEntries = prepared.length;
655
724
 
656
- const upRes = await fetch(`${remoteUrl}/api/admin/media`, {
657
- method: 'POST',
658
- headers: { Cookie: cookie },
659
- body: formData
660
- });
725
+ if (allStatements.length > 0) {
726
+ await d1ExecuteBatchSqlAsync(root, dbName, '--remote', allStatements);
727
+ }
728
+ s2.stop(
729
+ `Synced ${syncedEntries} entries + ${syncedAssets} assets to remote D1`
730
+ );
731
+ } catch (e) {
732
+ s2.stop('Content sync failed');
733
+ warn(`${e}`);
734
+ }
661
735
 
662
- if (!upRes.ok) {
663
- warn(`Upload failed for ${assetTitle}: ${await upRes.text()}`);
736
+ // ── Step 3/4: content/media/ → remote R2 (direct file upload) ──
737
+ if (existsSync(mediaDir)) {
738
+ const mediaFiles = readdirSync(mediaDir).filter(
739
+ f => !f.startsWith('.') && !f.startsWith('_')
740
+ );
741
+ if (mediaFiles.length > 0) {
742
+ const sProg = p.progress({ max: mediaFiles.length });
743
+ sProg.start('Uploading media files to remote R2...');
744
+ let uploaded = 0;
745
+ for (const file of mediaFiles) {
746
+ const ext = extname(file).toLowerCase();
747
+ const id = `media-${file.replace(/\.\w+$/, '')}`;
748
+ const key = `${id}${ext}`;
749
+ try {
750
+ await r2PutRemoteAsync(
751
+ root,
752
+ bucketName,
753
+ key,
754
+ resolve(mediaDir, file)
755
+ );
756
+ } catch (e) {
757
+ warn(`Failed to upload ${file}: ${e}`);
664
758
  }
665
- } catch (e) {
666
- warn(`Error uploading ${assetTitle}: ${e}`);
759
+ uploaded++;
760
+ sProg.advance(
761
+ uploaded,
762
+ `Uploading... (${uploaded}/${mediaFiles.length})`
763
+ );
667
764
  }
668
- uploaded++;
669
- sProg.advance(
670
- uploaded,
671
- `Uploading media... (${uploaded}/${localAssets.length})`
672
- );
765
+ sProg.stop(`Uploaded ${uploaded} media files to remote R2`);
673
766
  }
674
- sProg.stop(`Uploaded ${uploaded} media assets`);
675
767
  }
676
768
 
677
- // ── Step 6: Build + Deploy ──
769
+ // ── Step 4/4: Build + Deploy ──
678
770
  const sBuild = p.spinner();
679
771
  sBuild.start('Building admin dashboard...');
680
-
681
772
  const kogumaRoot = findKogumaRoot();
682
773
  const adminDir = resolve(kogumaRoot, 'admin');
683
774
  if (existsSync(adminDir)) {
@@ -949,7 +1040,7 @@ function cmdHelp(): void {
949
1040
  [
950
1041
  `${BRAND.ACCENT}init${BRAND.RESET} Set up a new project ${BRAND.DIM}(scaffold, login, D1, R2, secret)${BRAND.RESET}`,
951
1042
  `${BRAND.ACCENT}dev${BRAND.RESET} Start local dev server ${BRAND.DIM}with auto-sync + typegen${BRAND.RESET}`,
952
- `${BRAND.ACCENT}push${BRAND.RESET} Build, deploy, and sync content to remote`,
1043
+ `${BRAND.ACCENT}push${BRAND.RESET} Build, deploy, and sync content to remote ${BRAND.DIM}(--yes to skip confirm)${BRAND.RESET}`,
953
1044
  `${BRAND.ACCENT}pull${BRAND.RESET} Download remote content + media to local`,
954
1045
  `${BRAND.ACCENT}gen-types${BRAND.RESET} Generate ${BRAND.DIM}koguma.d.ts${BRAND.RESET} typed interfaces`,
955
1046
  `${BRAND.ACCENT}tidy${BRAND.RESET} Sync content/ dirs with config + validate`,
@@ -962,7 +1053,7 @@ function cmdHelp(): void {
962
1053
  [
963
1054
  `${BRAND.DIM}$${BRAND.RESET} koguma init`,
964
1055
  `${BRAND.DIM}$${BRAND.RESET} koguma dev`,
965
- `${BRAND.DIM}$${BRAND.RESET} koguma push --remote https://my-site.dev`,
1056
+ `${BRAND.DIM}$${BRAND.RESET} koguma push --remote https://my-site.dev${BRAND.DIM} --yes${BRAND.RESET}`,
966
1057
  `${BRAND.DIM}$${BRAND.RESET} koguma pull --remote https://my-site.dev`,
967
1058
  `${BRAND.DIM}$${BRAND.RESET} koguma gen-types`,
968
1059
  `${BRAND.DIM}$${BRAND.RESET} koguma tidy --dry`
package/cli/scaffold.ts CHANGED
@@ -11,7 +11,11 @@ import {
11
11
  import { resolve } from 'path';
12
12
  import { ok, warn } from './log.ts';
13
13
  import { generateKogumaToml } from './config.ts';
14
- import { findMarkdownField, type ContentTypeInfo } from './content.ts';
14
+ import {
15
+ findMarkdownField,
16
+ findMarkdownFields,
17
+ type ContentTypeInfo
18
+ } from './content.ts';
15
19
  import matter from 'gray-matter';
16
20
 
17
21
  // ── Template types ─────────────────────────────────────────────────
@@ -315,29 +319,50 @@ export function generateExampleFile(
315
319
  ctId: string,
316
320
  fields: Record<string, { fieldType: string }>,
317
321
  singleton?: boolean
318
- ): { content: string; extension: string } {
322
+ ): {
323
+ content: string;
324
+ extension: string;
325
+ siblingFiles?: { fieldId: string; content: string }[];
326
+ } {
319
327
  const frontmatter: Record<string, unknown> = {};
320
- let hasMarkdown = false;
328
+ const mdFields = Object.entries(fields)
329
+ .filter(([, meta]) => meta.fieldType === 'markdown')
330
+ .map(([id]) => id);
331
+ const primaryMdField = mdFields[0] ?? null;
332
+ const extraMdFields = mdFields.slice(1);
321
333
 
322
334
  for (const [fieldId, meta] of Object.entries(fields)) {
323
335
  if (meta.fieldType === 'markdown') {
324
- hasMarkdown = true;
325
- continue; // markdown goes in body, not frontmatter
336
+ continue; // markdown goes in body or sibling files, not frontmatter
326
337
  }
327
338
  frontmatter[fieldId] = placeholderForFieldType(meta.fieldType);
328
339
  }
329
340
 
330
- if (hasMarkdown) {
341
+ // Build sibling example files for extra markdown fields
342
+ const siblingFiles: { fieldId: string; content: string }[] = [];
343
+ for (const fieldId of extraMdFields) {
344
+ siblingFiles.push({
345
+ fieldId,
346
+ content: `Write your ${fieldId} content here.`
347
+ });
348
+ }
349
+
350
+ if (primaryMdField) {
331
351
  const fm = matter.stringify('', frontmatter).trim();
332
352
  const bodyHint = singleton ? '' : `\nWrite your ${ctId} content here.\n`;
333
353
  return {
334
354
  content: `${fm}\n${bodyHint}`,
335
- extension: '.md'
355
+ extension: '.md',
356
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
336
357
  };
337
358
  }
338
359
 
339
360
  const fm = matter.stringify('', frontmatter).trim();
340
- return { content: fm + '\n', extension: '.md' };
361
+ return {
362
+ content: fm + '\n',
363
+ extension: '.md',
364
+ ...(siblingFiles.length > 0 ? { siblingFiles } : {})
365
+ };
341
366
  }
342
367
 
343
368
  /**
@@ -367,7 +392,7 @@ export function scaffoldContentDirFromTemplate(
367
392
  fields[fid] = { fieldType: fieldTypeFromExpression(expr) };
368
393
  }
369
394
 
370
- const { content, extension } = generateExampleFile(
395
+ const { content, extension, siblingFiles } = generateExampleFile(
371
396
  ct.id,
372
397
  fields,
373
398
  ct.singleton
@@ -375,6 +400,14 @@ export function scaffoldContentDirFromTemplate(
375
400
  const filename = `_example${extension}`;
376
401
  writeFileSync(resolve(typeDir, filename), content);
377
402
  ok(`Created content/${ct.id}/${filename}`);
403
+
404
+ // Write sibling example files for extra markdown fields
405
+ if (siblingFiles) {
406
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
407
+ const siblingName = `_example.${fieldId}.md`;
408
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
409
+ }
410
+ }
378
411
  } else if (!dirExisted) {
379
412
  ok(`Created content/${ct.id}/`);
380
413
  }
@@ -408,7 +441,7 @@ export function scaffoldContentDir(
408
441
  const isEmpty = dirExisted ? readdirSync(typeDir).length === 0 : true;
409
442
 
410
443
  if (isEmpty) {
411
- const { content, extension } = generateExampleFile(
444
+ const { content, extension, siblingFiles } = generateExampleFile(
412
445
  ct.id,
413
446
  ct.fieldMeta,
414
447
  ct.singleton
@@ -416,6 +449,14 @@ export function scaffoldContentDir(
416
449
  const filename = `_example${extension}`;
417
450
  writeFileSync(resolve(typeDir, filename), content);
418
451
  ok(`Created content/${ct.id}/${filename}`);
452
+
453
+ // Write sibling example files for extra markdown fields
454
+ if (siblingFiles) {
455
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
456
+ const siblingName = `_example.${fieldId}.md`;
457
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
458
+ }
459
+ }
419
460
  } else if (!dirExisted) {
420
461
  ok(`Created content/${ct.id}/`);
421
462
  }
@@ -538,7 +579,7 @@ export function syncContentDirsWithConfig(
538
579
  mkdirSync(typeDir, { recursive: true });
539
580
  }
540
581
 
541
- const { content, extension } = generateExampleFile(
582
+ const { content, extension, siblingFiles } = generateExampleFile(
542
583
  ct.id,
543
584
  ct.fieldMeta,
544
585
  ct.singleton
@@ -551,6 +592,14 @@ export function syncContentDirsWithConfig(
551
592
  if (!dryRun) {
552
593
  writeFileSync(resolve(typeDir, filename), content);
553
594
  ok(`Created content/${ct.id}/${filename}`);
595
+
596
+ // Write sibling example files for extra markdown fields
597
+ if (siblingFiles) {
598
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
599
+ const siblingName = `_example.${fieldId}.md`;
600
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
601
+ }
602
+ }
554
603
  }
555
604
  continue;
556
605
  }
@@ -559,7 +608,7 @@ export function syncContentDirsWithConfig(
559
608
 
560
609
  // If dir is empty, create _example
561
610
  if (files.length === 0) {
562
- const { content, extension } = generateExampleFile(
611
+ const { content, extension, siblingFiles } = generateExampleFile(
563
612
  ct.id,
564
613
  ct.fieldMeta,
565
614
  ct.singleton
@@ -572,6 +621,14 @@ export function syncContentDirsWithConfig(
572
621
  if (!dryRun) {
573
622
  writeFileSync(resolve(typeDir, filename), content);
574
623
  ok(`Created content/${ct.id}/${filename}`);
624
+
625
+ // Write sibling example files for extra markdown fields
626
+ if (siblingFiles) {
627
+ for (const { fieldId, content: siblingContent } of siblingFiles) {
628
+ const siblingName = `_example.${fieldId}.md`;
629
+ writeFileSync(resolve(typeDir, siblingName), siblingContent + '\n');
630
+ }
631
+ }
575
632
  }
576
633
  continue;
577
634
  }