koguma 2.0.0 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli/index.ts CHANGED
@@ -13,7 +13,13 @@
13
13
  * All commands auto-detect the project root by looking for koguma.toml.
14
14
  */
15
15
 
16
- import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync } from 'fs';
16
+ import {
17
+ existsSync,
18
+ readFileSync,
19
+ writeFileSync,
20
+ mkdirSync,
21
+ readdirSync
22
+ } from 'fs';
17
23
  import { resolve, dirname, extname } from 'path';
18
24
 
19
25
  import { ANSI, log, ok, warn, fail, header } from './log.ts';
@@ -47,6 +53,7 @@ import {
47
53
  d1ExecuteBatchSqlAsync,
48
54
  r2PutLocal,
49
55
  r2PutLocalAsync,
56
+ r2PutRemoteAsync,
50
57
  wranglerDev,
51
58
  wranglerDeploy,
52
59
  createD1Database,
@@ -59,7 +66,11 @@ import {
59
66
  validateContent,
60
67
  type ContentTypeInfo
61
68
  } from './content.ts';
62
- import { startDevSync, DEV_SYNC_ENV_VAR, killStalePortHolder } from './dev-sync.ts';
69
+ import {
70
+ startDevSync,
71
+ DEV_SYNC_ENV_VAR,
72
+ killStalePortHolder
73
+ } from './dev-sync.ts';
63
74
  import { buildInsertSql, wrapForShell } from '../src/db/sql.ts';
64
75
  import { intro, outro, handleCancel, p, BRAND } from './ui.ts';
65
76
 
@@ -156,8 +167,12 @@ async function syncContentToLocalD1(
156
167
  f => !f.startsWith('.') && !f.startsWith('_')
157
168
  );
158
169
  const mimeTypes: Record<string, string> = {
159
- '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg',
160
- '.gif': 'image/gif', '.webp': 'image/webp', '.svg': 'image/svg+xml',
170
+ '.png': 'image/png',
171
+ '.jpg': 'image/jpeg',
172
+ '.jpeg': 'image/jpeg',
173
+ '.gif': 'image/gif',
174
+ '.webp': 'image/webp',
175
+ '.svg': 'image/svg+xml'
161
176
  };
162
177
 
163
178
  for (const file of mediaFiles) {
@@ -167,15 +182,19 @@ async function syncContentToLocalD1(
167
182
 
168
183
  assetIdMap.set(file, id);
169
184
  mediaUploads.push({ filePath: resolve(mediaDir, file), key });
170
- assetSql.push(buildInsertSql('assets', {
171
- id,
172
- data: JSON.stringify({
173
- title: file,
174
- url: `/api/media/${key}`,
175
- content_type: mimeTypes[ext] ?? 'application/octet-stream',
176
- width: null, height: null, file_size: null
185
+ assetSql.push(
186
+ buildInsertSql('assets', {
187
+ id,
188
+ data: JSON.stringify({
189
+ title: file,
190
+ url: `/api/media/${key}`,
191
+ content_type: mimeTypes[ext] ?? 'application/octet-stream',
192
+ width: null,
193
+ height: null,
194
+ file_size: null
195
+ })
177
196
  })
178
- }));
197
+ );
179
198
  }
180
199
  }
181
200
 
@@ -184,8 +203,14 @@ async function syncContentToLocalD1(
184
203
  const entrySql: string[] = [];
185
204
  for (const { contentType, rowData } of prepared) {
186
205
  const {
187
- id, slug, status, publish_at, publishAt,
188
- created_at: _ca, updated_at: _ua, content_type: _ct,
206
+ id,
207
+ slug,
208
+ status,
209
+ publish_at,
210
+ publishAt,
211
+ created_at: _ca,
212
+ updated_at: _ua,
213
+ content_type: _ct,
189
214
  ...fields
190
215
  } = rowData;
191
216
 
@@ -193,9 +218,16 @@ async function syncContentToLocalD1(
193
218
  const ct = config.contentTypes.find(c => c.id === contentType);
194
219
  if (ct && assetIdMap.size > 0) {
195
220
  for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
196
- if (meta.fieldType === 'image' && typeof fields[fieldId] === 'string') {
197
- fields[fieldId] = assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
198
- } else if (meta.fieldType === 'images' && Array.isArray(fields[fieldId])) {
221
+ if (
222
+ meta.fieldType === 'image' &&
223
+ typeof fields[fieldId] === 'string'
224
+ ) {
225
+ fields[fieldId] =
226
+ assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
227
+ } else if (
228
+ meta.fieldType === 'images' &&
229
+ Array.isArray(fields[fieldId])
230
+ ) {
199
231
  fields[fieldId] = (fields[fieldId] as string[]).map(
200
232
  v => assetIdMap.get(v) ?? v
201
233
  );
@@ -203,21 +235,26 @@ async function syncContentToLocalD1(
203
235
  }
204
236
  }
205
237
 
206
- entrySql.push(buildInsertSql('entries', {
207
- id: id as string,
208
- content_type: contentType,
209
- slug: (slug as string | undefined) ?? null,
210
- data: JSON.stringify(fields),
211
- status: (status as string | undefined) ?? 'draft',
212
- ...(publish_at !== undefined ? { publish_at } : {}),
213
- ...(publishAt !== undefined ? { publish_at: publishAt } : {})
214
- }));
238
+ entrySql.push(
239
+ buildInsertSql('entries', {
240
+ id: id as string,
241
+ content_type: contentType,
242
+ slug: (slug as string | undefined) ?? null,
243
+ data: JSON.stringify(fields),
244
+ status: (status as string | undefined) ?? 'draft',
245
+ ...(publish_at !== undefined ? { publish_at } : {}),
246
+ ...(publishAt !== undefined ? { publish_at: publishAt } : {})
247
+ })
248
+ );
215
249
  }
216
250
 
217
251
  // Single batch: assets + entries written to one SQL file
218
252
  const allStatements = [...assetSql, ...entrySql];
219
253
  if (allStatements.length > 0) {
220
- if (s) s.message(`Syncing ${entrySql.length} entries + ${assetSql.length} assets...`);
254
+ if (s)
255
+ s.message(
256
+ `Syncing ${entrySql.length} entries + ${assetSql.length} assets...`
257
+ );
221
258
  await d1ExecuteBatchSqlAsync(root, dbName, '--local', allStatements);
222
259
  }
223
260
 
@@ -542,7 +579,10 @@ async function cmdDev(): Promise<void> {
542
579
  /**
543
580
  * koguma push — Ship to production.
544
581
  *
545
- * Content sync build admin deploy → sync data + media to remote.
582
+ * Reads content/ directly as the source of truth:
583
+ * - Entries + media metadata → remote D1 (no local D1 roundtrip)
584
+ * - content/media/ files → remote R2 (no localhost dependency)
585
+ * - Then builds and deploys the worker.
546
586
  */
547
587
  async function cmdPush(): Promise<void> {
548
588
  intro('push');
@@ -557,127 +597,178 @@ async function cmdPush(): Promise<void> {
557
597
 
558
598
  const { dbName, bucketName } = getProjectNames(root);
559
599
 
560
- // ── Confirmation ──
561
- const confirmed = await p.confirm({
562
- message: `Push local content to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET}?`,
563
- initialValue: true
564
- });
565
- if (handleCancel(confirmed)) return;
566
- if (!confirmed) {
567
- outro('Push cancelled.');
568
- return;
600
+ // ── Confirmation (Enter = yes, n = cancel, --yes to skip) ──
601
+ const skipConfirm =
602
+ process.argv.includes('--yes') || process.argv.includes('-y');
603
+ if (!skipConfirm) {
604
+ const answer = await p.text({
605
+ message: `Push to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET}? (Y/n)`,
606
+ defaultValue: 'y',
607
+ placeholder: 'y'
608
+ });
609
+ if (handleCancel(answer)) return;
610
+ if ((answer as string).toLowerCase().startsWith('n')) {
611
+ outro('Push cancelled.');
612
+ return;
613
+ }
614
+ } else {
615
+ p.log.info(`Pushing to ${BRAND.ACCENT}${remoteUrl}${BRAND.RESET} (--yes)`);
569
616
  }
570
617
 
571
- // ── Step 1: Apply schema to remote ──
618
+ // ── Step 1/4: Apply schema to remote ──
572
619
  const s1 = p.spinner();
573
620
  s1.start('Applying schema to remote...');
574
621
  applySchema(root, dbName, '--remote');
575
622
  s1.stop('Remote schema applied');
576
623
 
577
- // ── Step 2: Sync content/ → local D1 ──
624
+ // ── Step 2/4: content/ → remote D1 (direct, no local roundtrip) ──
578
625
  const s2 = p.spinner();
579
- s2.start('Syncing content/ to local D1...');
580
- await syncContentToLocalD1(root, dbName, s2);
581
- s2.stop('Local content synced');
582
-
583
- // ── Step 3: Export local content ──
584
- const s3 = p.spinner();
585
- s3.start('Exporting local content...');
586
-
587
- let localEntries: Record<string, unknown>[] = [];
588
- try {
589
- localEntries = d1Query(root, dbName, '--local', 'SELECT * FROM entries');
590
- } catch {
591
- p.log.warn('Could not export entries from local');
592
- }
626
+ s2.start('Reading content/ and syncing to remote D1...');
627
+ const contentDir = resolve(root, CONTENT_DIR);
628
+ const mediaDir = resolve(contentDir, 'media');
629
+ let syncedEntries = 0;
630
+ let syncedAssets = 0;
593
631
 
594
- let localAssets: Record<string, unknown>[] = [];
595
632
  try {
596
- localAssets = d1Query(root, dbName, '--local', 'SELECT * FROM assets');
597
- } catch {
598
- p.log.warn('Could not export assets from local');
599
- }
600
- s3.stop(
601
- `Exported ${localEntries.length} entries + ${localAssets.length} assets`
602
- );
633
+ const config = await loadSiteConfig(root);
603
634
 
604
- // ── Step 4: Import to remote ──
605
- const total = localEntries.length + localAssets.length;
606
- if (total > 0) {
607
- const prog = p.progress({ max: total });
608
- prog.start('Importing content to remote...');
635
+ // Build asset rows from content/media/ filenames
636
+ const mimeTypes: Record<string, string> = {
637
+ '.png': 'image/png',
638
+ '.jpg': 'image/jpeg',
639
+ '.jpeg': 'image/jpeg',
640
+ '.gif': 'image/gif',
641
+ '.webp': 'image/webp',
642
+ '.svg': 'image/svg+xml',
643
+ '.avif': 'image/avif',
644
+ '.ico': 'image/x-icon'
645
+ };
646
+ const assetIdMap = new Map<string, string>();
647
+ const allStatements: string[] = [];
609
648
 
610
- let done = 0;
611
- for (const asset of localAssets) {
612
- d1InsertRow(root, dbName, '--remote', 'assets', asset);
613
- done++;
614
- prog.advance(done, `Importing... (${done}/${total})`);
615
- }
616
- for (const entry of localEntries) {
617
- d1InsertRow(root, dbName, '--remote', 'entries', entry);
618
- done++;
619
- prog.advance(done, `Importing... (${done}/${total})`);
649
+ if (existsSync(mediaDir)) {
650
+ const mediaFiles = readdirSync(mediaDir).filter(
651
+ f => !f.startsWith('.') && !f.startsWith('_')
652
+ );
653
+ for (const file of mediaFiles) {
654
+ const ext = extname(file).toLowerCase();
655
+ const id = `media-${file.replace(/\.\w+$/, '')}`;
656
+ const key = `${id}${ext}`;
657
+ assetIdMap.set(file, id);
658
+ allStatements.push(
659
+ buildInsertSql('assets', {
660
+ id,
661
+ data: JSON.stringify({
662
+ title: file,
663
+ url: `/api/media/${key}`,
664
+ content_type: mimeTypes[ext] ?? 'application/octet-stream',
665
+ width: null,
666
+ height: null,
667
+ file_size: null
668
+ })
669
+ })
670
+ );
671
+ }
672
+ syncedAssets = assetIdMap.size;
620
673
  }
621
- prog.stop(
622
- `Synced ${localEntries.length} entries + ${localAssets.length} assets to remote`
623
- );
624
- }
625
674
 
626
- // ── Step 5: Upload media to remote ──
627
- if (localAssets.length > 0) {
628
- const cookie = await authenticate(remoteUrl, root);
629
- const sProg = p.progress({ max: localAssets.length });
630
- sProg.start('Uploading media to remote...');
631
-
632
- let uploaded = 0;
633
- for (const asset of localAssets) {
634
- const assetData =
635
- typeof asset.data === 'string'
636
- ? JSON.parse(asset.data)
637
- : (asset.data as Record<string, string>);
638
- const assetUrl = assetData.url ?? '';
639
- const assetTitle = assetData.title ?? '';
640
- const contentType = assetData.content_type ?? 'application/octet-stream';
641
- const key = assetUrl.replace('/api/media/', '');
675
+ // Read content/ entries build SQL
676
+ const prepared = prepareContentForSync(contentDir, config.contentTypes);
677
+ for (const { contentType, rowData } of prepared) {
678
+ const {
679
+ id,
680
+ slug,
681
+ status,
682
+ publish_at,
683
+ publishAt,
684
+ created_at: _ca,
685
+ updated_at: _ua,
686
+ content_type: _ct,
687
+ ...fields
688
+ } = rowData;
642
689
 
643
- try {
644
- const dlRes = await fetch(`http://localhost:8787${assetUrl}`);
645
- if (!dlRes.ok) {
646
- uploaded++;
647
- sProg.advance(uploaded);
648
- continue;
690
+ // Resolve media filenames → asset IDs
691
+ const ct = config.contentTypes.find(c => c.id === contentType);
692
+ if (ct && assetIdMap.size > 0) {
693
+ for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
694
+ if (
695
+ meta.fieldType === 'image' &&
696
+ typeof fields[fieldId] === 'string'
697
+ ) {
698
+ fields[fieldId] =
699
+ assetIdMap.get(fields[fieldId] as string) ?? fields[fieldId];
700
+ } else if (
701
+ meta.fieldType === 'images' &&
702
+ Array.isArray(fields[fieldId])
703
+ ) {
704
+ fields[fieldId] = (fields[fieldId] as string[]).map(
705
+ v => assetIdMap.get(v) ?? v
706
+ );
707
+ }
649
708
  }
709
+ }
650
710
 
651
- const blob = await dlRes.blob();
652
- const formData = new FormData();
653
- formData.append('file', new File([blob], key, { type: contentType }));
654
- formData.append('title', assetTitle || key);
711
+ allStatements.push(
712
+ buildInsertSql('entries', {
713
+ id: id as string,
714
+ content_type: contentType,
715
+ slug: (slug as string | undefined) ?? null,
716
+ data: JSON.stringify(fields),
717
+ status: (status as string | undefined) ?? 'published',
718
+ ...(publish_at !== undefined ? { publish_at } : {}),
719
+ ...(publishAt !== undefined ? { publish_at: publishAt } : {})
720
+ })
721
+ );
722
+ }
723
+ syncedEntries = prepared.length;
655
724
 
656
- const upRes = await fetch(`${remoteUrl}/api/admin/media`, {
657
- method: 'POST',
658
- headers: { Cookie: cookie },
659
- body: formData
660
- });
725
+ if (allStatements.length > 0) {
726
+ await d1ExecuteBatchSqlAsync(root, dbName, '--remote', allStatements);
727
+ }
728
+ s2.stop(
729
+ `Synced ${syncedEntries} entries + ${syncedAssets} assets to remote D1`
730
+ );
731
+ } catch (e) {
732
+ s2.stop('Content sync failed');
733
+ warn(`${e}`);
734
+ }
661
735
 
662
- if (!upRes.ok) {
663
- warn(`Upload failed for ${assetTitle}: ${await upRes.text()}`);
736
+ // ── Step 3/4: content/media/ → remote R2 (direct file upload) ──
737
+ if (existsSync(mediaDir)) {
738
+ const mediaFiles = readdirSync(mediaDir).filter(
739
+ f => !f.startsWith('.') && !f.startsWith('_')
740
+ );
741
+ if (mediaFiles.length > 0) {
742
+ const sProg = p.progress({ max: mediaFiles.length });
743
+ sProg.start('Uploading media files to remote R2...');
744
+ let uploaded = 0;
745
+ for (const file of mediaFiles) {
746
+ const ext = extname(file).toLowerCase();
747
+ const id = `media-${file.replace(/\.\w+$/, '')}`;
748
+ const key = `${id}${ext}`;
749
+ try {
750
+ await r2PutRemoteAsync(
751
+ root,
752
+ bucketName,
753
+ key,
754
+ resolve(mediaDir, file)
755
+ );
756
+ } catch (e) {
757
+ warn(`Failed to upload ${file}: ${e}`);
664
758
  }
665
- } catch (e) {
666
- warn(`Error uploading ${assetTitle}: ${e}`);
759
+ uploaded++;
760
+ sProg.advance(
761
+ uploaded,
762
+ `Uploading... (${uploaded}/${mediaFiles.length})`
763
+ );
667
764
  }
668
- uploaded++;
669
- sProg.advance(
670
- uploaded,
671
- `Uploading media... (${uploaded}/${localAssets.length})`
672
- );
765
+ sProg.stop(`Uploaded ${uploaded} media files to remote R2`);
673
766
  }
674
- sProg.stop(`Uploaded ${uploaded} media assets`);
675
767
  }
676
768
 
677
- // ── Step 6: Build + Deploy ──
769
+ // ── Step 4/4: Build + Deploy ──
678
770
  const sBuild = p.spinner();
679
771
  sBuild.start('Building admin dashboard...');
680
-
681
772
  const kogumaRoot = findKogumaRoot();
682
773
  const adminDir = resolve(kogumaRoot, 'admin');
683
774
  if (existsSync(adminDir)) {
@@ -949,7 +1040,7 @@ function cmdHelp(): void {
949
1040
  [
950
1041
  `${BRAND.ACCENT}init${BRAND.RESET} Set up a new project ${BRAND.DIM}(scaffold, login, D1, R2, secret)${BRAND.RESET}`,
951
1042
  `${BRAND.ACCENT}dev${BRAND.RESET} Start local dev server ${BRAND.DIM}with auto-sync + typegen${BRAND.RESET}`,
952
- `${BRAND.ACCENT}push${BRAND.RESET} Build, deploy, and sync content to remote`,
1043
+ `${BRAND.ACCENT}push${BRAND.RESET} Build, deploy, and sync content to remote ${BRAND.DIM}(--yes to skip confirm)${BRAND.RESET}`,
953
1044
  `${BRAND.ACCENT}pull${BRAND.RESET} Download remote content + media to local`,
954
1045
  `${BRAND.ACCENT}gen-types${BRAND.RESET} Generate ${BRAND.DIM}koguma.d.ts${BRAND.RESET} typed interfaces`,
955
1046
  `${BRAND.ACCENT}tidy${BRAND.RESET} Sync content/ dirs with config + validate`,
@@ -962,7 +1053,7 @@ function cmdHelp(): void {
962
1053
  [
963
1054
  `${BRAND.DIM}$${BRAND.RESET} koguma init`,
964
1055
  `${BRAND.DIM}$${BRAND.RESET} koguma dev`,
965
- `${BRAND.DIM}$${BRAND.RESET} koguma push --remote https://my-site.dev`,
1056
+ `${BRAND.DIM}$${BRAND.RESET} koguma push --remote https://my-site.dev${BRAND.DIM} --yes${BRAND.RESET}`,
966
1057
  `${BRAND.DIM}$${BRAND.RESET} koguma pull --remote https://my-site.dev`,
967
1058
  `${BRAND.DIM}$${BRAND.RESET} koguma gen-types`,
968
1059
  `${BRAND.DIM}$${BRAND.RESET} koguma tidy --dry`
package/cli/scaffold.ts CHANGED
@@ -337,7 +337,7 @@ export function generateExampleFile(
337
337
  }
338
338
 
339
339
  const fm = matter.stringify('', frontmatter).trim();
340
- return { content: fm + '\n', extension: '.yml' };
340
+ return { content: fm + '\n', extension: '.md' };
341
341
  }
342
342
 
343
343
  /**
package/cli/wrangler.ts CHANGED
@@ -207,7 +207,10 @@ export async function d1ExecuteBatchSqlAsync(
207
207
  if (!existsSync(dbDir)) mkdirSync(dbDir, { recursive: true });
208
208
 
209
209
  const sqlFile = resolve(dbDir, 'batch-sync.sql');
210
- writeFileSync(sqlFile, statements.map(s => s.endsWith(';') ? s : s + ';').join('\n'));
210
+ writeFileSync(
211
+ sqlFile,
212
+ statements.map(s => (s.endsWith(';') ? s : s + ';')).join('\n')
213
+ );
211
214
  await runAsync(
212
215
  `bunx wrangler d1 execute ${dbName} ${target} ${configFlag(root)} --file=${sqlFile}`,
213
216
  { cwd: root, silent: true }
@@ -244,6 +247,36 @@ export async function r2PutLocalAsync(
244
247
  );
245
248
  }
246
249
 
250
+ /**
251
+ * Put an object into the remote (production) Cloudflare R2 bucket.
252
+ */
253
+ export function r2PutRemote(
254
+ root: string,
255
+ bucketName: string,
256
+ key: string,
257
+ filePath: string
258
+ ): void {
259
+ run(
260
+ `bunx wrangler r2 object put ${bucketName}/${key} ${configFlag(root)} --file=${filePath}`,
261
+ { cwd: root, silent: true }
262
+ );
263
+ }
264
+
265
+ /**
266
+ * Put an object into the remote (production) Cloudflare R2 bucket (async).
267
+ */
268
+ export async function r2PutRemoteAsync(
269
+ root: string,
270
+ bucketName: string,
271
+ key: string,
272
+ filePath: string
273
+ ): Promise<void> {
274
+ await runAsync(
275
+ `bunx wrangler r2 object put ${bucketName}/${key} ${configFlag(root)} --file=${filePath}`,
276
+ { cwd: root, silent: true }
277
+ );
278
+ }
279
+
247
280
  // ── Wrangler dev / deploy ──────────────────────────────────────────
248
281
 
249
282
  /**
@@ -280,6 +313,13 @@ export function wranglerDev(
280
313
  /│.*\[b\].*\[c\].*\[x\]/, // keyboard shortcuts content
281
314
  /╰.*╯/, // keyboard shortcuts box bottom
282
315
  /\[wrangler:/, // [wrangler:inf] prefixed messages (handled separately below)
316
+ /Resolving dependencies/, // npm dep install noise
317
+ /Resolved, downloaded/, // npm dep install noise
318
+ /Saved lockfile/, // npm dep install noise
319
+ /^✓\s+/, // npm dep install success lines
320
+ /^✘\s+/, // npm dep install error lines (let warn() handle these)
321
+ /^▲\s+/, // npm dep install warning lines
322
+ /^>\s+/, // npm progress lines
283
323
  /^\s*$/ // blank lines
284
324
  ];
285
325
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "koguma",
3
- "version": "2.0.0",
3
+ "version": "2.2.0",
4
4
  "description": "🐻 A little CMS with big heart — schema-driven, runs on Cloudflare's free tier",
5
5
  "type": "module",
6
6
  "license": "MIT",