koguma 0.6.4 → 0.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli/index.ts +581 -54
- package/package.json +1 -1
- package/src/config/define.ts +3 -3
- package/src/db/migrate.ts +7 -1
- package/src/db/sql.ts +166 -0
- package/src/rich-text/markdown-to-koguma.ts +164 -0
package/cli/index.ts
CHANGED
|
@@ -17,6 +17,13 @@ import { execSync } from 'child_process';
|
|
|
17
17
|
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';
|
|
18
18
|
import { resolve, dirname, basename } from 'path';
|
|
19
19
|
import { generateSchema } from '../src/db/schema.ts';
|
|
20
|
+
import {
|
|
21
|
+
buildInsertSql,
|
|
22
|
+
wrapForShell,
|
|
23
|
+
buildAssetIndex,
|
|
24
|
+
processSeedEntry,
|
|
25
|
+
buildImportSql
|
|
26
|
+
} from '../src/db/sql.ts';
|
|
20
27
|
|
|
21
28
|
// ── Helpers ─────────────────────────────────────────────────────────
|
|
22
29
|
|
|
@@ -472,13 +479,9 @@ async function cmdBuild() {
|
|
|
472
479
|
async function cmdSeed() {
|
|
473
480
|
header('koguma seed');
|
|
474
481
|
const root = findProjectRoot();
|
|
482
|
+
const seedTs = resolve(root, 'db/seed.ts');
|
|
475
483
|
const seedSql = resolve(root, 'db/seed.sql');
|
|
476
484
|
|
|
477
|
-
if (!existsSync(seedSql)) {
|
|
478
|
-
fail('db/seed.sql not found. Generate it first with your seed script.');
|
|
479
|
-
process.exit(1);
|
|
480
|
-
}
|
|
481
|
-
|
|
482
485
|
// Parse database name from wrangler.toml
|
|
483
486
|
const toml = readFileSync(resolve(root, 'wrangler.toml'), 'utf-8');
|
|
484
487
|
const dbNameMatch = toml.match(/database_name\s*=\s*"([^"]+)"/);
|
|
@@ -487,14 +490,94 @@ async function cmdSeed() {
|
|
|
487
490
|
const isRemote = process.argv.includes('--remote');
|
|
488
491
|
const target = isRemote ? '--remote' : '--local';
|
|
489
492
|
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
493
|
+
if (existsSync(seedTs)) {
|
|
494
|
+
// ── seed.ts path — structured seeding with smart field resolution ──
|
|
495
|
+
log(`Using ${CYAN}db/seed.ts${RESET} (structured seed)`);
|
|
496
|
+
const seedModule = await import(seedTs);
|
|
497
|
+
const seedData = seedModule.default as Record<
|
|
498
|
+
string,
|
|
499
|
+
Record<string, unknown>[]
|
|
500
|
+
>;
|
|
501
|
+
|
|
502
|
+
// Import config for field metadata
|
|
503
|
+
const configPath = resolve(root, 'site.config.ts');
|
|
504
|
+
const configModule = await import(configPath);
|
|
505
|
+
const config = configModule.default as {
|
|
506
|
+
contentTypes: {
|
|
507
|
+
id: string;
|
|
508
|
+
fieldMeta: Record<string, { fieldType: string; required: boolean }>;
|
|
509
|
+
}[];
|
|
510
|
+
};
|
|
511
|
+
const ctMap = new Map(config.contentTypes.map(ct => [ct.id, ct]));
|
|
512
|
+
|
|
513
|
+
// Build asset title→id lookup
|
|
514
|
+
log('Loading asset index...');
|
|
515
|
+
let assetIndex = buildAssetIndex([]);
|
|
516
|
+
try {
|
|
517
|
+
const output = runCapture(
|
|
518
|
+
`bunx wrangler d1 execute ${dbName} ${target} --command "SELECT id, title FROM _assets" --json`,
|
|
519
|
+
root
|
|
520
|
+
);
|
|
521
|
+
const parsed = JSON.parse(output);
|
|
522
|
+
const assets = parsed?.[0]?.results ?? [];
|
|
523
|
+
assetIndex = buildAssetIndex(assets);
|
|
524
|
+
ok(`Loaded ${assetIndex.titleMap.size} asset mappings`);
|
|
525
|
+
} catch {
|
|
526
|
+
warn('No _assets table found — image title resolution disabled');
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
// Lazy-load markdown converter
|
|
530
|
+
const { markdownToKoguma } =
|
|
531
|
+
await import('../src/rich-text/markdown-to-koguma.ts');
|
|
532
|
+
|
|
533
|
+
let totalEntries = 0;
|
|
534
|
+
|
|
535
|
+
for (const [typeId, entries] of Object.entries(seedData)) {
|
|
536
|
+
const ct = ctMap.get(typeId);
|
|
537
|
+
if (!ct) {
|
|
538
|
+
warn(`Content type '${typeId}' not found in site.config.ts — skipping`);
|
|
539
|
+
continue;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
log(`Seeding ${CYAN}${typeId}${RESET} (${entries.length} entries)...`);
|
|
496
543
|
|
|
497
|
-
|
|
544
|
+
for (const entry of entries) {
|
|
545
|
+
const { processed, resolutions } = processSeedEntry(
|
|
546
|
+
entry,
|
|
547
|
+
ct.fieldMeta,
|
|
548
|
+
assetIndex,
|
|
549
|
+
markdownToKoguma
|
|
550
|
+
);
|
|
551
|
+
|
|
552
|
+
for (const r of resolutions) {
|
|
553
|
+
ok(` ${r}`);
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
const sql = buildInsertSql(typeId, processed);
|
|
557
|
+
run(
|
|
558
|
+
`bunx wrangler d1 execute ${dbName} ${target} --command "${wrapForShell(sql)}"`,
|
|
559
|
+
{ cwd: root, silent: true }
|
|
560
|
+
);
|
|
561
|
+
totalEntries++;
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
ok(`Seeded ${totalEntries} entries (${isRemote ? 'remote' : 'local'})!`);
|
|
566
|
+
} else if (existsSync(seedSql)) {
|
|
567
|
+
// ── seed.sql path — legacy SQL seeding ──
|
|
568
|
+
log(
|
|
569
|
+
`Seeding ${isRemote ? 'REMOTE' : 'local'} database: ${CYAN}${dbName}${RESET}`
|
|
570
|
+
);
|
|
571
|
+
run(`bunx wrangler d1 execute ${dbName} ${target} --file=${seedSql}`, {
|
|
572
|
+
cwd: root
|
|
573
|
+
});
|
|
574
|
+
ok(`Database seeded (${isRemote ? 'remote' : 'local'})!`);
|
|
575
|
+
} else {
|
|
576
|
+
fail(
|
|
577
|
+
'No seed file found. Create db/seed.ts (structured) or db/seed.sql (raw SQL).'
|
|
578
|
+
);
|
|
579
|
+
process.exit(1);
|
|
580
|
+
}
|
|
498
581
|
}
|
|
499
582
|
|
|
500
583
|
async function cmdDeploy() {
|
|
@@ -672,9 +755,9 @@ function fieldTypeToTs(
|
|
|
672
755
|
case 'date':
|
|
673
756
|
return 'string';
|
|
674
757
|
case 'richText':
|
|
675
|
-
return '
|
|
758
|
+
return 'KogumaDocument';
|
|
676
759
|
case 'image':
|
|
677
|
-
return '
|
|
760
|
+
return 'KogumaAsset';
|
|
678
761
|
case 'boolean':
|
|
679
762
|
return 'boolean';
|
|
680
763
|
case 'number':
|
|
@@ -689,6 +772,14 @@ function fieldTypeToTs(
|
|
|
689
772
|
return meta.refContentType
|
|
690
773
|
? `${capitalize(meta.refContentType)}Entry[]`
|
|
691
774
|
: 'Record<string, unknown>[]';
|
|
775
|
+
case 'youtube':
|
|
776
|
+
case 'instagram':
|
|
777
|
+
case 'email':
|
|
778
|
+
case 'phone':
|
|
779
|
+
case 'color':
|
|
780
|
+
return 'string';
|
|
781
|
+
case 'images':
|
|
782
|
+
return 'string[]';
|
|
692
783
|
default:
|
|
693
784
|
return 'unknown';
|
|
694
785
|
}
|
|
@@ -739,6 +830,8 @@ async function cmdTypegen() {
|
|
|
739
830
|
' * Do not edit manually.',
|
|
740
831
|
' */',
|
|
741
832
|
'',
|
|
833
|
+
'import type { KogumaDocument, KogumaAsset } from "koguma/types";',
|
|
834
|
+
'',
|
|
742
835
|
'// ── System fields ── common to all entries',
|
|
743
836
|
'interface KogumaSystemFields {',
|
|
744
837
|
' id: string;',
|
|
@@ -856,7 +949,8 @@ async function cmdMigrate() {
|
|
|
856
949
|
const results = parsed?.[0]?.results ?? [];
|
|
857
950
|
existingColumns[ct.id] = results;
|
|
858
951
|
} catch {
|
|
859
|
-
warn(`Table '${ct.id}' does not exist yet.`);
|
|
952
|
+
warn(`Table '${ct.id}' does not exist yet — will create it.`);
|
|
953
|
+
existingColumns[ct.id] = [];
|
|
860
954
|
}
|
|
861
955
|
}
|
|
862
956
|
|
|
@@ -1119,16 +1213,9 @@ async function cmdImport() {
|
|
|
1119
1213
|
);
|
|
1120
1214
|
|
|
1121
1215
|
for (const entry of data.entries) {
|
|
1122
|
-
const
|
|
1123
|
-
const vals = Object.values(entry).map(v => {
|
|
1124
|
-
if (v === null) return 'NULL';
|
|
1125
|
-
if (typeof v === 'number') return String(v);
|
|
1126
|
-
return `'${String(v).replace(/'/g, "''")}'`;
|
|
1127
|
-
});
|
|
1128
|
-
|
|
1129
|
-
const sql = `INSERT OR REPLACE INTO ${typeId} (${cols.join(', ')}) VALUES (${vals.join(', ')})`;
|
|
1216
|
+
const sql = buildInsertSql(typeId, entry);
|
|
1130
1217
|
run(
|
|
1131
|
-
`bunx wrangler d1 execute ${dbName} ${target} --command "${sql
|
|
1218
|
+
`bunx wrangler d1 execute ${dbName} ${target} --command "${wrapForShell(sql)}"`,
|
|
1132
1219
|
{ cwd: root, silent: true }
|
|
1133
1220
|
);
|
|
1134
1221
|
totalEntries++;
|
|
@@ -1137,16 +1224,9 @@ async function cmdImport() {
|
|
|
1137
1224
|
// Import join tables
|
|
1138
1225
|
for (const [jtName, rows] of Object.entries(data.joinTables)) {
|
|
1139
1226
|
for (const row of rows) {
|
|
1140
|
-
const
|
|
1141
|
-
const vals = Object.values(row).map(v => {
|
|
1142
|
-
if (v === null) return 'NULL';
|
|
1143
|
-
if (typeof v === 'number') return String(v);
|
|
1144
|
-
return `'${String(v).replace(/'/g, "''")}'`;
|
|
1145
|
-
});
|
|
1146
|
-
|
|
1147
|
-
const sql = `INSERT OR REPLACE INTO ${jtName} (${cols.join(', ')}) VALUES (${vals.join(', ')})`;
|
|
1227
|
+
const sql = buildInsertSql(jtName, row);
|
|
1148
1228
|
run(
|
|
1149
|
-
`bunx wrangler d1 execute ${dbName} ${target} --command "${sql
|
|
1229
|
+
`bunx wrangler d1 execute ${dbName} ${target} --command "${wrapForShell(sql)}"`,
|
|
1150
1230
|
{ cwd: root, silent: true }
|
|
1151
1231
|
);
|
|
1152
1232
|
}
|
|
@@ -1158,37 +1238,469 @@ async function cmdImport() {
|
|
|
1158
1238
|
);
|
|
1159
1239
|
}
|
|
1160
1240
|
|
|
1241
|
+
// ── Shared auth helper ──────────────────────────────────────────────
|
|
1242
|
+
|
|
1243
|
+
async function authenticate(targetUrl: string, root: string): Promise<string> {
|
|
1244
|
+
const devVarsPath = resolve(root, '.dev.vars');
|
|
1245
|
+
let password = '';
|
|
1246
|
+
if (existsSync(devVarsPath)) {
|
|
1247
|
+
const content = readFileSync(devVarsPath, 'utf-8');
|
|
1248
|
+
const match = content.match(/KOGUMA_SECRET=(.+)/);
|
|
1249
|
+
if (match?.[1]) password = match[1].trim();
|
|
1250
|
+
}
|
|
1251
|
+
if (!password) {
|
|
1252
|
+
fail('KOGUMA_SECRET not found in .dev.vars');
|
|
1253
|
+
process.exit(1);
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
const loginRes = await fetch(`${targetUrl}/api/auth/login`, {
|
|
1257
|
+
method: 'POST',
|
|
1258
|
+
headers: { 'Content-Type': 'application/json' },
|
|
1259
|
+
body: JSON.stringify({ password }),
|
|
1260
|
+
redirect: 'manual'
|
|
1261
|
+
});
|
|
1262
|
+
const setCookie = loginRes.headers.get('set-cookie') ?? '';
|
|
1263
|
+
const cookieMatch = setCookie.match(/koguma_session=[^;]+/);
|
|
1264
|
+
if (!cookieMatch) {
|
|
1265
|
+
fail('Login failed — check your KOGUMA_SECRET');
|
|
1266
|
+
process.exit(1);
|
|
1267
|
+
}
|
|
1268
|
+
return cookieMatch[0];
|
|
1269
|
+
}
|
|
1270
|
+
|
|
1271
|
+
function getRemoteUrl(): string {
|
|
1272
|
+
const idx = process.argv.indexOf('--remote');
|
|
1273
|
+
const url = idx >= 0 ? process.argv[idx + 1] : undefined;
|
|
1274
|
+
if (!url || url.startsWith('-')) {
|
|
1275
|
+
fail('Usage: koguma <command> --remote https://your-site.workers.dev');
|
|
1276
|
+
process.exit(1);
|
|
1277
|
+
}
|
|
1278
|
+
return url.replace(/\/$/, '');
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
function getDbName(root: string): string {
|
|
1282
|
+
const toml = readFileSync(resolve(root, 'wrangler.toml'), 'utf-8');
|
|
1283
|
+
const match = toml.match(/database_name\s*=\s*"([^"]+)"/);
|
|
1284
|
+
return match?.[1] ?? 'my-db';
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
// ── Pull ────────────────────────────────────────────────────────────
|
|
1288
|
+
|
|
1289
|
+
async function cmdPull() {
|
|
1290
|
+
header('koguma pull');
|
|
1291
|
+
const root = findProjectRoot();
|
|
1292
|
+
const remoteUrl = getRemoteUrl();
|
|
1293
|
+
const dbName = getDbName(root);
|
|
1294
|
+
|
|
1295
|
+
// 1. Migrate local schema
|
|
1296
|
+
log('Step 1: Migrating local schema...');
|
|
1297
|
+
// Re-use migrate logic inline (avoid process.argv mutation)
|
|
1298
|
+
const configPath = resolve(root, 'site.config.ts');
|
|
1299
|
+
const configModule = await import(configPath);
|
|
1300
|
+
const config = configModule.default as {
|
|
1301
|
+
contentTypes: {
|
|
1302
|
+
id: string;
|
|
1303
|
+
name: string;
|
|
1304
|
+
fieldMeta: Record<
|
|
1305
|
+
string,
|
|
1306
|
+
{ fieldType: string; required: boolean; refContentType?: string }
|
|
1307
|
+
>;
|
|
1308
|
+
}[];
|
|
1309
|
+
};
|
|
1310
|
+
|
|
1311
|
+
// 2. Export remote content
|
|
1312
|
+
log('\nStep 2: Exporting remote content...');
|
|
1313
|
+
const exportData: Record<
|
|
1314
|
+
string,
|
|
1315
|
+
{ entries: unknown[]; joinTables: Record<string, unknown[]> }
|
|
1316
|
+
> = {};
|
|
1317
|
+
|
|
1318
|
+
for (const ct of config.contentTypes) {
|
|
1319
|
+
try {
|
|
1320
|
+
const output = runCapture(
|
|
1321
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "SELECT * FROM ${ct.id}" --json`,
|
|
1322
|
+
root
|
|
1323
|
+
);
|
|
1324
|
+
const parsed = JSON.parse(output);
|
|
1325
|
+
const entries = parsed?.[0]?.results ?? [];
|
|
1326
|
+
|
|
1327
|
+
const joinTables: Record<string, unknown[]> = {};
|
|
1328
|
+
for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
|
|
1329
|
+
if (meta.fieldType === 'references') {
|
|
1330
|
+
const joinTable = `${ct.id}__${fieldId}`;
|
|
1331
|
+
try {
|
|
1332
|
+
const jtOutput = runCapture(
|
|
1333
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "SELECT * FROM ${joinTable}" --json`,
|
|
1334
|
+
root
|
|
1335
|
+
);
|
|
1336
|
+
const jtParsed = JSON.parse(jtOutput);
|
|
1337
|
+
joinTables[joinTable] = jtParsed?.[0]?.results ?? [];
|
|
1338
|
+
} catch {
|
|
1339
|
+
// Join table may not exist
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
exportData[ct.id] = { entries, joinTables };
|
|
1345
|
+
ok(`${ct.id}: ${entries.length} entries`);
|
|
1346
|
+
} catch {
|
|
1347
|
+
warn(`Could not export ${ct.id} from remote`);
|
|
1348
|
+
}
|
|
1349
|
+
}
|
|
1350
|
+
|
|
1351
|
+
// Also export _assets
|
|
1352
|
+
let remoteAssets: Record<string, unknown>[] = [];
|
|
1353
|
+
try {
|
|
1354
|
+
const output = runCapture(
|
|
1355
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "SELECT * FROM _assets" --json`,
|
|
1356
|
+
root
|
|
1357
|
+
);
|
|
1358
|
+
const parsed = JSON.parse(output);
|
|
1359
|
+
remoteAssets = parsed?.[0]?.results ?? [];
|
|
1360
|
+
ok(`_assets: ${remoteAssets.length} assets`);
|
|
1361
|
+
} catch {
|
|
1362
|
+
warn('Could not export _assets from remote');
|
|
1363
|
+
}
|
|
1364
|
+
|
|
1365
|
+
// 3. Import content into local
|
|
1366
|
+
log('\nStep 3: Importing content to local...');
|
|
1367
|
+
|
|
1368
|
+
// Import _assets first
|
|
1369
|
+
for (const asset of remoteAssets) {
|
|
1370
|
+
const sql = buildInsertSql('_assets', asset);
|
|
1371
|
+
run(
|
|
1372
|
+
`bunx wrangler d1 execute ${dbName} --local --command "${wrapForShell(sql)}"`,
|
|
1373
|
+
{ cwd: root, silent: true }
|
|
1374
|
+
);
|
|
1375
|
+
}
|
|
1376
|
+
|
|
1377
|
+
// Import content types
|
|
1378
|
+
for (const [typeId, data] of Object.entries(exportData)) {
|
|
1379
|
+
for (const entry of data.entries as Record<string, unknown>[]) {
|
|
1380
|
+
const sql = buildInsertSql(typeId, entry);
|
|
1381
|
+
run(
|
|
1382
|
+
`bunx wrangler d1 execute ${dbName} --local --command "${wrapForShell(sql)}"`,
|
|
1383
|
+
{ cwd: root, silent: true }
|
|
1384
|
+
);
|
|
1385
|
+
}
|
|
1386
|
+
for (const [jtName, rows] of Object.entries(data.joinTables)) {
|
|
1387
|
+
for (const row of rows as Record<string, unknown>[]) {
|
|
1388
|
+
const sql = buildInsertSql(jtName, row);
|
|
1389
|
+
run(
|
|
1390
|
+
`bunx wrangler d1 execute ${dbName} --local --command "${wrapForShell(sql)}"`,
|
|
1391
|
+
{ cwd: root, silent: true }
|
|
1392
|
+
);
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
|
|
1397
|
+
// 4. Download R2 media
|
|
1398
|
+
log('\nStep 4: Downloading remote media...');
|
|
1399
|
+
const cookie = await authenticate(remoteUrl, root);
|
|
1400
|
+
|
|
1401
|
+
const mediaRes = await fetch(`${remoteUrl}/api/admin/media`, {
|
|
1402
|
+
headers: { Cookie: cookie }
|
|
1403
|
+
});
|
|
1404
|
+
if (!mediaRes.ok) {
|
|
1405
|
+
warn('Could not list remote media');
|
|
1406
|
+
} else {
|
|
1407
|
+
const { assets } = (await mediaRes.json()) as {
|
|
1408
|
+
assets: { id: string; url: string; title: string }[];
|
|
1409
|
+
};
|
|
1410
|
+
log(`Found ${assets.length} remote assets`);
|
|
1411
|
+
|
|
1412
|
+
const bucketMatch = readFileSync(
|
|
1413
|
+
resolve(root, 'wrangler.toml'),
|
|
1414
|
+
'utf-8'
|
|
1415
|
+
).match(/bucket_name\s*=\s*"([^"]+)"/);
|
|
1416
|
+
const bucketName = bucketMatch?.[1] ?? 'media';
|
|
1417
|
+
|
|
1418
|
+
for (const asset of assets) {
|
|
1419
|
+
const key = asset.url.replace('/api/media/', '');
|
|
1420
|
+
log(`⬇ ${asset.title}`);
|
|
1421
|
+
try {
|
|
1422
|
+
const dlRes = await fetch(`${remoteUrl}${asset.url}`);
|
|
1423
|
+
if (!dlRes.ok) {
|
|
1424
|
+
warn(` Download failed: ${dlRes.status}`);
|
|
1425
|
+
continue;
|
|
1426
|
+
}
|
|
1427
|
+
const buf = Buffer.from(await dlRes.arrayBuffer());
|
|
1428
|
+
const tmpPath = resolve(root, `db/.media-tmp-${key}`);
|
|
1429
|
+
writeFileSync(tmpPath, buf);
|
|
1430
|
+
run(
|
|
1431
|
+
`bunx wrangler r2 object put ${bucketName}/${key} --file=${tmpPath} --local`,
|
|
1432
|
+
{ cwd: root, silent: true }
|
|
1433
|
+
);
|
|
1434
|
+
// Clean up temp file
|
|
1435
|
+
try {
|
|
1436
|
+
const { unlinkSync } = await import('fs');
|
|
1437
|
+
unlinkSync(tmpPath);
|
|
1438
|
+
} catch {
|
|
1439
|
+
/* ignore */
|
|
1440
|
+
}
|
|
1441
|
+
ok(` → local R2`);
|
|
1442
|
+
} catch (e) {
|
|
1443
|
+
warn(` Error: ${e}`);
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
|
|
1448
|
+
ok('Pull complete! Local now mirrors remote.');
|
|
1449
|
+
}
|
|
1450
|
+
|
|
1451
|
+
// ── Push ────────────────────────────────────────────────────────────
|
|
1452
|
+
|
|
1453
|
+
async function cmdPush() {
|
|
1454
|
+
header('koguma push');
|
|
1455
|
+
const root = findProjectRoot();
|
|
1456
|
+
const remoteUrl = getRemoteUrl();
|
|
1457
|
+
const dbName = getDbName(root);
|
|
1458
|
+
|
|
1459
|
+
// 1. Migrate remote schema
|
|
1460
|
+
log('Step 1: Migrating remote schema...');
|
|
1461
|
+
const configPath = resolve(root, 'site.config.ts');
|
|
1462
|
+
const configModule = await import(configPath);
|
|
1463
|
+
const config = configModule.default as {
|
|
1464
|
+
contentTypes: {
|
|
1465
|
+
id: string;
|
|
1466
|
+
name: string;
|
|
1467
|
+
fieldMeta: Record<
|
|
1468
|
+
string,
|
|
1469
|
+
{ fieldType: string; required: boolean; refContentType?: string }
|
|
1470
|
+
>;
|
|
1471
|
+
}[];
|
|
1472
|
+
};
|
|
1473
|
+
|
|
1474
|
+
// Run migrate --remote
|
|
1475
|
+
const existingColumns: Record<string, { name: string; type: string }[]> = {};
|
|
1476
|
+
for (const ct of config.contentTypes) {
|
|
1477
|
+
try {
|
|
1478
|
+
const output = runCapture(
|
|
1479
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "SELECT name, type FROM pragma_table_info('${ct.id}')" --json`,
|
|
1480
|
+
root
|
|
1481
|
+
);
|
|
1482
|
+
const parsed = JSON.parse(output);
|
|
1483
|
+
existingColumns[ct.id] = parsed?.[0]?.results ?? [];
|
|
1484
|
+
} catch {
|
|
1485
|
+
warn(`Table '${ct.id}' does not exist on remote — will create it.`);
|
|
1486
|
+
existingColumns[ct.id] = [];
|
|
1487
|
+
}
|
|
1488
|
+
}
|
|
1489
|
+
|
|
1490
|
+
const { detectDrift } = await import('../src/db/migrate.ts');
|
|
1491
|
+
const driftResult = detectDrift(config.contentTypes as any, existingColumns);
|
|
1492
|
+
|
|
1493
|
+
if (driftResult.sql.length > 0) {
|
|
1494
|
+
const sqlFile = resolve(root, 'db/migration.sql');
|
|
1495
|
+
writeFileSync(sqlFile, driftResult.sql.join('\n'));
|
|
1496
|
+
run(`bunx wrangler d1 execute ${dbName} --remote --file=${sqlFile}`, {
|
|
1497
|
+
cwd: root
|
|
1498
|
+
});
|
|
1499
|
+
ok('Remote schema migrated');
|
|
1500
|
+
} else {
|
|
1501
|
+
ok('Remote schema is up to date');
|
|
1502
|
+
}
|
|
1503
|
+
|
|
1504
|
+
// 2. Export local content
|
|
1505
|
+
log('\nStep 2: Exporting local content...');
|
|
1506
|
+
const exportData: Record<
|
|
1507
|
+
string,
|
|
1508
|
+
{ entries: unknown[]; joinTables: Record<string, unknown[]> }
|
|
1509
|
+
> = {};
|
|
1510
|
+
|
|
1511
|
+
for (const ct of config.contentTypes) {
|
|
1512
|
+
try {
|
|
1513
|
+
const output = runCapture(
|
|
1514
|
+
`bunx wrangler d1 execute ${dbName} --local --command "SELECT * FROM ${ct.id}" --json`,
|
|
1515
|
+
root
|
|
1516
|
+
);
|
|
1517
|
+
const parsed = JSON.parse(output);
|
|
1518
|
+
const entries = parsed?.[0]?.results ?? [];
|
|
1519
|
+
|
|
1520
|
+
const joinTables: Record<string, unknown[]> = {};
|
|
1521
|
+
for (const [fieldId, meta] of Object.entries(ct.fieldMeta)) {
|
|
1522
|
+
if (meta.fieldType === 'references') {
|
|
1523
|
+
const joinTable = `${ct.id}__${fieldId}`;
|
|
1524
|
+
try {
|
|
1525
|
+
const jtOutput = runCapture(
|
|
1526
|
+
`bunx wrangler d1 execute ${dbName} --local --command "SELECT * FROM ${joinTable}" --json`,
|
|
1527
|
+
root
|
|
1528
|
+
);
|
|
1529
|
+
const jtParsed = JSON.parse(jtOutput);
|
|
1530
|
+
joinTables[joinTable] = jtParsed?.[0]?.results ?? [];
|
|
1531
|
+
} catch {
|
|
1532
|
+
/* */
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1537
|
+
exportData[ct.id] = { entries, joinTables };
|
|
1538
|
+
ok(`${ct.id}: ${entries.length} entries`);
|
|
1539
|
+
} catch {
|
|
1540
|
+
warn(`Could not export ${ct.id} from local`);
|
|
1541
|
+
}
|
|
1542
|
+
}
|
|
1543
|
+
|
|
1544
|
+
// Also export _assets
|
|
1545
|
+
let localAssets: Record<string, unknown>[] = [];
|
|
1546
|
+
try {
|
|
1547
|
+
const output = runCapture(
|
|
1548
|
+
`bunx wrangler d1 execute ${dbName} --local --command "SELECT * FROM _assets" --json`,
|
|
1549
|
+
root
|
|
1550
|
+
);
|
|
1551
|
+
const parsed = JSON.parse(output);
|
|
1552
|
+
localAssets = parsed?.[0]?.results ?? [];
|
|
1553
|
+
ok(`_assets: ${localAssets.length} assets`);
|
|
1554
|
+
} catch {
|
|
1555
|
+
warn('Could not export _assets from local');
|
|
1556
|
+
}
|
|
1557
|
+
|
|
1558
|
+
// 3. Import content to remote
|
|
1559
|
+
log('\nStep 3: Importing content to remote...');
|
|
1560
|
+
|
|
1561
|
+
// Import _assets first
|
|
1562
|
+
for (const asset of localAssets) {
|
|
1563
|
+
const sql = buildInsertSql('_assets', asset);
|
|
1564
|
+
run(
|
|
1565
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "${wrapForShell(sql)}"`,
|
|
1566
|
+
{ cwd: root, silent: true }
|
|
1567
|
+
);
|
|
1568
|
+
}
|
|
1569
|
+
|
|
1570
|
+
// Import content types
|
|
1571
|
+
for (const [typeId, data] of Object.entries(exportData)) {
|
|
1572
|
+
for (const entry of data.entries as Record<string, unknown>[]) {
|
|
1573
|
+
const sql = buildInsertSql(typeId, entry);
|
|
1574
|
+
run(
|
|
1575
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "${wrapForShell(sql)}"`,
|
|
1576
|
+
{ cwd: root, silent: true }
|
|
1577
|
+
);
|
|
1578
|
+
}
|
|
1579
|
+
for (const [jtName, rows] of Object.entries(data.joinTables)) {
|
|
1580
|
+
for (const row of rows as Record<string, unknown>[]) {
|
|
1581
|
+
const sql = buildInsertSql(jtName, row);
|
|
1582
|
+
run(
|
|
1583
|
+
`bunx wrangler d1 execute ${dbName} --remote --command "${wrapForShell(sql)}"`,
|
|
1584
|
+
{ cwd: root, silent: true }
|
|
1585
|
+
);
|
|
1586
|
+
}
|
|
1587
|
+
}
|
|
1588
|
+
}
|
|
1589
|
+
|
|
1590
|
+
// 4. Upload local media to remote
|
|
1591
|
+
log('\nStep 4: Uploading local media to remote...');
|
|
1592
|
+
const cookie = await authenticate(remoteUrl, root);
|
|
1593
|
+
|
|
1594
|
+
for (const asset of localAssets as {
|
|
1595
|
+
id: string;
|
|
1596
|
+
url: string;
|
|
1597
|
+
title: string;
|
|
1598
|
+
content_type: string;
|
|
1599
|
+
}[]) {
|
|
1600
|
+
const key = (asset.url as string).replace('/api/media/', '');
|
|
1601
|
+
log(`⬆ ${asset.title}`);
|
|
1602
|
+
try {
|
|
1603
|
+
// Download from local wrangler dev
|
|
1604
|
+
const dlRes = await fetch(`http://localhost:8787${asset.url}`);
|
|
1605
|
+
if (!dlRes.ok) {
|
|
1606
|
+
warn(` Local download failed: ${dlRes.status}`);
|
|
1607
|
+
continue;
|
|
1608
|
+
}
|
|
1609
|
+
|
|
1610
|
+
const blob = await dlRes.blob();
|
|
1611
|
+
const fileName = key;
|
|
1612
|
+
const formData = new FormData();
|
|
1613
|
+
formData.append(
|
|
1614
|
+
'file',
|
|
1615
|
+
new File([blob], fileName, {
|
|
1616
|
+
type: asset.content_type ?? 'application/octet-stream'
|
|
1617
|
+
})
|
|
1618
|
+
);
|
|
1619
|
+
formData.append('title', asset.title ?? fileName);
|
|
1620
|
+
|
|
1621
|
+
const upRes = await fetch(`${remoteUrl}/api/admin/media`, {
|
|
1622
|
+
method: 'POST',
|
|
1623
|
+
headers: { Cookie: cookie },
|
|
1624
|
+
body: formData
|
|
1625
|
+
});
|
|
1626
|
+
|
|
1627
|
+
if (!upRes.ok) {
|
|
1628
|
+
warn(` Upload failed: ${await upRes.text()}`);
|
|
1629
|
+
continue;
|
|
1630
|
+
}
|
|
1631
|
+
ok(` → remote R2`);
|
|
1632
|
+
} catch (e) {
|
|
1633
|
+
warn(` Error: ${e}`);
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
// 5. Deploy
|
|
1638
|
+
log('\nStep 5: Deploying...');
|
|
1639
|
+
await cmdDeploy();
|
|
1640
|
+
|
|
1641
|
+
ok('Push complete! Remote now mirrors local.');
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
// ── Wrangler wrappers ───────────────────────────────────────────────
|
|
1645
|
+
|
|
1646
|
+
function cmdDev() {
|
|
1647
|
+
const root = findProjectRoot();
|
|
1648
|
+
const extra = process.argv.slice(3).join(' ');
|
|
1649
|
+
run(`bunx wrangler dev ${extra}`.trim(), { cwd: root });
|
|
1650
|
+
}
|
|
1651
|
+
|
|
1652
|
+
function cmdLogin() {
|
|
1653
|
+
run('bunx wrangler login');
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
function cmdTail() {
|
|
1657
|
+
const root = findProjectRoot();
|
|
1658
|
+
const extra = process.argv.slice(3).join(' ');
|
|
1659
|
+
run(`bunx wrangler tail ${extra}`.trim(), { cwd: root });
|
|
1660
|
+
}
|
|
1661
|
+
|
|
1662
|
+
// ── Help ────────────────────────────────────────────────────────────
|
|
1663
|
+
|
|
1161
1664
|
function cmdHelp() {
|
|
1162
1665
|
console.log(`
|
|
1163
|
-
${BOLD}🐻 Koguma CLI${RESET} ${DIM}v0.
|
|
1666
|
+
${BOLD}🐻 Koguma CLI${RESET} ${DIM}v0.6.0${RESET}
|
|
1164
1667
|
|
|
1165
1668
|
${BOLD}Usage:${RESET} koguma <command>
|
|
1166
1669
|
|
|
1167
|
-
${BOLD}
|
|
1168
|
-
${CYAN}
|
|
1169
|
-
${CYAN}
|
|
1170
|
-
${CYAN}
|
|
1171
|
-
|
|
1172
|
-
|
|
1670
|
+
${BOLD}Development:${RESET}
|
|
1671
|
+
${CYAN}dev${RESET} Start local dev server (wrangler dev)
|
|
1672
|
+
${CYAN}login${RESET} Authenticate with Cloudflare
|
|
1673
|
+
${CYAN}tail${RESET} Stream live logs from production
|
|
1674
|
+
|
|
1675
|
+
${BOLD}Schema & Types:${RESET}
|
|
1676
|
+
${CYAN}schema${RESET} Generate DDL from site.config.ts → db/schema.sql
|
|
1173
1677
|
${CYAN}typegen${RESET} Generate koguma.d.ts typed interfaces
|
|
1174
|
-
${CYAN}migrate${RESET} Detect schema drift
|
|
1678
|
+
${CYAN}migrate${RESET} Detect schema drift, apply CREATE/ALTER TABLE
|
|
1679
|
+
|
|
1680
|
+
${BOLD}Content:${RESET}
|
|
1681
|
+
${CYAN}seed${RESET} Seed database from db/seed.ts or db/seed.sql
|
|
1175
1682
|
${CYAN}export${RESET} Export all content to JSON
|
|
1176
1683
|
${CYAN}import${RESET} Import content from JSON file
|
|
1177
|
-
|
|
1684
|
+
|
|
1685
|
+
${BOLD}Media:${RESET}
|
|
1686
|
+
${CYAN}migrate-media${RESET} Download external images and upload to R2
|
|
1687
|
+
|
|
1688
|
+
${BOLD}Sync:${RESET}
|
|
1689
|
+
${CYAN}pull${RESET} Download remote content + media → local
|
|
1690
|
+
${CYAN}push${RESET} Upload local content + media → remote + deploy
|
|
1691
|
+
|
|
1692
|
+
${BOLD}Deploy:${RESET}
|
|
1693
|
+
${CYAN}init${RESET} Create D1 database and R2 bucket, patch wrangler.toml
|
|
1694
|
+
${CYAN}secret${RESET} Set the admin password on Cloudflare
|
|
1695
|
+
${CYAN}build${RESET} Build the admin dashboard bundle
|
|
1178
1696
|
${CYAN}deploy${RESET} Build admin + frontend, then deploy via wrangler
|
|
1179
1697
|
|
|
1180
|
-
${BOLD}
|
|
1181
|
-
${DIM}koguma
|
|
1182
|
-
${DIM}koguma migrate --remote${
|
|
1183
|
-
${DIM}koguma
|
|
1184
|
-
${DIM}koguma
|
|
1185
|
-
${DIM}koguma
|
|
1186
|
-
|
|
1187
|
-
${BOLD}First deploy:${RESET}
|
|
1188
|
-
${DIM}$${RESET} koguma init ${DIM}# Create D1 + R2${RESET}
|
|
1189
|
-
${DIM}$${RESET} koguma secret ${DIM}# Set admin password${RESET}
|
|
1190
|
-
${DIM}$${RESET} koguma seed --remote ${DIM}# Seed production DB${RESET}
|
|
1191
|
-
${DIM}$${RESET} koguma deploy ${DIM}# Build + deploy${RESET}
|
|
1698
|
+
${BOLD}Examples:${RESET}
|
|
1699
|
+
${DIM}$${RESET} koguma dev ${DIM}# Local dev server${RESET}
|
|
1700
|
+
${DIM}$${RESET} koguma migrate --remote ${DIM}# Migrate production DB${RESET}
|
|
1701
|
+
${DIM}$${RESET} koguma pull --remote https://my-site.dev ${DIM}# Sync remote → local${RESET}
|
|
1702
|
+
${DIM}$${RESET} koguma push --remote https://my-site.dev ${DIM}# Sync local → remote${RESET}
|
|
1703
|
+
${DIM}$${RESET} koguma seed --remote ${DIM}# Seed production DB${RESET}
|
|
1192
1704
|
`);
|
|
1193
1705
|
}
|
|
1194
1706
|
|
|
@@ -1230,6 +1742,21 @@ switch (command) {
|
|
|
1230
1742
|
case 'deploy':
|
|
1231
1743
|
await cmdDeploy();
|
|
1232
1744
|
break;
|
|
1745
|
+
case 'pull':
|
|
1746
|
+
await cmdPull();
|
|
1747
|
+
break;
|
|
1748
|
+
case 'push':
|
|
1749
|
+
await cmdPush();
|
|
1750
|
+
break;
|
|
1751
|
+
case 'dev':
|
|
1752
|
+
cmdDev();
|
|
1753
|
+
break;
|
|
1754
|
+
case 'login':
|
|
1755
|
+
cmdLogin();
|
|
1756
|
+
break;
|
|
1757
|
+
case 'tail':
|
|
1758
|
+
cmdTail();
|
|
1759
|
+
break;
|
|
1233
1760
|
case 'help':
|
|
1234
1761
|
case '--help':
|
|
1235
1762
|
case '-h':
|
package/package.json
CHANGED
package/src/config/define.ts
CHANGED
|
@@ -82,7 +82,7 @@ export function contentType<
|
|
|
82
82
|
displayField: string;
|
|
83
83
|
singleton?: boolean;
|
|
84
84
|
fields: F;
|
|
85
|
-
})
|
|
85
|
+
}) {
|
|
86
86
|
// Collect all field builders (from groups or flat)
|
|
87
87
|
const allFields: Record<string, FieldBuilder> = {};
|
|
88
88
|
const groups: GroupConfig[] = [];
|
|
@@ -116,11 +116,11 @@ export function contentType<
|
|
|
116
116
|
name: opts.name,
|
|
117
117
|
displayField: opts.displayField,
|
|
118
118
|
singleton: opts.singleton,
|
|
119
|
-
schema
|
|
119
|
+
schema,
|
|
120
120
|
fieldMeta,
|
|
121
121
|
groups,
|
|
122
122
|
flatFields
|
|
123
|
-
};
|
|
123
|
+
} satisfies ContentTypeConfig;
|
|
124
124
|
}
|
|
125
125
|
|
|
126
126
|
// ── defineConfig ─────────────────────────────────────────────────────
|
package/src/db/migrate.ts
CHANGED
|
@@ -48,7 +48,13 @@ function sqlType(fieldType: string): string {
|
|
|
48
48
|
case 'longText':
|
|
49
49
|
case 'richText':
|
|
50
50
|
case 'url':
|
|
51
|
+
case 'email':
|
|
52
|
+
case 'phone':
|
|
53
|
+
case 'color':
|
|
54
|
+
case 'youtube':
|
|
55
|
+
case 'instagram':
|
|
51
56
|
case 'image':
|
|
57
|
+
case 'images':
|
|
52
58
|
case 'reference':
|
|
53
59
|
case 'date':
|
|
54
60
|
case 'select':
|
|
@@ -76,7 +82,7 @@ export function detectDrift(
|
|
|
76
82
|
|
|
77
83
|
for (const ct of contentTypes) {
|
|
78
84
|
const tableCols = existingColumns[ct.id];
|
|
79
|
-
if (!tableCols) {
|
|
85
|
+
if (!tableCols || tableCols.length === 0) {
|
|
80
86
|
// Table doesn't exist — generate CREATE TABLE
|
|
81
87
|
const columns: string[] = [
|
|
82
88
|
'id TEXT PRIMARY KEY',
|
package/src/db/sql.ts
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* sql.ts — Pure SQL generation utilities for D1 operations.
|
|
3
|
+
*
|
|
4
|
+
* These functions are used by seed, import, push, pull, and export commands.
|
|
5
|
+
* Extracted here to enable thorough unit testing without wrangler dependencies.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
// ── Value escaping ──────────────────────────────────────────────────
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Escape a JavaScript value for safe embedding in a SQL string.
|
|
12
|
+
* Returns a SQL-safe string (already quoted if needed).
|
|
13
|
+
*/
|
|
14
|
+
export function escapeValue(v: unknown): string {
|
|
15
|
+
if (v === null || v === undefined) return 'NULL';
|
|
16
|
+
if (typeof v === 'number') return String(v);
|
|
17
|
+
if (typeof v === 'boolean') return v ? '1' : '0';
|
|
18
|
+
return `'${String(v).replace(/'/g, "''")}'`;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// ── INSERT OR REPLACE generation ────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Generate an INSERT OR REPLACE statement from a table name and row data.
|
|
25
|
+
*/
|
|
26
|
+
export function buildInsertSql(
|
|
27
|
+
table: string,
|
|
28
|
+
row: Record<string, unknown>
|
|
29
|
+
): string {
|
|
30
|
+
const cols = Object.keys(row);
|
|
31
|
+
const vals = Object.values(row).map(escapeValue);
|
|
32
|
+
return `INSERT OR REPLACE INTO ${table} (${cols.join(', ')}) VALUES (${vals.join(', ')})`;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// ── Shell-safe SQL escaping ─────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Wrap a SQL statement for safe use in `wrangler d1 execute --command "..."`.
|
|
39
|
+
* Escapes inner double quotes.
|
|
40
|
+
*/
|
|
41
|
+
export function wrapForShell(sql: string): string {
|
|
42
|
+
return sql.replace(/"/g, '\\"');
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// ── Asset title resolution ──────────────────────────────────────────
|
|
46
|
+
|
|
47
|
+
export interface AssetIndex {
|
|
48
|
+
/** Map of lowercase title → asset ID */
|
|
49
|
+
titleMap: Map<string, string>;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Build an asset index from raw _assets rows.
|
|
54
|
+
* Maps both full title and title-without-extension to asset IDs.
|
|
55
|
+
*/
|
|
56
|
+
export function buildAssetIndex(
|
|
57
|
+
assets: { id: string; title: string }[]
|
|
58
|
+
): AssetIndex {
|
|
59
|
+
const titleMap = new Map<string, string>();
|
|
60
|
+
for (const a of assets) {
|
|
61
|
+
titleMap.set(a.title.toLowerCase(), a.id);
|
|
62
|
+
const noExt = a.title.replace(/\.\w+$/, '').toLowerCase();
|
|
63
|
+
if (noExt !== a.title.toLowerCase()) titleMap.set(noExt, a.id);
|
|
64
|
+
}
|
|
65
|
+
return { titleMap };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Resolve a human-friendly asset reference (title or filename) to an asset ID.
|
|
70
|
+
* Returns the original value if not found.
|
|
71
|
+
*/
|
|
72
|
+
export function resolveAssetRef(
|
|
73
|
+
value: string,
|
|
74
|
+
index: AssetIndex
|
|
75
|
+
): { id: string; resolved: boolean } {
|
|
76
|
+
const lower = value.toLowerCase();
|
|
77
|
+
const id = index.titleMap.get(lower);
|
|
78
|
+
if (id) return { id, resolved: true };
|
|
79
|
+
const noExt = value.replace(/\.\w+$/, '').toLowerCase();
|
|
80
|
+
const id2 = index.titleMap.get(noExt);
|
|
81
|
+
if (id2) return { id: id2, resolved: true };
|
|
82
|
+
return { id: value, resolved: false };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// ── Seed entry processing ───────────────────────────────────────────
|
|
86
|
+
|
|
87
|
+
export interface FieldMeta {
|
|
88
|
+
fieldType: string;
|
|
89
|
+
required: boolean;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Process a seed entry by resolving image references and converting
|
|
94
|
+
* markdown strings in richText fields to KogumaDocument JSON.
|
|
95
|
+
*
|
|
96
|
+
* Returns a new processed entry (does not mutate the input).
|
|
97
|
+
*/
|
|
98
|
+
export function processSeedEntry(
|
|
99
|
+
entry: Record<string, unknown>,
|
|
100
|
+
fieldMeta: Record<string, FieldMeta>,
|
|
101
|
+
assetIndex: AssetIndex,
|
|
102
|
+
markdownToKoguma: (md: string) => { nodes: unknown[] }
|
|
103
|
+
): { processed: Record<string, unknown>; resolutions: string[] } {
|
|
104
|
+
const processed: Record<string, unknown> = {
|
|
105
|
+
id: (entry.id as string) ?? crypto.randomUUID(),
|
|
106
|
+
status: (entry.status as string) ?? 'published',
|
|
107
|
+
...entry
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const resolutions: string[] = [];
|
|
111
|
+
|
|
112
|
+
for (const [fieldId, meta] of Object.entries(fieldMeta)) {
|
|
113
|
+
const value = processed[fieldId];
|
|
114
|
+
if (value === undefined || value === null) continue;
|
|
115
|
+
|
|
116
|
+
if (meta.fieldType === 'image' && typeof value === 'string') {
|
|
117
|
+
const result = resolveAssetRef(value, assetIndex);
|
|
118
|
+
if (result.resolved) {
|
|
119
|
+
processed[fieldId] = result.id;
|
|
120
|
+
resolutions.push(`${fieldId}: "${value}" → ${result.id}`);
|
|
121
|
+
}
|
|
122
|
+
} else if (meta.fieldType === 'richText' && typeof value === 'string') {
|
|
123
|
+
const doc = markdownToKoguma(value);
|
|
124
|
+
processed[fieldId] = JSON.stringify(doc);
|
|
125
|
+
resolutions.push(`${fieldId}: markdown → KogumaDocument`);
|
|
126
|
+
} else if (meta.fieldType === 'images' && Array.isArray(value)) {
|
|
127
|
+
const ids = value.map((v: unknown) => {
|
|
128
|
+
if (typeof v !== 'string') return v;
|
|
129
|
+
const result = resolveAssetRef(v, assetIndex);
|
|
130
|
+
if (result.resolved) {
|
|
131
|
+
resolutions.push(`${fieldId}: "${v}" → ${result.id}`);
|
|
132
|
+
return result.id;
|
|
133
|
+
}
|
|
134
|
+
return v;
|
|
135
|
+
});
|
|
136
|
+
processed[fieldId] = JSON.stringify(ids);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return { processed, resolutions };
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// ── Batch SQL generation ────────────────────────────────────────────
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Generate all INSERT OR REPLACE statements for importing a set of entries
|
|
147
|
+
* into a table, including join table rows.
|
|
148
|
+
*/
|
|
149
|
+
export function buildImportSql(
|
|
150
|
+
typeId: string,
|
|
151
|
+
entries: Record<string, unknown>[],
|
|
152
|
+
joinTables?: Record<string, Record<string, unknown>[]>
|
|
153
|
+
): string[] {
|
|
154
|
+
const statements: string[] = [];
|
|
155
|
+
for (const entry of entries) {
|
|
156
|
+
statements.push(buildInsertSql(typeId, entry));
|
|
157
|
+
}
|
|
158
|
+
if (joinTables) {
|
|
159
|
+
for (const [jtName, rows] of Object.entries(joinTables)) {
|
|
160
|
+
for (const row of rows) {
|
|
161
|
+
statements.push(buildInsertSql(jtName, row));
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return statements;
|
|
166
|
+
}
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* markdown-to-koguma.ts — Lightweight markdown → KogumaDocument converter.
|
|
3
|
+
*
|
|
4
|
+
* Designed for seed content (author-controlled markdown), not arbitrary user input.
|
|
5
|
+
* Supports: headings, paragraphs, bold, italic, code, links, lists, code blocks, HR.
|
|
6
|
+
*/
|
|
7
|
+
import type {
|
|
8
|
+
KogumaDocument,
|
|
9
|
+
KogumaBlockNode,
|
|
10
|
+
KogumaInlineNode,
|
|
11
|
+
KogumaListItem
|
|
12
|
+
} from '../config/types.ts';
|
|
13
|
+
|
|
14
|
+
// ── Inline parsing ──────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
function parseInline(text: string): KogumaInlineNode[] {
|
|
17
|
+
const nodes: KogumaInlineNode[] = [];
|
|
18
|
+
// Regex: links, bold, italic, inline code
|
|
19
|
+
const re =
|
|
20
|
+
/\[([^\]]+)\]\(([^)]+)\)|`([^`]+)`|\*\*(.+?)\*\*|\*(.+?)\*|([^[`*]+)/g;
|
|
21
|
+
let match: RegExpExecArray | null;
|
|
22
|
+
|
|
23
|
+
while ((match = re.exec(text)) !== null) {
|
|
24
|
+
if (match[1] !== undefined && match[2] !== undefined) {
|
|
25
|
+
// Link
|
|
26
|
+
nodes.push({
|
|
27
|
+
type: 'link',
|
|
28
|
+
url: match[2],
|
|
29
|
+
children: [{ type: 'text', text: match[1] }]
|
|
30
|
+
});
|
|
31
|
+
} else if (match[3] !== undefined) {
|
|
32
|
+
// Inline code
|
|
33
|
+
nodes.push({ type: 'text', text: match[3], code: true });
|
|
34
|
+
} else if (match[4] !== undefined) {
|
|
35
|
+
// Bold
|
|
36
|
+
nodes.push({ type: 'text', text: match[4], bold: true });
|
|
37
|
+
} else if (match[5] !== undefined) {
|
|
38
|
+
// Italic
|
|
39
|
+
nodes.push({ type: 'text', text: match[5], italic: true });
|
|
40
|
+
} else if (match[6] !== undefined && match[6].length > 0) {
|
|
41
|
+
// Plain text
|
|
42
|
+
nodes.push({ type: 'text', text: match[6] });
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return nodes.length > 0 ? nodes : [{ type: 'text', text }];
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// ── Block parsing ───────────────────────────────────────────────────
|
|
50
|
+
|
|
51
|
+
export function markdownToKoguma(markdown: string): KogumaDocument {
|
|
52
|
+
const lines = markdown.split('\n');
|
|
53
|
+
const nodes: KogumaBlockNode[] = [];
|
|
54
|
+
let i = 0;
|
|
55
|
+
|
|
56
|
+
while (i < lines.length) {
|
|
57
|
+
const line = lines[i]!;
|
|
58
|
+
|
|
59
|
+
// Blank line — skip
|
|
60
|
+
if (line.trim() === '') {
|
|
61
|
+
i++;
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Fenced code block
|
|
66
|
+
const codeMatch = line.match(/^```(\w*)$/);
|
|
67
|
+
if (codeMatch) {
|
|
68
|
+
const language = codeMatch[1] || undefined;
|
|
69
|
+
const codeLines: string[] = [];
|
|
70
|
+
i++;
|
|
71
|
+
while (i < lines.length && !lines[i]!.startsWith('```')) {
|
|
72
|
+
codeLines.push(lines[i]!);
|
|
73
|
+
i++;
|
|
74
|
+
}
|
|
75
|
+
i++; // skip closing ```
|
|
76
|
+
nodes.push({ type: 'code', language, text: codeLines.join('\n') });
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Heading
|
|
81
|
+
const headingMatch = line.match(/^(#{1,6})\s+(.+)$/);
|
|
82
|
+
if (headingMatch) {
|
|
83
|
+
const level = headingMatch[1]!.length as 1 | 2 | 3 | 4 | 5 | 6;
|
|
84
|
+
nodes.push({
|
|
85
|
+
type: 'heading',
|
|
86
|
+
level,
|
|
87
|
+
children: parseInline(headingMatch[2]!)
|
|
88
|
+
});
|
|
89
|
+
i++;
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Horizontal rule
|
|
94
|
+
if (/^(-{3,}|\*{3,}|_{3,})$/.test(line.trim())) {
|
|
95
|
+
nodes.push({ type: 'hr' });
|
|
96
|
+
i++;
|
|
97
|
+
continue;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Unordered list
|
|
101
|
+
if (/^[-*+]\s/.test(line)) {
|
|
102
|
+
const items: KogumaListItem[] = [];
|
|
103
|
+
while (i < lines.length && /^[-*+]\s/.test(lines[i]!)) {
|
|
104
|
+
items.push({
|
|
105
|
+
children: parseInline(lines[i]!.replace(/^[-*+]\s/, ''))
|
|
106
|
+
});
|
|
107
|
+
i++;
|
|
108
|
+
}
|
|
109
|
+
nodes.push({ type: 'list', ordered: false, items });
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Ordered list
|
|
114
|
+
if (/^\d+\.\s/.test(line)) {
|
|
115
|
+
const items: KogumaListItem[] = [];
|
|
116
|
+
while (i < lines.length && /^\d+\.\s/.test(lines[i]!)) {
|
|
117
|
+
items.push({
|
|
118
|
+
children: parseInline(lines[i]!.replace(/^\d+\.\s/, ''))
|
|
119
|
+
});
|
|
120
|
+
i++;
|
|
121
|
+
}
|
|
122
|
+
nodes.push({ type: 'list', ordered: true, items });
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Blockquote
|
|
127
|
+
if (line.startsWith('> ')) {
|
|
128
|
+
const quoteLines: string[] = [];
|
|
129
|
+
while (i < lines.length && lines[i]!.startsWith('> ')) {
|
|
130
|
+
quoteLines.push(lines[i]!.slice(2));
|
|
131
|
+
i++;
|
|
132
|
+
}
|
|
133
|
+
nodes.push({
|
|
134
|
+
type: 'quote',
|
|
135
|
+
children: parseInline(quoteLines.join(' '))
|
|
136
|
+
});
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Paragraph (default)
|
|
141
|
+
const paraLines: string[] = [];
|
|
142
|
+
while (
|
|
143
|
+
i < lines.length &&
|
|
144
|
+
lines[i]!.trim() !== '' &&
|
|
145
|
+
!lines[i]!.startsWith('#') &&
|
|
146
|
+
!lines[i]!.startsWith('```') &&
|
|
147
|
+
!/^[-*+]\s/.test(lines[i]!) &&
|
|
148
|
+
!/^\d+\.\s/.test(lines[i]!) &&
|
|
149
|
+
!lines[i]!.startsWith('> ') &&
|
|
150
|
+
!/^(-{3,}|\*{3,}|_{3,})$/.test(lines[i]!.trim())
|
|
151
|
+
) {
|
|
152
|
+
paraLines.push(lines[i]!);
|
|
153
|
+
i++;
|
|
154
|
+
}
|
|
155
|
+
if (paraLines.length > 0) {
|
|
156
|
+
nodes.push({
|
|
157
|
+
type: 'paragraph',
|
|
158
|
+
children: parseInline(paraLines.join(' '))
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return { nodes };
|
|
164
|
+
}
|