@dboio/cli 0.19.7 → 0.20.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -12
- package/bin/dbo.js +5 -0
- package/package.json +1 -1
- package/plugins/claude/dbo/.claude-plugin/plugin.json +1 -1
- package/plugins/claude/dbo/commands/dbo.md +39 -12
- package/plugins/claude/dbo/docs/dbo-cli-readme.md +18 -12
- package/plugins/claude/dbo/docs/dual-platform-maintenance.md +135 -0
- package/plugins/claude/dbo/skills/cookbook/SKILL.md +13 -3
- package/plugins/claude/dbo/skills/white-paper/SKILL.md +49 -8
- package/plugins/claude/dbo/skills/white-paper/references/api-reference.md +1 -1
- package/plugins/claude/track/.claude-plugin/plugin.json +1 -1
- package/src/commands/adopt.js +22 -19
- package/src/commands/clone.js +412 -57
- package/src/commands/init.js +2 -2
- package/src/commands/input.js +2 -2
- package/src/commands/login.js +3 -3
- package/src/commands/push.js +142 -43
- package/src/commands/status.js +15 -7
- package/src/lib/config.js +117 -11
- package/src/lib/dependencies.js +11 -9
- package/src/lib/filenames.js +54 -66
- package/src/lib/ignore.js +3 -0
- package/src/lib/input-parser.js +2 -6
- package/src/lib/insert.js +34 -49
- package/src/lib/structure.js +23 -8
- package/src/lib/ticketing.js +66 -9
- package/src/lib/toe-stepping.js +103 -3
- package/src/migrations/008-metadata-uid-in-suffix.js +4 -2
- package/src/migrations/009-fix-media-collision-metadata-names.js +9 -3
- package/src/migrations/013-remove-uid-from-meta-filenames.js +117 -0
- package/src/migrations/014-entity-dir-to-data-source.js +68 -0
package/src/lib/toe-stepping.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import chalk from 'chalk';
|
|
2
2
|
import { dirname, basename, join } from 'path';
|
|
3
|
-
import { readFile } from 'fs/promises';
|
|
4
|
-
import { findBaselineEntry, shouldSkipColumn, normalizeValue, isReference, resolveReferencePath } from './delta.js';
|
|
3
|
+
import { readFile, writeFile } from 'fs/promises';
|
|
4
|
+
import { findBaselineEntry, shouldSkipColumn, normalizeValue, isReference, resolveReferencePath, saveBaseline } from './delta.js';
|
|
5
5
|
import { resolveContentValue } from '../commands/clone.js';
|
|
6
6
|
import { computeLineDiff, formatDiff } from './diff.js';
|
|
7
7
|
import { parseMetaFilename } from './filenames.js';
|
|
8
|
-
import { parseServerDate } from './timestamps.js';
|
|
8
|
+
import { parseServerDate, setFileTimestamps } from './timestamps.js';
|
|
9
9
|
import { log } from './logger.js';
|
|
10
10
|
|
|
11
11
|
/**
|
|
@@ -335,6 +335,7 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
335
335
|
let skippedUIDs = new Set();
|
|
336
336
|
let bulkAction = null; // 'push_all' | 'skip_all'
|
|
337
337
|
let hasConflicts = false;
|
|
338
|
+
let baselineModified = false;
|
|
338
339
|
|
|
339
340
|
for (const { meta, metaPath } of records) {
|
|
340
341
|
const uid = meta.UID;
|
|
@@ -381,6 +382,14 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
381
382
|
if (options.yes || bulkAction === 'push_all') {
|
|
382
383
|
continue; // push this record
|
|
383
384
|
}
|
|
385
|
+
if (bulkAction === 'pull_all') {
|
|
386
|
+
await applyServerToLocal(serverEntry, meta, metaPath, serverTz);
|
|
387
|
+
_updateBaselineEntry(baseline, entity, uid, serverEntry);
|
|
388
|
+
baselineModified = true;
|
|
389
|
+
log.success(` Pulled server version of "${label}" to local`);
|
|
390
|
+
skippedUIDs.add(uid);
|
|
391
|
+
continue;
|
|
392
|
+
}
|
|
384
393
|
if (bulkAction === 'skip_all') {
|
|
385
394
|
skippedUIDs.add(uid);
|
|
386
395
|
continue;
|
|
@@ -395,9 +404,11 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
395
404
|
message: `"${label}" has server changes. How to proceed?`,
|
|
396
405
|
choices: [
|
|
397
406
|
{ name: 'Push anyway (overwrite server changes)', value: 'push' },
|
|
407
|
+
{ name: 'Pull from server (overwrite local changes)', value: 'pull' },
|
|
398
408
|
{ name: 'Compare differences', value: 'compare' },
|
|
399
409
|
{ name: 'Skip this record', value: 'skip' },
|
|
400
410
|
{ name: 'Push all remaining (overwrite all)', value: 'push_all' },
|
|
411
|
+
{ name: 'Pull all remaining (overwrite all local)', value: 'pull_all' },
|
|
401
412
|
{ name: 'Skip all remaining', value: 'skip_all' },
|
|
402
413
|
{ name: 'Cancel entire push', value: 'cancel' },
|
|
403
414
|
],
|
|
@@ -414,6 +425,14 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
414
425
|
log.info('Push cancelled. Run "dbo pull" to fetch server changes first.');
|
|
415
426
|
return false;
|
|
416
427
|
}
|
|
428
|
+
if (action === 'pull' || action === 'pull_all') {
|
|
429
|
+
await applyServerToLocal(serverEntry, meta, metaPath, serverTz);
|
|
430
|
+
_updateBaselineEntry(baseline, entity, uid, serverEntry);
|
|
431
|
+
baselineModified = true;
|
|
432
|
+
log.success(` Pulled server version of "${label}" to local`);
|
|
433
|
+
skippedUIDs.add(uid);
|
|
434
|
+
if (action === 'pull_all') bulkAction = 'pull_all';
|
|
435
|
+
}
|
|
417
436
|
if (action === 'skip' || action === 'skip_all') {
|
|
418
437
|
skippedUIDs.add(uid);
|
|
419
438
|
if (action === 'skip_all') bulkAction = 'skip_all';
|
|
@@ -423,6 +442,12 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
423
442
|
}
|
|
424
443
|
}
|
|
425
444
|
|
|
445
|
+
if (baselineModified) {
|
|
446
|
+
try {
|
|
447
|
+
await saveBaseline(baseline);
|
|
448
|
+
} catch { /* non-critical — next push will re-detect */ }
|
|
449
|
+
}
|
|
450
|
+
|
|
426
451
|
if (!hasConflicts) return true;
|
|
427
452
|
|
|
428
453
|
// Return skipped UIDs so the caller can filter them out
|
|
@@ -496,3 +521,78 @@ async function showPushDiff(serverEntry, localMeta, metaPath) {
|
|
|
496
521
|
|
|
497
522
|
log.plain('');
|
|
498
523
|
}
|
|
524
|
+
|
|
525
|
+
/**
|
|
526
|
+
* Update the in-memory baseline entry for a record with the server's current
|
|
527
|
+
* values. Called after pulling from server so the next toe-stepping check
|
|
528
|
+
* sees the pulled state as the new baseline and does not re-raise the conflict.
|
|
529
|
+
*
|
|
530
|
+
* @param {Object} baseline - Loaded baseline object (mutated in place)
|
|
531
|
+
* @param {string} entity - Entity type (e.g., "content")
|
|
532
|
+
* @param {string} uid - Record UID
|
|
533
|
+
* @param {Object} serverEntry - Live server record
|
|
534
|
+
*/
|
|
535
|
+
function _updateBaselineEntry(baseline, entity, uid, serverEntry) {
|
|
536
|
+
if (!baseline?.children) return;
|
|
537
|
+
const arr = baseline.children[entity];
|
|
538
|
+
if (!Array.isArray(arr)) return;
|
|
539
|
+
const idx = arr.findIndex(e => e.UID === uid);
|
|
540
|
+
if (idx < 0) return;
|
|
541
|
+
|
|
542
|
+
const SKIP = new Set(['_entity', '_companionReferenceColumns', '_contentColumns',
|
|
543
|
+
'_mediaFile', '_pathConfirmed', 'children', '_id']);
|
|
544
|
+
for (const [col, rawVal] of Object.entries(serverEntry)) {
|
|
545
|
+
if (SKIP.has(col)) continue;
|
|
546
|
+
const decoded = resolveContentValue(rawVal);
|
|
547
|
+
arr[idx][col] = decoded !== null ? decoded : rawVal;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
/**
|
|
552
|
+
* Overwrite local metadata and companion content files with server values.
|
|
553
|
+
*
|
|
554
|
+
* Called when the user chooses "Pull from server" during conflict resolution.
|
|
555
|
+
* Updates:
|
|
556
|
+
* - companion content files (columns listed in _companionReferenceColumns)
|
|
557
|
+
* - all non-system metadata fields
|
|
558
|
+
* - _LastUpdated / _CreatedOn timestamps in the metadata JSON
|
|
559
|
+
* - file timestamps on both the metadata file and any companion files
|
|
560
|
+
*
|
|
561
|
+
* @param {Object} serverEntry - Live server record (from fetchServerRecord*)
|
|
562
|
+
* @param {Object} localMeta - Currently loaded metadata object
|
|
563
|
+
* @param {string} metaPath - Absolute path to the .metadata.json file
|
|
564
|
+
* @param {string} [serverTz] - Server timezone string (e.g. "America/Chicago")
|
|
565
|
+
*/
|
|
566
|
+
async function applyServerToLocal(serverEntry, localMeta, metaPath, serverTz) {
|
|
567
|
+
const metaDir = dirname(metaPath);
|
|
568
|
+
const companions = new Set(localMeta._companionReferenceColumns || []);
|
|
569
|
+
|
|
570
|
+
// Write companion content files from server values
|
|
571
|
+
for (const col of companions) {
|
|
572
|
+
const ref = localMeta[col];
|
|
573
|
+
if (!ref || !String(ref).startsWith('@')) continue;
|
|
574
|
+
const filePath = join(metaDir, String(ref).substring(1));
|
|
575
|
+
const serverValue = resolveContentValue(serverEntry[col]);
|
|
576
|
+
if (serverValue !== null) {
|
|
577
|
+
await writeFile(filePath, serverValue, 'utf8');
|
|
578
|
+
try {
|
|
579
|
+
await setFileTimestamps(filePath, serverEntry._CreatedOn, serverEntry._LastUpdated, serverTz);
|
|
580
|
+
} catch { /* non-critical */ }
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
// Merge non-system, non-companion server columns into localMeta
|
|
585
|
+
const skipMeta = new Set(['_entity', '_companionReferenceColumns', '_contentColumns', '_mediaFile',
|
|
586
|
+
'_pathConfirmed', 'children', '_id']);
|
|
587
|
+
for (const [col, rawVal] of Object.entries(serverEntry)) {
|
|
588
|
+
if (skipMeta.has(col)) continue;
|
|
589
|
+
if (companions.has(col)) continue; // companion already handled as a file
|
|
590
|
+
const decoded = resolveContentValue(rawVal);
|
|
591
|
+
localMeta[col] = decoded !== null ? decoded : rawVal;
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
await writeFile(metaPath, JSON.stringify(localMeta, null, 2) + '\n');
|
|
595
|
+
try {
|
|
596
|
+
await setFileTimestamps(metaPath, serverEntry._CreatedOn, serverEntry._LastUpdated, serverTz);
|
|
597
|
+
} catch { /* non-critical */ }
|
|
598
|
+
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { readdir, rename, access } from 'fs/promises';
|
|
2
2
|
import { join, basename, dirname } from 'path';
|
|
3
3
|
import { log } from '../lib/logger.js';
|
|
4
|
-
import { detectLegacyTildeMetadata
|
|
4
|
+
import { detectLegacyTildeMetadata } from '../lib/filenames.js';
|
|
5
5
|
|
|
6
6
|
export const description = 'Rename metadata files from name~uid.metadata.json to name.metadata~uid.json';
|
|
7
7
|
|
|
@@ -32,7 +32,9 @@ export default async function run(_options) {
|
|
|
32
32
|
if (!parsed) continue;
|
|
33
33
|
|
|
34
34
|
const { naturalBase, uid } = parsed;
|
|
35
|
-
|
|
35
|
+
// Migration 008 target is the intermediate name.metadata~uid.json format;
|
|
36
|
+
// migration 013 will later remove the uid suffix entirely.
|
|
37
|
+
const newFilename = `${naturalBase}.metadata~${uid}.json`;
|
|
36
38
|
const newPath = join(dir, newFilename);
|
|
37
39
|
|
|
38
40
|
// Skip if target already exists (avoid overwrite)
|
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
import { readdir, readFile, rename, unlink, access } from 'fs/promises';
|
|
2
2
|
import { join, basename, dirname, extname } from 'path';
|
|
3
3
|
import { log } from '../lib/logger.js';
|
|
4
|
-
import { parseMetaFilename
|
|
4
|
+
import { parseMetaFilename } from '../lib/filenames.js';
|
|
5
|
+
|
|
6
|
+
// Build the intermediate name.metadata~uid.json format this migration targets.
|
|
7
|
+
// (Migration 013 will later strip the uid from filenames entirely.)
|
|
8
|
+
function buildLegacySuffixFilename(naturalBase, uid) {
|
|
9
|
+
return `${naturalBase}.metadata~${uid}.json`;
|
|
10
|
+
}
|
|
5
11
|
|
|
6
12
|
export const description = 'Fix media collision suffix: rename (media) → _media and fix mismatched metadata filenames';
|
|
7
13
|
|
|
@@ -87,7 +93,7 @@ export default async function run(_options) {
|
|
|
87
93
|
// Rename metadata file itself if it contains (media)
|
|
88
94
|
if (parsed.naturalBase.includes('(media)')) {
|
|
89
95
|
const newBase = parsed.naturalBase.replace('(media)', '_media');
|
|
90
|
-
const newMetaFilename =
|
|
96
|
+
const newMetaFilename = buildLegacySuffixFilename(newBase, parsed.uid);
|
|
91
97
|
const newMetaPath = join(dirname(metaPath), newMetaFilename);
|
|
92
98
|
try { await access(newMetaPath); } catch {
|
|
93
99
|
await rename(metaPath, newMetaPath);
|
|
@@ -112,7 +118,7 @@ export default async function run(_options) {
|
|
|
112
118
|
// Already correct
|
|
113
119
|
if (currentParsed.naturalBase === refFilename) continue;
|
|
114
120
|
|
|
115
|
-
const correctFilename =
|
|
121
|
+
const correctFilename = buildLegacySuffixFilename(refFilename, currentParsed.uid);
|
|
116
122
|
const correctPath = join(dirname(metaPath), correctFilename);
|
|
117
123
|
|
|
118
124
|
// If correct metadata already exists, this one is an orphan
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import { readFile, writeFile, rename, readdir, access } from 'fs/promises';
|
|
2
|
+
import { join, basename, dirname } from 'path';
|
|
3
|
+
|
|
4
|
+
export const description = 'Rename metadata files from name.metadata~uid.json to name.metadata.json';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Migration 013 — Remove UID from metadata filenames.
|
|
8
|
+
*
|
|
9
|
+
* Old format: colors.metadata~abc123.json
|
|
10
|
+
* New format: colors.metadata.json
|
|
11
|
+
*
|
|
12
|
+
* The UID is already stored inside the JSON as the "UID" field — no information is lost.
|
|
13
|
+
*
|
|
14
|
+
* Collision resolution uses entity priority:
|
|
15
|
+
* content > output > everything else
|
|
16
|
+
*
|
|
17
|
+
* Within the same entity type, the first record (alphabetically by old filename) wins
|
|
18
|
+
* the unsuffixed slot; subsequent ones get -1, -2, etc.
|
|
19
|
+
*
|
|
20
|
+
* Does NOT rename companion files (they use natural names already).
|
|
21
|
+
*/
|
|
22
|
+
export default async function run(_options) {
|
|
23
|
+
const cwd = process.cwd();
|
|
24
|
+
let totalRenamed = 0;
|
|
25
|
+
|
|
26
|
+
const legacyFiles = await findLegacySuffixMetadataFiles(cwd);
|
|
27
|
+
if (legacyFiles.length === 0) return;
|
|
28
|
+
|
|
29
|
+
// Group by directory so we can resolve collisions per-dir
|
|
30
|
+
const byDir = new Map();
|
|
31
|
+
for (const filePath of legacyFiles) {
|
|
32
|
+
const dir = dirname(filePath);
|
|
33
|
+
if (!byDir.has(dir)) byDir.set(dir, []);
|
|
34
|
+
byDir.get(dir).push(filePath);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
for (const [dir, files] of byDir) {
|
|
38
|
+
// Read entity type from each file to apply priority ordering
|
|
39
|
+
const withMeta = [];
|
|
40
|
+
for (const filePath of files) {
|
|
41
|
+
let entity = 'other';
|
|
42
|
+
let uid = null;
|
|
43
|
+
try {
|
|
44
|
+
const content = JSON.parse(await readFile(filePath, 'utf8'));
|
|
45
|
+
entity = content._entity || 'other';
|
|
46
|
+
uid = content.UID || null;
|
|
47
|
+
} catch { /* use defaults */ }
|
|
48
|
+
|
|
49
|
+
// Parse naturalBase from legacy filename: name.metadata~uid.json
|
|
50
|
+
const filename = basename(filePath);
|
|
51
|
+
const m = filename.match(/^(.+)\.metadata~([a-z0-9_]+)\.json$/i);
|
|
52
|
+
const naturalBase = m ? m[1] : filename.replace(/\.metadata~[^.]+\.json$/i, '');
|
|
53
|
+
|
|
54
|
+
withMeta.push({ filePath, naturalBase, entity, uid });
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Sort by entity priority: content first, output second, then others (alphabetically within tier)
|
|
58
|
+
const PRIORITY = { content: 0, output: 1 };
|
|
59
|
+
withMeta.sort((a, b) => {
|
|
60
|
+
const pa = PRIORITY[a.entity] ?? 2;
|
|
61
|
+
const pb = PRIORITY[b.entity] ?? 2;
|
|
62
|
+
if (pa !== pb) return pa - pb;
|
|
63
|
+
return a.naturalBase.localeCompare(b.naturalBase);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
// Assign new filenames with collision resolution
|
|
67
|
+
const usedBases = new Map(); // naturalBase (lowercase) → count of times used
|
|
68
|
+
|
|
69
|
+
for (const { filePath, naturalBase, entity } of withMeta) {
|
|
70
|
+
const baseKey = naturalBase.toLowerCase();
|
|
71
|
+
const count = usedBases.get(baseKey) || 0;
|
|
72
|
+
usedBases.set(baseKey, count + 1);
|
|
73
|
+
|
|
74
|
+
const newFilename = count === 0
|
|
75
|
+
? `${naturalBase}.metadata.json`
|
|
76
|
+
: `${naturalBase}-${count}.metadata.json`;
|
|
77
|
+
|
|
78
|
+
const newPath = join(dir, newFilename);
|
|
79
|
+
|
|
80
|
+
// Skip if target already exists (safe guard against re-running migration)
|
|
81
|
+
try { await access(newPath); continue; } catch { /* doesn't exist — safe to rename */ }
|
|
82
|
+
|
|
83
|
+
try {
|
|
84
|
+
await rename(filePath, newPath);
|
|
85
|
+
if (newFilename !== basename(filePath)) {
|
|
86
|
+
console.log(` [${entity}] ${basename(filePath)} → ${newFilename}`);
|
|
87
|
+
totalRenamed++;
|
|
88
|
+
}
|
|
89
|
+
} catch (err) {
|
|
90
|
+
console.warn(` (skip) Could not rename ${basename(filePath)}: ${err.message}`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (totalRenamed > 0) {
|
|
96
|
+
console.log(` Renamed ${totalRenamed} metadata file(s) — UID now stored in JSON only`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const SKIP = new Set(['.app', 'node_modules', 'trash', '.git', '.claude', 'app_dependencies']);
|
|
101
|
+
|
|
102
|
+
async function findLegacySuffixMetadataFiles(dir) {
|
|
103
|
+
const results = [];
|
|
104
|
+
try {
|
|
105
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
106
|
+
for (const entry of entries) {
|
|
107
|
+
if (SKIP.has(entry.name)) continue;
|
|
108
|
+
const full = join(dir, entry.name);
|
|
109
|
+
if (entry.isDirectory()) {
|
|
110
|
+
results.push(...await findLegacySuffixMetadataFiles(full));
|
|
111
|
+
} else if (/\.metadata~[a-z0-9_]+\.json$/i.test(entry.name)) {
|
|
112
|
+
results.push(full);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
} catch { /* skip unreadable dirs */ }
|
|
116
|
+
return results;
|
|
117
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { readdir, rename, mkdir, rmdir, access } from 'fs/promises';
|
|
2
|
+
import { join } from 'path';
|
|
3
|
+
import { log } from '../lib/logger.js';
|
|
4
|
+
|
|
5
|
+
export const description = 'Move lib/entity/ files into lib/data_source/';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Migration 014 — Relocate entity records from lib/entity/ into lib/data_source/.
|
|
9
|
+
*
|
|
10
|
+
* Entity (table-definition) records are now co-located with data source records
|
|
11
|
+
* under lib/data_source/. Files keep their _entity: "entity" metadata value —
|
|
12
|
+
* only their directory changes.
|
|
13
|
+
*
|
|
14
|
+
* If lib/data_source/ already contains a file with the same name, the source
|
|
15
|
+
* file is left in place and a warning is emitted; no data is overwritten.
|
|
16
|
+
*/
|
|
17
|
+
export default async function run() {
|
|
18
|
+
const cwd = process.cwd();
|
|
19
|
+
const srcDir = join(cwd, 'lib', 'entity');
|
|
20
|
+
|
|
21
|
+
// Nothing to do if lib/entity/ doesn't exist
|
|
22
|
+
try {
|
|
23
|
+
await access(srcDir);
|
|
24
|
+
} catch {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const entries = await readdir(srcDir, { withFileTypes: true });
|
|
29
|
+
if (entries.length === 0) {
|
|
30
|
+
// Empty directory — just remove it
|
|
31
|
+
try { await rmdir(srcDir); } catch { /* ignore */ }
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const destDir = join(cwd, 'lib', 'data_source');
|
|
36
|
+
await mkdir(destDir, { recursive: true });
|
|
37
|
+
|
|
38
|
+
let movedCount = 0;
|
|
39
|
+
let skippedCount = 0;
|
|
40
|
+
|
|
41
|
+
for (const entry of entries) {
|
|
42
|
+
const srcPath = join(srcDir, entry.name);
|
|
43
|
+
const destPath = join(destDir, entry.name);
|
|
44
|
+
|
|
45
|
+
// Check for collision
|
|
46
|
+
try {
|
|
47
|
+
await access(destPath);
|
|
48
|
+
log.warn(` Migration 014: skipped ${entry.name} — already exists in lib/data_source/`);
|
|
49
|
+
skippedCount++;
|
|
50
|
+
continue;
|
|
51
|
+
} catch { /* dest absent — safe to move */ }
|
|
52
|
+
|
|
53
|
+
await rename(srcPath, destPath);
|
|
54
|
+
movedCount++;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Remove lib/entity/ if now empty
|
|
58
|
+
try {
|
|
59
|
+
const remaining = await readdir(srcDir);
|
|
60
|
+
if (remaining.length === 0) {
|
|
61
|
+
await rmdir(srcDir);
|
|
62
|
+
}
|
|
63
|
+
} catch { /* ignore */ }
|
|
64
|
+
|
|
65
|
+
if (movedCount > 0) {
|
|
66
|
+
log.success(` Migration 014: moved ${movedCount} file(s) from lib/entity/ → lib/data_source/${skippedCount > 0 ? ` (${skippedCount} skipped — collision)` : ''}`);
|
|
67
|
+
}
|
|
68
|
+
}
|