@dboio/cli 0.15.2 → 0.16.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +103 -25
- package/package.json +1 -1
- package/plugins/claude/dbo/docs/dbo-cli-readme.md +103 -25
- package/src/commands/add.js +18 -18
- package/src/commands/clone.js +390 -157
- package/src/commands/init.js +42 -1
- package/src/commands/input.js +2 -32
- package/src/commands/mv.js +3 -3
- package/src/commands/push.js +29 -11
- package/src/commands/rm.js +2 -2
- package/src/lib/columns.js +1 -0
- package/src/lib/config.js +83 -1
- package/src/lib/delta.js +31 -7
- package/src/lib/dependencies.js +217 -2
- package/src/lib/diff.js +9 -11
- package/src/lib/filenames.js +2 -2
- package/src/lib/ignore.js +1 -0
- package/src/lib/logger.js +35 -0
- package/src/lib/metadata-schema.js +492 -0
- package/src/lib/save-to-disk.js +1 -1
- package/src/lib/schema.js +53 -0
- package/src/lib/structure.js +3 -3
- package/src/lib/tagging.js +1 -1
- package/src/lib/ticketing.js +18 -2
- package/src/lib/toe-stepping.js +9 -6
- package/src/migrations/007-natural-entity-companion-filenames.js +5 -2
- package/src/migrations/009-fix-media-collision-metadata-names.js +161 -0
- package/src/migrations/010-delete-paren-media-orphans.js +61 -0
- package/src/migrations/011-schema-driven-metadata.js +120 -0
package/src/lib/toe-stepping.js
CHANGED
|
@@ -5,6 +5,7 @@ import { findBaselineEntry, shouldSkipColumn, normalizeValue, isReference, resol
|
|
|
5
5
|
import { resolveContentValue } from '../commands/clone.js';
|
|
6
6
|
import { computeLineDiff, formatDiff } from './diff.js';
|
|
7
7
|
import { parseMetaFilename } from './filenames.js';
|
|
8
|
+
import { parseServerDate } from './timestamps.js';
|
|
8
9
|
import { log } from './logger.js';
|
|
9
10
|
|
|
10
11
|
/**
|
|
@@ -285,10 +286,11 @@ function findOldestBaselineDate(records, baseline) {
|
|
|
285
286
|
* @param {Object} baseline - Loaded baseline from .dbo/.app_baseline.json
|
|
286
287
|
* @param {Object} options - Commander options (options.yes used for auto-accept)
|
|
287
288
|
* @param {string} [appShortName] - App short name for bulk fetch (optional)
|
|
289
|
+
* @param {string} [serverTz] - Server timezone from config (e.g. "America/Chicago")
|
|
288
290
|
* @returns {Promise<boolean|Set<string>>} - true = proceed with all,
|
|
289
291
|
* false = user cancelled entirely, Set<string> = UIDs to skip (proceed with rest)
|
|
290
292
|
*/
|
|
291
|
-
export async function checkToeStepping(records, client, baseline, options, appShortName) {
|
|
293
|
+
export async function checkToeStepping(records, client, baseline, options, appShortName, serverTz) {
|
|
292
294
|
// Build list of records to check (skip new records without UID)
|
|
293
295
|
const requests = [];
|
|
294
296
|
for (const { meta } of records) {
|
|
@@ -350,9 +352,10 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
350
352
|
|
|
351
353
|
if (!serverTs || !baselineTs) continue; // missing timestamps — skip safely
|
|
352
354
|
|
|
353
|
-
// Parse both
|
|
354
|
-
|
|
355
|
-
const
|
|
355
|
+
// Parse both dates using server timezone from config — server dates
|
|
356
|
+
// may arrive without Z suffix and represent the server's local time
|
|
357
|
+
const serverDate = parseServerDate(serverTs, serverTz);
|
|
358
|
+
const baselineDate = parseServerDate(baselineTs, serverTz);
|
|
356
359
|
|
|
357
360
|
if (isNaN(serverDate) || isNaN(baselineDate)) continue; // unparseable — skip
|
|
358
361
|
if (serverDate <= baselineDate) continue; // server is same or older — no conflict
|
|
@@ -440,7 +443,7 @@ export async function checkToeStepping(records, client, baseline, options, appSh
|
|
|
440
443
|
*/
|
|
441
444
|
async function showPushDiff(serverEntry, localMeta, metaPath) {
|
|
442
445
|
const metaDir = dirname(metaPath);
|
|
443
|
-
const contentCols = localMeta._contentColumns || [];
|
|
446
|
+
const contentCols = localMeta._companionReferenceColumns || localMeta._contentColumns || [];
|
|
444
447
|
|
|
445
448
|
// Compare content file columns
|
|
446
449
|
for (const col of contentCols) {
|
|
@@ -472,7 +475,7 @@ async function showPushDiff(serverEntry, localMeta, metaPath) {
|
|
|
472
475
|
}
|
|
473
476
|
|
|
474
477
|
// Compare metadata fields
|
|
475
|
-
const skipFields = new Set(['_entity', '_contentColumns', '_mediaFile', 'children', '_pathConfirmed']);
|
|
478
|
+
const skipFields = new Set(['_entity', '_contentColumns', '_companionReferenceColumns', '_mediaFile', 'children', '_pathConfirmed']);
|
|
476
479
|
for (const col of Object.keys(serverEntry)) {
|
|
477
480
|
if (skipFields.has(col)) continue;
|
|
478
481
|
if (contentCols.includes(col)) continue;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { readdir, readFile, writeFile, rename, access, mkdir } from 'fs/promises';
|
|
1
|
+
import { readdir, readFile, writeFile, rename, access, mkdir, stat, utimes } from 'fs/promises';
|
|
2
2
|
import { join, basename, dirname } from 'path';
|
|
3
3
|
import { log } from '../lib/logger.js';
|
|
4
4
|
import { stripUidFromFilename, hasUidInFilename, isMetadataFile } from '../lib/filenames.js';
|
|
@@ -102,10 +102,13 @@ export default async function run(_options) {
|
|
|
102
102
|
claimedNaturals.add(naturalPath);
|
|
103
103
|
}
|
|
104
104
|
|
|
105
|
-
// Rewrite metadata if @references were updated
|
|
105
|
+
// Rewrite metadata if @references were updated (preserve timestamps
|
|
106
|
+
// so the write doesn't cause false "local changes" during clone/pull)
|
|
106
107
|
if (metaChanged) {
|
|
107
108
|
try {
|
|
109
|
+
const before = await stat(metaPath);
|
|
108
110
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
111
|
+
await utimes(metaPath, before.atime, before.mtime);
|
|
109
112
|
totalRefsUpdated++;
|
|
110
113
|
} catch { /* non-critical */ }
|
|
111
114
|
}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { readdir, readFile, rename, unlink, access } from 'fs/promises';
|
|
2
|
+
import { join, basename, dirname, extname } from 'path';
|
|
3
|
+
import { log } from '../lib/logger.js';
|
|
4
|
+
import { parseMetaFilename, buildMetaFilename } from '../lib/filenames.js';
|
|
5
|
+
|
|
6
|
+
export const description = 'Fix media collision suffix: rename (media) → _media and fix mismatched metadata filenames';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Migration 009 — Fix media metadata/companion files with wrong collision suffix.
|
|
10
|
+
*
|
|
11
|
+
* Two issues addressed:
|
|
12
|
+
*
|
|
13
|
+
* 1. Metadata filename mismatch: media records with collision-resolved names had their
|
|
14
|
+
* metadata created as "env.js.metadata~uid.json" instead of matching the companion.
|
|
15
|
+
* Detection: _mediaFile @reference base differs from metadata filename base.
|
|
16
|
+
*
|
|
17
|
+
* 2. (media) → _media rename: the old "(media)" suffix causes zsh glob issues.
|
|
18
|
+
* Renames both companion files and metadata files, and updates @references.
|
|
19
|
+
*/
|
|
20
|
+
export default async function run(_options) {
|
|
21
|
+
const cwd = process.cwd();
|
|
22
|
+
let totalFixed = 0;
|
|
23
|
+
let totalOrphansDeleted = 0;
|
|
24
|
+
let totalParenRenamed = 0;
|
|
25
|
+
|
|
26
|
+
const metaFiles = await findAllMetadataFiles(cwd);
|
|
27
|
+
if (metaFiles.length === 0) return;
|
|
28
|
+
|
|
29
|
+
for (let metaPath of metaFiles) {
|
|
30
|
+
try {
|
|
31
|
+
let content = JSON.parse(await readFile(metaPath, 'utf8'));
|
|
32
|
+
const filename = basename(metaPath);
|
|
33
|
+
const parsed = parseMetaFilename(filename);
|
|
34
|
+
if (!parsed) continue;
|
|
35
|
+
|
|
36
|
+
// ── Phase 1: Rename (media) → _media in companion files and @references ──
|
|
37
|
+
|
|
38
|
+
// Check media @reference for (media) pattern
|
|
39
|
+
if (content._entity === 'media' && content._mediaFile) {
|
|
40
|
+
const ref = String(content._mediaFile);
|
|
41
|
+
if (ref.startsWith('@') && ref.includes('(media)')) {
|
|
42
|
+
const oldCompanion = ref.substring(1);
|
|
43
|
+
const ext = extname(oldCompanion);
|
|
44
|
+
const base = basename(oldCompanion, ext);
|
|
45
|
+
const newCompanion = `${base.replace('(media)', '_media')}${ext}`;
|
|
46
|
+
const dir = dirname(metaPath);
|
|
47
|
+
|
|
48
|
+
// Rename companion file on disk (or delete old if new already exists)
|
|
49
|
+
const oldCompanionPath = join(dir, oldCompanion);
|
|
50
|
+
const newCompanionPath = join(dir, newCompanion);
|
|
51
|
+
try {
|
|
52
|
+
await access(oldCompanionPath);
|
|
53
|
+
let newExists = false;
|
|
54
|
+
try { await access(newCompanionPath); newExists = true; } catch { /* */ }
|
|
55
|
+
if (newExists) {
|
|
56
|
+
// New file already exists (from re-clone) — delete the old orphan
|
|
57
|
+
await unlink(oldCompanionPath);
|
|
58
|
+
log.dim(` Deleted old companion: ${oldCompanion}`);
|
|
59
|
+
totalOrphansDeleted++;
|
|
60
|
+
} else {
|
|
61
|
+
await rename(oldCompanionPath, newCompanionPath);
|
|
62
|
+
log.dim(` ${oldCompanion} → ${newCompanion}`);
|
|
63
|
+
}
|
|
64
|
+
} catch { /* companion doesn't exist or already renamed */ }
|
|
65
|
+
|
|
66
|
+
// Update @reference in metadata
|
|
67
|
+
content._mediaFile = `@${newCompanion}`;
|
|
68
|
+
await import('fs/promises').then(fs => fs.writeFile(metaPath, JSON.stringify(content, null, 2) + '\n'));
|
|
69
|
+
totalParenRenamed++;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Check content @references for (media) — shouldn't happen but be safe
|
|
74
|
+
if (content._contentColumns) {
|
|
75
|
+
for (const col of content._contentColumns) {
|
|
76
|
+
const ref = content[col];
|
|
77
|
+
if (ref && String(ref).startsWith('@') && String(ref).includes('(media)')) {
|
|
78
|
+
const oldName = String(ref).substring(1);
|
|
79
|
+
const ext = extname(oldName);
|
|
80
|
+
const base = basename(oldName, ext);
|
|
81
|
+
content[col] = `@${base.replace('(media)', '_media')}${ext}`;
|
|
82
|
+
await import('fs/promises').then(fs => fs.writeFile(metaPath, JSON.stringify(content, null, 2) + '\n'));
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Rename metadata file itself if it contains (media)
|
|
88
|
+
if (parsed.naturalBase.includes('(media)')) {
|
|
89
|
+
const newBase = parsed.naturalBase.replace('(media)', '_media');
|
|
90
|
+
const newMetaFilename = buildMetaFilename(newBase, parsed.uid);
|
|
91
|
+
const newMetaPath = join(dirname(metaPath), newMetaFilename);
|
|
92
|
+
try { await access(newMetaPath); } catch {
|
|
93
|
+
await rename(metaPath, newMetaPath);
|
|
94
|
+
log.dim(` ${filename} → ${newMetaFilename}`);
|
|
95
|
+
metaPath = newMetaPath; // update for phase 2
|
|
96
|
+
totalParenRenamed++;
|
|
97
|
+
}
|
|
98
|
+
// Re-read parsed after rename
|
|
99
|
+
content = JSON.parse(await readFile(metaPath, 'utf8'));
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// ── Phase 2: Fix metadata filename not matching @reference ──
|
|
103
|
+
|
|
104
|
+
if (content._entity !== 'media') continue;
|
|
105
|
+
const mediaRef = content._mediaFile;
|
|
106
|
+
if (!mediaRef || !String(mediaRef).startsWith('@')) continue;
|
|
107
|
+
|
|
108
|
+
const refFilename = String(mediaRef).substring(1);
|
|
109
|
+
const currentParsed = parseMetaFilename(basename(metaPath));
|
|
110
|
+
if (!currentParsed) continue;
|
|
111
|
+
|
|
112
|
+
// Already correct
|
|
113
|
+
if (currentParsed.naturalBase === refFilename) continue;
|
|
114
|
+
|
|
115
|
+
const correctFilename = buildMetaFilename(refFilename, currentParsed.uid);
|
|
116
|
+
const correctPath = join(dirname(metaPath), correctFilename);
|
|
117
|
+
|
|
118
|
+
// If correct metadata already exists, this one is an orphan
|
|
119
|
+
try {
|
|
120
|
+
await access(correctPath);
|
|
121
|
+
await unlink(metaPath);
|
|
122
|
+
log.dim(` Deleted orphan: ${basename(metaPath)}`);
|
|
123
|
+
totalOrphansDeleted++;
|
|
124
|
+
continue;
|
|
125
|
+
} catch { /* correct file doesn't exist — rename */ }
|
|
126
|
+
|
|
127
|
+
await rename(metaPath, correctPath);
|
|
128
|
+
log.dim(` ${basename(metaPath)} → ${correctFilename}`);
|
|
129
|
+
totalFixed++;
|
|
130
|
+
} catch { /* skip unreadable files */ }
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (totalParenRenamed > 0) {
|
|
134
|
+
log.dim(` Renamed ${totalParenRenamed} file(s) from (media) to _media`);
|
|
135
|
+
}
|
|
136
|
+
if (totalFixed > 0) {
|
|
137
|
+
log.dim(` Renamed ${totalFixed} media metadata file(s) to match companion names`);
|
|
138
|
+
}
|
|
139
|
+
if (totalOrphansDeleted > 0) {
|
|
140
|
+
log.dim(` Deleted ${totalOrphansDeleted} orphaned media metadata file(s)`);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
const SKIP = new Set(['.dbo', 'node_modules', 'trash', '.git', '.claude']);
|
|
145
|
+
|
|
146
|
+
async function findAllMetadataFiles(dir) {
|
|
147
|
+
const results = [];
|
|
148
|
+
try {
|
|
149
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
150
|
+
for (const entry of entries) {
|
|
151
|
+
if (SKIP.has(entry.name)) continue;
|
|
152
|
+
const full = join(dir, entry.name);
|
|
153
|
+
if (entry.isDirectory()) {
|
|
154
|
+
results.push(...await findAllMetadataFiles(full));
|
|
155
|
+
} else if (entry.name.includes('.metadata~') && entry.name.endsWith('.json')) {
|
|
156
|
+
results.push(full);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
} catch { /* skip unreadable dirs */ }
|
|
160
|
+
return results;
|
|
161
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { readdir, unlink, access } from 'fs/promises';
|
|
2
|
+
import { join, basename, extname } from 'path';
|
|
3
|
+
import { log } from '../lib/logger.js';
|
|
4
|
+
|
|
5
|
+
export const description = 'Delete orphaned (media) companion and metadata files replaced by _media';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Migration 010 — Clean up leftover (media) files.
|
|
9
|
+
*
|
|
10
|
+
* Migration 009 renamed (media) → _media but did not delete the old files
|
|
11
|
+
* when the new _media versions already existed. This migration finds any
|
|
12
|
+
* remaining files with "(media)" in their name and deletes them if the
|
|
13
|
+
* corresponding _media version exists.
|
|
14
|
+
*/
|
|
15
|
+
export default async function run(_options) {
|
|
16
|
+
const cwd = process.cwd();
|
|
17
|
+
let totalDeleted = 0;
|
|
18
|
+
|
|
19
|
+
const parenFiles = await findParenMediaFiles(cwd);
|
|
20
|
+
if (parenFiles.length === 0) return;
|
|
21
|
+
|
|
22
|
+
for (const filePath of parenFiles) {
|
|
23
|
+
const filename = basename(filePath);
|
|
24
|
+
// Build the _media equivalent
|
|
25
|
+
const newFilename = filename.replace(/\(media\)/g, '_media');
|
|
26
|
+
const newPath = join(filePath, '..', newFilename);
|
|
27
|
+
|
|
28
|
+
// Only delete if the _media replacement exists
|
|
29
|
+
try {
|
|
30
|
+
await access(newPath);
|
|
31
|
+
await unlink(filePath);
|
|
32
|
+
log.dim(` Deleted: ${filename}`);
|
|
33
|
+
totalDeleted++;
|
|
34
|
+
} catch {
|
|
35
|
+
// No _media replacement — leave alone (might be a legitimate filename)
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (totalDeleted > 0) {
|
|
40
|
+
log.dim(` Removed ${totalDeleted} orphaned (media) file(s)`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const SKIP = new Set(['.dbo', 'node_modules', 'trash', '.git', '.claude']);
|
|
45
|
+
|
|
46
|
+
async function findParenMediaFiles(dir) {
|
|
47
|
+
const results = [];
|
|
48
|
+
try {
|
|
49
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
50
|
+
for (const entry of entries) {
|
|
51
|
+
if (SKIP.has(entry.name)) continue;
|
|
52
|
+
const full = join(dir, entry.name);
|
|
53
|
+
if (entry.isDirectory()) {
|
|
54
|
+
results.push(...await findParenMediaFiles(full));
|
|
55
|
+
} else if (entry.name.includes('(media)')) {
|
|
56
|
+
results.push(full);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
} catch { /* skip unreadable dirs */ }
|
|
60
|
+
return results;
|
|
61
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { readFile, writeFile, rename, readdir, rm, utimes } from 'fs/promises';
|
|
2
|
+
import { join } from 'path';
|
|
3
|
+
import { stat } from 'fs/promises';
|
|
4
|
+
|
|
5
|
+
export const description = 'Rename metadata_templates.json → metadata_schema.json; migrate _contentColumns → _companionReferenceColumns; remove _unsupported dir; retire Extension_* config keys';
|
|
6
|
+
|
|
7
|
+
export default async function run(_options) {
|
|
8
|
+
const log = (...args) => console.log(...args);
|
|
9
|
+
|
|
10
|
+
// ── Step 1: Rename .dbo/metadata_templates.json → .dbo/metadata_schema.json ──
|
|
11
|
+
try {
|
|
12
|
+
const oldPath = '.dbo/metadata_templates.json';
|
|
13
|
+
const newPath = '.dbo/metadata_schema.json';
|
|
14
|
+
if (await fileExists(oldPath) && !(await fileExists(newPath))) {
|
|
15
|
+
await rename(oldPath, newPath);
|
|
16
|
+
log(` Renamed .dbo/metadata_templates.json → .dbo/metadata_schema.json`);
|
|
17
|
+
} else if (await fileExists(newPath)) {
|
|
18
|
+
log(` .dbo/metadata_schema.json already exists — skipping rename`);
|
|
19
|
+
}
|
|
20
|
+
} catch (e) {
|
|
21
|
+
log(` (skip) metadata_templates rename: ${e.message}`);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// ── Step 2: Rename _contentColumns → _companionReferenceColumns in all metadata files ──
|
|
25
|
+
try {
|
|
26
|
+
const metaFiles = await findMetadataFiles('.');
|
|
27
|
+
let updated = 0;
|
|
28
|
+
for (const file of metaFiles) {
|
|
29
|
+
try {
|
|
30
|
+
const raw = await readFile(file, 'utf8');
|
|
31
|
+
const obj = JSON.parse(raw);
|
|
32
|
+
if ('_contentColumns' in obj && !('_companionReferenceColumns' in obj)) {
|
|
33
|
+
// Preserve file timestamps so the write doesn't cause false
|
|
34
|
+
// "local changes" detections during the subsequent clone/pull.
|
|
35
|
+
const before = await stat(file);
|
|
36
|
+
obj._companionReferenceColumns = obj._contentColumns;
|
|
37
|
+
delete obj._contentColumns;
|
|
38
|
+
await writeFile(file, JSON.stringify(obj, null, 2) + '\n');
|
|
39
|
+
await utimes(file, before.atime, before.mtime);
|
|
40
|
+
updated++;
|
|
41
|
+
}
|
|
42
|
+
} catch { /* skip malformed files */ }
|
|
43
|
+
}
|
|
44
|
+
if (updated > 0) log(` Migrated _contentColumns in ${updated} metadata file(s)`);
|
|
45
|
+
} catch (e) {
|
|
46
|
+
log(` (skip) _contentColumns rename: ${e.message}`);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// ── Step 3: Move lib/extension/_unsupported/* → lib/extension/ ──
|
|
50
|
+
try {
|
|
51
|
+
const unsupportedDir = 'lib/extension/_unsupported';
|
|
52
|
+
if (await fileExists(unsupportedDir)) {
|
|
53
|
+
const files = await readdir(unsupportedDir);
|
|
54
|
+
let moved = 0;
|
|
55
|
+
for (const file of files) {
|
|
56
|
+
const src = join(unsupportedDir, file);
|
|
57
|
+
const dst = join('lib/extension', file);
|
|
58
|
+
try {
|
|
59
|
+
await rename(src, dst);
|
|
60
|
+
moved++;
|
|
61
|
+
} catch (e) {
|
|
62
|
+
log(` (warn) Could not move ${file}: ${e.message}`);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
try {
|
|
66
|
+
await rm(unsupportedDir, { recursive: true, force: true });
|
|
67
|
+
} catch { /* ignore */ }
|
|
68
|
+
if (moved > 0) log(` Moved ${moved} file(s) from lib/extension/_unsupported/ to lib/extension/`);
|
|
69
|
+
}
|
|
70
|
+
} catch (e) {
|
|
71
|
+
log(` (skip) _unsupported relocation: ${e.message}`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// ── Step 4: Remove retired config.json keys ──
|
|
75
|
+
try {
|
|
76
|
+
const configPath = '.dbo/config.json';
|
|
77
|
+
const raw = await readFile(configPath, 'utf8');
|
|
78
|
+
const config = JSON.parse(raw);
|
|
79
|
+
const retiredPattern = /^Extension_.+_(FilenameCol|ContentExtractions)$|^ExtensionFilenameCol$/;
|
|
80
|
+
const keysToRemove = Object.keys(config).filter(k => retiredPattern.test(k));
|
|
81
|
+
if (keysToRemove.length > 0) {
|
|
82
|
+
for (const k of keysToRemove) delete config[k];
|
|
83
|
+
await writeFile(configPath, JSON.stringify(config, null, 2) + '\n');
|
|
84
|
+
log(` Removed ${keysToRemove.length} retired config key(s): ${keysToRemove.join(', ')}`);
|
|
85
|
+
}
|
|
86
|
+
} catch (e) {
|
|
87
|
+
log(` (skip) config key retirement: ${e.message}`);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
async function fileExists(p) {
|
|
92
|
+
try {
|
|
93
|
+
await stat(p);
|
|
94
|
+
return true;
|
|
95
|
+
} catch {
|
|
96
|
+
return false;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
async function findMetadataFiles(dir) {
|
|
101
|
+
const results = [];
|
|
102
|
+
async function walk(d) {
|
|
103
|
+
let entries;
|
|
104
|
+
try {
|
|
105
|
+
entries = await readdir(d, { withFileTypes: true });
|
|
106
|
+
} catch {
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
for (const entry of entries) {
|
|
110
|
+
const full = join(d, entry.name);
|
|
111
|
+
if (entry.isDirectory() && !entry.name.startsWith('.') && entry.name !== 'node_modules' && entry.name !== 'trash') {
|
|
112
|
+
await walk(full);
|
|
113
|
+
} else if (entry.isFile() && /\.metadata~[^.]+\.json$/.test(entry.name)) {
|
|
114
|
+
results.push(full);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
await walk(dir);
|
|
119
|
+
return results;
|
|
120
|
+
}
|