@dboio/cli 0.7.2 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +128 -8
- package/package.json +3 -2
- package/src/commands/add.js +56 -11
- package/src/commands/clone.js +652 -58
- package/src/commands/init.js +19 -3
- package/src/commands/install.js +10 -1
- package/src/commands/push.js +15 -3
- package/src/lib/config.js +101 -0
- package/src/lib/diff.js +67 -13
- package/src/lib/ignore.js +145 -0
- package/src/lib/structure.js +114 -0
- package/src/lib/timestamps.js +31 -9
package/src/commands/clone.js
CHANGED
|
@@ -2,11 +2,11 @@ import { Command } from 'commander';
|
|
|
2
2
|
import { readFile, writeFile, mkdir, access } from 'fs/promises';
|
|
3
3
|
import { join, basename, extname } from 'path';
|
|
4
4
|
import { DboClient } from '../lib/client.js';
|
|
5
|
-
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference } from '../lib/config.js';
|
|
6
|
-
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES } from '../lib/structure.js';
|
|
5
|
+
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference, saveCloneSource, loadCloneSource, saveDescriptorFilenamePreference, loadDescriptorFilenamePreference, saveDescriptorContentExtractions, loadDescriptorContentExtractions, saveExtensionDocumentationMDPlacement, loadExtensionDocumentationMDPlacement } from '../lib/config.js';
|
|
6
|
+
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES, EXTENSION_DESCRIPTORS_DIR, EXTENSION_UNSUPPORTED_DIR, DOCUMENTATION_DIR, buildDescriptorMapping, saveDescriptorMapping, loadDescriptorMapping, resolveExtensionSubDir } from '../lib/structure.js';
|
|
7
7
|
import { log } from '../lib/logger.js';
|
|
8
|
-
import { setFileTimestamps } from '../lib/timestamps.js';
|
|
9
|
-
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge } from '../lib/diff.js';
|
|
8
|
+
import { setFileTimestamps, parseServerDate } from '../lib/timestamps.js';
|
|
9
|
+
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge, isDiffable } from '../lib/diff.js';
|
|
10
10
|
import { checkDomainChange } from '../lib/domain-guard.js';
|
|
11
11
|
|
|
12
12
|
/**
|
|
@@ -422,6 +422,8 @@ export const cloneCommand = new Command('clone')
|
|
|
422
422
|
.argument('[source]', 'Local JSON file path (optional)')
|
|
423
423
|
.option('--app <shortName>', 'App short name to fetch from server')
|
|
424
424
|
.option('-e, --entity <type>', 'Only clone a specific entity type (e.g. output, content, media, extension)')
|
|
425
|
+
.option('--documentation-only', 'When used with -e extension, clone only documentation extensions')
|
|
426
|
+
.option('--descriptor-types <bool>', 'Sort extensions into descriptor sub-directories (default: true)', 'true')
|
|
425
427
|
.option('--force', 'Force re-processing of all files, skip change detection')
|
|
426
428
|
.option('--domain <host>', 'Override domain')
|
|
427
429
|
.option('-y, --yes', 'Auto-accept all prompts')
|
|
@@ -436,26 +438,46 @@ export const cloneCommand = new Command('clone')
|
|
|
436
438
|
});
|
|
437
439
|
|
|
438
440
|
/**
|
|
439
|
-
*
|
|
441
|
+
* Resolve app JSON from a given source (file path, URL, or server fetch).
|
|
442
|
+
* Throws on failure so the caller can retry with a different source.
|
|
440
443
|
*/
|
|
441
|
-
|
|
442
|
-
const config = await loadConfig();
|
|
443
|
-
const effectiveDomain = options.domain || config.domain;
|
|
444
|
-
let appJson;
|
|
445
|
-
|
|
446
|
-
// Step 1: Load the app JSON
|
|
444
|
+
async function resolveAppSource(source, options, config) {
|
|
447
445
|
if (source) {
|
|
448
|
-
|
|
446
|
+
if (source.startsWith('http://') || source.startsWith('https://')) {
|
|
447
|
+
log.info(`Fetching app JSON from ${source}...`);
|
|
448
|
+
const res = await fetch(source);
|
|
449
|
+
if (!res.ok) throw new Error(`HTTP ${res.status} fetching ${source}`);
|
|
450
|
+
return await res.json();
|
|
451
|
+
}
|
|
449
452
|
log.info(`Loading app JSON from ${source}...`);
|
|
450
453
|
const raw = await readFile(source, 'utf8');
|
|
451
|
-
|
|
454
|
+
return JSON.parse(raw);
|
|
452
455
|
} else if (options.app) {
|
|
453
|
-
|
|
454
|
-
appJson = await fetchAppFromServer(options.app, options, config);
|
|
455
|
-
} else if (config.AppShortName) {
|
|
456
|
-
// Use config's AppShortName
|
|
457
|
-
appJson = await fetchAppFromServer(config.AppShortName, options, config);
|
|
456
|
+
return await fetchAppFromServer(options.app, options, config);
|
|
458
457
|
} else {
|
|
458
|
+
// Try stored cloneSource before falling back to server fetch
|
|
459
|
+
const storedSource = await loadCloneSource();
|
|
460
|
+
if (storedSource && storedSource !== 'default') {
|
|
461
|
+
// Stored source is a local file path or URL — reuse it
|
|
462
|
+
if (storedSource.startsWith('http://') || storedSource.startsWith('https://')) {
|
|
463
|
+
log.info(`Fetching app JSON from ${storedSource} (stored source)...`);
|
|
464
|
+
const res = await fetch(storedSource);
|
|
465
|
+
if (!res.ok) throw new Error(`HTTP ${res.status} fetching ${storedSource}`);
|
|
466
|
+
return await res.json();
|
|
467
|
+
}
|
|
468
|
+
if (await fileExists(storedSource)) {
|
|
469
|
+
log.info(`Loading app JSON from ${storedSource} (stored source)...`);
|
|
470
|
+
const raw = await readFile(storedSource, 'utf8');
|
|
471
|
+
return JSON.parse(raw);
|
|
472
|
+
}
|
|
473
|
+
// Stored file no longer exists — fall through to server fetch
|
|
474
|
+
log.dim(` Stored clone source "${storedSource}" not found, trying server...`);
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
if (config.AppShortName) {
|
|
478
|
+
return await fetchAppFromServer(config.AppShortName, options, config);
|
|
479
|
+
}
|
|
480
|
+
|
|
459
481
|
// Prompt
|
|
460
482
|
const inquirer = (await import('inquirer')).default;
|
|
461
483
|
const { choice } = await inquirer.prompt([{
|
|
@@ -474,7 +496,7 @@ export async function performClone(source, options = {}) {
|
|
|
474
496
|
message: 'App short name:',
|
|
475
497
|
validate: v => v.trim() ? true : 'App short name is required',
|
|
476
498
|
}]);
|
|
477
|
-
|
|
499
|
+
return await fetchAppFromServer(appName, options, config);
|
|
478
500
|
} else {
|
|
479
501
|
const { filePath } = await inquirer.prompt([{
|
|
480
502
|
type: 'input', name: 'filePath',
|
|
@@ -482,7 +504,155 @@ export async function performClone(source, options = {}) {
|
|
|
482
504
|
validate: v => v.trim() ? true : 'File path is required',
|
|
483
505
|
}]);
|
|
484
506
|
const raw = await readFile(filePath, 'utf8');
|
|
485
|
-
|
|
507
|
+
return JSON.parse(raw);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
/**
|
|
513
|
+
* Check synchronize.json for un-pushed staged items before cloning.
|
|
514
|
+
* Warns the user and offers options: push first, review individually, ignore, or clear.
|
|
515
|
+
*/
|
|
516
|
+
async function checkPendingSynchronize(options) {
|
|
517
|
+
const sync = await loadSynchronize();
|
|
518
|
+
const deleteCount = (sync.delete || []).length;
|
|
519
|
+
const editCount = (sync.edit || []).length;
|
|
520
|
+
const addCount = (sync.add || []).length;
|
|
521
|
+
const totalCount = deleteCount + editCount + addCount;
|
|
522
|
+
|
|
523
|
+
if (totalCount === 0) return;
|
|
524
|
+
|
|
525
|
+
log.warn('');
|
|
526
|
+
log.warn(` ⚠ There are ${totalCount} un-pushed staged item(s) in .dbo/synchronize.json:`);
|
|
527
|
+
if (deleteCount > 0) log.warn(` ${deleteCount} pending deletion(s)`);
|
|
528
|
+
if (editCount > 0) log.warn(` ${editCount} pending edit(s)`);
|
|
529
|
+
if (addCount > 0) log.warn(` ${addCount} pending add(s)`);
|
|
530
|
+
log.warn('');
|
|
531
|
+
|
|
532
|
+
if (options.yes) {
|
|
533
|
+
log.dim(' Ignoring staged items (-y flag)');
|
|
534
|
+
return;
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
const inquirer = (await import('inquirer')).default;
|
|
538
|
+
const { action } = await inquirer.prompt([{
|
|
539
|
+
type: 'list',
|
|
540
|
+
name: 'action',
|
|
541
|
+
message: 'How would you like to handle the staged items?',
|
|
542
|
+
choices: [
|
|
543
|
+
{ name: 'Ignore — keep staged items and continue cloning', value: 'ignore' },
|
|
544
|
+
{ name: 'Review — show each item and decide individually', value: 'review' },
|
|
545
|
+
{ name: 'Clear — discard all staged items and continue cloning', value: 'clear' },
|
|
546
|
+
{ name: 'Abort — stop cloning so you can push first', value: 'abort' },
|
|
547
|
+
],
|
|
548
|
+
}]);
|
|
549
|
+
|
|
550
|
+
if (action === 'abort') {
|
|
551
|
+
log.info('Clone aborted. Run "dbo push" to process staged items first.');
|
|
552
|
+
process.exit(0);
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
if (action === 'clear') {
|
|
556
|
+
await saveSynchronize({ delete: [], edit: [], add: [] });
|
|
557
|
+
log.success('Cleared all staged items from synchronize.json');
|
|
558
|
+
return;
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
if (action === 'review') {
|
|
562
|
+
const kept = { delete: [], edit: [], add: [] };
|
|
563
|
+
|
|
564
|
+
for (const category of ['delete', 'edit', 'add']) {
|
|
565
|
+
const items = sync[category] || [];
|
|
566
|
+
if (items.length === 0) continue;
|
|
567
|
+
|
|
568
|
+
for (const item of items) {
|
|
569
|
+
const label = item.Name || item.UID || item.metaPath || JSON.stringify(item).substring(0, 80);
|
|
570
|
+
const entity = item._entity || item.entity || '';
|
|
571
|
+
const desc = entity ? `[${category}] ${entity}: ${label}` : `[${category}] ${label}`;
|
|
572
|
+
|
|
573
|
+
const { keep } = await inquirer.prompt([{
|
|
574
|
+
type: 'confirm',
|
|
575
|
+
name: 'keep',
|
|
576
|
+
message: `Keep staged? ${desc}`,
|
|
577
|
+
default: true,
|
|
578
|
+
}]);
|
|
579
|
+
|
|
580
|
+
if (keep) {
|
|
581
|
+
kept[category].push(item);
|
|
582
|
+
} else {
|
|
583
|
+
log.dim(` Discarded: ${desc}`);
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
await saveSynchronize(kept);
|
|
589
|
+
const remaining = kept.delete.length + kept.edit.length + kept.add.length;
|
|
590
|
+
if (remaining > 0) {
|
|
591
|
+
log.info(`${remaining} staged item(s) kept`);
|
|
592
|
+
} else {
|
|
593
|
+
log.success('All staged items discarded');
|
|
594
|
+
}
|
|
595
|
+
return;
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
// action === 'ignore' — do nothing, keep staged items
|
|
599
|
+
log.dim(' Keeping staged items');
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
/**
|
|
603
|
+
* Main clone workflow. Exported for use by init --clone.
|
|
604
|
+
*/
|
|
605
|
+
export async function performClone(source, options = {}) {
|
|
606
|
+
const config = await loadConfig();
|
|
607
|
+
const effectiveDomain = options.domain || config.domain;
|
|
608
|
+
let appJson;
|
|
609
|
+
|
|
610
|
+
// Step 1: Source mismatch detection
|
|
611
|
+
// Warn when the user provides an explicit source that differs from the stored one.
|
|
612
|
+
const storedCloneSource = await loadCloneSource();
|
|
613
|
+
if (source && storedCloneSource && source !== storedCloneSource) {
|
|
614
|
+
if (!options.force && !options.yes) {
|
|
615
|
+
log.warn('');
|
|
616
|
+
log.warn(` ⚠ This project was previously cloned from: ${storedCloneSource}`);
|
|
617
|
+
log.warn(` Requested source: ${source}`);
|
|
618
|
+
const inquirer = (await import('inquirer')).default;
|
|
619
|
+
const { confirmed } = await inquirer.prompt([{
|
|
620
|
+
type: 'confirm',
|
|
621
|
+
name: 'confirmed',
|
|
622
|
+
message: 'Clone from the new source anyway? This will update the stored clone source.',
|
|
623
|
+
default: false,
|
|
624
|
+
}]);
|
|
625
|
+
if (!confirmed) {
|
|
626
|
+
log.info('Clone aborted.');
|
|
627
|
+
return;
|
|
628
|
+
}
|
|
629
|
+
} else {
|
|
630
|
+
log.warn(` ⚠ Clone source override: "${storedCloneSource}" → "${source}"`);
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// Step 2: Load the app JSON — retry loop with fallback prompt on failure
|
|
635
|
+
let activeSource = source;
|
|
636
|
+
while (true) {
|
|
637
|
+
try {
|
|
638
|
+
appJson = await resolveAppSource(activeSource, options, config);
|
|
639
|
+
break;
|
|
640
|
+
} catch (err) {
|
|
641
|
+
if (options.yes || !process.stdin.isTTY) {
|
|
642
|
+
throw err;
|
|
643
|
+
}
|
|
644
|
+
log.warn('');
|
|
645
|
+
log.warn(` ⚠ Source did not return expected results: ${err.message}`);
|
|
646
|
+
const inquirer = (await import('inquirer')).default;
|
|
647
|
+
const { fallback } = await inquirer.prompt([{
|
|
648
|
+
type: 'input',
|
|
649
|
+
name: 'fallback',
|
|
650
|
+
message: 'Enter another local file path or URL to retry (or leave empty to abort):',
|
|
651
|
+
}]);
|
|
652
|
+
if (!fallback || !fallback.trim()) {
|
|
653
|
+
throw new Error('Clone aborted: no valid source provided.');
|
|
654
|
+
}
|
|
655
|
+
activeSource = fallback.trim();
|
|
486
656
|
}
|
|
487
657
|
}
|
|
488
658
|
|
|
@@ -502,6 +672,9 @@ export async function performClone(source, options = {}) {
|
|
|
502
672
|
|
|
503
673
|
log.success(`Cloning "${appJson.Name}" (${appJson.ShortName})`);
|
|
504
674
|
|
|
675
|
+
// Check for un-pushed staged items in synchronize.json
|
|
676
|
+
await checkPendingSynchronize(options);
|
|
677
|
+
|
|
505
678
|
// Ensure sensitive files are gitignored
|
|
506
679
|
await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt']);
|
|
507
680
|
|
|
@@ -512,6 +685,7 @@ export async function performClone(source, options = {}) {
|
|
|
512
685
|
AppName: appJson.Name,
|
|
513
686
|
AppShortName: appJson.ShortName,
|
|
514
687
|
});
|
|
688
|
+
await saveCloneSource(activeSource || 'default');
|
|
515
689
|
log.dim(' Updated .dbo/config.json with app metadata');
|
|
516
690
|
|
|
517
691
|
// Detect and store ModifyKey for locked/production apps
|
|
@@ -637,7 +811,13 @@ export async function performClone(source, options = {}) {
|
|
|
637
811
|
// Skip if entity filter is active and this entity doesn't match
|
|
638
812
|
if (entityFilter && !entityFilter.has(entityName)) continue;
|
|
639
813
|
|
|
640
|
-
if (
|
|
814
|
+
if (entityName === 'extension') {
|
|
815
|
+
// Extension entities: descriptor-organized sub-directories
|
|
816
|
+
const refs = await processExtensionEntries(entries, structure, options, serverTz);
|
|
817
|
+
if (refs.length > 0) {
|
|
818
|
+
otherRefs[entityName] = refs;
|
|
819
|
+
}
|
|
820
|
+
} else if (ENTITY_DIR_MAP[entityName]) {
|
|
641
821
|
// Entity types with project directories — process into their directory
|
|
642
822
|
const refs = await processEntityDirEntries(entityName, entries, options, serverTz);
|
|
643
823
|
if (refs.length > 0) {
|
|
@@ -756,16 +936,23 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
756
936
|
const answers = await inquirer.prompt(prompts);
|
|
757
937
|
contentPlacement = contentPlacement || answers.contentPlacement || 'bin';
|
|
758
938
|
mediaPlacement = mediaPlacement || answers.mediaPlacement || 'bin';
|
|
939
|
+
}
|
|
759
940
|
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
941
|
+
// Resolve defaults for any still-unset values
|
|
942
|
+
contentPlacement = contentPlacement || 'bin';
|
|
943
|
+
mediaPlacement = mediaPlacement || 'bin';
|
|
944
|
+
|
|
945
|
+
// Always persist resolved values — not just when prompts were shown.
|
|
946
|
+
// This ensures defaults are saved even when the app has no content/media yet,
|
|
947
|
+
// so subsequent clones that do have records skip the prompts.
|
|
948
|
+
if (!saved.contentPlacement || !saved.mediaPlacement) {
|
|
949
|
+
await saveClonePlacement({ contentPlacement, mediaPlacement });
|
|
950
|
+
if (prompts.length > 0) {
|
|
951
|
+
log.dim(' Saved placement preferences to .dbo/config.json');
|
|
952
|
+
}
|
|
763
953
|
}
|
|
764
954
|
|
|
765
|
-
return {
|
|
766
|
-
contentPlacement: contentPlacement || 'bin',
|
|
767
|
-
mediaPlacement: mediaPlacement || 'bin',
|
|
768
|
-
};
|
|
955
|
+
return { contentPlacement, mediaPlacement };
|
|
769
956
|
}
|
|
770
957
|
|
|
771
958
|
/**
|
|
@@ -775,10 +962,7 @@ async function fetchAppFromServer(appShortName, options, config) {
|
|
|
775
962
|
const client = new DboClient({ domain: options.domain, verbose: options.verbose });
|
|
776
963
|
log.info(`Fetching app "${appShortName}" from server...`);
|
|
777
964
|
|
|
778
|
-
const result = await client.get(
|
|
779
|
-
'_filter:AppShortName': appShortName,
|
|
780
|
-
'_format': 'json_raw',
|
|
781
|
-
});
|
|
965
|
+
const result = await client.get(`/api/app/object/${appShortName}`);
|
|
782
966
|
|
|
783
967
|
const data = result.payload || result.data;
|
|
784
968
|
const rows = Array.isArray(data) ? data : (data?.Rows || data?.rows || []);
|
|
@@ -1082,11 +1266,16 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1082
1266
|
}
|
|
1083
1267
|
|
|
1084
1268
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1269
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1085
1270
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1086
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1271
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1272
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1087
1273
|
|
|
1088
1274
|
if (serverNewer) {
|
|
1089
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1275
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1276
|
+
serverDate,
|
|
1277
|
+
localDate: localSyncTime,
|
|
1278
|
+
});
|
|
1090
1279
|
|
|
1091
1280
|
if (action === 'skip') {
|
|
1092
1281
|
log.dim(` Skipped ${finalName}`);
|
|
@@ -1103,14 +1292,18 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1103
1292
|
bulkAction.value = 'overwrite_all';
|
|
1104
1293
|
}
|
|
1105
1294
|
if (action === 'compare') {
|
|
1106
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1295
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1107
1296
|
refs.push({ uid: record.UID, metaPath });
|
|
1108
1297
|
continue;
|
|
1109
1298
|
}
|
|
1110
1299
|
} else {
|
|
1111
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
1300
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1112
1301
|
if (locallyModified) {
|
|
1113
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1302
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1303
|
+
localIsNewer: true,
|
|
1304
|
+
serverDate,
|
|
1305
|
+
localDate: localSyncTime,
|
|
1306
|
+
});
|
|
1114
1307
|
|
|
1115
1308
|
if (action === 'skip') {
|
|
1116
1309
|
log.dim(` Kept local: ${finalName}`);
|
|
@@ -1127,7 +1320,7 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1127
1320
|
bulkAction.value = 'overwrite_all';
|
|
1128
1321
|
}
|
|
1129
1322
|
if (action === 'compare') {
|
|
1130
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1323
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1131
1324
|
refs.push({ uid: record.UID, metaPath });
|
|
1132
1325
|
continue;
|
|
1133
1326
|
}
|
|
@@ -1199,6 +1392,378 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1199
1392
|
return refs;
|
|
1200
1393
|
}
|
|
1201
1394
|
|
|
1395
|
+
// ─── Extension Descriptor Sub-directory Processing ────────────────────────
|
|
1396
|
+
|
|
1397
|
+
/**
|
|
1398
|
+
* Scan extension records for descriptor_definition entries.
|
|
1399
|
+
* Builds the mapping, persists to structure.json, always creates Extensions/Unsupported/,
|
|
1400
|
+
* and creates sub-directories for every mapped descriptor.
|
|
1401
|
+
* Returns the mapping object.
|
|
1402
|
+
*
|
|
1403
|
+
* @param {Object[]} extensionEntries
|
|
1404
|
+
* @param {Object} structure - Current bin structure (from loadStructureFile)
|
|
1405
|
+
* @returns {Promise<Object<string,string>>}
|
|
1406
|
+
*/
|
|
1407
|
+
async function buildDescriptorPrePass(extensionEntries, structure) {
|
|
1408
|
+
const { mapping, warnings } = buildDescriptorMapping(extensionEntries);
|
|
1409
|
+
|
|
1410
|
+
for (const w of warnings) log.warn(` descriptor_definition: ${w}`);
|
|
1411
|
+
|
|
1412
|
+
// Always create Unsupported/ — even if empty, users see at a glance what couldn't be mapped
|
|
1413
|
+
await mkdir(EXTENSION_UNSUPPORTED_DIR, { recursive: true });
|
|
1414
|
+
log.dim(` ${EXTENSION_UNSUPPORTED_DIR}/`);
|
|
1415
|
+
|
|
1416
|
+
// Create one sub-directory per mapped descriptor name
|
|
1417
|
+
for (const dirName of new Set(Object.values(mapping))) {
|
|
1418
|
+
const fullDir = `${EXTENSION_DESCRIPTORS_DIR}/${dirName}`;
|
|
1419
|
+
await mkdir(fullDir, { recursive: true });
|
|
1420
|
+
log.dim(` ${fullDir}/`);
|
|
1421
|
+
}
|
|
1422
|
+
|
|
1423
|
+
await saveDescriptorMapping(structure, mapping);
|
|
1424
|
+
log.dim(` Saved descriptorMapping to .dbo/structure.json`);
|
|
1425
|
+
|
|
1426
|
+
return mapping;
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
/**
|
|
1430
|
+
* Resolve filename column and content extraction preferences for one descriptor.
|
|
1431
|
+
* Prompts the user on first use; saves to config.json; respects -y and --force.
|
|
1432
|
+
*
|
|
1433
|
+
* @param {string} descriptor - e.g. "documentation", "include", "control"
|
|
1434
|
+
* @param {Object[]} records - All extension records sharing this descriptor
|
|
1435
|
+
* @param {Object} options - CLI options (options.yes, options.force)
|
|
1436
|
+
* @returns {Promise<{ filenameCol: string, contentColsToExtract: Array<{col,ext}> }>}
|
|
1437
|
+
*/
|
|
1438
|
+
async function resolveDescriptorPreferences(descriptor, records, options) {
|
|
1439
|
+
const sampleRecord = records[0];
|
|
1440
|
+
const columns = Object.keys(sampleRecord)
|
|
1441
|
+
.filter(k => k !== 'children' && !k.startsWith('_'));
|
|
1442
|
+
|
|
1443
|
+
// ── Filename column ──────────────────────────────────────────────────
|
|
1444
|
+
let filenameCol;
|
|
1445
|
+
const savedCol = await loadDescriptorFilenamePreference(descriptor);
|
|
1446
|
+
|
|
1447
|
+
if (options.yes) {
|
|
1448
|
+
filenameCol = columns.includes('Name') ? 'Name'
|
|
1449
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1450
|
+
} else if (savedCol && !options.force) {
|
|
1451
|
+
filenameCol = savedCol;
|
|
1452
|
+
log.dim(` Filename column for "${descriptor}": "${filenameCol}" (saved)`);
|
|
1453
|
+
} else {
|
|
1454
|
+
const inquirer = (await import('inquirer')).default;
|
|
1455
|
+
const defaultCol = columns.includes('Name') ? 'Name'
|
|
1456
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1457
|
+
const { col } = await inquirer.prompt([{
|
|
1458
|
+
type: 'list', name: 'col',
|
|
1459
|
+
message: `Filename column for "${descriptor}" extensions:`,
|
|
1460
|
+
choices: columns, default: defaultCol,
|
|
1461
|
+
}]);
|
|
1462
|
+
filenameCol = col;
|
|
1463
|
+
await saveDescriptorFilenamePreference(descriptor, filenameCol);
|
|
1464
|
+
log.dim(` Saved filename column for "${descriptor}": "${filenameCol}"`);
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
// ── Content extraction ───────────────────────────────────────────────
|
|
1468
|
+
const contentColsToExtract = [];
|
|
1469
|
+
if (!options.yes) {
|
|
1470
|
+
const base64Cols = [];
|
|
1471
|
+
for (const record of records) {
|
|
1472
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1473
|
+
if (key === 'children' || key.startsWith('_')) continue;
|
|
1474
|
+
if (value && typeof value === 'object' && !Array.isArray(value)
|
|
1475
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1476
|
+
if (!base64Cols.find(c => c.col === key)) {
|
|
1477
|
+
let snippet = '';
|
|
1478
|
+
try {
|
|
1479
|
+
const decoded = Buffer.from(value.value, 'base64').toString('utf8');
|
|
1480
|
+
snippet = decoded.substring(0, 80)
|
|
1481
|
+
.replace(/[\x00-\x1f\x7f]/g, ' ') // strip control chars (incl. \n, \r, \t, ESC)
|
|
1482
|
+
.replace(/\s+/g, ' ') // collapse whitespace
|
|
1483
|
+
.trim();
|
|
1484
|
+
if (decoded.length > 80) snippet += '…';
|
|
1485
|
+
} catch {}
|
|
1486
|
+
base64Cols.push({ col: key, snippet });
|
|
1487
|
+
}
|
|
1488
|
+
}
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
|
|
1492
|
+
if (base64Cols.length > 0) {
|
|
1493
|
+
const savedExtractions = options.force
|
|
1494
|
+
? null
|
|
1495
|
+
: await loadDescriptorContentExtractions(descriptor);
|
|
1496
|
+
const newPreferences = savedExtractions ? { ...savedExtractions } : {};
|
|
1497
|
+
let changed = false;
|
|
1498
|
+
const inquirer = (await import('inquirer')).default;
|
|
1499
|
+
|
|
1500
|
+
for (const { col, snippet } of base64Cols) {
|
|
1501
|
+
if (savedExtractions) {
|
|
1502
|
+
const saved = savedExtractions[col];
|
|
1503
|
+
if (saved === false) { log.dim(` Skip "${col}" for "${descriptor}" (saved)`); continue; }
|
|
1504
|
+
if (typeof saved === 'string') {
|
|
1505
|
+
log.dim(` Extracting "${col}" for "${descriptor}" as .${saved} (saved)`);
|
|
1506
|
+
contentColsToExtract.push({ col, ext: saved });
|
|
1507
|
+
continue;
|
|
1508
|
+
}
|
|
1509
|
+
}
|
|
1510
|
+
const preview = snippet ? ` ("${snippet}")` : '';
|
|
1511
|
+
const { extract } = await inquirer.prompt([{
|
|
1512
|
+
type: 'confirm', name: 'extract',
|
|
1513
|
+
message: `Extract column "${col}" (${descriptor}) as companion file?${preview}`,
|
|
1514
|
+
default: true,
|
|
1515
|
+
}]);
|
|
1516
|
+
if (extract) {
|
|
1517
|
+
const guessed = guessExtensionForDescriptor(descriptor, col);
|
|
1518
|
+
const { ext } = await inquirer.prompt([{
|
|
1519
|
+
type: 'input', name: 'ext',
|
|
1520
|
+
message: `File extension for "${col}" (${descriptor}):`,
|
|
1521
|
+
default: guessed,
|
|
1522
|
+
}]);
|
|
1523
|
+
const cleanExt = ext.replace(/^\./, '');
|
|
1524
|
+
contentColsToExtract.push({ col, ext: cleanExt });
|
|
1525
|
+
newPreferences[col] = cleanExt;
|
|
1526
|
+
} else {
|
|
1527
|
+
newPreferences[col] = false;
|
|
1528
|
+
}
|
|
1529
|
+
changed = true;
|
|
1530
|
+
}
|
|
1531
|
+
if (changed) await saveDescriptorContentExtractions(descriptor, newPreferences);
|
|
1532
|
+
}
|
|
1533
|
+
}
|
|
1534
|
+
|
|
1535
|
+
return { filenameCol, contentColsToExtract };
|
|
1536
|
+
}
|
|
1537
|
+
|
|
1538
|
+
/**
|
|
1539
|
+
* Guess a file extension based on descriptor name, with column-name fallback.
|
|
1540
|
+
*/
|
|
1541
|
+
function guessExtensionForDescriptor(descriptor, columnName) {
|
|
1542
|
+
if (descriptor === 'documentation') return 'md';
|
|
1543
|
+
if (descriptor === 'include') return 'html';
|
|
1544
|
+
if (descriptor === 'control') return 'js';
|
|
1545
|
+
return guessExtensionForColumn(columnName);
|
|
1546
|
+
}
|
|
1547
|
+
|
|
1548
|
+
/**
|
|
1549
|
+
* Resolve placement for documentation descriptor MD companion files.
|
|
1550
|
+
* Prompts on first use; persists to config.json; respects -y and --force.
|
|
1551
|
+
* @returns {Promise<'inline'|'root'>}
|
|
1552
|
+
*/
|
|
1553
|
+
async function resolveDocumentationPlacement(options) {
|
|
1554
|
+
if (options.yes) return 'inline';
|
|
1555
|
+
|
|
1556
|
+
const saved = await loadExtensionDocumentationMDPlacement();
|
|
1557
|
+
if (saved && !options.force) {
|
|
1558
|
+
log.dim(` Documentation MD placement: ${saved} (saved)`);
|
|
1559
|
+
return saved;
|
|
1560
|
+
}
|
|
1561
|
+
|
|
1562
|
+
const inquirer = (await import('inquirer')).default;
|
|
1563
|
+
const { placement } = await inquirer.prompt([{
|
|
1564
|
+
type: 'list', name: 'placement',
|
|
1565
|
+
message: 'Where should extracted documentation MD files be placed?',
|
|
1566
|
+
choices: [
|
|
1567
|
+
{ name: '/Documentation/<filename>.md — project root (recommended)', value: 'root' },
|
|
1568
|
+
{ name: 'Extensions/Documentation/<filename>.md — inline alongside metadata', value: 'inline' },
|
|
1569
|
+
],
|
|
1570
|
+
default: 'root',
|
|
1571
|
+
}]);
|
|
1572
|
+
|
|
1573
|
+
await saveExtensionDocumentationMDPlacement(placement);
|
|
1574
|
+
log.dim(` Saved documentation MD placement: "${placement}"`);
|
|
1575
|
+
return placement;
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
/**
|
|
1579
|
+
* Process extension entity records into descriptor-organized sub-directories.
|
|
1580
|
+
*
|
|
1581
|
+
* @param {Object[]} entries - All extension records from appJson.children.extension
|
|
1582
|
+
* @param {Object} structure - Bin structure from loadStructureFile()
|
|
1583
|
+
* @param {Object} options - CLI options (options.yes, options.force, options.entity)
|
|
1584
|
+
* @param {string} serverTz - Server timezone for timestamp syncing
|
|
1585
|
+
* @returns {Promise<Array<{uid, metaPath}>>}
|
|
1586
|
+
*/
|
|
1587
|
+
async function processExtensionEntries(entries, structure, options, serverTz) {
|
|
1588
|
+
if (!entries || entries.length === 0) return [];
|
|
1589
|
+
|
|
1590
|
+
// --descriptor-types false → skip descriptor sorting, use flat Extensions/ via generic path
|
|
1591
|
+
if (options.descriptorTypes === 'false') {
|
|
1592
|
+
log.info(`Processing ${entries.length} extension record(s) (flat mode)...`);
|
|
1593
|
+
return await processEntityDirEntries('extension', entries, options, serverTz);
|
|
1594
|
+
}
|
|
1595
|
+
|
|
1596
|
+
log.info(`Processing ${entries.length} extension record(s)...`);
|
|
1597
|
+
|
|
1598
|
+
// Step A: Pre-pass — build mapping + create directories
|
|
1599
|
+
const mapping = await buildDescriptorPrePass(entries, structure);
|
|
1600
|
+
|
|
1601
|
+
// Clear documentation preferences when --force is used with --documentation-only
|
|
1602
|
+
if (options.documentationOnly && options.force) {
|
|
1603
|
+
await saveDescriptorFilenamePreference('documentation', null);
|
|
1604
|
+
await saveDescriptorContentExtractions('documentation', null);
|
|
1605
|
+
await saveExtensionDocumentationMDPlacement(null);
|
|
1606
|
+
log.info(' --force: cleared saved documentation preferences');
|
|
1607
|
+
}
|
|
1608
|
+
|
|
1609
|
+
// Step B: Group records by descriptor
|
|
1610
|
+
const groups = new Map(); // descriptor → { dir, records[] }
|
|
1611
|
+
for (const record of entries) {
|
|
1612
|
+
const descriptor = record.Descriptor || '__unsupported__';
|
|
1613
|
+
|
|
1614
|
+
// --documentation-only: skip non-documentation records
|
|
1615
|
+
if (options.documentationOnly && descriptor !== 'documentation') continue;
|
|
1616
|
+
|
|
1617
|
+
if (!groups.has(descriptor)) {
|
|
1618
|
+
groups.set(descriptor, { dir: resolveExtensionSubDir(record, mapping), records: [] });
|
|
1619
|
+
}
|
|
1620
|
+
groups.get(descriptor).records.push(record);
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
// Step C: Resolve preferences per descriptor (prompts fire here, before file writes)
|
|
1624
|
+
const descriptorPrefs = new Map();
|
|
1625
|
+
let docPlacement = 'inline';
|
|
1626
|
+
|
|
1627
|
+
for (const [descriptor, { records }] of groups.entries()) {
|
|
1628
|
+
if (descriptor === '__unsupported__') {
|
|
1629
|
+
descriptorPrefs.set(descriptor, { filenameCol: null, contentColsToExtract: [] });
|
|
1630
|
+
continue;
|
|
1631
|
+
}
|
|
1632
|
+
const prefs = await resolveDescriptorPreferences(descriptor, records, options);
|
|
1633
|
+
descriptorPrefs.set(descriptor, prefs);
|
|
1634
|
+
|
|
1635
|
+
if (descriptor === 'documentation') {
|
|
1636
|
+
docPlacement = await resolveDocumentationPlacement(options);
|
|
1637
|
+
if (docPlacement === 'root') {
|
|
1638
|
+
await mkdir(DOCUMENTATION_DIR, { recursive: true });
|
|
1639
|
+
log.dim(` ${DOCUMENTATION_DIR}/`);
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
// Step D: Write files, one group at a time
|
|
1645
|
+
const refs = [];
|
|
1646
|
+
const bulkAction = { value: null };
|
|
1647
|
+
const config = await loadConfig();
|
|
1648
|
+
|
|
1649
|
+
for (const [descriptor, { dir, records }] of groups.entries()) {
|
|
1650
|
+
const { filenameCol, contentColsToExtract } = descriptorPrefs.get(descriptor);
|
|
1651
|
+
const useRootDoc = (descriptor === 'documentation' && docPlacement === 'root');
|
|
1652
|
+
const mdColInfo = useRootDoc ? contentColsToExtract.find(c => c.ext === 'md') : null;
|
|
1653
|
+
|
|
1654
|
+
log.info(`Processing ${records.length} "${descriptor}" extension(s) → ${dir}/`);
|
|
1655
|
+
|
|
1656
|
+
for (const record of records) {
|
|
1657
|
+
// Resolve filename
|
|
1658
|
+
let name;
|
|
1659
|
+
if (filenameCol && record[filenameCol] != null) {
|
|
1660
|
+
name = sanitizeFilename(String(record[filenameCol]));
|
|
1661
|
+
} else if (record.Name) {
|
|
1662
|
+
name = sanitizeFilename(String(record.Name));
|
|
1663
|
+
} else {
|
|
1664
|
+
name = sanitizeFilename(String(record.UID || 'untitled'));
|
|
1665
|
+
}
|
|
1666
|
+
|
|
1667
|
+
const uid = record.UID || 'untitled';
|
|
1668
|
+
const finalName = name === uid ? uid : `${name}.${uid}`;
|
|
1669
|
+
const metaPath = join(dir, `${finalName}.metadata.json`);
|
|
1670
|
+
|
|
1671
|
+
// Change detection — same pattern as processEntityDirEntries()
|
|
1672
|
+
const hasNewExtractions = contentColsToExtract.length > 0;
|
|
1673
|
+
if (await fileExists(metaPath) && !options.yes && !hasNewExtractions) {
|
|
1674
|
+
if (bulkAction.value === 'skip_all') {
|
|
1675
|
+
log.dim(` Skipped ${finalName}`);
|
|
1676
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1677
|
+
continue;
|
|
1678
|
+
}
|
|
1679
|
+
if (bulkAction.value !== 'overwrite_all') {
|
|
1680
|
+
const cfgWithTz = { ...config, ServerTimezone: serverTz };
|
|
1681
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1682
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, cfgWithTz);
|
|
1683
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1684
|
+
|
|
1685
|
+
if (serverNewer) {
|
|
1686
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { serverDate, localDate: localSyncTime });
|
|
1687
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1688
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1689
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1690
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1691
|
+
} else {
|
|
1692
|
+
const localModified = await hasLocalModifications(metaPath, cfgWithTz);
|
|
1693
|
+
if (localModified) {
|
|
1694
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
1695
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1696
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1697
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1698
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz, { localIsNewer: true }); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1699
|
+
} else {
|
|
1700
|
+
log.dim(` Up to date: ${finalName}`);
|
|
1701
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1702
|
+
continue;
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
|
|
1708
|
+
// Build metadata object
|
|
1709
|
+
const meta = {};
|
|
1710
|
+
const extractedCols = [];
|
|
1711
|
+
|
|
1712
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1713
|
+
if (key === 'children') continue;
|
|
1714
|
+
|
|
1715
|
+
const extractInfo = contentColsToExtract.find(c => c.col === key);
|
|
1716
|
+
if (extractInfo && value && typeof value === 'object'
|
|
1717
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1718
|
+
const decoded = resolveContentValue(value);
|
|
1719
|
+
if (decoded) {
|
|
1720
|
+
let colFilePath, refValue;
|
|
1721
|
+
|
|
1722
|
+
if (mdColInfo && extractInfo.col === mdColInfo.col) {
|
|
1723
|
+
// Root placement: Documentation/<name>.md
|
|
1724
|
+
const docFileName = `${name}.md`;
|
|
1725
|
+
colFilePath = join(DOCUMENTATION_DIR, docFileName);
|
|
1726
|
+
refValue = `@/${DOCUMENTATION_DIR}/${docFileName}`;
|
|
1727
|
+
} else {
|
|
1728
|
+
const colFileName = `${finalName}.${key}.${extractInfo.ext}`;
|
|
1729
|
+
colFilePath = join(dir, colFileName);
|
|
1730
|
+
refValue = `@${colFileName}`;
|
|
1731
|
+
}
|
|
1732
|
+
|
|
1733
|
+
meta[key] = refValue;
|
|
1734
|
+
await writeFile(colFilePath, decoded);
|
|
1735
|
+
extractedCols.push(key);
|
|
1736
|
+
if (serverTz) {
|
|
1737
|
+
try { await setFileTimestamps(colFilePath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1738
|
+
}
|
|
1739
|
+
log.dim(` → ${colFilePath}`);
|
|
1740
|
+
continue;
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
|
|
1744
|
+
// Inline or non-extraction columns
|
|
1745
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
1746
|
+
meta[key] = resolveContentValue(value);
|
|
1747
|
+
} else {
|
|
1748
|
+
meta[key] = value;
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
|
|
1752
|
+
meta._entity = 'extension';
|
|
1753
|
+
if (extractedCols.length > 0) meta._contentColumns = extractedCols;
|
|
1754
|
+
|
|
1755
|
+
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1756
|
+
if (serverTz) {
|
|
1757
|
+
try { await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1758
|
+
}
|
|
1759
|
+
log.success(`Saved ${metaPath}`);
|
|
1760
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1761
|
+
}
|
|
1762
|
+
}
|
|
1763
|
+
|
|
1764
|
+
return refs;
|
|
1765
|
+
}
|
|
1766
|
+
|
|
1202
1767
|
/**
|
|
1203
1768
|
* Process media entries: download binary files from server + create metadata.
|
|
1204
1769
|
* Media uses Filename (not Name) and files are fetched via /api/media/{uid}.
|
|
@@ -1349,11 +1914,20 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1349
1914
|
}
|
|
1350
1915
|
|
|
1351
1916
|
if (mediaBulkAction.value !== 'overwrite_all') {
|
|
1917
|
+
// Use explicit serverTz (not config.ServerTimezone) to ensure consistency
|
|
1918
|
+
// with how setFileTimestamps set the mtime — the two must use the same timezone.
|
|
1919
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1352
1920
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1353
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1921
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1922
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1923
|
+
const diffable = isDiffable(ext);
|
|
1354
1924
|
|
|
1355
1925
|
if (serverNewer) {
|
|
1356
|
-
const action = await promptChangeDetection(dedupName, record,
|
|
1926
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
1927
|
+
diffable,
|
|
1928
|
+
serverDate,
|
|
1929
|
+
localDate: localSyncTime,
|
|
1930
|
+
});
|
|
1357
1931
|
|
|
1358
1932
|
if (action === 'skip') {
|
|
1359
1933
|
log.dim(` Skipped ${finalFilename}`);
|
|
@@ -1371,15 +1945,21 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1371
1945
|
}
|
|
1372
1946
|
if (action === 'compare') {
|
|
1373
1947
|
// For binary media, show metadata diffs only
|
|
1374
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1948
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1375
1949
|
refs.push({ uid: record.UID, metaPath });
|
|
1376
1950
|
continue;
|
|
1377
1951
|
}
|
|
1378
1952
|
} else {
|
|
1379
1953
|
// Server _LastUpdated hasn't changed — check for local modifications
|
|
1380
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
1954
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1381
1955
|
if (locallyModified) {
|
|
1382
|
-
const
|
|
1956
|
+
const localDate = localSyncTime; // mtime already fetched above
|
|
1957
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
1958
|
+
localIsNewer: true,
|
|
1959
|
+
diffable,
|
|
1960
|
+
serverDate,
|
|
1961
|
+
localDate,
|
|
1962
|
+
});
|
|
1383
1963
|
|
|
1384
1964
|
if (action === 'skip') {
|
|
1385
1965
|
log.dim(` Kept local: ${finalFilename}`);
|
|
@@ -1396,7 +1976,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1396
1976
|
mediaBulkAction.value = 'overwrite_all';
|
|
1397
1977
|
}
|
|
1398
1978
|
if (action === 'compare') {
|
|
1399
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1979
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1400
1980
|
refs.push({ uid: record.UID, metaPath });
|
|
1401
1981
|
continue;
|
|
1402
1982
|
}
|
|
@@ -1454,10 +2034,13 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1454
2034
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1455
2035
|
log.dim(` → ${metaPath}`);
|
|
1456
2036
|
|
|
1457
|
-
// Set file timestamps from server dates
|
|
2037
|
+
// Set file timestamps from server dates (independent try-catch so one failure
|
|
2038
|
+
// doesn't prevent the other — metaPath mtime is the sync baseline for comparisons)
|
|
1458
2039
|
if (serverTz && (record._CreatedOn || record._LastUpdated)) {
|
|
1459
2040
|
try {
|
|
1460
2041
|
await setFileTimestamps(filePath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
2042
|
+
} catch { /* non-critical */ }
|
|
2043
|
+
try {
|
|
1461
2044
|
await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
1462
2045
|
} catch { /* non-critical */ }
|
|
1463
2046
|
}
|
|
@@ -1630,12 +2213,17 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1630
2213
|
}
|
|
1631
2214
|
|
|
1632
2215
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1633
|
-
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1634
2216
|
const config = await loadConfig();
|
|
1635
|
-
const
|
|
2217
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2218
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
2219
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
2220
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1636
2221
|
|
|
1637
2222
|
if (serverNewer) {
|
|
1638
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2223
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2224
|
+
serverDate,
|
|
2225
|
+
localDate: localSyncTime,
|
|
2226
|
+
});
|
|
1639
2227
|
|
|
1640
2228
|
if (action === 'skip') {
|
|
1641
2229
|
log.dim(` Skipped ${finalName}.${ext}`);
|
|
@@ -1651,16 +2239,20 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1651
2239
|
// Fall through to write
|
|
1652
2240
|
}
|
|
1653
2241
|
if (action === 'compare') {
|
|
1654
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2242
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1655
2243
|
return { uid: record.UID, metaPath };
|
|
1656
2244
|
}
|
|
1657
2245
|
// 'overwrite' falls through to normal write
|
|
1658
2246
|
} else {
|
|
1659
2247
|
// Server _LastUpdated hasn't changed since last sync.
|
|
1660
2248
|
// Check if local content files were modified (user edits).
|
|
1661
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
2249
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1662
2250
|
if (locallyModified) {
|
|
1663
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2251
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2252
|
+
localIsNewer: true,
|
|
2253
|
+
serverDate,
|
|
2254
|
+
localDate: localSyncTime,
|
|
2255
|
+
});
|
|
1664
2256
|
|
|
1665
2257
|
if (action === 'skip') {
|
|
1666
2258
|
log.dim(` Kept local: ${finalName}.${ext}`);
|
|
@@ -1675,7 +2267,7 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1675
2267
|
bulkAction.value = 'overwrite_all';
|
|
1676
2268
|
}
|
|
1677
2269
|
if (action === 'compare') {
|
|
1678
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2270
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1679
2271
|
return { uid: record.UID, metaPath };
|
|
1680
2272
|
}
|
|
1681
2273
|
// 'overwrite' falls through to normal write
|
|
@@ -2073,6 +2665,7 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2073
2665
|
const refs = [];
|
|
2074
2666
|
const bulkAction = { value: null };
|
|
2075
2667
|
const config = await loadConfig();
|
|
2668
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2076
2669
|
// When --force flag is set, skip change detection and re-process all files
|
|
2077
2670
|
const forceReprocess = !!options.force;
|
|
2078
2671
|
|
|
@@ -2125,21 +2718,22 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2125
2718
|
}
|
|
2126
2719
|
if (bulkAction.value !== 'overwrite_all') {
|
|
2127
2720
|
const localSyncTime = await getLocalSyncTime(rootMetaPath);
|
|
2128
|
-
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated,
|
|
2721
|
+
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated, configWithTz);
|
|
2722
|
+
const serverDate = parseServerDate(output._LastUpdated, serverTz);
|
|
2129
2723
|
if (serverNewer) {
|
|
2130
|
-
const action = await promptChangeDetection(rootBasename, output,
|
|
2724
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { serverDate, localDate: localSyncTime });
|
|
2131
2725
|
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2132
2726
|
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2133
2727
|
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2134
|
-
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath,
|
|
2728
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2135
2729
|
} else {
|
|
2136
|
-
const locallyModified = await hasLocalModifications(rootMetaPath,
|
|
2730
|
+
const locallyModified = await hasLocalModifications(rootMetaPath, configWithTz);
|
|
2137
2731
|
if (locallyModified) {
|
|
2138
|
-
const action = await promptChangeDetection(rootBasename, output,
|
|
2732
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
2139
2733
|
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2140
2734
|
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2141
2735
|
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2142
|
-
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath,
|
|
2736
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz, { localIsNewer: true }); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2143
2737
|
} else {
|
|
2144
2738
|
log.dim(` Up to date: ${rootBasename}`);
|
|
2145
2739
|
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|