@dboio/cli 0.7.2 → 0.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +153 -11
- package/package.json +3 -2
- package/src/commands/add.js +64 -11
- package/src/commands/clone.js +749 -63
- package/src/commands/init.js +28 -4
- package/src/commands/install.js +10 -1
- package/src/commands/login.js +69 -0
- package/src/commands/push.js +102 -18
- package/src/lib/config.js +101 -0
- package/src/lib/delta.js +14 -1
- package/src/lib/diff.js +71 -15
- package/src/lib/ignore.js +145 -0
- package/src/lib/structure.js +114 -0
- package/src/lib/ticketing.js +6 -3
- package/src/lib/timestamps.js +31 -9
package/src/commands/clone.js
CHANGED
|
@@ -2,11 +2,11 @@ import { Command } from 'commander';
|
|
|
2
2
|
import { readFile, writeFile, mkdir, access } from 'fs/promises';
|
|
3
3
|
import { join, basename, extname } from 'path';
|
|
4
4
|
import { DboClient } from '../lib/client.js';
|
|
5
|
-
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference } from '../lib/config.js';
|
|
6
|
-
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES } from '../lib/structure.js';
|
|
5
|
+
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference, saveCloneSource, loadCloneSource, saveDescriptorFilenamePreference, loadDescriptorFilenamePreference, saveDescriptorContentExtractions, loadDescriptorContentExtractions, saveExtensionDocumentationMDPlacement, loadExtensionDocumentationMDPlacement } from '../lib/config.js';
|
|
6
|
+
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES, EXTENSION_DESCRIPTORS_DIR, EXTENSION_UNSUPPORTED_DIR, DOCUMENTATION_DIR, buildDescriptorMapping, saveDescriptorMapping, loadDescriptorMapping, resolveExtensionSubDir } from '../lib/structure.js';
|
|
7
7
|
import { log } from '../lib/logger.js';
|
|
8
|
-
import { setFileTimestamps } from '../lib/timestamps.js';
|
|
9
|
-
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge } from '../lib/diff.js';
|
|
8
|
+
import { setFileTimestamps, parseServerDate } from '../lib/timestamps.js';
|
|
9
|
+
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge, isDiffable } from '../lib/diff.js';
|
|
10
10
|
import { checkDomainChange } from '../lib/domain-guard.js';
|
|
11
11
|
|
|
12
12
|
/**
|
|
@@ -422,6 +422,8 @@ export const cloneCommand = new Command('clone')
|
|
|
422
422
|
.argument('[source]', 'Local JSON file path (optional)')
|
|
423
423
|
.option('--app <shortName>', 'App short name to fetch from server')
|
|
424
424
|
.option('-e, --entity <type>', 'Only clone a specific entity type (e.g. output, content, media, extension)')
|
|
425
|
+
.option('--documentation-only', 'When used with -e extension, clone only documentation extensions')
|
|
426
|
+
.option('--descriptor-types <bool>', 'Sort extensions into descriptor sub-directories (default: true)', 'true')
|
|
425
427
|
.option('--force', 'Force re-processing of all files, skip change detection')
|
|
426
428
|
.option('--domain <host>', 'Override domain')
|
|
427
429
|
.option('-y, --yes', 'Auto-accept all prompts')
|
|
@@ -436,26 +438,70 @@ export const cloneCommand = new Command('clone')
|
|
|
436
438
|
});
|
|
437
439
|
|
|
438
440
|
/**
|
|
439
|
-
*
|
|
441
|
+
* Resolve app JSON from a given source (file path, URL, or server fetch).
|
|
442
|
+
* Throws on failure so the caller can retry with a different source.
|
|
440
443
|
*/
|
|
441
|
-
|
|
442
|
-
const config = await loadConfig();
|
|
443
|
-
const effectiveDomain = options.domain || config.domain;
|
|
444
|
-
let appJson;
|
|
445
|
-
|
|
446
|
-
// Step 1: Load the app JSON
|
|
444
|
+
async function resolveAppSource(source, options, config) {
|
|
447
445
|
if (source) {
|
|
448
|
-
|
|
446
|
+
if (source.startsWith('http://') || source.startsWith('https://')) {
|
|
447
|
+
const ora = (await import('ora')).default;
|
|
448
|
+
const spinner = ora(`Fetching app JSON from ${source}...`).start();
|
|
449
|
+
let res;
|
|
450
|
+
try {
|
|
451
|
+
res = await fetch(source);
|
|
452
|
+
} catch (err) {
|
|
453
|
+
spinner.fail(`Failed to fetch from ${source}`);
|
|
454
|
+
throw err;
|
|
455
|
+
}
|
|
456
|
+
if (!res.ok) {
|
|
457
|
+
spinner.fail(`HTTP ${res.status} fetching ${source}`);
|
|
458
|
+
throw new Error(`HTTP ${res.status} fetching ${source}`);
|
|
459
|
+
}
|
|
460
|
+
const json = await res.json();
|
|
461
|
+
spinner.succeed('Loaded app JSON');
|
|
462
|
+
return json;
|
|
463
|
+
}
|
|
449
464
|
log.info(`Loading app JSON from ${source}...`);
|
|
450
465
|
const raw = await readFile(source, 'utf8');
|
|
451
|
-
|
|
466
|
+
return JSON.parse(raw);
|
|
452
467
|
} else if (options.app) {
|
|
453
|
-
|
|
454
|
-
appJson = await fetchAppFromServer(options.app, options, config);
|
|
455
|
-
} else if (config.AppShortName) {
|
|
456
|
-
// Use config's AppShortName
|
|
457
|
-
appJson = await fetchAppFromServer(config.AppShortName, options, config);
|
|
468
|
+
return await fetchAppFromServer(options.app, options, config);
|
|
458
469
|
} else {
|
|
470
|
+
// Try stored cloneSource before falling back to server fetch
|
|
471
|
+
const storedSource = await loadCloneSource();
|
|
472
|
+
if (storedSource && storedSource !== 'default') {
|
|
473
|
+
// Stored source is a local file path or URL — reuse it
|
|
474
|
+
if (storedSource.startsWith('http://') || storedSource.startsWith('https://')) {
|
|
475
|
+
const ora = (await import('ora')).default;
|
|
476
|
+
const spinner = ora(`Fetching app JSON from ${storedSource} (stored source)...`).start();
|
|
477
|
+
let res;
|
|
478
|
+
try {
|
|
479
|
+
res = await fetch(storedSource);
|
|
480
|
+
} catch (err) {
|
|
481
|
+
spinner.fail(`Failed to fetch from ${storedSource}`);
|
|
482
|
+
throw err;
|
|
483
|
+
}
|
|
484
|
+
if (!res.ok) {
|
|
485
|
+
spinner.fail(`HTTP ${res.status} fetching ${storedSource}`);
|
|
486
|
+
throw new Error(`HTTP ${res.status} fetching ${storedSource}`);
|
|
487
|
+
}
|
|
488
|
+
const json = await res.json();
|
|
489
|
+
spinner.succeed('Loaded app JSON');
|
|
490
|
+
return json;
|
|
491
|
+
}
|
|
492
|
+
if (await fileExists(storedSource)) {
|
|
493
|
+
log.info(`Loading app JSON from ${storedSource} (stored source)...`);
|
|
494
|
+
const raw = await readFile(storedSource, 'utf8');
|
|
495
|
+
return JSON.parse(raw);
|
|
496
|
+
}
|
|
497
|
+
// Stored file no longer exists — fall through to server fetch
|
|
498
|
+
log.dim(` Stored clone source "${storedSource}" not found, trying server...`);
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
if (config.AppShortName) {
|
|
502
|
+
return await fetchAppFromServer(config.AppShortName, options, config);
|
|
503
|
+
}
|
|
504
|
+
|
|
459
505
|
// Prompt
|
|
460
506
|
const inquirer = (await import('inquirer')).default;
|
|
461
507
|
const { choice } = await inquirer.prompt([{
|
|
@@ -474,7 +520,7 @@ export async function performClone(source, options = {}) {
|
|
|
474
520
|
message: 'App short name:',
|
|
475
521
|
validate: v => v.trim() ? true : 'App short name is required',
|
|
476
522
|
}]);
|
|
477
|
-
|
|
523
|
+
return await fetchAppFromServer(appName, options, config);
|
|
478
524
|
} else {
|
|
479
525
|
const { filePath } = await inquirer.prompt([{
|
|
480
526
|
type: 'input', name: 'filePath',
|
|
@@ -482,7 +528,155 @@ export async function performClone(source, options = {}) {
|
|
|
482
528
|
validate: v => v.trim() ? true : 'File path is required',
|
|
483
529
|
}]);
|
|
484
530
|
const raw = await readFile(filePath, 'utf8');
|
|
485
|
-
|
|
531
|
+
return JSON.parse(raw);
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
/**
|
|
537
|
+
* Check synchronize.json for un-pushed staged items before cloning.
|
|
538
|
+
* Warns the user and offers options: push first, review individually, ignore, or clear.
|
|
539
|
+
*/
|
|
540
|
+
async function checkPendingSynchronize(options) {
|
|
541
|
+
const sync = await loadSynchronize();
|
|
542
|
+
const deleteCount = (sync.delete || []).length;
|
|
543
|
+
const editCount = (sync.edit || []).length;
|
|
544
|
+
const addCount = (sync.add || []).length;
|
|
545
|
+
const totalCount = deleteCount + editCount + addCount;
|
|
546
|
+
|
|
547
|
+
if (totalCount === 0) return;
|
|
548
|
+
|
|
549
|
+
log.warn('');
|
|
550
|
+
log.warn(` ⚠ There are ${totalCount} un-pushed staged item(s) in .dbo/synchronize.json:`);
|
|
551
|
+
if (deleteCount > 0) log.warn(` ${deleteCount} pending deletion(s)`);
|
|
552
|
+
if (editCount > 0) log.warn(` ${editCount} pending edit(s)`);
|
|
553
|
+
if (addCount > 0) log.warn(` ${addCount} pending add(s)`);
|
|
554
|
+
log.warn('');
|
|
555
|
+
|
|
556
|
+
if (options.yes) {
|
|
557
|
+
log.dim(' Ignoring staged items (-y flag)');
|
|
558
|
+
return;
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
const inquirer = (await import('inquirer')).default;
|
|
562
|
+
const { action } = await inquirer.prompt([{
|
|
563
|
+
type: 'list',
|
|
564
|
+
name: 'action',
|
|
565
|
+
message: 'How would you like to handle the staged items?',
|
|
566
|
+
choices: [
|
|
567
|
+
{ name: 'Ignore — keep staged items and continue cloning', value: 'ignore' },
|
|
568
|
+
{ name: 'Review — show each item and decide individually', value: 'review' },
|
|
569
|
+
{ name: 'Clear — discard all staged items and continue cloning', value: 'clear' },
|
|
570
|
+
{ name: 'Abort — stop cloning so you can push first', value: 'abort' },
|
|
571
|
+
],
|
|
572
|
+
}]);
|
|
573
|
+
|
|
574
|
+
if (action === 'abort') {
|
|
575
|
+
log.info('Clone aborted. Run "dbo push" to process staged items first.');
|
|
576
|
+
process.exit(0);
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
if (action === 'clear') {
|
|
580
|
+
await saveSynchronize({ delete: [], edit: [], add: [] });
|
|
581
|
+
log.success('Cleared all staged items from synchronize.json');
|
|
582
|
+
return;
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
if (action === 'review') {
|
|
586
|
+
const kept = { delete: [], edit: [], add: [] };
|
|
587
|
+
|
|
588
|
+
for (const category of ['delete', 'edit', 'add']) {
|
|
589
|
+
const items = sync[category] || [];
|
|
590
|
+
if (items.length === 0) continue;
|
|
591
|
+
|
|
592
|
+
for (const item of items) {
|
|
593
|
+
const label = item.Name || item.UID || item.metaPath || JSON.stringify(item).substring(0, 80);
|
|
594
|
+
const entity = item._entity || item.entity || '';
|
|
595
|
+
const desc = entity ? `[${category}] ${entity}: ${label}` : `[${category}] ${label}`;
|
|
596
|
+
|
|
597
|
+
const { keep } = await inquirer.prompt([{
|
|
598
|
+
type: 'confirm',
|
|
599
|
+
name: 'keep',
|
|
600
|
+
message: `Keep staged? ${desc}`,
|
|
601
|
+
default: true,
|
|
602
|
+
}]);
|
|
603
|
+
|
|
604
|
+
if (keep) {
|
|
605
|
+
kept[category].push(item);
|
|
606
|
+
} else {
|
|
607
|
+
log.dim(` Discarded: ${desc}`);
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
await saveSynchronize(kept);
|
|
613
|
+
const remaining = kept.delete.length + kept.edit.length + kept.add.length;
|
|
614
|
+
if (remaining > 0) {
|
|
615
|
+
log.info(`${remaining} staged item(s) kept`);
|
|
616
|
+
} else {
|
|
617
|
+
log.success('All staged items discarded');
|
|
618
|
+
}
|
|
619
|
+
return;
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
// action === 'ignore' — do nothing, keep staged items
|
|
623
|
+
log.dim(' Keeping staged items');
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
/**
|
|
627
|
+
* Main clone workflow. Exported for use by init --clone.
|
|
628
|
+
*/
|
|
629
|
+
export async function performClone(source, options = {}) {
|
|
630
|
+
const config = await loadConfig();
|
|
631
|
+
const effectiveDomain = options.domain || config.domain;
|
|
632
|
+
let appJson;
|
|
633
|
+
|
|
634
|
+
// Step 1: Source mismatch detection
|
|
635
|
+
// Warn when the user provides an explicit source that differs from the stored one.
|
|
636
|
+
const storedCloneSource = await loadCloneSource();
|
|
637
|
+
if (source && storedCloneSource && source !== storedCloneSource) {
|
|
638
|
+
if (!options.force && !options.yes) {
|
|
639
|
+
log.warn('');
|
|
640
|
+
log.warn(` ⚠ This project was previously cloned from: ${storedCloneSource}`);
|
|
641
|
+
log.warn(` Requested source: ${source}`);
|
|
642
|
+
const inquirer = (await import('inquirer')).default;
|
|
643
|
+
const { confirmed } = await inquirer.prompt([{
|
|
644
|
+
type: 'confirm',
|
|
645
|
+
name: 'confirmed',
|
|
646
|
+
message: 'Clone from the new source anyway? This will update the stored clone source.',
|
|
647
|
+
default: false,
|
|
648
|
+
}]);
|
|
649
|
+
if (!confirmed) {
|
|
650
|
+
log.info('Clone aborted.');
|
|
651
|
+
return;
|
|
652
|
+
}
|
|
653
|
+
} else {
|
|
654
|
+
log.warn(` ⚠ Clone source override: "${storedCloneSource}" → "${source}"`);
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
// Step 2: Load the app JSON — retry loop with fallback prompt on failure
|
|
659
|
+
let activeSource = source;
|
|
660
|
+
while (true) {
|
|
661
|
+
try {
|
|
662
|
+
appJson = await resolveAppSource(activeSource, options, config);
|
|
663
|
+
break;
|
|
664
|
+
} catch (err) {
|
|
665
|
+
if (options.yes || !process.stdin.isTTY) {
|
|
666
|
+
throw err;
|
|
667
|
+
}
|
|
668
|
+
log.warn('');
|
|
669
|
+
log.warn(` ⚠ Source did not return expected results: ${err.message}`);
|
|
670
|
+
const inquirer = (await import('inquirer')).default;
|
|
671
|
+
const { fallback } = await inquirer.prompt([{
|
|
672
|
+
type: 'input',
|
|
673
|
+
name: 'fallback',
|
|
674
|
+
message: 'Enter another local file path or URL to retry (or leave empty to abort):',
|
|
675
|
+
}]);
|
|
676
|
+
if (!fallback || !fallback.trim()) {
|
|
677
|
+
throw new Error('Clone aborted: no valid source provided.');
|
|
678
|
+
}
|
|
679
|
+
activeSource = fallback.trim();
|
|
486
680
|
}
|
|
487
681
|
}
|
|
488
682
|
|
|
@@ -502,6 +696,9 @@ export async function performClone(source, options = {}) {
|
|
|
502
696
|
|
|
503
697
|
log.success(`Cloning "${appJson.Name}" (${appJson.ShortName})`);
|
|
504
698
|
|
|
699
|
+
// Check for un-pushed staged items in synchronize.json
|
|
700
|
+
await checkPendingSynchronize(options);
|
|
701
|
+
|
|
505
702
|
// Ensure sensitive files are gitignored
|
|
506
703
|
await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt']);
|
|
507
704
|
|
|
@@ -512,6 +709,7 @@ export async function performClone(source, options = {}) {
|
|
|
512
709
|
AppName: appJson.Name,
|
|
513
710
|
AppShortName: appJson.ShortName,
|
|
514
711
|
});
|
|
712
|
+
await saveCloneSource(activeSource || 'default');
|
|
515
713
|
log.dim(' Updated .dbo/config.json with app metadata');
|
|
516
714
|
|
|
517
715
|
// Detect and store ModifyKey for locked/production apps
|
|
@@ -637,7 +835,13 @@ export async function performClone(source, options = {}) {
|
|
|
637
835
|
// Skip if entity filter is active and this entity doesn't match
|
|
638
836
|
if (entityFilter && !entityFilter.has(entityName)) continue;
|
|
639
837
|
|
|
640
|
-
if (
|
|
838
|
+
if (entityName === 'extension') {
|
|
839
|
+
// Extension entities: descriptor-organized sub-directories
|
|
840
|
+
const refs = await processExtensionEntries(entries, structure, options, serverTz);
|
|
841
|
+
if (refs.length > 0) {
|
|
842
|
+
otherRefs[entityName] = refs;
|
|
843
|
+
}
|
|
844
|
+
} else if (ENTITY_DIR_MAP[entityName]) {
|
|
641
845
|
// Entity types with project directories — process into their directory
|
|
642
846
|
const refs = await processEntityDirEntries(entityName, entries, options, serverTz);
|
|
643
847
|
if (refs.length > 0) {
|
|
@@ -756,16 +960,23 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
756
960
|
const answers = await inquirer.prompt(prompts);
|
|
757
961
|
contentPlacement = contentPlacement || answers.contentPlacement || 'bin';
|
|
758
962
|
mediaPlacement = mediaPlacement || answers.mediaPlacement || 'bin';
|
|
963
|
+
}
|
|
964
|
+
|
|
965
|
+
// Resolve defaults for any still-unset values
|
|
966
|
+
contentPlacement = contentPlacement || 'bin';
|
|
967
|
+
mediaPlacement = mediaPlacement || 'bin';
|
|
759
968
|
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
969
|
+
// Always persist resolved values — not just when prompts were shown.
|
|
970
|
+
// This ensures defaults are saved even when the app has no content/media yet,
|
|
971
|
+
// so subsequent clones that do have records skip the prompts.
|
|
972
|
+
if (!saved.contentPlacement || !saved.mediaPlacement) {
|
|
973
|
+
await saveClonePlacement({ contentPlacement, mediaPlacement });
|
|
974
|
+
if (prompts.length > 0) {
|
|
975
|
+
log.dim(' Saved placement preferences to .dbo/config.json');
|
|
976
|
+
}
|
|
763
977
|
}
|
|
764
978
|
|
|
765
|
-
return {
|
|
766
|
-
contentPlacement: contentPlacement || 'bin',
|
|
767
|
-
mediaPlacement: mediaPlacement || 'bin',
|
|
768
|
-
};
|
|
979
|
+
return { contentPlacement, mediaPlacement };
|
|
769
980
|
}
|
|
770
981
|
|
|
771
982
|
/**
|
|
@@ -773,22 +984,45 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
773
984
|
*/
|
|
774
985
|
async function fetchAppFromServer(appShortName, options, config) {
|
|
775
986
|
const client = new DboClient({ domain: options.domain, verbose: options.verbose });
|
|
776
|
-
log.info(`Fetching app "${appShortName}" from server...`);
|
|
777
987
|
|
|
778
|
-
const
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
988
|
+
const ora = (await import('ora')).default;
|
|
989
|
+
const spinner = ora(`Fetching app "${appShortName}" from server...`).start();
|
|
990
|
+
|
|
991
|
+
let result;
|
|
992
|
+
try {
|
|
993
|
+
result = await client.get(`/api/app/object/${appShortName}`);
|
|
994
|
+
} catch (err) {
|
|
995
|
+
spinner.fail(`Failed to fetch app "${appShortName}"`);
|
|
996
|
+
throw err;
|
|
997
|
+
}
|
|
782
998
|
|
|
783
999
|
const data = result.payload || result.data;
|
|
784
|
-
const rows = Array.isArray(data) ? data : (data?.Rows || data?.rows || []);
|
|
785
1000
|
|
|
786
|
-
|
|
1001
|
+
// Handle all response shapes:
|
|
1002
|
+
// 1. Array of rows: [{ UID, ShortName, ... }]
|
|
1003
|
+
// 2. Object with Rows key: { Rows: [...] }
|
|
1004
|
+
// 3. Single app object: { UID, ShortName, children, ... }
|
|
1005
|
+
let appRecord;
|
|
1006
|
+
if (Array.isArray(data)) {
|
|
1007
|
+
appRecord = data.length > 0 ? data[0] : null;
|
|
1008
|
+
} else if (data?.Rows?.length > 0) {
|
|
1009
|
+
appRecord = data.Rows[0];
|
|
1010
|
+
} else if (data?.rows?.length > 0) {
|
|
1011
|
+
appRecord = data.rows[0];
|
|
1012
|
+
} else if (data && typeof data === 'object' && (data.UID || data.ShortName)) {
|
|
1013
|
+
// Single app object returned directly as payload
|
|
1014
|
+
appRecord = data;
|
|
1015
|
+
} else {
|
|
1016
|
+
appRecord = null;
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
if (!appRecord) {
|
|
1020
|
+
spinner.fail(`No app found with ShortName "${appShortName}"`);
|
|
787
1021
|
throw new Error(`No app found with ShortName "${appShortName}"`);
|
|
788
1022
|
}
|
|
789
1023
|
|
|
790
|
-
|
|
791
|
-
return
|
|
1024
|
+
spinner.succeed(`Found app on server`);
|
|
1025
|
+
return appRecord;
|
|
792
1026
|
}
|
|
793
1027
|
|
|
794
1028
|
/**
|
|
@@ -1082,11 +1316,16 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1082
1316
|
}
|
|
1083
1317
|
|
|
1084
1318
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1319
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1085
1320
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1086
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1321
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1322
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1087
1323
|
|
|
1088
1324
|
if (serverNewer) {
|
|
1089
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1325
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1326
|
+
serverDate,
|
|
1327
|
+
localDate: localSyncTime,
|
|
1328
|
+
});
|
|
1090
1329
|
|
|
1091
1330
|
if (action === 'skip') {
|
|
1092
1331
|
log.dim(` Skipped ${finalName}`);
|
|
@@ -1103,14 +1342,18 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1103
1342
|
bulkAction.value = 'overwrite_all';
|
|
1104
1343
|
}
|
|
1105
1344
|
if (action === 'compare') {
|
|
1106
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1345
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1107
1346
|
refs.push({ uid: record.UID, metaPath });
|
|
1108
1347
|
continue;
|
|
1109
1348
|
}
|
|
1110
1349
|
} else {
|
|
1111
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
1350
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1112
1351
|
if (locallyModified) {
|
|
1113
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1352
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1353
|
+
localIsNewer: true,
|
|
1354
|
+
serverDate,
|
|
1355
|
+
localDate: localSyncTime,
|
|
1356
|
+
});
|
|
1114
1357
|
|
|
1115
1358
|
if (action === 'skip') {
|
|
1116
1359
|
log.dim(` Kept local: ${finalName}`);
|
|
@@ -1127,7 +1370,7 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1127
1370
|
bulkAction.value = 'overwrite_all';
|
|
1128
1371
|
}
|
|
1129
1372
|
if (action === 'compare') {
|
|
1130
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1373
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1131
1374
|
refs.push({ uid: record.UID, metaPath });
|
|
1132
1375
|
continue;
|
|
1133
1376
|
}
|
|
@@ -1199,6 +1442,378 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1199
1442
|
return refs;
|
|
1200
1443
|
}
|
|
1201
1444
|
|
|
1445
|
+
// ─── Extension Descriptor Sub-directory Processing ────────────────────────
|
|
1446
|
+
|
|
1447
|
+
/**
|
|
1448
|
+
* Scan extension records for descriptor_definition entries.
|
|
1449
|
+
* Builds the mapping, persists to structure.json, always creates Extensions/Unsupported/,
|
|
1450
|
+
* and creates sub-directories for every mapped descriptor.
|
|
1451
|
+
* Returns the mapping object.
|
|
1452
|
+
*
|
|
1453
|
+
* @param {Object[]} extensionEntries
|
|
1454
|
+
* @param {Object} structure - Current bin structure (from loadStructureFile)
|
|
1455
|
+
* @returns {Promise<Object<string,string>>}
|
|
1456
|
+
*/
|
|
1457
|
+
async function buildDescriptorPrePass(extensionEntries, structure) {
|
|
1458
|
+
const { mapping, warnings } = buildDescriptorMapping(extensionEntries);
|
|
1459
|
+
|
|
1460
|
+
for (const w of warnings) log.warn(` descriptor_definition: ${w}`);
|
|
1461
|
+
|
|
1462
|
+
// Always create Unsupported/ — even if empty, users see at a glance what couldn't be mapped
|
|
1463
|
+
await mkdir(EXTENSION_UNSUPPORTED_DIR, { recursive: true });
|
|
1464
|
+
log.dim(` ${EXTENSION_UNSUPPORTED_DIR}/`);
|
|
1465
|
+
|
|
1466
|
+
// Create one sub-directory per mapped descriptor name
|
|
1467
|
+
for (const dirName of new Set(Object.values(mapping))) {
|
|
1468
|
+
const fullDir = `${EXTENSION_DESCRIPTORS_DIR}/${dirName}`;
|
|
1469
|
+
await mkdir(fullDir, { recursive: true });
|
|
1470
|
+
log.dim(` ${fullDir}/`);
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
await saveDescriptorMapping(structure, mapping);
|
|
1474
|
+
log.dim(` Saved descriptorMapping to .dbo/structure.json`);
|
|
1475
|
+
|
|
1476
|
+
return mapping;
|
|
1477
|
+
}
|
|
1478
|
+
|
|
1479
|
+
/**
|
|
1480
|
+
* Resolve filename column and content extraction preferences for one descriptor.
|
|
1481
|
+
* Prompts the user on first use; saves to config.json; respects -y and --force.
|
|
1482
|
+
*
|
|
1483
|
+
* @param {string} descriptor - e.g. "documentation", "include", "control"
|
|
1484
|
+
* @param {Object[]} records - All extension records sharing this descriptor
|
|
1485
|
+
* @param {Object} options - CLI options (options.yes, options.force)
|
|
1486
|
+
* @returns {Promise<{ filenameCol: string, contentColsToExtract: Array<{col,ext}> }>}
|
|
1487
|
+
*/
|
|
1488
|
+
async function resolveDescriptorPreferences(descriptor, records, options) {
|
|
1489
|
+
const sampleRecord = records[0];
|
|
1490
|
+
const columns = Object.keys(sampleRecord)
|
|
1491
|
+
.filter(k => k !== 'children' && !k.startsWith('_'));
|
|
1492
|
+
|
|
1493
|
+
// ── Filename column ──────────────────────────────────────────────────
|
|
1494
|
+
let filenameCol;
|
|
1495
|
+
const savedCol = await loadDescriptorFilenamePreference(descriptor);
|
|
1496
|
+
|
|
1497
|
+
if (options.yes) {
|
|
1498
|
+
filenameCol = columns.includes('Name') ? 'Name'
|
|
1499
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1500
|
+
} else if (savedCol && !options.force) {
|
|
1501
|
+
filenameCol = savedCol;
|
|
1502
|
+
log.dim(` Filename column for "${descriptor}": "${filenameCol}" (saved)`);
|
|
1503
|
+
} else {
|
|
1504
|
+
const inquirer = (await import('inquirer')).default;
|
|
1505
|
+
const defaultCol = columns.includes('Name') ? 'Name'
|
|
1506
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1507
|
+
const { col } = await inquirer.prompt([{
|
|
1508
|
+
type: 'list', name: 'col',
|
|
1509
|
+
message: `Filename column for "${descriptor}" extensions:`,
|
|
1510
|
+
choices: columns, default: defaultCol,
|
|
1511
|
+
}]);
|
|
1512
|
+
filenameCol = col;
|
|
1513
|
+
await saveDescriptorFilenamePreference(descriptor, filenameCol);
|
|
1514
|
+
log.dim(` Saved filename column for "${descriptor}": "${filenameCol}"`);
|
|
1515
|
+
}
|
|
1516
|
+
|
|
1517
|
+
// ── Content extraction ───────────────────────────────────────────────
|
|
1518
|
+
const contentColsToExtract = [];
|
|
1519
|
+
if (!options.yes) {
|
|
1520
|
+
const base64Cols = [];
|
|
1521
|
+
for (const record of records) {
|
|
1522
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1523
|
+
if (key === 'children' || key.startsWith('_')) continue;
|
|
1524
|
+
if (value && typeof value === 'object' && !Array.isArray(value)
|
|
1525
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1526
|
+
if (!base64Cols.find(c => c.col === key)) {
|
|
1527
|
+
let snippet = '';
|
|
1528
|
+
try {
|
|
1529
|
+
const decoded = Buffer.from(value.value, 'base64').toString('utf8');
|
|
1530
|
+
snippet = decoded.substring(0, 80)
|
|
1531
|
+
.replace(/[\x00-\x1f\x7f]/g, ' ') // strip control chars (incl. \n, \r, \t, ESC)
|
|
1532
|
+
.replace(/\s+/g, ' ') // collapse whitespace
|
|
1533
|
+
.trim();
|
|
1534
|
+
if (decoded.length > 80) snippet += '…';
|
|
1535
|
+
} catch {}
|
|
1536
|
+
base64Cols.push({ col: key, snippet });
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
}
|
|
1540
|
+
}
|
|
1541
|
+
|
|
1542
|
+
if (base64Cols.length > 0) {
|
|
1543
|
+
const savedExtractions = options.force
|
|
1544
|
+
? null
|
|
1545
|
+
: await loadDescriptorContentExtractions(descriptor);
|
|
1546
|
+
const newPreferences = savedExtractions ? { ...savedExtractions } : {};
|
|
1547
|
+
let changed = false;
|
|
1548
|
+
const inquirer = (await import('inquirer')).default;
|
|
1549
|
+
|
|
1550
|
+
for (const { col, snippet } of base64Cols) {
|
|
1551
|
+
if (savedExtractions) {
|
|
1552
|
+
const saved = savedExtractions[col];
|
|
1553
|
+
if (saved === false) { log.dim(` Skip "${col}" for "${descriptor}" (saved)`); continue; }
|
|
1554
|
+
if (typeof saved === 'string') {
|
|
1555
|
+
log.dim(` Extracting "${col}" for "${descriptor}" as .${saved} (saved)`);
|
|
1556
|
+
contentColsToExtract.push({ col, ext: saved });
|
|
1557
|
+
continue;
|
|
1558
|
+
}
|
|
1559
|
+
}
|
|
1560
|
+
const preview = snippet ? ` ("${snippet}")` : '';
|
|
1561
|
+
const { extract } = await inquirer.prompt([{
|
|
1562
|
+
type: 'confirm', name: 'extract',
|
|
1563
|
+
message: `Extract column "${col}" (${descriptor}) as companion file?${preview}`,
|
|
1564
|
+
default: true,
|
|
1565
|
+
}]);
|
|
1566
|
+
if (extract) {
|
|
1567
|
+
const guessed = guessExtensionForDescriptor(descriptor, col);
|
|
1568
|
+
const { ext } = await inquirer.prompt([{
|
|
1569
|
+
type: 'input', name: 'ext',
|
|
1570
|
+
message: `File extension for "${col}" (${descriptor}):`,
|
|
1571
|
+
default: guessed,
|
|
1572
|
+
}]);
|
|
1573
|
+
const cleanExt = ext.replace(/^\./, '');
|
|
1574
|
+
contentColsToExtract.push({ col, ext: cleanExt });
|
|
1575
|
+
newPreferences[col] = cleanExt;
|
|
1576
|
+
} else {
|
|
1577
|
+
newPreferences[col] = false;
|
|
1578
|
+
}
|
|
1579
|
+
changed = true;
|
|
1580
|
+
}
|
|
1581
|
+
if (changed) await saveDescriptorContentExtractions(descriptor, newPreferences);
|
|
1582
|
+
}
|
|
1583
|
+
}
|
|
1584
|
+
|
|
1585
|
+
return { filenameCol, contentColsToExtract };
|
|
1586
|
+
}
|
|
1587
|
+
|
|
1588
|
+
/**
|
|
1589
|
+
* Guess a file extension based on descriptor name, with column-name fallback.
|
|
1590
|
+
*/
|
|
1591
|
+
function guessExtensionForDescriptor(descriptor, columnName) {
|
|
1592
|
+
if (descriptor === 'documentation') return 'md';
|
|
1593
|
+
if (descriptor === 'include') return 'html';
|
|
1594
|
+
if (descriptor === 'control') return 'js';
|
|
1595
|
+
return guessExtensionForColumn(columnName);
|
|
1596
|
+
}
|
|
1597
|
+
|
|
1598
|
+
/**
|
|
1599
|
+
* Resolve placement for documentation descriptor MD companion files.
|
|
1600
|
+
* Prompts on first use; persists to config.json; respects -y and --force.
|
|
1601
|
+
* @returns {Promise<'inline'|'root'>}
|
|
1602
|
+
*/
|
|
1603
|
+
async function resolveDocumentationPlacement(options) {
|
|
1604
|
+
if (options.yes) return 'inline';
|
|
1605
|
+
|
|
1606
|
+
const saved = await loadExtensionDocumentationMDPlacement();
|
|
1607
|
+
if (saved && !options.force) {
|
|
1608
|
+
log.dim(` Documentation MD placement: ${saved} (saved)`);
|
|
1609
|
+
return saved;
|
|
1610
|
+
}
|
|
1611
|
+
|
|
1612
|
+
const inquirer = (await import('inquirer')).default;
|
|
1613
|
+
const { placement } = await inquirer.prompt([{
|
|
1614
|
+
type: 'list', name: 'placement',
|
|
1615
|
+
message: 'Where should extracted documentation MD files be placed?',
|
|
1616
|
+
choices: [
|
|
1617
|
+
{ name: '/Documentation/<filename>.md — project root (recommended)', value: 'root' },
|
|
1618
|
+
{ name: 'Extensions/Documentation/<filename>.md — inline alongside metadata', value: 'inline' },
|
|
1619
|
+
],
|
|
1620
|
+
default: 'root',
|
|
1621
|
+
}]);
|
|
1622
|
+
|
|
1623
|
+
await saveExtensionDocumentationMDPlacement(placement);
|
|
1624
|
+
log.dim(` Saved documentation MD placement: "${placement}"`);
|
|
1625
|
+
return placement;
|
|
1626
|
+
}
|
|
1627
|
+
|
|
1628
|
+
/**
|
|
1629
|
+
* Process extension entity records into descriptor-organized sub-directories.
|
|
1630
|
+
*
|
|
1631
|
+
* @param {Object[]} entries - All extension records from appJson.children.extension
|
|
1632
|
+
* @param {Object} structure - Bin structure from loadStructureFile()
|
|
1633
|
+
* @param {Object} options - CLI options (options.yes, options.force, options.entity)
|
|
1634
|
+
* @param {string} serverTz - Server timezone for timestamp syncing
|
|
1635
|
+
* @returns {Promise<Array<{uid, metaPath}>>}
|
|
1636
|
+
*/
|
|
1637
|
+
async function processExtensionEntries(entries, structure, options, serverTz) {
|
|
1638
|
+
if (!entries || entries.length === 0) return [];
|
|
1639
|
+
|
|
1640
|
+
// --descriptor-types false → skip descriptor sorting, use flat Extensions/ via generic path
|
|
1641
|
+
if (options.descriptorTypes === 'false') {
|
|
1642
|
+
log.info(`Processing ${entries.length} extension record(s) (flat mode)...`);
|
|
1643
|
+
return await processEntityDirEntries('extension', entries, options, serverTz);
|
|
1644
|
+
}
|
|
1645
|
+
|
|
1646
|
+
log.info(`Processing ${entries.length} extension record(s)...`);
|
|
1647
|
+
|
|
1648
|
+
// Step A: Pre-pass — build mapping + create directories
|
|
1649
|
+
const mapping = await buildDescriptorPrePass(entries, structure);
|
|
1650
|
+
|
|
1651
|
+
// Clear documentation preferences when --force is used with --documentation-only
|
|
1652
|
+
if (options.documentationOnly && options.force) {
|
|
1653
|
+
await saveDescriptorFilenamePreference('documentation', null);
|
|
1654
|
+
await saveDescriptorContentExtractions('documentation', null);
|
|
1655
|
+
await saveExtensionDocumentationMDPlacement(null);
|
|
1656
|
+
log.info(' --force: cleared saved documentation preferences');
|
|
1657
|
+
}
|
|
1658
|
+
|
|
1659
|
+
// Step B: Group records by descriptor
|
|
1660
|
+
const groups = new Map(); // descriptor → { dir, records[] }
|
|
1661
|
+
for (const record of entries) {
|
|
1662
|
+
const descriptor = record.Descriptor || '__unsupported__';
|
|
1663
|
+
|
|
1664
|
+
// --documentation-only: skip non-documentation records
|
|
1665
|
+
if (options.documentationOnly && descriptor !== 'documentation') continue;
|
|
1666
|
+
|
|
1667
|
+
if (!groups.has(descriptor)) {
|
|
1668
|
+
groups.set(descriptor, { dir: resolveExtensionSubDir(record, mapping), records: [] });
|
|
1669
|
+
}
|
|
1670
|
+
groups.get(descriptor).records.push(record);
|
|
1671
|
+
}
|
|
1672
|
+
|
|
1673
|
+
// Step C: Resolve preferences per descriptor (prompts fire here, before file writes)
|
|
1674
|
+
const descriptorPrefs = new Map();
|
|
1675
|
+
let docPlacement = 'inline';
|
|
1676
|
+
|
|
1677
|
+
for (const [descriptor, { records }] of groups.entries()) {
|
|
1678
|
+
if (descriptor === '__unsupported__') {
|
|
1679
|
+
descriptorPrefs.set(descriptor, { filenameCol: null, contentColsToExtract: [] });
|
|
1680
|
+
continue;
|
|
1681
|
+
}
|
|
1682
|
+
const prefs = await resolveDescriptorPreferences(descriptor, records, options);
|
|
1683
|
+
descriptorPrefs.set(descriptor, prefs);
|
|
1684
|
+
|
|
1685
|
+
if (descriptor === 'documentation') {
|
|
1686
|
+
docPlacement = await resolveDocumentationPlacement(options);
|
|
1687
|
+
if (docPlacement === 'root') {
|
|
1688
|
+
await mkdir(DOCUMENTATION_DIR, { recursive: true });
|
|
1689
|
+
log.dim(` ${DOCUMENTATION_DIR}/`);
|
|
1690
|
+
}
|
|
1691
|
+
}
|
|
1692
|
+
}
|
|
1693
|
+
|
|
1694
|
+
// Step D: Write files, one group at a time
|
|
1695
|
+
const refs = [];
|
|
1696
|
+
const bulkAction = { value: null };
|
|
1697
|
+
const config = await loadConfig();
|
|
1698
|
+
|
|
1699
|
+
for (const [descriptor, { dir, records }] of groups.entries()) {
|
|
1700
|
+
const { filenameCol, contentColsToExtract } = descriptorPrefs.get(descriptor);
|
|
1701
|
+
const useRootDoc = (descriptor === 'documentation' && docPlacement === 'root');
|
|
1702
|
+
const mdColInfo = useRootDoc ? contentColsToExtract.find(c => c.ext === 'md') : null;
|
|
1703
|
+
|
|
1704
|
+
log.info(`Processing ${records.length} "${descriptor}" extension(s) → ${dir}/`);
|
|
1705
|
+
|
|
1706
|
+
for (const record of records) {
|
|
1707
|
+
// Resolve filename
|
|
1708
|
+
let name;
|
|
1709
|
+
if (filenameCol && record[filenameCol] != null) {
|
|
1710
|
+
name = sanitizeFilename(String(record[filenameCol]));
|
|
1711
|
+
} else if (record.Name) {
|
|
1712
|
+
name = sanitizeFilename(String(record.Name));
|
|
1713
|
+
} else {
|
|
1714
|
+
name = sanitizeFilename(String(record.UID || 'untitled'));
|
|
1715
|
+
}
|
|
1716
|
+
|
|
1717
|
+
const uid = record.UID || 'untitled';
|
|
1718
|
+
const finalName = name === uid ? uid : `${name}.${uid}`;
|
|
1719
|
+
const metaPath = join(dir, `${finalName}.metadata.json`);
|
|
1720
|
+
|
|
1721
|
+
// Change detection — same pattern as processEntityDirEntries()
|
|
1722
|
+
const hasNewExtractions = contentColsToExtract.length > 0;
|
|
1723
|
+
if (await fileExists(metaPath) && !options.yes && !hasNewExtractions) {
|
|
1724
|
+
if (bulkAction.value === 'skip_all') {
|
|
1725
|
+
log.dim(` Skipped ${finalName}`);
|
|
1726
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1727
|
+
continue;
|
|
1728
|
+
}
|
|
1729
|
+
if (bulkAction.value !== 'overwrite_all') {
|
|
1730
|
+
const cfgWithTz = { ...config, ServerTimezone: serverTz };
|
|
1731
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1732
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, cfgWithTz);
|
|
1733
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1734
|
+
|
|
1735
|
+
if (serverNewer) {
|
|
1736
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { serverDate, localDate: localSyncTime });
|
|
1737
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1738
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1739
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1740
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1741
|
+
} else {
|
|
1742
|
+
const localModified = await hasLocalModifications(metaPath, cfgWithTz);
|
|
1743
|
+
if (localModified) {
|
|
1744
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
1745
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1746
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1747
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1748
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz, { localIsNewer: true }); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1749
|
+
} else {
|
|
1750
|
+
log.dim(` Up to date: ${finalName}`);
|
|
1751
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1752
|
+
continue;
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
}
|
|
1756
|
+
}
|
|
1757
|
+
|
|
1758
|
+
// Build metadata object
|
|
1759
|
+
const meta = {};
|
|
1760
|
+
const extractedCols = [];
|
|
1761
|
+
|
|
1762
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1763
|
+
if (key === 'children') continue;
|
|
1764
|
+
|
|
1765
|
+
const extractInfo = contentColsToExtract.find(c => c.col === key);
|
|
1766
|
+
if (extractInfo && value && typeof value === 'object'
|
|
1767
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1768
|
+
const decoded = resolveContentValue(value);
|
|
1769
|
+
if (decoded) {
|
|
1770
|
+
let colFilePath, refValue;
|
|
1771
|
+
|
|
1772
|
+
if (mdColInfo && extractInfo.col === mdColInfo.col) {
|
|
1773
|
+
// Root placement: Documentation/<name>.md
|
|
1774
|
+
const docFileName = `${name}.md`;
|
|
1775
|
+
colFilePath = join(DOCUMENTATION_DIR, docFileName);
|
|
1776
|
+
refValue = `@/${DOCUMENTATION_DIR}/${docFileName}`;
|
|
1777
|
+
} else {
|
|
1778
|
+
const colFileName = `${finalName}.${key}.${extractInfo.ext}`;
|
|
1779
|
+
colFilePath = join(dir, colFileName);
|
|
1780
|
+
refValue = `@${colFileName}`;
|
|
1781
|
+
}
|
|
1782
|
+
|
|
1783
|
+
meta[key] = refValue;
|
|
1784
|
+
await writeFile(colFilePath, decoded);
|
|
1785
|
+
extractedCols.push(key);
|
|
1786
|
+
if (serverTz) {
|
|
1787
|
+
try { await setFileTimestamps(colFilePath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1788
|
+
}
|
|
1789
|
+
log.dim(` → ${colFilePath}`);
|
|
1790
|
+
continue;
|
|
1791
|
+
}
|
|
1792
|
+
}
|
|
1793
|
+
|
|
1794
|
+
// Inline or non-extraction columns
|
|
1795
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
1796
|
+
meta[key] = resolveContentValue(value);
|
|
1797
|
+
} else {
|
|
1798
|
+
meta[key] = value;
|
|
1799
|
+
}
|
|
1800
|
+
}
|
|
1801
|
+
|
|
1802
|
+
meta._entity = 'extension';
|
|
1803
|
+
if (extractedCols.length > 0) meta._contentColumns = extractedCols;
|
|
1804
|
+
|
|
1805
|
+
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1806
|
+
if (serverTz) {
|
|
1807
|
+
try { await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1808
|
+
}
|
|
1809
|
+
log.success(`Saved ${metaPath}`);
|
|
1810
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1811
|
+
}
|
|
1812
|
+
}
|
|
1813
|
+
|
|
1814
|
+
return refs;
|
|
1815
|
+
}
|
|
1816
|
+
|
|
1202
1817
|
/**
|
|
1203
1818
|
* Process media entries: download binary files from server + create metadata.
|
|
1204
1819
|
* Media uses Filename (not Name) and files are fetched via /api/media/{uid}.
|
|
@@ -1349,11 +1964,20 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1349
1964
|
}
|
|
1350
1965
|
|
|
1351
1966
|
if (mediaBulkAction.value !== 'overwrite_all') {
|
|
1967
|
+
// Use explicit serverTz (not config.ServerTimezone) to ensure consistency
|
|
1968
|
+
// with how setFileTimestamps set the mtime — the two must use the same timezone.
|
|
1969
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1352
1970
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1353
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1971
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1972
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1973
|
+
const diffable = isDiffable(ext);
|
|
1354
1974
|
|
|
1355
1975
|
if (serverNewer) {
|
|
1356
|
-
const action = await promptChangeDetection(dedupName, record,
|
|
1976
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
1977
|
+
diffable,
|
|
1978
|
+
serverDate,
|
|
1979
|
+
localDate: localSyncTime,
|
|
1980
|
+
});
|
|
1357
1981
|
|
|
1358
1982
|
if (action === 'skip') {
|
|
1359
1983
|
log.dim(` Skipped ${finalFilename}`);
|
|
@@ -1371,15 +1995,21 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1371
1995
|
}
|
|
1372
1996
|
if (action === 'compare') {
|
|
1373
1997
|
// For binary media, show metadata diffs only
|
|
1374
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1998
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1375
1999
|
refs.push({ uid: record.UID, metaPath });
|
|
1376
2000
|
continue;
|
|
1377
2001
|
}
|
|
1378
2002
|
} else {
|
|
1379
2003
|
// Server _LastUpdated hasn't changed — check for local modifications
|
|
1380
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
2004
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1381
2005
|
if (locallyModified) {
|
|
1382
|
-
const
|
|
2006
|
+
const localDate = localSyncTime; // mtime already fetched above
|
|
2007
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
2008
|
+
localIsNewer: true,
|
|
2009
|
+
diffable,
|
|
2010
|
+
serverDate,
|
|
2011
|
+
localDate,
|
|
2012
|
+
});
|
|
1383
2013
|
|
|
1384
2014
|
if (action === 'skip') {
|
|
1385
2015
|
log.dim(` Kept local: ${finalFilename}`);
|
|
@@ -1396,7 +2026,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1396
2026
|
mediaBulkAction.value = 'overwrite_all';
|
|
1397
2027
|
}
|
|
1398
2028
|
if (action === 'compare') {
|
|
1399
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2029
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1400
2030
|
refs.push({ uid: record.UID, metaPath });
|
|
1401
2031
|
continue;
|
|
1402
2032
|
}
|
|
@@ -1454,10 +2084,13 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1454
2084
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1455
2085
|
log.dim(` → ${metaPath}`);
|
|
1456
2086
|
|
|
1457
|
-
// Set file timestamps from server dates
|
|
2087
|
+
// Set file timestamps from server dates (independent try-catch so one failure
|
|
2088
|
+
// doesn't prevent the other — metaPath mtime is the sync baseline for comparisons)
|
|
1458
2089
|
if (serverTz && (record._CreatedOn || record._LastUpdated)) {
|
|
1459
2090
|
try {
|
|
1460
2091
|
await setFileTimestamps(filePath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
2092
|
+
} catch { /* non-critical */ }
|
|
2093
|
+
try {
|
|
1461
2094
|
await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
1462
2095
|
} catch { /* non-critical */ }
|
|
1463
2096
|
}
|
|
@@ -1531,6 +2164,41 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1531
2164
|
}
|
|
1532
2165
|
// If still no extension, ext remains '' (no extension)
|
|
1533
2166
|
|
|
2167
|
+
// If no extension determined and Content column has data, prompt user to choose one
|
|
2168
|
+
if (!ext && !options.yes && record.Content) {
|
|
2169
|
+
const cv = record.Content;
|
|
2170
|
+
const hasContentData = cv && (
|
|
2171
|
+
(typeof cv === 'object' && cv.value !== null && cv.value !== undefined) ||
|
|
2172
|
+
(typeof cv === 'string' && cv.length > 0)
|
|
2173
|
+
);
|
|
2174
|
+
if (hasContentData) {
|
|
2175
|
+
// Decode a snippet for preview
|
|
2176
|
+
let snippet = '';
|
|
2177
|
+
try {
|
|
2178
|
+
const decoded = resolveContentValue(cv);
|
|
2179
|
+
if (decoded) {
|
|
2180
|
+
snippet = decoded.substring(0, 80).replace(/\n/g, ' ').replace(/\s+/g, ' ').trim();
|
|
2181
|
+
if (decoded.length > 80) snippet += '...';
|
|
2182
|
+
}
|
|
2183
|
+
} catch { /* ignore decode errors */ }
|
|
2184
|
+
const preview = snippet ? ` (${snippet})` : '';
|
|
2185
|
+
const VALID_CONTENT_EXTENSIONS = ['css', 'js', 'html', 'xml', 'txt', 'md', 'cs', 'json', 'sql'];
|
|
2186
|
+
const inquirer = (await import('inquirer')).default;
|
|
2187
|
+
const { chosenExt } = await inquirer.prompt([{
|
|
2188
|
+
type: 'list',
|
|
2189
|
+
name: 'chosenExt',
|
|
2190
|
+
message: `No extension found for "${record.Name || record.UID}". Choose a file extension for the Content:${preview}`,
|
|
2191
|
+
choices: [
|
|
2192
|
+
...VALID_CONTENT_EXTENSIONS.map(e => ({ name: `.${e}`, value: e })),
|
|
2193
|
+
{ name: 'No extension (skip)', value: '' },
|
|
2194
|
+
],
|
|
2195
|
+
}]);
|
|
2196
|
+
if (chosenExt) {
|
|
2197
|
+
ext = chosenExt;
|
|
2198
|
+
}
|
|
2199
|
+
}
|
|
2200
|
+
}
|
|
2201
|
+
|
|
1534
2202
|
// Avoid double extension: if name already ends with .ext, strip it
|
|
1535
2203
|
if (ext) {
|
|
1536
2204
|
const extWithDot = `.${ext}`;
|
|
@@ -1630,12 +2298,17 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1630
2298
|
}
|
|
1631
2299
|
|
|
1632
2300
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1633
|
-
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1634
2301
|
const config = await loadConfig();
|
|
1635
|
-
const
|
|
2302
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2303
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
2304
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
2305
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1636
2306
|
|
|
1637
2307
|
if (serverNewer) {
|
|
1638
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2308
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2309
|
+
serverDate,
|
|
2310
|
+
localDate: localSyncTime,
|
|
2311
|
+
});
|
|
1639
2312
|
|
|
1640
2313
|
if (action === 'skip') {
|
|
1641
2314
|
log.dim(` Skipped ${finalName}.${ext}`);
|
|
@@ -1651,16 +2324,20 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1651
2324
|
// Fall through to write
|
|
1652
2325
|
}
|
|
1653
2326
|
if (action === 'compare') {
|
|
1654
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2327
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1655
2328
|
return { uid: record.UID, metaPath };
|
|
1656
2329
|
}
|
|
1657
2330
|
// 'overwrite' falls through to normal write
|
|
1658
2331
|
} else {
|
|
1659
2332
|
// Server _LastUpdated hasn't changed since last sync.
|
|
1660
2333
|
// Check if local content files were modified (user edits).
|
|
1661
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
2334
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1662
2335
|
if (locallyModified) {
|
|
1663
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2336
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2337
|
+
localIsNewer: true,
|
|
2338
|
+
serverDate,
|
|
2339
|
+
localDate: localSyncTime,
|
|
2340
|
+
});
|
|
1664
2341
|
|
|
1665
2342
|
if (action === 'skip') {
|
|
1666
2343
|
log.dim(` Kept local: ${finalName}.${ext}`);
|
|
@@ -1675,7 +2352,7 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1675
2352
|
bulkAction.value = 'overwrite_all';
|
|
1676
2353
|
}
|
|
1677
2354
|
if (action === 'compare') {
|
|
1678
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2355
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1679
2356
|
return { uid: record.UID, metaPath };
|
|
1680
2357
|
}
|
|
1681
2358
|
// 'overwrite' falls through to normal write
|
|
@@ -1728,6 +2405,13 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1728
2405
|
meta._contentColumns = ['Content'];
|
|
1729
2406
|
}
|
|
1730
2407
|
|
|
2408
|
+
// If the extension picker chose an extension (record.Extension was null),
|
|
2409
|
+
// set it in metadata only — not in the record — so the baseline preserves
|
|
2410
|
+
// the server's null and push detects the change.
|
|
2411
|
+
if (ext && !record.Extension) {
|
|
2412
|
+
meta.Extension = ext;
|
|
2413
|
+
}
|
|
2414
|
+
|
|
1731
2415
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1732
2416
|
log.dim(` → ${metaPath}`);
|
|
1733
2417
|
|
|
@@ -2073,6 +2757,7 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2073
2757
|
const refs = [];
|
|
2074
2758
|
const bulkAction = { value: null };
|
|
2075
2759
|
const config = await loadConfig();
|
|
2760
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2076
2761
|
// When --force flag is set, skip change detection and re-process all files
|
|
2077
2762
|
const forceReprocess = !!options.force;
|
|
2078
2763
|
|
|
@@ -2125,21 +2810,22 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2125
2810
|
}
|
|
2126
2811
|
if (bulkAction.value !== 'overwrite_all') {
|
|
2127
2812
|
const localSyncTime = await getLocalSyncTime(rootMetaPath);
|
|
2128
|
-
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated,
|
|
2813
|
+
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated, configWithTz);
|
|
2814
|
+
const serverDate = parseServerDate(output._LastUpdated, serverTz);
|
|
2129
2815
|
if (serverNewer) {
|
|
2130
|
-
const action = await promptChangeDetection(rootBasename, output,
|
|
2816
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { serverDate, localDate: localSyncTime });
|
|
2131
2817
|
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2132
2818
|
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2133
2819
|
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2134
|
-
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath,
|
|
2820
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2135
2821
|
} else {
|
|
2136
|
-
const locallyModified = await hasLocalModifications(rootMetaPath,
|
|
2822
|
+
const locallyModified = await hasLocalModifications(rootMetaPath, configWithTz);
|
|
2137
2823
|
if (locallyModified) {
|
|
2138
|
-
const action = await promptChangeDetection(rootBasename, output,
|
|
2824
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
2139
2825
|
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2140
2826
|
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2141
2827
|
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2142
|
-
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath,
|
|
2828
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz, { localIsNewer: true }); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2143
2829
|
} else {
|
|
2144
2830
|
log.dim(` Up to date: ${rootBasename}`);
|
|
2145
2831
|
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|