@dboio/cli 0.6.14 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +313 -8
- package/package.json +3 -2
- package/src/commands/add.js +56 -11
- package/src/commands/clone.js +1294 -75
- package/src/commands/content.js +1 -1
- package/src/commands/deploy.js +1 -1
- package/src/commands/init.js +39 -4
- package/src/commands/install.js +10 -1
- package/src/commands/login.js +4 -9
- package/src/commands/output.js +56 -3
- package/src/commands/pull.js +3 -3
- package/src/commands/push.js +24 -12
- package/src/lib/config.js +175 -8
- package/src/lib/diff.js +72 -14
- package/src/lib/ignore.js +145 -0
- package/src/lib/input-parser.js +87 -38
- package/src/lib/structure.js +130 -0
- package/src/lib/timestamps.js +31 -9
package/src/commands/clone.js
CHANGED
|
@@ -2,11 +2,11 @@ import { Command } from 'commander';
|
|
|
2
2
|
import { readFile, writeFile, mkdir, access } from 'fs/promises';
|
|
3
3
|
import { join, basename, extname } from 'path';
|
|
4
4
|
import { DboClient } from '../lib/client.js';
|
|
5
|
-
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset } from '../lib/config.js';
|
|
6
|
-
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP } from '../lib/structure.js';
|
|
5
|
+
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference, saveCloneSource, loadCloneSource, saveDescriptorFilenamePreference, loadDescriptorFilenamePreference, saveDescriptorContentExtractions, loadDescriptorContentExtractions, saveExtensionDocumentationMDPlacement, loadExtensionDocumentationMDPlacement } from '../lib/config.js';
|
|
6
|
+
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES, EXTENSION_DESCRIPTORS_DIR, EXTENSION_UNSUPPORTED_DIR, DOCUMENTATION_DIR, buildDescriptorMapping, saveDescriptorMapping, loadDescriptorMapping, resolveExtensionSubDir } from '../lib/structure.js';
|
|
7
7
|
import { log } from '../lib/logger.js';
|
|
8
|
-
import { setFileTimestamps } from '../lib/timestamps.js';
|
|
9
|
-
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge } from '../lib/diff.js';
|
|
8
|
+
import { setFileTimestamps, parseServerDate } from '../lib/timestamps.js';
|
|
9
|
+
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge, isDiffable } from '../lib/diff.js';
|
|
10
10
|
import { checkDomainChange } from '../lib/domain-guard.js';
|
|
11
11
|
|
|
12
12
|
/**
|
|
@@ -421,6 +421,10 @@ export const cloneCommand = new Command('clone')
|
|
|
421
421
|
.description('Clone an app from DBO.io to a local project structure')
|
|
422
422
|
.argument('[source]', 'Local JSON file path (optional)')
|
|
423
423
|
.option('--app <shortName>', 'App short name to fetch from server')
|
|
424
|
+
.option('-e, --entity <type>', 'Only clone a specific entity type (e.g. output, content, media, extension)')
|
|
425
|
+
.option('--documentation-only', 'When used with -e extension, clone only documentation extensions')
|
|
426
|
+
.option('--descriptor-types <bool>', 'Sort extensions into descriptor sub-directories (default: true)', 'true')
|
|
427
|
+
.option('--force', 'Force re-processing of all files, skip change detection')
|
|
424
428
|
.option('--domain <host>', 'Override domain')
|
|
425
429
|
.option('-y, --yes', 'Auto-accept all prompts')
|
|
426
430
|
.option('-v, --verbose', 'Show HTTP request details')
|
|
@@ -434,26 +438,46 @@ export const cloneCommand = new Command('clone')
|
|
|
434
438
|
});
|
|
435
439
|
|
|
436
440
|
/**
|
|
437
|
-
*
|
|
441
|
+
* Resolve app JSON from a given source (file path, URL, or server fetch).
|
|
442
|
+
* Throws on failure so the caller can retry with a different source.
|
|
438
443
|
*/
|
|
439
|
-
|
|
440
|
-
const config = await loadConfig();
|
|
441
|
-
const effectiveDomain = options.domain || config.domain;
|
|
442
|
-
let appJson;
|
|
443
|
-
|
|
444
|
-
// Step 1: Load the app JSON
|
|
444
|
+
async function resolveAppSource(source, options, config) {
|
|
445
445
|
if (source) {
|
|
446
|
-
|
|
446
|
+
if (source.startsWith('http://') || source.startsWith('https://')) {
|
|
447
|
+
log.info(`Fetching app JSON from ${source}...`);
|
|
448
|
+
const res = await fetch(source);
|
|
449
|
+
if (!res.ok) throw new Error(`HTTP ${res.status} fetching ${source}`);
|
|
450
|
+
return await res.json();
|
|
451
|
+
}
|
|
447
452
|
log.info(`Loading app JSON from ${source}...`);
|
|
448
453
|
const raw = await readFile(source, 'utf8');
|
|
449
|
-
|
|
454
|
+
return JSON.parse(raw);
|
|
450
455
|
} else if (options.app) {
|
|
451
|
-
|
|
452
|
-
appJson = await fetchAppFromServer(options.app, options, config);
|
|
453
|
-
} else if (config.AppShortName) {
|
|
454
|
-
// Use config's AppShortName
|
|
455
|
-
appJson = await fetchAppFromServer(config.AppShortName, options, config);
|
|
456
|
+
return await fetchAppFromServer(options.app, options, config);
|
|
456
457
|
} else {
|
|
458
|
+
// Try stored cloneSource before falling back to server fetch
|
|
459
|
+
const storedSource = await loadCloneSource();
|
|
460
|
+
if (storedSource && storedSource !== 'default') {
|
|
461
|
+
// Stored source is a local file path or URL — reuse it
|
|
462
|
+
if (storedSource.startsWith('http://') || storedSource.startsWith('https://')) {
|
|
463
|
+
log.info(`Fetching app JSON from ${storedSource} (stored source)...`);
|
|
464
|
+
const res = await fetch(storedSource);
|
|
465
|
+
if (!res.ok) throw new Error(`HTTP ${res.status} fetching ${storedSource}`);
|
|
466
|
+
return await res.json();
|
|
467
|
+
}
|
|
468
|
+
if (await fileExists(storedSource)) {
|
|
469
|
+
log.info(`Loading app JSON from ${storedSource} (stored source)...`);
|
|
470
|
+
const raw = await readFile(storedSource, 'utf8');
|
|
471
|
+
return JSON.parse(raw);
|
|
472
|
+
}
|
|
473
|
+
// Stored file no longer exists — fall through to server fetch
|
|
474
|
+
log.dim(` Stored clone source "${storedSource}" not found, trying server...`);
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
if (config.AppShortName) {
|
|
478
|
+
return await fetchAppFromServer(config.AppShortName, options, config);
|
|
479
|
+
}
|
|
480
|
+
|
|
457
481
|
// Prompt
|
|
458
482
|
const inquirer = (await import('inquirer')).default;
|
|
459
483
|
const { choice } = await inquirer.prompt([{
|
|
@@ -472,7 +496,7 @@ export async function performClone(source, options = {}) {
|
|
|
472
496
|
message: 'App short name:',
|
|
473
497
|
validate: v => v.trim() ? true : 'App short name is required',
|
|
474
498
|
}]);
|
|
475
|
-
|
|
499
|
+
return await fetchAppFromServer(appName, options, config);
|
|
476
500
|
} else {
|
|
477
501
|
const { filePath } = await inquirer.prompt([{
|
|
478
502
|
type: 'input', name: 'filePath',
|
|
@@ -480,7 +504,155 @@ export async function performClone(source, options = {}) {
|
|
|
480
504
|
validate: v => v.trim() ? true : 'File path is required',
|
|
481
505
|
}]);
|
|
482
506
|
const raw = await readFile(filePath, 'utf8');
|
|
483
|
-
|
|
507
|
+
return JSON.parse(raw);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
/**
|
|
513
|
+
* Check synchronize.json for un-pushed staged items before cloning.
|
|
514
|
+
* Warns the user and offers options: push first, review individually, ignore, or clear.
|
|
515
|
+
*/
|
|
516
|
+
async function checkPendingSynchronize(options) {
|
|
517
|
+
const sync = await loadSynchronize();
|
|
518
|
+
const deleteCount = (sync.delete || []).length;
|
|
519
|
+
const editCount = (sync.edit || []).length;
|
|
520
|
+
const addCount = (sync.add || []).length;
|
|
521
|
+
const totalCount = deleteCount + editCount + addCount;
|
|
522
|
+
|
|
523
|
+
if (totalCount === 0) return;
|
|
524
|
+
|
|
525
|
+
log.warn('');
|
|
526
|
+
log.warn(` ⚠ There are ${totalCount} un-pushed staged item(s) in .dbo/synchronize.json:`);
|
|
527
|
+
if (deleteCount > 0) log.warn(` ${deleteCount} pending deletion(s)`);
|
|
528
|
+
if (editCount > 0) log.warn(` ${editCount} pending edit(s)`);
|
|
529
|
+
if (addCount > 0) log.warn(` ${addCount} pending add(s)`);
|
|
530
|
+
log.warn('');
|
|
531
|
+
|
|
532
|
+
if (options.yes) {
|
|
533
|
+
log.dim(' Ignoring staged items (-y flag)');
|
|
534
|
+
return;
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
const inquirer = (await import('inquirer')).default;
|
|
538
|
+
const { action } = await inquirer.prompt([{
|
|
539
|
+
type: 'list',
|
|
540
|
+
name: 'action',
|
|
541
|
+
message: 'How would you like to handle the staged items?',
|
|
542
|
+
choices: [
|
|
543
|
+
{ name: 'Ignore — keep staged items and continue cloning', value: 'ignore' },
|
|
544
|
+
{ name: 'Review — show each item and decide individually', value: 'review' },
|
|
545
|
+
{ name: 'Clear — discard all staged items and continue cloning', value: 'clear' },
|
|
546
|
+
{ name: 'Abort — stop cloning so you can push first', value: 'abort' },
|
|
547
|
+
],
|
|
548
|
+
}]);
|
|
549
|
+
|
|
550
|
+
if (action === 'abort') {
|
|
551
|
+
log.info('Clone aborted. Run "dbo push" to process staged items first.');
|
|
552
|
+
process.exit(0);
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
if (action === 'clear') {
|
|
556
|
+
await saveSynchronize({ delete: [], edit: [], add: [] });
|
|
557
|
+
log.success('Cleared all staged items from synchronize.json');
|
|
558
|
+
return;
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
if (action === 'review') {
|
|
562
|
+
const kept = { delete: [], edit: [], add: [] };
|
|
563
|
+
|
|
564
|
+
for (const category of ['delete', 'edit', 'add']) {
|
|
565
|
+
const items = sync[category] || [];
|
|
566
|
+
if (items.length === 0) continue;
|
|
567
|
+
|
|
568
|
+
for (const item of items) {
|
|
569
|
+
const label = item.Name || item.UID || item.metaPath || JSON.stringify(item).substring(0, 80);
|
|
570
|
+
const entity = item._entity || item.entity || '';
|
|
571
|
+
const desc = entity ? `[${category}] ${entity}: ${label}` : `[${category}] ${label}`;
|
|
572
|
+
|
|
573
|
+
const { keep } = await inquirer.prompt([{
|
|
574
|
+
type: 'confirm',
|
|
575
|
+
name: 'keep',
|
|
576
|
+
message: `Keep staged? ${desc}`,
|
|
577
|
+
default: true,
|
|
578
|
+
}]);
|
|
579
|
+
|
|
580
|
+
if (keep) {
|
|
581
|
+
kept[category].push(item);
|
|
582
|
+
} else {
|
|
583
|
+
log.dim(` Discarded: ${desc}`);
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
await saveSynchronize(kept);
|
|
589
|
+
const remaining = kept.delete.length + kept.edit.length + kept.add.length;
|
|
590
|
+
if (remaining > 0) {
|
|
591
|
+
log.info(`${remaining} staged item(s) kept`);
|
|
592
|
+
} else {
|
|
593
|
+
log.success('All staged items discarded');
|
|
594
|
+
}
|
|
595
|
+
return;
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
// action === 'ignore' — do nothing, keep staged items
|
|
599
|
+
log.dim(' Keeping staged items');
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
/**
|
|
603
|
+
* Main clone workflow. Exported for use by init --clone.
|
|
604
|
+
*/
|
|
605
|
+
export async function performClone(source, options = {}) {
|
|
606
|
+
const config = await loadConfig();
|
|
607
|
+
const effectiveDomain = options.domain || config.domain;
|
|
608
|
+
let appJson;
|
|
609
|
+
|
|
610
|
+
// Step 1: Source mismatch detection
|
|
611
|
+
// Warn when the user provides an explicit source that differs from the stored one.
|
|
612
|
+
const storedCloneSource = await loadCloneSource();
|
|
613
|
+
if (source && storedCloneSource && source !== storedCloneSource) {
|
|
614
|
+
if (!options.force && !options.yes) {
|
|
615
|
+
log.warn('');
|
|
616
|
+
log.warn(` ⚠ This project was previously cloned from: ${storedCloneSource}`);
|
|
617
|
+
log.warn(` Requested source: ${source}`);
|
|
618
|
+
const inquirer = (await import('inquirer')).default;
|
|
619
|
+
const { confirmed } = await inquirer.prompt([{
|
|
620
|
+
type: 'confirm',
|
|
621
|
+
name: 'confirmed',
|
|
622
|
+
message: 'Clone from the new source anyway? This will update the stored clone source.',
|
|
623
|
+
default: false,
|
|
624
|
+
}]);
|
|
625
|
+
if (!confirmed) {
|
|
626
|
+
log.info('Clone aborted.');
|
|
627
|
+
return;
|
|
628
|
+
}
|
|
629
|
+
} else {
|
|
630
|
+
log.warn(` ⚠ Clone source override: "${storedCloneSource}" → "${source}"`);
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// Step 2: Load the app JSON — retry loop with fallback prompt on failure
|
|
635
|
+
let activeSource = source;
|
|
636
|
+
while (true) {
|
|
637
|
+
try {
|
|
638
|
+
appJson = await resolveAppSource(activeSource, options, config);
|
|
639
|
+
break;
|
|
640
|
+
} catch (err) {
|
|
641
|
+
if (options.yes || !process.stdin.isTTY) {
|
|
642
|
+
throw err;
|
|
643
|
+
}
|
|
644
|
+
log.warn('');
|
|
645
|
+
log.warn(` ⚠ Source did not return expected results: ${err.message}`);
|
|
646
|
+
const inquirer = (await import('inquirer')).default;
|
|
647
|
+
const { fallback } = await inquirer.prompt([{
|
|
648
|
+
type: 'input',
|
|
649
|
+
name: 'fallback',
|
|
650
|
+
message: 'Enter another local file path or URL to retry (or leave empty to abort):',
|
|
651
|
+
}]);
|
|
652
|
+
if (!fallback || !fallback.trim()) {
|
|
653
|
+
throw new Error('Clone aborted: no valid source provided.');
|
|
654
|
+
}
|
|
655
|
+
activeSource = fallback.trim();
|
|
484
656
|
}
|
|
485
657
|
}
|
|
486
658
|
|
|
@@ -500,6 +672,9 @@ export async function performClone(source, options = {}) {
|
|
|
500
672
|
|
|
501
673
|
log.success(`Cloning "${appJson.Name}" (${appJson.ShortName})`);
|
|
502
674
|
|
|
675
|
+
// Check for un-pushed staged items in synchronize.json
|
|
676
|
+
await checkPendingSynchronize(options);
|
|
677
|
+
|
|
503
678
|
// Ensure sensitive files are gitignored
|
|
504
679
|
await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt']);
|
|
505
680
|
|
|
@@ -510,6 +685,7 @@ export async function performClone(source, options = {}) {
|
|
|
510
685
|
AppName: appJson.Name,
|
|
511
686
|
AppShortName: appJson.ShortName,
|
|
512
687
|
});
|
|
688
|
+
await saveCloneSource(activeSource || 'default');
|
|
513
689
|
log.dim(' Updated .dbo/config.json with app metadata');
|
|
514
690
|
|
|
515
691
|
// Detect and store ModifyKey for locked/production apps
|
|
@@ -573,39 +749,75 @@ export async function performClone(source, options = {}) {
|
|
|
573
749
|
log.dim(` Set ServerTimezone to ${serverTz} in .dbo/config.json`);
|
|
574
750
|
}
|
|
575
751
|
|
|
576
|
-
//
|
|
577
|
-
|
|
578
|
-
const
|
|
579
|
-
|
|
752
|
+
// Resolve --entity filter: which entity types to process
|
|
753
|
+
// "output" expands to all OUTPUT_HIERARCHY_ENTITIES, others are matched directly
|
|
754
|
+
const entityFilter = resolveEntityFilter(options.entity);
|
|
755
|
+
if (entityFilter) {
|
|
756
|
+
log.info(`Entity filter: only processing ${options.entity}`);
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
// Step 4c: Detect and resolve file path collisions (skip in entity-filter mode)
|
|
760
|
+
let toDeleteUIDs = new Set();
|
|
761
|
+
if (!entityFilter) {
|
|
762
|
+
log.info('Scanning for file path collisions...');
|
|
763
|
+
const fileRegistry = await buildFileRegistry(appJson, structure, placementPrefs);
|
|
764
|
+
toDeleteUIDs = await resolveCollisions(fileRegistry, options);
|
|
580
765
|
|
|
581
|
-
|
|
582
|
-
|
|
766
|
+
if (toDeleteUIDs.size > 0) {
|
|
767
|
+
await stageCollisionDeletions(toDeleteUIDs, appJson, options);
|
|
768
|
+
}
|
|
583
769
|
}
|
|
584
770
|
|
|
585
771
|
// Step 5: Process content → files + metadata (skip rejected records)
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
772
|
+
let contentRefs = [];
|
|
773
|
+
if (!entityFilter || entityFilter.has('content')) {
|
|
774
|
+
contentRefs = await processContentEntries(
|
|
775
|
+
appJson.children.content || [],
|
|
776
|
+
structure,
|
|
777
|
+
options,
|
|
778
|
+
placementPrefs.contentPlacement,
|
|
779
|
+
serverTz,
|
|
780
|
+
toDeleteUIDs,
|
|
781
|
+
);
|
|
782
|
+
}
|
|
594
783
|
|
|
595
784
|
// Step 5b: Process media → download binary files + metadata (skip rejected records)
|
|
596
785
|
let mediaRefs = [];
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
786
|
+
if (!entityFilter || entityFilter.has('media')) {
|
|
787
|
+
const mediaEntries = appJson.children.media || [];
|
|
788
|
+
if (mediaEntries.length > 0) {
|
|
789
|
+
mediaRefs = await processMediaEntries(mediaEntries, structure, options, config, appJson.ShortName, placementPrefs.mediaPlacement, serverTz, toDeleteUIDs);
|
|
790
|
+
}
|
|
600
791
|
}
|
|
601
792
|
|
|
602
|
-
// Step
|
|
793
|
+
// Step 5c: Process output hierarchy
|
|
603
794
|
const otherRefs = {};
|
|
795
|
+
if (!entityFilter || entityFilter.has('output')) {
|
|
796
|
+
const outputEntries = appJson.children.output || [];
|
|
797
|
+
if (outputEntries.length > 0) {
|
|
798
|
+
const outputRefs = await processOutputHierarchy(appJson, structure, options, serverTz);
|
|
799
|
+
if (outputRefs.length > 0) {
|
|
800
|
+
otherRefs.output = outputRefs;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
// Step 6: Process other entities (not output hierarchy, not bin, not content, not media)
|
|
604
806
|
for (const [entityName, entries] of Object.entries(appJson.children)) {
|
|
605
|
-
if (['bin', 'content', 'output', '
|
|
807
|
+
if (['bin', 'content', 'output', 'output_value', 'output_value_filter',
|
|
808
|
+
'output_value_entity_column_rel', 'media'].includes(entityName)) continue;
|
|
606
809
|
if (!Array.isArray(entries)) continue;
|
|
607
810
|
|
|
608
|
-
if
|
|
811
|
+
// Skip if entity filter is active and this entity doesn't match
|
|
812
|
+
if (entityFilter && !entityFilter.has(entityName)) continue;
|
|
813
|
+
|
|
814
|
+
if (entityName === 'extension') {
|
|
815
|
+
// Extension entities: descriptor-organized sub-directories
|
|
816
|
+
const refs = await processExtensionEntries(entries, structure, options, serverTz);
|
|
817
|
+
if (refs.length > 0) {
|
|
818
|
+
otherRefs[entityName] = refs;
|
|
819
|
+
}
|
|
820
|
+
} else if (ENTITY_DIR_MAP[entityName]) {
|
|
609
821
|
// Entity types with project directories — process into their directory
|
|
610
822
|
const refs = await processEntityDirEntries(entityName, entries, options, serverTz);
|
|
611
823
|
if (refs.length > 0) {
|
|
@@ -628,18 +840,43 @@ export async function performClone(source, options = {}) {
|
|
|
628
840
|
// Step 7: Save app.json with references
|
|
629
841
|
await saveAppJson(appJson, contentRefs, otherRefs, effectiveDomain);
|
|
630
842
|
|
|
631
|
-
// Step 8: Create .app.json baseline for delta tracking
|
|
632
|
-
|
|
843
|
+
// Step 8: Create .app.json baseline for delta tracking (skip in entity-filter mode to avoid overwriting)
|
|
844
|
+
if (!entityFilter) {
|
|
845
|
+
await saveBaselineFile(appJson);
|
|
846
|
+
}
|
|
633
847
|
|
|
634
848
|
// Step 9: Ensure .app.json is in .gitignore
|
|
635
849
|
await ensureGitignore(['.app.json']);
|
|
636
850
|
|
|
637
851
|
log.plain('');
|
|
638
|
-
log.success('Clone complete!');
|
|
852
|
+
log.success(entityFilter ? `Clone complete! (filtered: ${options.entity})` : 'Clone complete!');
|
|
639
853
|
log.dim(' app.json saved to project root');
|
|
640
854
|
log.dim(' Run "dbo login" to authenticate, then "dbo push" to deploy changes');
|
|
641
855
|
}
|
|
642
856
|
|
|
857
|
+
/**
|
|
858
|
+
* Resolve --entity filter into a Set of entity keys to process.
|
|
859
|
+
* Returns null if no filter (process everything), or a Set of entity key strings.
|
|
860
|
+
*
|
|
861
|
+
* "output" expands to all OUTPUT_HIERARCHY_ENTITIES.
|
|
862
|
+
* Entity-dir names (e.g. "extension", "site") are matched directly.
|
|
863
|
+
* Documentation aliases are also accepted (e.g. "column" → "output_value").
|
|
864
|
+
*/
|
|
865
|
+
function resolveEntityFilter(entityArg) {
|
|
866
|
+
if (!entityArg) return null;
|
|
867
|
+
|
|
868
|
+
const input = entityArg.toLowerCase().trim();
|
|
869
|
+
|
|
870
|
+
// "output" or any output sub-entity → process all output hierarchy entities
|
|
871
|
+
const docToPhysical = { column: 'output_value', filter: 'output_value_filter', join: 'output_value_entity_column_rel' };
|
|
872
|
+
if (input === 'output' || OUTPUT_HIERARCHY_ENTITIES.includes(input) || docToPhysical[input]) {
|
|
873
|
+
return new Set(OUTPUT_HIERARCHY_ENTITIES.concat(['output']));
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
// Direct match on known entity types
|
|
877
|
+
return new Set([input]);
|
|
878
|
+
}
|
|
879
|
+
|
|
643
880
|
/**
|
|
644
881
|
* Resolve placement preferences from config or prompt the user.
|
|
645
882
|
* Returns { contentPlacement, mediaPlacement } where values are 'path'|'bin'|'ask'|null
|
|
@@ -699,16 +936,23 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
699
936
|
const answers = await inquirer.prompt(prompts);
|
|
700
937
|
contentPlacement = contentPlacement || answers.contentPlacement || 'bin';
|
|
701
938
|
mediaPlacement = mediaPlacement || answers.mediaPlacement || 'bin';
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
// Resolve defaults for any still-unset values
|
|
942
|
+
contentPlacement = contentPlacement || 'bin';
|
|
943
|
+
mediaPlacement = mediaPlacement || 'bin';
|
|
702
944
|
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
945
|
+
// Always persist resolved values — not just when prompts were shown.
|
|
946
|
+
// This ensures defaults are saved even when the app has no content/media yet,
|
|
947
|
+
// so subsequent clones that do have records skip the prompts.
|
|
948
|
+
if (!saved.contentPlacement || !saved.mediaPlacement) {
|
|
949
|
+
await saveClonePlacement({ contentPlacement, mediaPlacement });
|
|
950
|
+
if (prompts.length > 0) {
|
|
951
|
+
log.dim(' Saved placement preferences to .dbo/config.json');
|
|
952
|
+
}
|
|
706
953
|
}
|
|
707
954
|
|
|
708
|
-
return {
|
|
709
|
-
contentPlacement: contentPlacement || 'bin',
|
|
710
|
-
mediaPlacement: mediaPlacement || 'bin',
|
|
711
|
-
};
|
|
955
|
+
return { contentPlacement, mediaPlacement };
|
|
712
956
|
}
|
|
713
957
|
|
|
714
958
|
/**
|
|
@@ -718,10 +962,7 @@ async function fetchAppFromServer(appShortName, options, config) {
|
|
|
718
962
|
const client = new DboClient({ domain: options.domain, verbose: options.verbose });
|
|
719
963
|
log.info(`Fetching app "${appShortName}" from server...`);
|
|
720
964
|
|
|
721
|
-
const result = await client.get(
|
|
722
|
-
'_filter:AppShortName': appShortName,
|
|
723
|
-
'_format': 'json_raw',
|
|
724
|
-
});
|
|
965
|
+
const result = await client.get(`/api/app/object/${appShortName}`);
|
|
725
966
|
|
|
726
967
|
const data = result.payload || result.data;
|
|
727
968
|
const rows = Array.isArray(data) ? data : (data?.Rows || data?.rows || []);
|
|
@@ -1025,11 +1266,16 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1025
1266
|
}
|
|
1026
1267
|
|
|
1027
1268
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1269
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1028
1270
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1029
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1271
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1272
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1030
1273
|
|
|
1031
1274
|
if (serverNewer) {
|
|
1032
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1275
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1276
|
+
serverDate,
|
|
1277
|
+
localDate: localSyncTime,
|
|
1278
|
+
});
|
|
1033
1279
|
|
|
1034
1280
|
if (action === 'skip') {
|
|
1035
1281
|
log.dim(` Skipped ${finalName}`);
|
|
@@ -1046,14 +1292,18 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1046
1292
|
bulkAction.value = 'overwrite_all';
|
|
1047
1293
|
}
|
|
1048
1294
|
if (action === 'compare') {
|
|
1049
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1295
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1050
1296
|
refs.push({ uid: record.UID, metaPath });
|
|
1051
1297
|
continue;
|
|
1052
1298
|
}
|
|
1053
1299
|
} else {
|
|
1054
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
1300
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1055
1301
|
if (locallyModified) {
|
|
1056
|
-
const action = await promptChangeDetection(finalName, record,
|
|
1302
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
1303
|
+
localIsNewer: true,
|
|
1304
|
+
serverDate,
|
|
1305
|
+
localDate: localSyncTime,
|
|
1306
|
+
});
|
|
1057
1307
|
|
|
1058
1308
|
if (action === 'skip') {
|
|
1059
1309
|
log.dim(` Kept local: ${finalName}`);
|
|
@@ -1070,7 +1320,7 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1070
1320
|
bulkAction.value = 'overwrite_all';
|
|
1071
1321
|
}
|
|
1072
1322
|
if (action === 'compare') {
|
|
1073
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1323
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1074
1324
|
refs.push({ uid: record.UID, metaPath });
|
|
1075
1325
|
continue;
|
|
1076
1326
|
}
|
|
@@ -1142,6 +1392,378 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
|
|
|
1142
1392
|
return refs;
|
|
1143
1393
|
}
|
|
1144
1394
|
|
|
1395
|
+
// ─── Extension Descriptor Sub-directory Processing ────────────────────────
|
|
1396
|
+
|
|
1397
|
+
/**
|
|
1398
|
+
* Scan extension records for descriptor_definition entries.
|
|
1399
|
+
* Builds the mapping, persists to structure.json, always creates Extensions/Unsupported/,
|
|
1400
|
+
* and creates sub-directories for every mapped descriptor.
|
|
1401
|
+
* Returns the mapping object.
|
|
1402
|
+
*
|
|
1403
|
+
* @param {Object[]} extensionEntries
|
|
1404
|
+
* @param {Object} structure - Current bin structure (from loadStructureFile)
|
|
1405
|
+
* @returns {Promise<Object<string,string>>}
|
|
1406
|
+
*/
|
|
1407
|
+
async function buildDescriptorPrePass(extensionEntries, structure) {
|
|
1408
|
+
const { mapping, warnings } = buildDescriptorMapping(extensionEntries);
|
|
1409
|
+
|
|
1410
|
+
for (const w of warnings) log.warn(` descriptor_definition: ${w}`);
|
|
1411
|
+
|
|
1412
|
+
// Always create Unsupported/ — even if empty, users see at a glance what couldn't be mapped
|
|
1413
|
+
await mkdir(EXTENSION_UNSUPPORTED_DIR, { recursive: true });
|
|
1414
|
+
log.dim(` ${EXTENSION_UNSUPPORTED_DIR}/`);
|
|
1415
|
+
|
|
1416
|
+
// Create one sub-directory per mapped descriptor name
|
|
1417
|
+
for (const dirName of new Set(Object.values(mapping))) {
|
|
1418
|
+
const fullDir = `${EXTENSION_DESCRIPTORS_DIR}/${dirName}`;
|
|
1419
|
+
await mkdir(fullDir, { recursive: true });
|
|
1420
|
+
log.dim(` ${fullDir}/`);
|
|
1421
|
+
}
|
|
1422
|
+
|
|
1423
|
+
await saveDescriptorMapping(structure, mapping);
|
|
1424
|
+
log.dim(` Saved descriptorMapping to .dbo/structure.json`);
|
|
1425
|
+
|
|
1426
|
+
return mapping;
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
/**
|
|
1430
|
+
* Resolve filename column and content extraction preferences for one descriptor.
|
|
1431
|
+
* Prompts the user on first use; saves to config.json; respects -y and --force.
|
|
1432
|
+
*
|
|
1433
|
+
* @param {string} descriptor - e.g. "documentation", "include", "control"
|
|
1434
|
+
* @param {Object[]} records - All extension records sharing this descriptor
|
|
1435
|
+
* @param {Object} options - CLI options (options.yes, options.force)
|
|
1436
|
+
* @returns {Promise<{ filenameCol: string, contentColsToExtract: Array<{col,ext}> }>}
|
|
1437
|
+
*/
|
|
1438
|
+
async function resolveDescriptorPreferences(descriptor, records, options) {
|
|
1439
|
+
const sampleRecord = records[0];
|
|
1440
|
+
const columns = Object.keys(sampleRecord)
|
|
1441
|
+
.filter(k => k !== 'children' && !k.startsWith('_'));
|
|
1442
|
+
|
|
1443
|
+
// ── Filename column ──────────────────────────────────────────────────
|
|
1444
|
+
let filenameCol;
|
|
1445
|
+
const savedCol = await loadDescriptorFilenamePreference(descriptor);
|
|
1446
|
+
|
|
1447
|
+
if (options.yes) {
|
|
1448
|
+
filenameCol = columns.includes('Name') ? 'Name'
|
|
1449
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1450
|
+
} else if (savedCol && !options.force) {
|
|
1451
|
+
filenameCol = savedCol;
|
|
1452
|
+
log.dim(` Filename column for "${descriptor}": "${filenameCol}" (saved)`);
|
|
1453
|
+
} else {
|
|
1454
|
+
const inquirer = (await import('inquirer')).default;
|
|
1455
|
+
const defaultCol = columns.includes('Name') ? 'Name'
|
|
1456
|
+
: columns.includes('UID') ? 'UID' : columns[0];
|
|
1457
|
+
const { col } = await inquirer.prompt([{
|
|
1458
|
+
type: 'list', name: 'col',
|
|
1459
|
+
message: `Filename column for "${descriptor}" extensions:`,
|
|
1460
|
+
choices: columns, default: defaultCol,
|
|
1461
|
+
}]);
|
|
1462
|
+
filenameCol = col;
|
|
1463
|
+
await saveDescriptorFilenamePreference(descriptor, filenameCol);
|
|
1464
|
+
log.dim(` Saved filename column for "${descriptor}": "${filenameCol}"`);
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
// ── Content extraction ───────────────────────────────────────────────
|
|
1468
|
+
const contentColsToExtract = [];
|
|
1469
|
+
if (!options.yes) {
|
|
1470
|
+
const base64Cols = [];
|
|
1471
|
+
for (const record of records) {
|
|
1472
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1473
|
+
if (key === 'children' || key.startsWith('_')) continue;
|
|
1474
|
+
if (value && typeof value === 'object' && !Array.isArray(value)
|
|
1475
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1476
|
+
if (!base64Cols.find(c => c.col === key)) {
|
|
1477
|
+
let snippet = '';
|
|
1478
|
+
try {
|
|
1479
|
+
const decoded = Buffer.from(value.value, 'base64').toString('utf8');
|
|
1480
|
+
snippet = decoded.substring(0, 80)
|
|
1481
|
+
.replace(/[\x00-\x1f\x7f]/g, ' ') // strip control chars (incl. \n, \r, \t, ESC)
|
|
1482
|
+
.replace(/\s+/g, ' ') // collapse whitespace
|
|
1483
|
+
.trim();
|
|
1484
|
+
if (decoded.length > 80) snippet += '…';
|
|
1485
|
+
} catch {}
|
|
1486
|
+
base64Cols.push({ col: key, snippet });
|
|
1487
|
+
}
|
|
1488
|
+
}
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
|
|
1492
|
+
if (base64Cols.length > 0) {
|
|
1493
|
+
const savedExtractions = options.force
|
|
1494
|
+
? null
|
|
1495
|
+
: await loadDescriptorContentExtractions(descriptor);
|
|
1496
|
+
const newPreferences = savedExtractions ? { ...savedExtractions } : {};
|
|
1497
|
+
let changed = false;
|
|
1498
|
+
const inquirer = (await import('inquirer')).default;
|
|
1499
|
+
|
|
1500
|
+
for (const { col, snippet } of base64Cols) {
|
|
1501
|
+
if (savedExtractions) {
|
|
1502
|
+
const saved = savedExtractions[col];
|
|
1503
|
+
if (saved === false) { log.dim(` Skip "${col}" for "${descriptor}" (saved)`); continue; }
|
|
1504
|
+
if (typeof saved === 'string') {
|
|
1505
|
+
log.dim(` Extracting "${col}" for "${descriptor}" as .${saved} (saved)`);
|
|
1506
|
+
contentColsToExtract.push({ col, ext: saved });
|
|
1507
|
+
continue;
|
|
1508
|
+
}
|
|
1509
|
+
}
|
|
1510
|
+
const preview = snippet ? ` ("${snippet}")` : '';
|
|
1511
|
+
const { extract } = await inquirer.prompt([{
|
|
1512
|
+
type: 'confirm', name: 'extract',
|
|
1513
|
+
message: `Extract column "${col}" (${descriptor}) as companion file?${preview}`,
|
|
1514
|
+
default: true,
|
|
1515
|
+
}]);
|
|
1516
|
+
if (extract) {
|
|
1517
|
+
const guessed = guessExtensionForDescriptor(descriptor, col);
|
|
1518
|
+
const { ext } = await inquirer.prompt([{
|
|
1519
|
+
type: 'input', name: 'ext',
|
|
1520
|
+
message: `File extension for "${col}" (${descriptor}):`,
|
|
1521
|
+
default: guessed,
|
|
1522
|
+
}]);
|
|
1523
|
+
const cleanExt = ext.replace(/^\./, '');
|
|
1524
|
+
contentColsToExtract.push({ col, ext: cleanExt });
|
|
1525
|
+
newPreferences[col] = cleanExt;
|
|
1526
|
+
} else {
|
|
1527
|
+
newPreferences[col] = false;
|
|
1528
|
+
}
|
|
1529
|
+
changed = true;
|
|
1530
|
+
}
|
|
1531
|
+
if (changed) await saveDescriptorContentExtractions(descriptor, newPreferences);
|
|
1532
|
+
}
|
|
1533
|
+
}
|
|
1534
|
+
|
|
1535
|
+
return { filenameCol, contentColsToExtract };
|
|
1536
|
+
}
|
|
1537
|
+
|
|
1538
|
+
/**
|
|
1539
|
+
* Guess a file extension based on descriptor name, with column-name fallback.
|
|
1540
|
+
*/
|
|
1541
|
+
function guessExtensionForDescriptor(descriptor, columnName) {
|
|
1542
|
+
if (descriptor === 'documentation') return 'md';
|
|
1543
|
+
if (descriptor === 'include') return 'html';
|
|
1544
|
+
if (descriptor === 'control') return 'js';
|
|
1545
|
+
return guessExtensionForColumn(columnName);
|
|
1546
|
+
}
|
|
1547
|
+
|
|
1548
|
+
/**
|
|
1549
|
+
* Resolve placement for documentation descriptor MD companion files.
|
|
1550
|
+
* Prompts on first use; persists to config.json; respects -y and --force.
|
|
1551
|
+
* @returns {Promise<'inline'|'root'>}
|
|
1552
|
+
*/
|
|
1553
|
+
async function resolveDocumentationPlacement(options) {
|
|
1554
|
+
if (options.yes) return 'inline';
|
|
1555
|
+
|
|
1556
|
+
const saved = await loadExtensionDocumentationMDPlacement();
|
|
1557
|
+
if (saved && !options.force) {
|
|
1558
|
+
log.dim(` Documentation MD placement: ${saved} (saved)`);
|
|
1559
|
+
return saved;
|
|
1560
|
+
}
|
|
1561
|
+
|
|
1562
|
+
const inquirer = (await import('inquirer')).default;
|
|
1563
|
+
const { placement } = await inquirer.prompt([{
|
|
1564
|
+
type: 'list', name: 'placement',
|
|
1565
|
+
message: 'Where should extracted documentation MD files be placed?',
|
|
1566
|
+
choices: [
|
|
1567
|
+
{ name: '/Documentation/<filename>.md — project root (recommended)', value: 'root' },
|
|
1568
|
+
{ name: 'Extensions/Documentation/<filename>.md — inline alongside metadata', value: 'inline' },
|
|
1569
|
+
],
|
|
1570
|
+
default: 'root',
|
|
1571
|
+
}]);
|
|
1572
|
+
|
|
1573
|
+
await saveExtensionDocumentationMDPlacement(placement);
|
|
1574
|
+
log.dim(` Saved documentation MD placement: "${placement}"`);
|
|
1575
|
+
return placement;
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
/**
|
|
1579
|
+
* Process extension entity records into descriptor-organized sub-directories.
|
|
1580
|
+
*
|
|
1581
|
+
* @param {Object[]} entries - All extension records from appJson.children.extension
|
|
1582
|
+
* @param {Object} structure - Bin structure from loadStructureFile()
|
|
1583
|
+
* @param {Object} options - CLI options (options.yes, options.force, options.entity)
|
|
1584
|
+
* @param {string} serverTz - Server timezone for timestamp syncing
|
|
1585
|
+
* @returns {Promise<Array<{uid, metaPath}>>}
|
|
1586
|
+
*/
|
|
1587
|
+
async function processExtensionEntries(entries, structure, options, serverTz) {
|
|
1588
|
+
if (!entries || entries.length === 0) return [];
|
|
1589
|
+
|
|
1590
|
+
// --descriptor-types false → skip descriptor sorting, use flat Extensions/ via generic path
|
|
1591
|
+
if (options.descriptorTypes === 'false') {
|
|
1592
|
+
log.info(`Processing ${entries.length} extension record(s) (flat mode)...`);
|
|
1593
|
+
return await processEntityDirEntries('extension', entries, options, serverTz);
|
|
1594
|
+
}
|
|
1595
|
+
|
|
1596
|
+
log.info(`Processing ${entries.length} extension record(s)...`);
|
|
1597
|
+
|
|
1598
|
+
// Step A: Pre-pass — build mapping + create directories
|
|
1599
|
+
const mapping = await buildDescriptorPrePass(entries, structure);
|
|
1600
|
+
|
|
1601
|
+
// Clear documentation preferences when --force is used with --documentation-only
|
|
1602
|
+
if (options.documentationOnly && options.force) {
|
|
1603
|
+
await saveDescriptorFilenamePreference('documentation', null);
|
|
1604
|
+
await saveDescriptorContentExtractions('documentation', null);
|
|
1605
|
+
await saveExtensionDocumentationMDPlacement(null);
|
|
1606
|
+
log.info(' --force: cleared saved documentation preferences');
|
|
1607
|
+
}
|
|
1608
|
+
|
|
1609
|
+
// Step B: Group records by descriptor
|
|
1610
|
+
const groups = new Map(); // descriptor → { dir, records[] }
|
|
1611
|
+
for (const record of entries) {
|
|
1612
|
+
const descriptor = record.Descriptor || '__unsupported__';
|
|
1613
|
+
|
|
1614
|
+
// --documentation-only: skip non-documentation records
|
|
1615
|
+
if (options.documentationOnly && descriptor !== 'documentation') continue;
|
|
1616
|
+
|
|
1617
|
+
if (!groups.has(descriptor)) {
|
|
1618
|
+
groups.set(descriptor, { dir: resolveExtensionSubDir(record, mapping), records: [] });
|
|
1619
|
+
}
|
|
1620
|
+
groups.get(descriptor).records.push(record);
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
// Step C: Resolve preferences per descriptor (prompts fire here, before file writes)
|
|
1624
|
+
const descriptorPrefs = new Map();
|
|
1625
|
+
let docPlacement = 'inline';
|
|
1626
|
+
|
|
1627
|
+
for (const [descriptor, { records }] of groups.entries()) {
|
|
1628
|
+
if (descriptor === '__unsupported__') {
|
|
1629
|
+
descriptorPrefs.set(descriptor, { filenameCol: null, contentColsToExtract: [] });
|
|
1630
|
+
continue;
|
|
1631
|
+
}
|
|
1632
|
+
const prefs = await resolveDescriptorPreferences(descriptor, records, options);
|
|
1633
|
+
descriptorPrefs.set(descriptor, prefs);
|
|
1634
|
+
|
|
1635
|
+
if (descriptor === 'documentation') {
|
|
1636
|
+
docPlacement = await resolveDocumentationPlacement(options);
|
|
1637
|
+
if (docPlacement === 'root') {
|
|
1638
|
+
await mkdir(DOCUMENTATION_DIR, { recursive: true });
|
|
1639
|
+
log.dim(` ${DOCUMENTATION_DIR}/`);
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
// Step D: Write files, one group at a time
|
|
1645
|
+
const refs = [];
|
|
1646
|
+
const bulkAction = { value: null };
|
|
1647
|
+
const config = await loadConfig();
|
|
1648
|
+
|
|
1649
|
+
for (const [descriptor, { dir, records }] of groups.entries()) {
|
|
1650
|
+
const { filenameCol, contentColsToExtract } = descriptorPrefs.get(descriptor);
|
|
1651
|
+
const useRootDoc = (descriptor === 'documentation' && docPlacement === 'root');
|
|
1652
|
+
const mdColInfo = useRootDoc ? contentColsToExtract.find(c => c.ext === 'md') : null;
|
|
1653
|
+
|
|
1654
|
+
log.info(`Processing ${records.length} "${descriptor}" extension(s) → ${dir}/`);
|
|
1655
|
+
|
|
1656
|
+
for (const record of records) {
|
|
1657
|
+
// Resolve filename
|
|
1658
|
+
let name;
|
|
1659
|
+
if (filenameCol && record[filenameCol] != null) {
|
|
1660
|
+
name = sanitizeFilename(String(record[filenameCol]));
|
|
1661
|
+
} else if (record.Name) {
|
|
1662
|
+
name = sanitizeFilename(String(record.Name));
|
|
1663
|
+
} else {
|
|
1664
|
+
name = sanitizeFilename(String(record.UID || 'untitled'));
|
|
1665
|
+
}
|
|
1666
|
+
|
|
1667
|
+
const uid = record.UID || 'untitled';
|
|
1668
|
+
const finalName = name === uid ? uid : `${name}.${uid}`;
|
|
1669
|
+
const metaPath = join(dir, `${finalName}.metadata.json`);
|
|
1670
|
+
|
|
1671
|
+
// Change detection — same pattern as processEntityDirEntries()
|
|
1672
|
+
const hasNewExtractions = contentColsToExtract.length > 0;
|
|
1673
|
+
if (await fileExists(metaPath) && !options.yes && !hasNewExtractions) {
|
|
1674
|
+
if (bulkAction.value === 'skip_all') {
|
|
1675
|
+
log.dim(` Skipped ${finalName}`);
|
|
1676
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1677
|
+
continue;
|
|
1678
|
+
}
|
|
1679
|
+
if (bulkAction.value !== 'overwrite_all') {
|
|
1680
|
+
const cfgWithTz = { ...config, ServerTimezone: serverTz };
|
|
1681
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1682
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, cfgWithTz);
|
|
1683
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1684
|
+
|
|
1685
|
+
if (serverNewer) {
|
|
1686
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { serverDate, localDate: localSyncTime });
|
|
1687
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1688
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1689
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1690
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1691
|
+
} else {
|
|
1692
|
+
const localModified = await hasLocalModifications(metaPath, cfgWithTz);
|
|
1693
|
+
if (localModified) {
|
|
1694
|
+
const action = await promptChangeDetection(finalName, record, cfgWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
1695
|
+
if (action === 'skip') { refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1696
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1697
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
1698
|
+
if (action === 'compare') { await inlineDiffAndMerge(record, metaPath, cfgWithTz, { localIsNewer: true }); refs.push({ uid: record.UID, metaPath }); continue; }
|
|
1699
|
+
} else {
|
|
1700
|
+
log.dim(` Up to date: ${finalName}`);
|
|
1701
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1702
|
+
continue;
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
|
|
1708
|
+
// Build metadata object
|
|
1709
|
+
const meta = {};
|
|
1710
|
+
const extractedCols = [];
|
|
1711
|
+
|
|
1712
|
+
for (const [key, value] of Object.entries(record)) {
|
|
1713
|
+
if (key === 'children') continue;
|
|
1714
|
+
|
|
1715
|
+
const extractInfo = contentColsToExtract.find(c => c.col === key);
|
|
1716
|
+
if (extractInfo && value && typeof value === 'object'
|
|
1717
|
+
&& value.encoding === 'base64' && value.value !== null) {
|
|
1718
|
+
const decoded = resolveContentValue(value);
|
|
1719
|
+
if (decoded) {
|
|
1720
|
+
let colFilePath, refValue;
|
|
1721
|
+
|
|
1722
|
+
if (mdColInfo && extractInfo.col === mdColInfo.col) {
|
|
1723
|
+
// Root placement: Documentation/<name>.md
|
|
1724
|
+
const docFileName = `${name}.md`;
|
|
1725
|
+
colFilePath = join(DOCUMENTATION_DIR, docFileName);
|
|
1726
|
+
refValue = `@/${DOCUMENTATION_DIR}/${docFileName}`;
|
|
1727
|
+
} else {
|
|
1728
|
+
const colFileName = `${finalName}.${key}.${extractInfo.ext}`;
|
|
1729
|
+
colFilePath = join(dir, colFileName);
|
|
1730
|
+
refValue = `@${colFileName}`;
|
|
1731
|
+
}
|
|
1732
|
+
|
|
1733
|
+
meta[key] = refValue;
|
|
1734
|
+
await writeFile(colFilePath, decoded);
|
|
1735
|
+
extractedCols.push(key);
|
|
1736
|
+
if (serverTz) {
|
|
1737
|
+
try { await setFileTimestamps(colFilePath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1738
|
+
}
|
|
1739
|
+
log.dim(` → ${colFilePath}`);
|
|
1740
|
+
continue;
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
|
|
1744
|
+
// Inline or non-extraction columns
|
|
1745
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
1746
|
+
meta[key] = resolveContentValue(value);
|
|
1747
|
+
} else {
|
|
1748
|
+
meta[key] = value;
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
|
|
1752
|
+
meta._entity = 'extension';
|
|
1753
|
+
if (extractedCols.length > 0) meta._contentColumns = extractedCols;
|
|
1754
|
+
|
|
1755
|
+
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1756
|
+
if (serverTz) {
|
|
1757
|
+
try { await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz); } catch {}
|
|
1758
|
+
}
|
|
1759
|
+
log.success(`Saved ${metaPath}`);
|
|
1760
|
+
refs.push({ uid: record.UID, metaPath });
|
|
1761
|
+
}
|
|
1762
|
+
}
|
|
1763
|
+
|
|
1764
|
+
return refs;
|
|
1765
|
+
}
|
|
1766
|
+
|
|
1145
1767
|
/**
|
|
1146
1768
|
* Process media entries: download binary files from server + create metadata.
|
|
1147
1769
|
* Media uses Filename (not Name) and files are fetched via /api/media/{uid}.
|
|
@@ -1292,11 +1914,20 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1292
1914
|
}
|
|
1293
1915
|
|
|
1294
1916
|
if (mediaBulkAction.value !== 'overwrite_all') {
|
|
1917
|
+
// Use explicit serverTz (not config.ServerTimezone) to ensure consistency
|
|
1918
|
+
// with how setFileTimestamps set the mtime — the two must use the same timezone.
|
|
1919
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
1295
1920
|
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1296
|
-
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated,
|
|
1921
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
1922
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1923
|
+
const diffable = isDiffable(ext);
|
|
1297
1924
|
|
|
1298
1925
|
if (serverNewer) {
|
|
1299
|
-
const action = await promptChangeDetection(dedupName, record,
|
|
1926
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
1927
|
+
diffable,
|
|
1928
|
+
serverDate,
|
|
1929
|
+
localDate: localSyncTime,
|
|
1930
|
+
});
|
|
1300
1931
|
|
|
1301
1932
|
if (action === 'skip') {
|
|
1302
1933
|
log.dim(` Skipped ${finalFilename}`);
|
|
@@ -1314,15 +1945,21 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1314
1945
|
}
|
|
1315
1946
|
if (action === 'compare') {
|
|
1316
1947
|
// For binary media, show metadata diffs only
|
|
1317
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1948
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1318
1949
|
refs.push({ uid: record.UID, metaPath });
|
|
1319
1950
|
continue;
|
|
1320
1951
|
}
|
|
1321
1952
|
} else {
|
|
1322
1953
|
// Server _LastUpdated hasn't changed — check for local modifications
|
|
1323
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
1954
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1324
1955
|
if (locallyModified) {
|
|
1325
|
-
const
|
|
1956
|
+
const localDate = localSyncTime; // mtime already fetched above
|
|
1957
|
+
const action = await promptChangeDetection(dedupName, record, configWithTz, {
|
|
1958
|
+
localIsNewer: true,
|
|
1959
|
+
diffable,
|
|
1960
|
+
serverDate,
|
|
1961
|
+
localDate,
|
|
1962
|
+
});
|
|
1326
1963
|
|
|
1327
1964
|
if (action === 'skip') {
|
|
1328
1965
|
log.dim(` Kept local: ${finalFilename}`);
|
|
@@ -1339,7 +1976,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1339
1976
|
mediaBulkAction.value = 'overwrite_all';
|
|
1340
1977
|
}
|
|
1341
1978
|
if (action === 'compare') {
|
|
1342
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
1979
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1343
1980
|
refs.push({ uid: record.UID, metaPath });
|
|
1344
1981
|
continue;
|
|
1345
1982
|
}
|
|
@@ -1397,10 +2034,13 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1397
2034
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
1398
2035
|
log.dim(` → ${metaPath}`);
|
|
1399
2036
|
|
|
1400
|
-
// Set file timestamps from server dates
|
|
2037
|
+
// Set file timestamps from server dates (independent try-catch so one failure
|
|
2038
|
+
// doesn't prevent the other — metaPath mtime is the sync baseline for comparisons)
|
|
1401
2039
|
if (serverTz && (record._CreatedOn || record._LastUpdated)) {
|
|
1402
2040
|
try {
|
|
1403
2041
|
await setFileTimestamps(filePath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
2042
|
+
} catch { /* non-critical */ }
|
|
2043
|
+
try {
|
|
1404
2044
|
await setFileTimestamps(metaPath, record._CreatedOn, record._LastUpdated, serverTz);
|
|
1405
2045
|
} catch { /* non-critical */ }
|
|
1406
2046
|
}
|
|
@@ -1573,12 +2213,17 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1573
2213
|
}
|
|
1574
2214
|
|
|
1575
2215
|
if (bulkAction.value !== 'overwrite_all') {
|
|
1576
|
-
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
1577
2216
|
const config = await loadConfig();
|
|
1578
|
-
const
|
|
2217
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2218
|
+
const localSyncTime = await getLocalSyncTime(metaPath);
|
|
2219
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
2220
|
+
const serverDate = parseServerDate(record._LastUpdated, serverTz);
|
|
1579
2221
|
|
|
1580
2222
|
if (serverNewer) {
|
|
1581
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2223
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2224
|
+
serverDate,
|
|
2225
|
+
localDate: localSyncTime,
|
|
2226
|
+
});
|
|
1582
2227
|
|
|
1583
2228
|
if (action === 'skip') {
|
|
1584
2229
|
log.dim(` Skipped ${finalName}.${ext}`);
|
|
@@ -1594,16 +2239,20 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1594
2239
|
// Fall through to write
|
|
1595
2240
|
}
|
|
1596
2241
|
if (action === 'compare') {
|
|
1597
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2242
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz);
|
|
1598
2243
|
return { uid: record.UID, metaPath };
|
|
1599
2244
|
}
|
|
1600
2245
|
// 'overwrite' falls through to normal write
|
|
1601
2246
|
} else {
|
|
1602
2247
|
// Server _LastUpdated hasn't changed since last sync.
|
|
1603
2248
|
// Check if local content files were modified (user edits).
|
|
1604
|
-
const locallyModified = await hasLocalModifications(metaPath,
|
|
2249
|
+
const locallyModified = await hasLocalModifications(metaPath, configWithTz);
|
|
1605
2250
|
if (locallyModified) {
|
|
1606
|
-
const action = await promptChangeDetection(finalName, record,
|
|
2251
|
+
const action = await promptChangeDetection(finalName, record, configWithTz, {
|
|
2252
|
+
localIsNewer: true,
|
|
2253
|
+
serverDate,
|
|
2254
|
+
localDate: localSyncTime,
|
|
2255
|
+
});
|
|
1607
2256
|
|
|
1608
2257
|
if (action === 'skip') {
|
|
1609
2258
|
log.dim(` Kept local: ${finalName}.${ext}`);
|
|
@@ -1618,7 +2267,7 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
1618
2267
|
bulkAction.value = 'overwrite_all';
|
|
1619
2268
|
}
|
|
1620
2269
|
if (action === 'compare') {
|
|
1621
|
-
await inlineDiffAndMerge(record, metaPath,
|
|
2270
|
+
await inlineDiffAndMerge(record, metaPath, configWithTz, { localIsNewer: true });
|
|
1622
2271
|
return { uid: record.UID, metaPath };
|
|
1623
2272
|
}
|
|
1624
2273
|
// 'overwrite' falls through to normal write
|
|
@@ -1700,6 +2349,570 @@ export function guessExtensionForColumn(columnName) {
|
|
|
1700
2349
|
return 'txt';
|
|
1701
2350
|
}
|
|
1702
2351
|
|
|
2352
|
+
// ─── Output Hierarchy Processing ──────────────────────────────────────────
|
|
2353
|
+
|
|
2354
|
+
/**
|
|
2355
|
+
* Build a tree structure from flat output entity arrays.
|
|
2356
|
+
* Groups columns, joins, and filters under their parent output records.
|
|
2357
|
+
*
|
|
2358
|
+
* @param {Object} appJson - The full app JSON with children arrays
|
|
2359
|
+
* @returns {Array} - Array of output root nodes with nested _children
|
|
2360
|
+
*/
|
|
2361
|
+
export function buildOutputHierarchyTree(appJson) {
|
|
2362
|
+
const outputs = appJson.children.output || [];
|
|
2363
|
+
const columns = appJson.children.output_value || [];
|
|
2364
|
+
const filters = appJson.children.output_value_filter || [];
|
|
2365
|
+
const joins = appJson.children.output_value_entity_column_rel || [];
|
|
2366
|
+
|
|
2367
|
+
if (outputs.length === 0) return [];
|
|
2368
|
+
|
|
2369
|
+
// Index all entities by their numeric ID for O(1) lookups
|
|
2370
|
+
const outputById = new Map();
|
|
2371
|
+
const columnById = new Map();
|
|
2372
|
+
const joinById = new Map();
|
|
2373
|
+
|
|
2374
|
+
for (const o of outputs) {
|
|
2375
|
+
outputById.set(o.OutputID || o._id, { ...o, _children: { column: [], join: [], filter: [] } });
|
|
2376
|
+
}
|
|
2377
|
+
for (const c of columns) {
|
|
2378
|
+
columnById.set(c.OutputValueID || c._id, { ...c, _children: { filter: [] } });
|
|
2379
|
+
}
|
|
2380
|
+
for (const j of joins) {
|
|
2381
|
+
joinById.set(j.OutputValueEntityColumnRelID || j._id, { ...j, _children: { column: [] } });
|
|
2382
|
+
}
|
|
2383
|
+
|
|
2384
|
+
// Attach filters: to column (via OutputValueID) or directly to output (via OutputID)
|
|
2385
|
+
for (const f of filters) {
|
|
2386
|
+
if (f.OutputValueID) {
|
|
2387
|
+
const parent = columnById.get(f.OutputValueID);
|
|
2388
|
+
if (parent) {
|
|
2389
|
+
parent._children.filter.push({ ...f });
|
|
2390
|
+
continue;
|
|
2391
|
+
}
|
|
2392
|
+
}
|
|
2393
|
+
if (f.OutputID) {
|
|
2394
|
+
const parent = outputById.get(f.OutputID);
|
|
2395
|
+
if (parent) {
|
|
2396
|
+
parent._children.filter.push({ ...f });
|
|
2397
|
+
}
|
|
2398
|
+
}
|
|
2399
|
+
}
|
|
2400
|
+
|
|
2401
|
+
// Attach columns: to join (via OutputValueEntityColumnRelID) or directly to output (via OutputID)
|
|
2402
|
+
for (const [, col] of columnById) {
|
|
2403
|
+
if (col.OutputValueEntityColumnRelID) {
|
|
2404
|
+
const parent = joinById.get(col.OutputValueEntityColumnRelID);
|
|
2405
|
+
if (parent) {
|
|
2406
|
+
parent._children.column.push(col);
|
|
2407
|
+
continue;
|
|
2408
|
+
}
|
|
2409
|
+
}
|
|
2410
|
+
if (col.OutputID) {
|
|
2411
|
+
const parent = outputById.get(col.OutputID);
|
|
2412
|
+
if (parent) {
|
|
2413
|
+
parent._children.column.push(col);
|
|
2414
|
+
}
|
|
2415
|
+
}
|
|
2416
|
+
}
|
|
2417
|
+
|
|
2418
|
+
// Attach joins to outputs (via OutputID)
|
|
2419
|
+
for (const [, j] of joinById) {
|
|
2420
|
+
if (j.OutputID) {
|
|
2421
|
+
const parent = outputById.get(j.OutputID);
|
|
2422
|
+
if (parent) {
|
|
2423
|
+
parent._children.join.push(j);
|
|
2424
|
+
}
|
|
2425
|
+
}
|
|
2426
|
+
}
|
|
2427
|
+
|
|
2428
|
+
// Sort children by OrderNumber (ties broken by array index stability)
|
|
2429
|
+
const sortByOrder = (a, b) => (a.OrderNumber || 0) - (b.OrderNumber || 0);
|
|
2430
|
+
|
|
2431
|
+
for (const [, output] of outputById) {
|
|
2432
|
+
output._children.column.sort(sortByOrder);
|
|
2433
|
+
output._children.join.sort(sortByOrder);
|
|
2434
|
+
output._children.filter.sort(sortByOrder);
|
|
2435
|
+
|
|
2436
|
+
for (const col of output._children.column) {
|
|
2437
|
+
col._children.filter.sort(sortByOrder);
|
|
2438
|
+
}
|
|
2439
|
+
for (const j of output._children.join) {
|
|
2440
|
+
j._children.column.sort(sortByOrder);
|
|
2441
|
+
for (const col of j._children.column) {
|
|
2442
|
+
col._children.filter.sort(sortByOrder);
|
|
2443
|
+
}
|
|
2444
|
+
}
|
|
2445
|
+
}
|
|
2446
|
+
|
|
2447
|
+
return Array.from(outputById.values());
|
|
2448
|
+
}
|
|
2449
|
+
|
|
2450
|
+
/**
|
|
2451
|
+
* Resolve filename column preferences for each output entity type.
|
|
2452
|
+
* Loads from config, prompts if needed, saves choices.
|
|
2453
|
+
*
|
|
2454
|
+
* @param {Object} appJson - The full app JSON
|
|
2455
|
+
* @param {Object} options - CLI options
|
|
2456
|
+
* @returns {Object} - { output: 'Name', output_value: 'Title', ... }
|
|
2457
|
+
*/
|
|
2458
|
+
async function resolveOutputFilenameColumns(appJson, options) {
|
|
2459
|
+
const defaults = {
|
|
2460
|
+
output: 'Name',
|
|
2461
|
+
output_value: 'Title',
|
|
2462
|
+
output_value_filter: 'ShortName',
|
|
2463
|
+
output_value_entity_column_rel: 'UID',
|
|
2464
|
+
};
|
|
2465
|
+
const fallbacks = {
|
|
2466
|
+
output: ['Name', 'Title', 'OrderNumber', 'UID'],
|
|
2467
|
+
output_value: ['Title', 'Name', 'OrderNumber', 'UID'],
|
|
2468
|
+
output_value_filter: ['ShortName', 'Name', 'UID'],
|
|
2469
|
+
output_value_entity_column_rel: ['UID'],
|
|
2470
|
+
};
|
|
2471
|
+
const docNames = OUTPUT_ENTITY_MAP;
|
|
2472
|
+
const result = {};
|
|
2473
|
+
|
|
2474
|
+
for (const entityKey of OUTPUT_HIERARCHY_ENTITIES) {
|
|
2475
|
+
// Check saved preference
|
|
2476
|
+
const saved = await loadOutputFilenamePreference(entityKey);
|
|
2477
|
+
if (saved) {
|
|
2478
|
+
result[entityKey] = saved;
|
|
2479
|
+
continue;
|
|
2480
|
+
}
|
|
2481
|
+
|
|
2482
|
+
// In -y mode use defaults
|
|
2483
|
+
if (options.yes) {
|
|
2484
|
+
result[entityKey] = defaults[entityKey];
|
|
2485
|
+
await saveOutputFilenamePreference(entityKey, defaults[entityKey]);
|
|
2486
|
+
continue;
|
|
2487
|
+
}
|
|
2488
|
+
|
|
2489
|
+
// Find a sample record to get available columns
|
|
2490
|
+
const records = appJson.children[entityKey] || [];
|
|
2491
|
+
if (records.length === 0) {
|
|
2492
|
+
result[entityKey] = defaults[entityKey];
|
|
2493
|
+
continue;
|
|
2494
|
+
}
|
|
2495
|
+
|
|
2496
|
+
const sampleRecord = records[0];
|
|
2497
|
+
const columns = Object.keys(sampleRecord).filter(k => k !== 'children' && !k.startsWith('_'));
|
|
2498
|
+
|
|
2499
|
+
// Find best default from fallback chain
|
|
2500
|
+
let defaultCol = defaults[entityKey];
|
|
2501
|
+
for (const fb of fallbacks[entityKey]) {
|
|
2502
|
+
if (columns.includes(fb)) { defaultCol = fb; break; }
|
|
2503
|
+
}
|
|
2504
|
+
|
|
2505
|
+
const inquirer = (await import('inquirer')).default;
|
|
2506
|
+
const { col } = await inquirer.prompt([{
|
|
2507
|
+
type: 'list',
|
|
2508
|
+
name: 'col',
|
|
2509
|
+
message: `Which column should be used as the filename for ${docNames[entityKey]} (${entityKey}) records?`,
|
|
2510
|
+
choices: columns,
|
|
2511
|
+
default: defaultCol,
|
|
2512
|
+
}]);
|
|
2513
|
+
|
|
2514
|
+
result[entityKey] = col;
|
|
2515
|
+
await saveOutputFilenamePreference(entityKey, col);
|
|
2516
|
+
log.dim(` Saved filename column preference for ${entityKey}`);
|
|
2517
|
+
}
|
|
2518
|
+
|
|
2519
|
+
return result;
|
|
2520
|
+
}
|
|
2521
|
+
|
|
2522
|
+
/**
|
|
2523
|
+
* Build a filename for an output hierarchy entity.
|
|
2524
|
+
* Uses dot-separated hierarchical naming: _output~<name>~<uid>.column~<name>~<uid>.json
|
|
2525
|
+
*
|
|
2526
|
+
* @param {string} entityType - Documentation name: 'output', 'column', 'join', 'filter'
|
|
2527
|
+
* @param {Object} node - The entity record
|
|
2528
|
+
* @param {string} filenameCol - Column to use for the name portion
|
|
2529
|
+
* @param {string[]} parentChain - Array of parent segments: ['_output~name~uid', 'join~name~uid', ...]
|
|
2530
|
+
* @returns {string} - Base filename without extension
|
|
2531
|
+
*/
|
|
2532
|
+
export function buildOutputFilename(entityType, node, filenameCol, parentChain = []) {
|
|
2533
|
+
const uid = node.UID || '';
|
|
2534
|
+
const rawName = node[filenameCol];
|
|
2535
|
+
const name = rawName ? sanitizeFilename(String(rawName)) : '';
|
|
2536
|
+
|
|
2537
|
+
// Build this entity's segment: <type>~<name>~<uid>
|
|
2538
|
+
// If filenameCol IS the UID, don't double-append it
|
|
2539
|
+
let segment;
|
|
2540
|
+
if (!name || name === uid) {
|
|
2541
|
+
segment = `${entityType}~${uid}`;
|
|
2542
|
+
} else {
|
|
2543
|
+
segment = `${entityType}~${name}~${uid}`;
|
|
2544
|
+
}
|
|
2545
|
+
|
|
2546
|
+
// Root output gets _ prefix
|
|
2547
|
+
if (entityType === 'output') {
|
|
2548
|
+
segment = `_${segment}`;
|
|
2549
|
+
}
|
|
2550
|
+
|
|
2551
|
+
const allSegments = [...parentChain, segment];
|
|
2552
|
+
return allSegments.join('.');
|
|
2553
|
+
}
|
|
2554
|
+
|
|
2555
|
+
/**
|
|
2556
|
+
* Parse an output hierarchy filename back into entity relationships.
|
|
2557
|
+
*
|
|
2558
|
+
* @param {string} filename - e.g. "_output~name~uid.column~name~uid.filter~name~uid.json"
|
|
2559
|
+
* @returns {Object} - { segments: [{entity, name, uid}], rootOutputUid, entityType, uid }
|
|
2560
|
+
*/
|
|
2561
|
+
export function parseOutputHierarchyFile(filename) {
|
|
2562
|
+
// Strip .json extension
|
|
2563
|
+
let base = filename;
|
|
2564
|
+
if (base.endsWith('.json')) base = base.substring(0, base.length - 5);
|
|
2565
|
+
|
|
2566
|
+
// Split into segments by finding entity type boundaries
|
|
2567
|
+
// Entity types are: _output~, output~, column~, join~, filter~
|
|
2568
|
+
const parts = [];
|
|
2569
|
+
|
|
2570
|
+
// First, split by '.' but we need to be careful since names can contain '.'
|
|
2571
|
+
// Strategy: find entity-type prefixed segments
|
|
2572
|
+
// Split on '.' then re-join segments that don't start with an entity type
|
|
2573
|
+
const dotParts = base.split('.');
|
|
2574
|
+
let currentSegment = null;
|
|
2575
|
+
|
|
2576
|
+
for (const part of dotParts) {
|
|
2577
|
+
// Check if this part starts with an entity type prefix
|
|
2578
|
+
const stripped = part.replace(/^_/, '');
|
|
2579
|
+
const match = stripped.match(/^(output|column|join|filter)~/);
|
|
2580
|
+
|
|
2581
|
+
if (match) {
|
|
2582
|
+
if (currentSegment !== null) {
|
|
2583
|
+
parts.push(currentSegment);
|
|
2584
|
+
}
|
|
2585
|
+
currentSegment = part;
|
|
2586
|
+
} else if (currentSegment !== null) {
|
|
2587
|
+
// This part belongs to the previous segment (name contained '.')
|
|
2588
|
+
currentSegment += '.' + part;
|
|
2589
|
+
} else {
|
|
2590
|
+
currentSegment = part;
|
|
2591
|
+
}
|
|
2592
|
+
}
|
|
2593
|
+
if (currentSegment !== null) {
|
|
2594
|
+
parts.push(currentSegment);
|
|
2595
|
+
}
|
|
2596
|
+
|
|
2597
|
+
// Parse each segment: <type>~<name>~<uid> or <type>~<uid>
|
|
2598
|
+
const segments = [];
|
|
2599
|
+
for (const part of parts) {
|
|
2600
|
+
const stripped = part.replace(/^_/, '');
|
|
2601
|
+
const firstTilde = stripped.indexOf('~');
|
|
2602
|
+
if (firstTilde < 0) continue;
|
|
2603
|
+
|
|
2604
|
+
const entityType = stripped.substring(0, firstTilde);
|
|
2605
|
+
const rest = stripped.substring(firstTilde + 1);
|
|
2606
|
+
|
|
2607
|
+
// The UID is the last ~-separated value (UIDs are alphanumeric, typically 8+ chars)
|
|
2608
|
+
const lastTilde = rest.lastIndexOf('~');
|
|
2609
|
+
let name, uid;
|
|
2610
|
+
if (lastTilde >= 0) {
|
|
2611
|
+
name = rest.substring(0, lastTilde);
|
|
2612
|
+
uid = rest.substring(lastTilde + 1);
|
|
2613
|
+
} else {
|
|
2614
|
+
// No second tilde — the entire rest is the UID
|
|
2615
|
+
name = null;
|
|
2616
|
+
uid = rest;
|
|
2617
|
+
}
|
|
2618
|
+
|
|
2619
|
+
segments.push({ entity: entityType, name, uid });
|
|
2620
|
+
}
|
|
2621
|
+
|
|
2622
|
+
if (segments.length === 0) return null;
|
|
2623
|
+
|
|
2624
|
+
const last = segments[segments.length - 1];
|
|
2625
|
+
const root = segments[0];
|
|
2626
|
+
|
|
2627
|
+
// Map documentation name back to physical table name
|
|
2628
|
+
const docToPhysical = {
|
|
2629
|
+
output: 'output',
|
|
2630
|
+
column: 'output_value',
|
|
2631
|
+
filter: 'output_value_filter',
|
|
2632
|
+
join: 'output_value_entity_column_rel',
|
|
2633
|
+
};
|
|
2634
|
+
|
|
2635
|
+
return {
|
|
2636
|
+
segments,
|
|
2637
|
+
rootOutputUid: root.uid,
|
|
2638
|
+
entityType: last.entity,
|
|
2639
|
+
physicalEntity: docToPhysical[last.entity] || last.entity,
|
|
2640
|
+
uid: last.uid,
|
|
2641
|
+
parentEntity: segments.length > 1 ? segments[segments.length - 2].entity : null,
|
|
2642
|
+
parentUid: segments.length > 1 ? segments[segments.length - 2].uid : null,
|
|
2643
|
+
};
|
|
2644
|
+
}
|
|
2645
|
+
|
|
2646
|
+
/**
|
|
2647
|
+
* Main orchestrator: process output hierarchy entities during clone.
|
|
2648
|
+
* Builds tree, resolves filenames, writes hierarchy files.
|
|
2649
|
+
*
|
|
2650
|
+
* @param {Object} appJson - The full app JSON
|
|
2651
|
+
* @param {Object} structure - Bin hierarchy structure
|
|
2652
|
+
* @param {Object} options - CLI options
|
|
2653
|
+
* @param {string} serverTz - Server timezone
|
|
2654
|
+
* @returns {Array} - Array of { uid, metaPath } for app.json reference replacement
|
|
2655
|
+
*/
|
|
2656
|
+
async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
2657
|
+
const tree = buildOutputHierarchyTree(appJson);
|
|
2658
|
+
if (tree.length === 0) return [];
|
|
2659
|
+
|
|
2660
|
+
log.info(`Processing ${tree.length} output record(s) with hierarchy...`);
|
|
2661
|
+
|
|
2662
|
+
// Resolve filename columns for each entity type
|
|
2663
|
+
const filenameCols = await resolveOutputFilenameColumns(appJson, options);
|
|
2664
|
+
|
|
2665
|
+
const refs = [];
|
|
2666
|
+
const bulkAction = { value: null };
|
|
2667
|
+
const config = await loadConfig();
|
|
2668
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2669
|
+
// When --force flag is set, skip change detection and re-process all files
|
|
2670
|
+
const forceReprocess = !!options.force;
|
|
2671
|
+
|
|
2672
|
+
for (const output of tree) {
|
|
2673
|
+
// Resolve bin directory for this output
|
|
2674
|
+
let binDir = null;
|
|
2675
|
+
let chosenBinId = null;
|
|
2676
|
+
if (output.BinID && structure[output.BinID]) {
|
|
2677
|
+
binDir = resolveBinPath(output.BinID, structure);
|
|
2678
|
+
}
|
|
2679
|
+
|
|
2680
|
+
if (!binDir) {
|
|
2681
|
+
// No BinID — prompt or default
|
|
2682
|
+
if (!options.yes) {
|
|
2683
|
+
const inquirer = (await import('inquirer')).default;
|
|
2684
|
+
const binChoices = Object.entries(structure).map(([id, entry]) => ({
|
|
2685
|
+
name: `${entry.name} (${entry.fullPath})`,
|
|
2686
|
+
value: id,
|
|
2687
|
+
}));
|
|
2688
|
+
|
|
2689
|
+
if (binChoices.length > 0) {
|
|
2690
|
+
const { binId } = await inquirer.prompt([{
|
|
2691
|
+
type: 'list',
|
|
2692
|
+
name: 'binId',
|
|
2693
|
+
message: `Output "${output.Name || output.UID}" has no BinID. Which bin should it go in?`,
|
|
2694
|
+
choices: binChoices,
|
|
2695
|
+
}]);
|
|
2696
|
+
chosenBinId = Number(binId);
|
|
2697
|
+
binDir = resolveBinPath(chosenBinId, structure);
|
|
2698
|
+
} else {
|
|
2699
|
+
binDir = BINS_DIR;
|
|
2700
|
+
}
|
|
2701
|
+
} else {
|
|
2702
|
+
binDir = BINS_DIR;
|
|
2703
|
+
}
|
|
2704
|
+
}
|
|
2705
|
+
|
|
2706
|
+
await mkdir(binDir, { recursive: true });
|
|
2707
|
+
|
|
2708
|
+
// Build root output filename
|
|
2709
|
+
const rootBasename = buildOutputFilename('output', output, filenameCols.output);
|
|
2710
|
+
const rootMetaPath = join(binDir, `${rootBasename}.json`);
|
|
2711
|
+
|
|
2712
|
+
// Change detection for existing files (skip when --entity forces re-processing)
|
|
2713
|
+
if (await fileExists(rootMetaPath) && !options.yes && !forceReprocess) {
|
|
2714
|
+
if (bulkAction.value === 'skip_all') {
|
|
2715
|
+
log.dim(` Skipped ${rootBasename}`);
|
|
2716
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2717
|
+
continue;
|
|
2718
|
+
}
|
|
2719
|
+
if (bulkAction.value !== 'overwrite_all') {
|
|
2720
|
+
const localSyncTime = await getLocalSyncTime(rootMetaPath);
|
|
2721
|
+
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated, configWithTz);
|
|
2722
|
+
const serverDate = parseServerDate(output._LastUpdated, serverTz);
|
|
2723
|
+
if (serverNewer) {
|
|
2724
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { serverDate, localDate: localSyncTime });
|
|
2725
|
+
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2726
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2727
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2728
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2729
|
+
} else {
|
|
2730
|
+
const locallyModified = await hasLocalModifications(rootMetaPath, configWithTz);
|
|
2731
|
+
if (locallyModified) {
|
|
2732
|
+
const action = await promptChangeDetection(rootBasename, output, configWithTz, { localIsNewer: true, serverDate, localDate: localSyncTime });
|
|
2733
|
+
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2734
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2735
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2736
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, configWithTz, { localIsNewer: true }); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2737
|
+
} else {
|
|
2738
|
+
log.dim(` Up to date: ${rootBasename}`);
|
|
2739
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2740
|
+
continue;
|
|
2741
|
+
}
|
|
2742
|
+
}
|
|
2743
|
+
}
|
|
2744
|
+
}
|
|
2745
|
+
|
|
2746
|
+
// Collect child file references for root JSON
|
|
2747
|
+
const childRefs = { column: [], join: [], filter: [] };
|
|
2748
|
+
|
|
2749
|
+
// Helper to build a child filename segment
|
|
2750
|
+
const childSegment = (type, node, col) => {
|
|
2751
|
+
const uid = node.UID || '';
|
|
2752
|
+
const rawName = node[col];
|
|
2753
|
+
const name = rawName ? sanitizeFilename(String(rawName)) : '';
|
|
2754
|
+
return (!name || name === uid) ? `${type}~${uid}` : `${type}~${name}~${uid}`;
|
|
2755
|
+
};
|
|
2756
|
+
|
|
2757
|
+
// Process all children depth-first
|
|
2758
|
+
// Direct filters on output
|
|
2759
|
+
for (const filter of output._children.filter) {
|
|
2760
|
+
const filterName = `${rootBasename}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2761
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2762
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2763
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2764
|
+
}
|
|
2765
|
+
|
|
2766
|
+
// Direct columns on output
|
|
2767
|
+
for (const col of output._children.column) {
|
|
2768
|
+
const colSeg = childSegment('column', col, filenameCols.output_value);
|
|
2769
|
+
const colName = `${rootBasename}.${colSeg}`;
|
|
2770
|
+
const colPath = join(binDir, `${colName}.json`);
|
|
2771
|
+
await writeOutputEntityFile(col, 'output_value', colPath, serverTz);
|
|
2772
|
+
childRefs.column.push(`@${colPath}`);
|
|
2773
|
+
|
|
2774
|
+
// Filters under this column
|
|
2775
|
+
for (const filter of col._children.filter) {
|
|
2776
|
+
const filterName = `${colName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2777
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2778
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2779
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2780
|
+
}
|
|
2781
|
+
}
|
|
2782
|
+
|
|
2783
|
+
// Joins on output
|
|
2784
|
+
for (const j of output._children.join) {
|
|
2785
|
+
const joinSeg = childSegment('join', j, filenameCols.output_value_entity_column_rel);
|
|
2786
|
+
const joinName = `${rootBasename}.${joinSeg}`;
|
|
2787
|
+
const joinPath = join(binDir, `${joinName}.json`);
|
|
2788
|
+
await writeOutputEntityFile(j, 'output_value_entity_column_rel', joinPath, serverTz);
|
|
2789
|
+
childRefs.join.push(`@${joinPath}`);
|
|
2790
|
+
|
|
2791
|
+
// Columns under this join
|
|
2792
|
+
for (const col of j._children.column) {
|
|
2793
|
+
const joinColName = `${joinName}.${childSegment('column', col, filenameCols.output_value)}`;
|
|
2794
|
+
const joinColPath = join(binDir, `${joinColName}.json`);
|
|
2795
|
+
await writeOutputEntityFile(col, 'output_value', joinColPath, serverTz);
|
|
2796
|
+
childRefs.column.push(`@${joinColPath}`);
|
|
2797
|
+
|
|
2798
|
+
// Filters under this join→column
|
|
2799
|
+
for (const filter of col._children.filter) {
|
|
2800
|
+
const filterName = `${joinColName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2801
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2802
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2803
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2804
|
+
}
|
|
2805
|
+
}
|
|
2806
|
+
}
|
|
2807
|
+
|
|
2808
|
+
// Write root output JSON with child references
|
|
2809
|
+
const rootMeta = {};
|
|
2810
|
+
const rootContentColumns = [];
|
|
2811
|
+
for (const [key, value] of Object.entries(output)) {
|
|
2812
|
+
if (key === '_children') continue;
|
|
2813
|
+
|
|
2814
|
+
// Always extract CustomSQL to companion .sql file
|
|
2815
|
+
if (key === 'CustomSQL') {
|
|
2816
|
+
const decoded = resolveContentValue(value);
|
|
2817
|
+
const sqlContent = (decoded && decoded.trim()) ? decoded : '';
|
|
2818
|
+
const sqlFilePath = rootMetaPath.replace(/\.json$/, '.CustomSQL.sql');
|
|
2819
|
+
await writeFile(sqlFilePath, sqlContent);
|
|
2820
|
+
rootMeta[key] = `@${basename(sqlFilePath)}`;
|
|
2821
|
+
rootContentColumns.push('CustomSQL');
|
|
2822
|
+
if (serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
2823
|
+
try { await setFileTimestamps(sqlFilePath, output._CreatedOn, output._LastUpdated, serverTz); } catch { /* non-critical */ }
|
|
2824
|
+
}
|
|
2825
|
+
log.dim(` → ${sqlFilePath}`);
|
|
2826
|
+
continue;
|
|
2827
|
+
}
|
|
2828
|
+
|
|
2829
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
2830
|
+
rootMeta[key] = resolveContentValue(value);
|
|
2831
|
+
} else {
|
|
2832
|
+
rootMeta[key] = value;
|
|
2833
|
+
}
|
|
2834
|
+
}
|
|
2835
|
+
rootMeta._entity = 'output';
|
|
2836
|
+
if (rootContentColumns.length > 0) {
|
|
2837
|
+
rootMeta._contentColumns = rootContentColumns;
|
|
2838
|
+
}
|
|
2839
|
+
rootMeta.children = childRefs;
|
|
2840
|
+
|
|
2841
|
+
// If user chose a bin for a BinID-less output, store it and mark as modified
|
|
2842
|
+
if (chosenBinId) {
|
|
2843
|
+
rootMeta.BinID = chosenBinId;
|
|
2844
|
+
log.dim(` Set BinID=${chosenBinId} on "${output.Name || output.UID}" (staged for next push)`);
|
|
2845
|
+
}
|
|
2846
|
+
|
|
2847
|
+
await writeFile(rootMetaPath, JSON.stringify(rootMeta, null, 2) + '\n');
|
|
2848
|
+
log.success(`Saved ${rootMetaPath}`);
|
|
2849
|
+
|
|
2850
|
+
// Set file timestamps to server's _LastUpdated so diff detection works.
|
|
2851
|
+
// Skip when chosenBinId is set — keep mtime at "now" so push detects the local edit.
|
|
2852
|
+
if (!chosenBinId && serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
2853
|
+
try {
|
|
2854
|
+
await setFileTimestamps(rootMetaPath, output._CreatedOn, output._LastUpdated, serverTz);
|
|
2855
|
+
} catch { /* non-critical */ }
|
|
2856
|
+
}
|
|
2857
|
+
|
|
2858
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2859
|
+
}
|
|
2860
|
+
|
|
2861
|
+
return refs;
|
|
2862
|
+
}
|
|
2863
|
+
|
|
2864
|
+
/**
|
|
2865
|
+
* Write a single output hierarchy entity file (column, join, or filter).
|
|
2866
|
+
* Handles CustomSQL extraction to companion .sql files.
|
|
2867
|
+
*/
|
|
2868
|
+
async function writeOutputEntityFile(node, physicalEntity, filePath, serverTz) {
|
|
2869
|
+
const meta = {};
|
|
2870
|
+
const contentColumns = [];
|
|
2871
|
+
|
|
2872
|
+
for (const [key, value] of Object.entries(node)) {
|
|
2873
|
+
if (key === '_children') continue;
|
|
2874
|
+
|
|
2875
|
+
// Always extract CustomSQL to companion .sql file
|
|
2876
|
+
if (key === 'CustomSQL') {
|
|
2877
|
+
const decoded = resolveContentValue(value);
|
|
2878
|
+
const sqlContent = (decoded && decoded.trim()) ? decoded : '';
|
|
2879
|
+
const sqlFilePath = filePath.replace(/\.json$/, '.CustomSQL.sql');
|
|
2880
|
+
await writeFile(sqlFilePath, sqlContent);
|
|
2881
|
+
meta[key] = `@${basename(sqlFilePath)}`;
|
|
2882
|
+
contentColumns.push('CustomSQL');
|
|
2883
|
+
|
|
2884
|
+
if (serverTz && (node._CreatedOn || node._LastUpdated)) {
|
|
2885
|
+
try {
|
|
2886
|
+
await setFileTimestamps(sqlFilePath, node._CreatedOn, node._LastUpdated, serverTz);
|
|
2887
|
+
} catch { /* non-critical */ }
|
|
2888
|
+
}
|
|
2889
|
+
log.dim(` → ${sqlFilePath}`);
|
|
2890
|
+
continue;
|
|
2891
|
+
}
|
|
2892
|
+
|
|
2893
|
+
// Decode other base64 columns inline
|
|
2894
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
2895
|
+
meta[key] = resolveContentValue(value);
|
|
2896
|
+
} else {
|
|
2897
|
+
meta[key] = value;
|
|
2898
|
+
}
|
|
2899
|
+
}
|
|
2900
|
+
|
|
2901
|
+
meta._entity = physicalEntity;
|
|
2902
|
+
if (contentColumns.length > 0) {
|
|
2903
|
+
meta._contentColumns = contentColumns;
|
|
2904
|
+
}
|
|
2905
|
+
|
|
2906
|
+
await writeFile(filePath, JSON.stringify(meta, null, 2) + '\n');
|
|
2907
|
+
log.dim(` → ${filePath}`);
|
|
2908
|
+
|
|
2909
|
+
if (serverTz && (node._CreatedOn || node._LastUpdated)) {
|
|
2910
|
+
try {
|
|
2911
|
+
await setFileTimestamps(filePath, node._CreatedOn, node._LastUpdated, serverTz);
|
|
2912
|
+
} catch { /* non-critical */ }
|
|
2913
|
+
}
|
|
2914
|
+
}
|
|
2915
|
+
|
|
1703
2916
|
/**
|
|
1704
2917
|
* Save app.json to project root with @ references replacing processed entries.
|
|
1705
2918
|
*/
|
|
@@ -1731,7 +2944,13 @@ async function saveAppJson(appJson, contentRefs, otherRefs, domain) {
|
|
|
1731
2944
|
}
|
|
1732
2945
|
|
|
1733
2946
|
// Bins stay as-is (directory structure, no metadata files)
|
|
1734
|
-
|
|
2947
|
+
|
|
2948
|
+
// Remove sub-entity arrays that are now embedded in output hierarchy files
|
|
2949
|
+
if (otherRefs.output) {
|
|
2950
|
+
delete output.children.output_value;
|
|
2951
|
+
delete output.children.output_value_filter;
|
|
2952
|
+
delete output.children.output_value_entity_column_rel;
|
|
2953
|
+
}
|
|
1735
2954
|
|
|
1736
2955
|
await writeFile('app.json', JSON.stringify(output, null, 2) + '\n');
|
|
1737
2956
|
}
|