@dboio/cli 0.6.13 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +201 -4
- package/package.json +1 -1
- package/src/commands/clone.js +664 -27
- package/src/commands/content.js +1 -1
- package/src/commands/deploy.js +1 -1
- package/src/commands/init.js +67 -3
- package/src/commands/login.js +4 -9
- package/src/commands/output.js +56 -3
- package/src/commands/pull.js +3 -3
- package/src/commands/push.js +9 -9
- package/src/commands/status.js +0 -1
- package/src/lib/config.js +74 -8
- package/src/lib/diff.js +5 -1
- package/src/lib/domain-guard.js +95 -0
- package/src/lib/input-parser.js +87 -38
- package/src/lib/scaffold.js +62 -0
- package/src/lib/structure.js +16 -0
package/src/commands/clone.js
CHANGED
|
@@ -2,11 +2,12 @@ import { Command } from 'commander';
|
|
|
2
2
|
import { readFile, writeFile, mkdir, access } from 'fs/promises';
|
|
3
3
|
import { join, basename, extname } from 'path';
|
|
4
4
|
import { DboClient } from '../lib/client.js';
|
|
5
|
-
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset } from '../lib/config.js';
|
|
6
|
-
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP } from '../lib/structure.js';
|
|
5
|
+
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference } from '../lib/config.js';
|
|
6
|
+
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES } from '../lib/structure.js';
|
|
7
7
|
import { log } from '../lib/logger.js';
|
|
8
8
|
import { setFileTimestamps } from '../lib/timestamps.js';
|
|
9
9
|
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge } from '../lib/diff.js';
|
|
10
|
+
import { checkDomainChange } from '../lib/domain-guard.js';
|
|
10
11
|
|
|
11
12
|
/**
|
|
12
13
|
* Resolve a column value that may be base64-encoded.
|
|
@@ -420,6 +421,8 @@ export const cloneCommand = new Command('clone')
|
|
|
420
421
|
.description('Clone an app from DBO.io to a local project structure')
|
|
421
422
|
.argument('[source]', 'Local JSON file path (optional)')
|
|
422
423
|
.option('--app <shortName>', 'App short name to fetch from server')
|
|
424
|
+
.option('-e, --entity <type>', 'Only clone a specific entity type (e.g. output, content, media, extension)')
|
|
425
|
+
.option('--force', 'Force re-processing of all files, skip change detection')
|
|
423
426
|
.option('--domain <host>', 'Override domain')
|
|
424
427
|
.option('-y, --yes', 'Auto-accept all prompts')
|
|
425
428
|
.option('-v, --verbose', 'Show HTTP request details')
|
|
@@ -437,6 +440,7 @@ export const cloneCommand = new Command('clone')
|
|
|
437
440
|
*/
|
|
438
441
|
export async function performClone(source, options = {}) {
|
|
439
442
|
const config = await loadConfig();
|
|
443
|
+
const effectiveDomain = options.domain || config.domain;
|
|
440
444
|
let appJson;
|
|
441
445
|
|
|
442
446
|
// Step 1: Load the app JSON
|
|
@@ -487,6 +491,15 @@ export async function performClone(source, options = {}) {
|
|
|
487
491
|
throw new Error('Invalid app JSON: missing UID or children');
|
|
488
492
|
}
|
|
489
493
|
|
|
494
|
+
// Domain change detection
|
|
495
|
+
if (effectiveDomain) {
|
|
496
|
+
const { changed, proceed } = await checkDomainChange(effectiveDomain, options);
|
|
497
|
+
if (changed && !proceed) {
|
|
498
|
+
log.info('Clone aborted: domain change denied.');
|
|
499
|
+
return;
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
|
|
490
503
|
log.success(`Cloning "${appJson.Name}" (${appJson.ShortName})`);
|
|
491
504
|
|
|
492
505
|
// Ensure sensitive files are gitignored
|
|
@@ -562,38 +575,68 @@ export async function performClone(source, options = {}) {
|
|
|
562
575
|
log.dim(` Set ServerTimezone to ${serverTz} in .dbo/config.json`);
|
|
563
576
|
}
|
|
564
577
|
|
|
565
|
-
//
|
|
566
|
-
|
|
567
|
-
const
|
|
568
|
-
|
|
578
|
+
// Resolve --entity filter: which entity types to process
|
|
579
|
+
// "output" expands to all OUTPUT_HIERARCHY_ENTITIES, others are matched directly
|
|
580
|
+
const entityFilter = resolveEntityFilter(options.entity);
|
|
581
|
+
if (entityFilter) {
|
|
582
|
+
log.info(`Entity filter: only processing ${options.entity}`);
|
|
583
|
+
}
|
|
569
584
|
|
|
570
|
-
|
|
571
|
-
|
|
585
|
+
// Step 4c: Detect and resolve file path collisions (skip in entity-filter mode)
|
|
586
|
+
let toDeleteUIDs = new Set();
|
|
587
|
+
if (!entityFilter) {
|
|
588
|
+
log.info('Scanning for file path collisions...');
|
|
589
|
+
const fileRegistry = await buildFileRegistry(appJson, structure, placementPrefs);
|
|
590
|
+
toDeleteUIDs = await resolveCollisions(fileRegistry, options);
|
|
591
|
+
|
|
592
|
+
if (toDeleteUIDs.size > 0) {
|
|
593
|
+
await stageCollisionDeletions(toDeleteUIDs, appJson, options);
|
|
594
|
+
}
|
|
572
595
|
}
|
|
573
596
|
|
|
574
597
|
// Step 5: Process content → files + metadata (skip rejected records)
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
598
|
+
let contentRefs = [];
|
|
599
|
+
if (!entityFilter || entityFilter.has('content')) {
|
|
600
|
+
contentRefs = await processContentEntries(
|
|
601
|
+
appJson.children.content || [],
|
|
602
|
+
structure,
|
|
603
|
+
options,
|
|
604
|
+
placementPrefs.contentPlacement,
|
|
605
|
+
serverTz,
|
|
606
|
+
toDeleteUIDs,
|
|
607
|
+
);
|
|
608
|
+
}
|
|
583
609
|
|
|
584
610
|
// Step 5b: Process media → download binary files + metadata (skip rejected records)
|
|
585
611
|
let mediaRefs = [];
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
612
|
+
if (!entityFilter || entityFilter.has('media')) {
|
|
613
|
+
const mediaEntries = appJson.children.media || [];
|
|
614
|
+
if (mediaEntries.length > 0) {
|
|
615
|
+
mediaRefs = await processMediaEntries(mediaEntries, structure, options, config, appJson.ShortName, placementPrefs.mediaPlacement, serverTz, toDeleteUIDs);
|
|
616
|
+
}
|
|
589
617
|
}
|
|
590
618
|
|
|
591
|
-
// Step
|
|
619
|
+
// Step 5c: Process output hierarchy
|
|
592
620
|
const otherRefs = {};
|
|
621
|
+
if (!entityFilter || entityFilter.has('output')) {
|
|
622
|
+
const outputEntries = appJson.children.output || [];
|
|
623
|
+
if (outputEntries.length > 0) {
|
|
624
|
+
const outputRefs = await processOutputHierarchy(appJson, structure, options, serverTz);
|
|
625
|
+
if (outputRefs.length > 0) {
|
|
626
|
+
otherRefs.output = outputRefs;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
// Step 6: Process other entities (not output hierarchy, not bin, not content, not media)
|
|
593
632
|
for (const [entityName, entries] of Object.entries(appJson.children)) {
|
|
594
|
-
if (['bin', 'content', 'output', '
|
|
633
|
+
if (['bin', 'content', 'output', 'output_value', 'output_value_filter',
|
|
634
|
+
'output_value_entity_column_rel', 'media'].includes(entityName)) continue;
|
|
595
635
|
if (!Array.isArray(entries)) continue;
|
|
596
636
|
|
|
637
|
+
// Skip if entity filter is active and this entity doesn't match
|
|
638
|
+
if (entityFilter && !entityFilter.has(entityName)) continue;
|
|
639
|
+
|
|
597
640
|
if (ENTITY_DIR_MAP[entityName]) {
|
|
598
641
|
// Entity types with project directories — process into their directory
|
|
599
642
|
const refs = await processEntityDirEntries(entityName, entries, options, serverTz);
|
|
@@ -615,20 +658,45 @@ export async function performClone(source, options = {}) {
|
|
|
615
658
|
}
|
|
616
659
|
|
|
617
660
|
// Step 7: Save app.json with references
|
|
618
|
-
await saveAppJson(appJson, contentRefs, otherRefs);
|
|
661
|
+
await saveAppJson(appJson, contentRefs, otherRefs, effectiveDomain);
|
|
619
662
|
|
|
620
|
-
// Step 8: Create .app.json baseline for delta tracking
|
|
621
|
-
|
|
663
|
+
// Step 8: Create .app.json baseline for delta tracking (skip in entity-filter mode to avoid overwriting)
|
|
664
|
+
if (!entityFilter) {
|
|
665
|
+
await saveBaselineFile(appJson);
|
|
666
|
+
}
|
|
622
667
|
|
|
623
668
|
// Step 9: Ensure .app.json is in .gitignore
|
|
624
669
|
await ensureGitignore(['.app.json']);
|
|
625
670
|
|
|
626
671
|
log.plain('');
|
|
627
|
-
log.success('Clone complete!');
|
|
672
|
+
log.success(entityFilter ? `Clone complete! (filtered: ${options.entity})` : 'Clone complete!');
|
|
628
673
|
log.dim(' app.json saved to project root');
|
|
629
674
|
log.dim(' Run "dbo login" to authenticate, then "dbo push" to deploy changes');
|
|
630
675
|
}
|
|
631
676
|
|
|
677
|
+
/**
|
|
678
|
+
* Resolve --entity filter into a Set of entity keys to process.
|
|
679
|
+
* Returns null if no filter (process everything), or a Set of entity key strings.
|
|
680
|
+
*
|
|
681
|
+
* "output" expands to all OUTPUT_HIERARCHY_ENTITIES.
|
|
682
|
+
* Entity-dir names (e.g. "extension", "site") are matched directly.
|
|
683
|
+
* Documentation aliases are also accepted (e.g. "column" → "output_value").
|
|
684
|
+
*/
|
|
685
|
+
function resolveEntityFilter(entityArg) {
|
|
686
|
+
if (!entityArg) return null;
|
|
687
|
+
|
|
688
|
+
const input = entityArg.toLowerCase().trim();
|
|
689
|
+
|
|
690
|
+
// "output" or any output sub-entity → process all output hierarchy entities
|
|
691
|
+
const docToPhysical = { column: 'output_value', filter: 'output_value_filter', join: 'output_value_entity_column_rel' };
|
|
692
|
+
if (input === 'output' || OUTPUT_HIERARCHY_ENTITIES.includes(input) || docToPhysical[input]) {
|
|
693
|
+
return new Set(OUTPUT_HIERARCHY_ENTITIES.concat(['output']));
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
// Direct match on known entity types
|
|
697
|
+
return new Set([input]);
|
|
698
|
+
}
|
|
699
|
+
|
|
632
700
|
/**
|
|
633
701
|
* Resolve placement preferences from config or prompt the user.
|
|
634
702
|
* Returns { contentPlacement, mediaPlacement } where values are 'path'|'bin'|'ask'|null
|
|
@@ -1689,11 +1757,574 @@ export function guessExtensionForColumn(columnName) {
|
|
|
1689
1757
|
return 'txt';
|
|
1690
1758
|
}
|
|
1691
1759
|
|
|
1760
|
+
// ─── Output Hierarchy Processing ──────────────────────────────────────────
|
|
1761
|
+
|
|
1762
|
+
/**
|
|
1763
|
+
* Build a tree structure from flat output entity arrays.
|
|
1764
|
+
* Groups columns, joins, and filters under their parent output records.
|
|
1765
|
+
*
|
|
1766
|
+
* @param {Object} appJson - The full app JSON with children arrays
|
|
1767
|
+
* @returns {Array} - Array of output root nodes with nested _children
|
|
1768
|
+
*/
|
|
1769
|
+
export function buildOutputHierarchyTree(appJson) {
|
|
1770
|
+
const outputs = appJson.children.output || [];
|
|
1771
|
+
const columns = appJson.children.output_value || [];
|
|
1772
|
+
const filters = appJson.children.output_value_filter || [];
|
|
1773
|
+
const joins = appJson.children.output_value_entity_column_rel || [];
|
|
1774
|
+
|
|
1775
|
+
if (outputs.length === 0) return [];
|
|
1776
|
+
|
|
1777
|
+
// Index all entities by their numeric ID for O(1) lookups
|
|
1778
|
+
const outputById = new Map();
|
|
1779
|
+
const columnById = new Map();
|
|
1780
|
+
const joinById = new Map();
|
|
1781
|
+
|
|
1782
|
+
for (const o of outputs) {
|
|
1783
|
+
outputById.set(o.OutputID || o._id, { ...o, _children: { column: [], join: [], filter: [] } });
|
|
1784
|
+
}
|
|
1785
|
+
for (const c of columns) {
|
|
1786
|
+
columnById.set(c.OutputValueID || c._id, { ...c, _children: { filter: [] } });
|
|
1787
|
+
}
|
|
1788
|
+
for (const j of joins) {
|
|
1789
|
+
joinById.set(j.OutputValueEntityColumnRelID || j._id, { ...j, _children: { column: [] } });
|
|
1790
|
+
}
|
|
1791
|
+
|
|
1792
|
+
// Attach filters: to column (via OutputValueID) or directly to output (via OutputID)
|
|
1793
|
+
for (const f of filters) {
|
|
1794
|
+
if (f.OutputValueID) {
|
|
1795
|
+
const parent = columnById.get(f.OutputValueID);
|
|
1796
|
+
if (parent) {
|
|
1797
|
+
parent._children.filter.push({ ...f });
|
|
1798
|
+
continue;
|
|
1799
|
+
}
|
|
1800
|
+
}
|
|
1801
|
+
if (f.OutputID) {
|
|
1802
|
+
const parent = outputById.get(f.OutputID);
|
|
1803
|
+
if (parent) {
|
|
1804
|
+
parent._children.filter.push({ ...f });
|
|
1805
|
+
}
|
|
1806
|
+
}
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
// Attach columns: to join (via OutputValueEntityColumnRelID) or directly to output (via OutputID)
|
|
1810
|
+
for (const [, col] of columnById) {
|
|
1811
|
+
if (col.OutputValueEntityColumnRelID) {
|
|
1812
|
+
const parent = joinById.get(col.OutputValueEntityColumnRelID);
|
|
1813
|
+
if (parent) {
|
|
1814
|
+
parent._children.column.push(col);
|
|
1815
|
+
continue;
|
|
1816
|
+
}
|
|
1817
|
+
}
|
|
1818
|
+
if (col.OutputID) {
|
|
1819
|
+
const parent = outputById.get(col.OutputID);
|
|
1820
|
+
if (parent) {
|
|
1821
|
+
parent._children.column.push(col);
|
|
1822
|
+
}
|
|
1823
|
+
}
|
|
1824
|
+
}
|
|
1825
|
+
|
|
1826
|
+
// Attach joins to outputs (via OutputID)
|
|
1827
|
+
for (const [, j] of joinById) {
|
|
1828
|
+
if (j.OutputID) {
|
|
1829
|
+
const parent = outputById.get(j.OutputID);
|
|
1830
|
+
if (parent) {
|
|
1831
|
+
parent._children.join.push(j);
|
|
1832
|
+
}
|
|
1833
|
+
}
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1836
|
+
// Sort children by OrderNumber (ties broken by array index stability)
|
|
1837
|
+
const sortByOrder = (a, b) => (a.OrderNumber || 0) - (b.OrderNumber || 0);
|
|
1838
|
+
|
|
1839
|
+
for (const [, output] of outputById) {
|
|
1840
|
+
output._children.column.sort(sortByOrder);
|
|
1841
|
+
output._children.join.sort(sortByOrder);
|
|
1842
|
+
output._children.filter.sort(sortByOrder);
|
|
1843
|
+
|
|
1844
|
+
for (const col of output._children.column) {
|
|
1845
|
+
col._children.filter.sort(sortByOrder);
|
|
1846
|
+
}
|
|
1847
|
+
for (const j of output._children.join) {
|
|
1848
|
+
j._children.column.sort(sortByOrder);
|
|
1849
|
+
for (const col of j._children.column) {
|
|
1850
|
+
col._children.filter.sort(sortByOrder);
|
|
1851
|
+
}
|
|
1852
|
+
}
|
|
1853
|
+
}
|
|
1854
|
+
|
|
1855
|
+
return Array.from(outputById.values());
|
|
1856
|
+
}
|
|
1857
|
+
|
|
1858
|
+
/**
|
|
1859
|
+
* Resolve filename column preferences for each output entity type.
|
|
1860
|
+
* Loads from config, prompts if needed, saves choices.
|
|
1861
|
+
*
|
|
1862
|
+
* @param {Object} appJson - The full app JSON
|
|
1863
|
+
* @param {Object} options - CLI options
|
|
1864
|
+
* @returns {Object} - { output: 'Name', output_value: 'Title', ... }
|
|
1865
|
+
*/
|
|
1866
|
+
async function resolveOutputFilenameColumns(appJson, options) {
|
|
1867
|
+
const defaults = {
|
|
1868
|
+
output: 'Name',
|
|
1869
|
+
output_value: 'Title',
|
|
1870
|
+
output_value_filter: 'ShortName',
|
|
1871
|
+
output_value_entity_column_rel: 'UID',
|
|
1872
|
+
};
|
|
1873
|
+
const fallbacks = {
|
|
1874
|
+
output: ['Name', 'Title', 'OrderNumber', 'UID'],
|
|
1875
|
+
output_value: ['Title', 'Name', 'OrderNumber', 'UID'],
|
|
1876
|
+
output_value_filter: ['ShortName', 'Name', 'UID'],
|
|
1877
|
+
output_value_entity_column_rel: ['UID'],
|
|
1878
|
+
};
|
|
1879
|
+
const docNames = OUTPUT_ENTITY_MAP;
|
|
1880
|
+
const result = {};
|
|
1881
|
+
|
|
1882
|
+
for (const entityKey of OUTPUT_HIERARCHY_ENTITIES) {
|
|
1883
|
+
// Check saved preference
|
|
1884
|
+
const saved = await loadOutputFilenamePreference(entityKey);
|
|
1885
|
+
if (saved) {
|
|
1886
|
+
result[entityKey] = saved;
|
|
1887
|
+
continue;
|
|
1888
|
+
}
|
|
1889
|
+
|
|
1890
|
+
// In -y mode use defaults
|
|
1891
|
+
if (options.yes) {
|
|
1892
|
+
result[entityKey] = defaults[entityKey];
|
|
1893
|
+
await saveOutputFilenamePreference(entityKey, defaults[entityKey]);
|
|
1894
|
+
continue;
|
|
1895
|
+
}
|
|
1896
|
+
|
|
1897
|
+
// Find a sample record to get available columns
|
|
1898
|
+
const records = appJson.children[entityKey] || [];
|
|
1899
|
+
if (records.length === 0) {
|
|
1900
|
+
result[entityKey] = defaults[entityKey];
|
|
1901
|
+
continue;
|
|
1902
|
+
}
|
|
1903
|
+
|
|
1904
|
+
const sampleRecord = records[0];
|
|
1905
|
+
const columns = Object.keys(sampleRecord).filter(k => k !== 'children' && !k.startsWith('_'));
|
|
1906
|
+
|
|
1907
|
+
// Find best default from fallback chain
|
|
1908
|
+
let defaultCol = defaults[entityKey];
|
|
1909
|
+
for (const fb of fallbacks[entityKey]) {
|
|
1910
|
+
if (columns.includes(fb)) { defaultCol = fb; break; }
|
|
1911
|
+
}
|
|
1912
|
+
|
|
1913
|
+
const inquirer = (await import('inquirer')).default;
|
|
1914
|
+
const { col } = await inquirer.prompt([{
|
|
1915
|
+
type: 'list',
|
|
1916
|
+
name: 'col',
|
|
1917
|
+
message: `Which column should be used as the filename for ${docNames[entityKey]} (${entityKey}) records?`,
|
|
1918
|
+
choices: columns,
|
|
1919
|
+
default: defaultCol,
|
|
1920
|
+
}]);
|
|
1921
|
+
|
|
1922
|
+
result[entityKey] = col;
|
|
1923
|
+
await saveOutputFilenamePreference(entityKey, col);
|
|
1924
|
+
log.dim(` Saved filename column preference for ${entityKey}`);
|
|
1925
|
+
}
|
|
1926
|
+
|
|
1927
|
+
return result;
|
|
1928
|
+
}
|
|
1929
|
+
|
|
1930
|
+
/**
|
|
1931
|
+
* Build a filename for an output hierarchy entity.
|
|
1932
|
+
* Uses dot-separated hierarchical naming: _output~<name>~<uid>.column~<name>~<uid>.json
|
|
1933
|
+
*
|
|
1934
|
+
* @param {string} entityType - Documentation name: 'output', 'column', 'join', 'filter'
|
|
1935
|
+
* @param {Object} node - The entity record
|
|
1936
|
+
* @param {string} filenameCol - Column to use for the name portion
|
|
1937
|
+
* @param {string[]} parentChain - Array of parent segments: ['_output~name~uid', 'join~name~uid', ...]
|
|
1938
|
+
* @returns {string} - Base filename without extension
|
|
1939
|
+
*/
|
|
1940
|
+
export function buildOutputFilename(entityType, node, filenameCol, parentChain = []) {
|
|
1941
|
+
const uid = node.UID || '';
|
|
1942
|
+
const rawName = node[filenameCol];
|
|
1943
|
+
const name = rawName ? sanitizeFilename(String(rawName)) : '';
|
|
1944
|
+
|
|
1945
|
+
// Build this entity's segment: <type>~<name>~<uid>
|
|
1946
|
+
// If filenameCol IS the UID, don't double-append it
|
|
1947
|
+
let segment;
|
|
1948
|
+
if (!name || name === uid) {
|
|
1949
|
+
segment = `${entityType}~${uid}`;
|
|
1950
|
+
} else {
|
|
1951
|
+
segment = `${entityType}~${name}~${uid}`;
|
|
1952
|
+
}
|
|
1953
|
+
|
|
1954
|
+
// Root output gets _ prefix
|
|
1955
|
+
if (entityType === 'output') {
|
|
1956
|
+
segment = `_${segment}`;
|
|
1957
|
+
}
|
|
1958
|
+
|
|
1959
|
+
const allSegments = [...parentChain, segment];
|
|
1960
|
+
return allSegments.join('.');
|
|
1961
|
+
}
|
|
1962
|
+
|
|
1963
|
+
/**
|
|
1964
|
+
* Parse an output hierarchy filename back into entity relationships.
|
|
1965
|
+
*
|
|
1966
|
+
* @param {string} filename - e.g. "_output~name~uid.column~name~uid.filter~name~uid.json"
|
|
1967
|
+
* @returns {Object} - { segments: [{entity, name, uid}], rootOutputUid, entityType, uid }
|
|
1968
|
+
*/
|
|
1969
|
+
export function parseOutputHierarchyFile(filename) {
|
|
1970
|
+
// Strip .json extension
|
|
1971
|
+
let base = filename;
|
|
1972
|
+
if (base.endsWith('.json')) base = base.substring(0, base.length - 5);
|
|
1973
|
+
|
|
1974
|
+
// Split into segments by finding entity type boundaries
|
|
1975
|
+
// Entity types are: _output~, output~, column~, join~, filter~
|
|
1976
|
+
const parts = [];
|
|
1977
|
+
|
|
1978
|
+
// First, split by '.' but we need to be careful since names can contain '.'
|
|
1979
|
+
// Strategy: find entity-type prefixed segments
|
|
1980
|
+
// Split on '.' then re-join segments that don't start with an entity type
|
|
1981
|
+
const dotParts = base.split('.');
|
|
1982
|
+
let currentSegment = null;
|
|
1983
|
+
|
|
1984
|
+
for (const part of dotParts) {
|
|
1985
|
+
// Check if this part starts with an entity type prefix
|
|
1986
|
+
const stripped = part.replace(/^_/, '');
|
|
1987
|
+
const match = stripped.match(/^(output|column|join|filter)~/);
|
|
1988
|
+
|
|
1989
|
+
if (match) {
|
|
1990
|
+
if (currentSegment !== null) {
|
|
1991
|
+
parts.push(currentSegment);
|
|
1992
|
+
}
|
|
1993
|
+
currentSegment = part;
|
|
1994
|
+
} else if (currentSegment !== null) {
|
|
1995
|
+
// This part belongs to the previous segment (name contained '.')
|
|
1996
|
+
currentSegment += '.' + part;
|
|
1997
|
+
} else {
|
|
1998
|
+
currentSegment = part;
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
if (currentSegment !== null) {
|
|
2002
|
+
parts.push(currentSegment);
|
|
2003
|
+
}
|
|
2004
|
+
|
|
2005
|
+
// Parse each segment: <type>~<name>~<uid> or <type>~<uid>
|
|
2006
|
+
const segments = [];
|
|
2007
|
+
for (const part of parts) {
|
|
2008
|
+
const stripped = part.replace(/^_/, '');
|
|
2009
|
+
const firstTilde = stripped.indexOf('~');
|
|
2010
|
+
if (firstTilde < 0) continue;
|
|
2011
|
+
|
|
2012
|
+
const entityType = stripped.substring(0, firstTilde);
|
|
2013
|
+
const rest = stripped.substring(firstTilde + 1);
|
|
2014
|
+
|
|
2015
|
+
// The UID is the last ~-separated value (UIDs are alphanumeric, typically 8+ chars)
|
|
2016
|
+
const lastTilde = rest.lastIndexOf('~');
|
|
2017
|
+
let name, uid;
|
|
2018
|
+
if (lastTilde >= 0) {
|
|
2019
|
+
name = rest.substring(0, lastTilde);
|
|
2020
|
+
uid = rest.substring(lastTilde + 1);
|
|
2021
|
+
} else {
|
|
2022
|
+
// No second tilde — the entire rest is the UID
|
|
2023
|
+
name = null;
|
|
2024
|
+
uid = rest;
|
|
2025
|
+
}
|
|
2026
|
+
|
|
2027
|
+
segments.push({ entity: entityType, name, uid });
|
|
2028
|
+
}
|
|
2029
|
+
|
|
2030
|
+
if (segments.length === 0) return null;
|
|
2031
|
+
|
|
2032
|
+
const last = segments[segments.length - 1];
|
|
2033
|
+
const root = segments[0];
|
|
2034
|
+
|
|
2035
|
+
// Map documentation name back to physical table name
|
|
2036
|
+
const docToPhysical = {
|
|
2037
|
+
output: 'output',
|
|
2038
|
+
column: 'output_value',
|
|
2039
|
+
filter: 'output_value_filter',
|
|
2040
|
+
join: 'output_value_entity_column_rel',
|
|
2041
|
+
};
|
|
2042
|
+
|
|
2043
|
+
return {
|
|
2044
|
+
segments,
|
|
2045
|
+
rootOutputUid: root.uid,
|
|
2046
|
+
entityType: last.entity,
|
|
2047
|
+
physicalEntity: docToPhysical[last.entity] || last.entity,
|
|
2048
|
+
uid: last.uid,
|
|
2049
|
+
parentEntity: segments.length > 1 ? segments[segments.length - 2].entity : null,
|
|
2050
|
+
parentUid: segments.length > 1 ? segments[segments.length - 2].uid : null,
|
|
2051
|
+
};
|
|
2052
|
+
}
|
|
2053
|
+
|
|
2054
|
+
/**
|
|
2055
|
+
* Main orchestrator: process output hierarchy entities during clone.
|
|
2056
|
+
* Builds tree, resolves filenames, writes hierarchy files.
|
|
2057
|
+
*
|
|
2058
|
+
* @param {Object} appJson - The full app JSON
|
|
2059
|
+
* @param {Object} structure - Bin hierarchy structure
|
|
2060
|
+
* @param {Object} options - CLI options
|
|
2061
|
+
* @param {string} serverTz - Server timezone
|
|
2062
|
+
* @returns {Array} - Array of { uid, metaPath } for app.json reference replacement
|
|
2063
|
+
*/
|
|
2064
|
+
async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
2065
|
+
const tree = buildOutputHierarchyTree(appJson);
|
|
2066
|
+
if (tree.length === 0) return [];
|
|
2067
|
+
|
|
2068
|
+
log.info(`Processing ${tree.length} output record(s) with hierarchy...`);
|
|
2069
|
+
|
|
2070
|
+
// Resolve filename columns for each entity type
|
|
2071
|
+
const filenameCols = await resolveOutputFilenameColumns(appJson, options);
|
|
2072
|
+
|
|
2073
|
+
const refs = [];
|
|
2074
|
+
const bulkAction = { value: null };
|
|
2075
|
+
const config = await loadConfig();
|
|
2076
|
+
// When --force flag is set, skip change detection and re-process all files
|
|
2077
|
+
const forceReprocess = !!options.force;
|
|
2078
|
+
|
|
2079
|
+
for (const output of tree) {
|
|
2080
|
+
// Resolve bin directory for this output
|
|
2081
|
+
let binDir = null;
|
|
2082
|
+
let chosenBinId = null;
|
|
2083
|
+
if (output.BinID && structure[output.BinID]) {
|
|
2084
|
+
binDir = resolveBinPath(output.BinID, structure);
|
|
2085
|
+
}
|
|
2086
|
+
|
|
2087
|
+
if (!binDir) {
|
|
2088
|
+
// No BinID — prompt or default
|
|
2089
|
+
if (!options.yes) {
|
|
2090
|
+
const inquirer = (await import('inquirer')).default;
|
|
2091
|
+
const binChoices = Object.entries(structure).map(([id, entry]) => ({
|
|
2092
|
+
name: `${entry.name} (${entry.fullPath})`,
|
|
2093
|
+
value: id,
|
|
2094
|
+
}));
|
|
2095
|
+
|
|
2096
|
+
if (binChoices.length > 0) {
|
|
2097
|
+
const { binId } = await inquirer.prompt([{
|
|
2098
|
+
type: 'list',
|
|
2099
|
+
name: 'binId',
|
|
2100
|
+
message: `Output "${output.Name || output.UID}" has no BinID. Which bin should it go in?`,
|
|
2101
|
+
choices: binChoices,
|
|
2102
|
+
}]);
|
|
2103
|
+
chosenBinId = Number(binId);
|
|
2104
|
+
binDir = resolveBinPath(chosenBinId, structure);
|
|
2105
|
+
} else {
|
|
2106
|
+
binDir = BINS_DIR;
|
|
2107
|
+
}
|
|
2108
|
+
} else {
|
|
2109
|
+
binDir = BINS_DIR;
|
|
2110
|
+
}
|
|
2111
|
+
}
|
|
2112
|
+
|
|
2113
|
+
await mkdir(binDir, { recursive: true });
|
|
2114
|
+
|
|
2115
|
+
// Build root output filename
|
|
2116
|
+
const rootBasename = buildOutputFilename('output', output, filenameCols.output);
|
|
2117
|
+
const rootMetaPath = join(binDir, `${rootBasename}.json`);
|
|
2118
|
+
|
|
2119
|
+
// Change detection for existing files (skip when --entity forces re-processing)
|
|
2120
|
+
if (await fileExists(rootMetaPath) && !options.yes && !forceReprocess) {
|
|
2121
|
+
if (bulkAction.value === 'skip_all') {
|
|
2122
|
+
log.dim(` Skipped ${rootBasename}`);
|
|
2123
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2124
|
+
continue;
|
|
2125
|
+
}
|
|
2126
|
+
if (bulkAction.value !== 'overwrite_all') {
|
|
2127
|
+
const localSyncTime = await getLocalSyncTime(rootMetaPath);
|
|
2128
|
+
const serverNewer = isServerNewer(localSyncTime, output._LastUpdated, config);
|
|
2129
|
+
if (serverNewer) {
|
|
2130
|
+
const action = await promptChangeDetection(rootBasename, output, config);
|
|
2131
|
+
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2132
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2133
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2134
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, config); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2135
|
+
} else {
|
|
2136
|
+
const locallyModified = await hasLocalModifications(rootMetaPath, config);
|
|
2137
|
+
if (locallyModified) {
|
|
2138
|
+
const action = await promptChangeDetection(rootBasename, output, config, { localIsNewer: true });
|
|
2139
|
+
if (action === 'skip') { refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2140
|
+
if (action === 'skip_all') { bulkAction.value = 'skip_all'; refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2141
|
+
if (action === 'overwrite_all') { bulkAction.value = 'overwrite_all'; }
|
|
2142
|
+
if (action === 'compare') { await inlineDiffAndMerge(output, rootMetaPath, config, { localIsNewer: true }); refs.push({ uid: output.UID, metaPath: rootMetaPath }); continue; }
|
|
2143
|
+
} else {
|
|
2144
|
+
log.dim(` Up to date: ${rootBasename}`);
|
|
2145
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2146
|
+
continue;
|
|
2147
|
+
}
|
|
2148
|
+
}
|
|
2149
|
+
}
|
|
2150
|
+
}
|
|
2151
|
+
|
|
2152
|
+
// Collect child file references for root JSON
|
|
2153
|
+
const childRefs = { column: [], join: [], filter: [] };
|
|
2154
|
+
|
|
2155
|
+
// Helper to build a child filename segment
|
|
2156
|
+
const childSegment = (type, node, col) => {
|
|
2157
|
+
const uid = node.UID || '';
|
|
2158
|
+
const rawName = node[col];
|
|
2159
|
+
const name = rawName ? sanitizeFilename(String(rawName)) : '';
|
|
2160
|
+
return (!name || name === uid) ? `${type}~${uid}` : `${type}~${name}~${uid}`;
|
|
2161
|
+
};
|
|
2162
|
+
|
|
2163
|
+
// Process all children depth-first
|
|
2164
|
+
// Direct filters on output
|
|
2165
|
+
for (const filter of output._children.filter) {
|
|
2166
|
+
const filterName = `${rootBasename}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2167
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2168
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2169
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2170
|
+
}
|
|
2171
|
+
|
|
2172
|
+
// Direct columns on output
|
|
2173
|
+
for (const col of output._children.column) {
|
|
2174
|
+
const colSeg = childSegment('column', col, filenameCols.output_value);
|
|
2175
|
+
const colName = `${rootBasename}.${colSeg}`;
|
|
2176
|
+
const colPath = join(binDir, `${colName}.json`);
|
|
2177
|
+
await writeOutputEntityFile(col, 'output_value', colPath, serverTz);
|
|
2178
|
+
childRefs.column.push(`@${colPath}`);
|
|
2179
|
+
|
|
2180
|
+
// Filters under this column
|
|
2181
|
+
for (const filter of col._children.filter) {
|
|
2182
|
+
const filterName = `${colName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2183
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2184
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2185
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2186
|
+
}
|
|
2187
|
+
}
|
|
2188
|
+
|
|
2189
|
+
// Joins on output
|
|
2190
|
+
for (const j of output._children.join) {
|
|
2191
|
+
const joinSeg = childSegment('join', j, filenameCols.output_value_entity_column_rel);
|
|
2192
|
+
const joinName = `${rootBasename}.${joinSeg}`;
|
|
2193
|
+
const joinPath = join(binDir, `${joinName}.json`);
|
|
2194
|
+
await writeOutputEntityFile(j, 'output_value_entity_column_rel', joinPath, serverTz);
|
|
2195
|
+
childRefs.join.push(`@${joinPath}`);
|
|
2196
|
+
|
|
2197
|
+
// Columns under this join
|
|
2198
|
+
for (const col of j._children.column) {
|
|
2199
|
+
const joinColName = `${joinName}.${childSegment('column', col, filenameCols.output_value)}`;
|
|
2200
|
+
const joinColPath = join(binDir, `${joinColName}.json`);
|
|
2201
|
+
await writeOutputEntityFile(col, 'output_value', joinColPath, serverTz);
|
|
2202
|
+
childRefs.column.push(`@${joinColPath}`);
|
|
2203
|
+
|
|
2204
|
+
// Filters under this join→column
|
|
2205
|
+
for (const filter of col._children.filter) {
|
|
2206
|
+
const filterName = `${joinColName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
2207
|
+
const filterPath = join(binDir, `${filterName}.json`);
|
|
2208
|
+
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
2209
|
+
childRefs.filter.push(`@${filterPath}`);
|
|
2210
|
+
}
|
|
2211
|
+
}
|
|
2212
|
+
}
|
|
2213
|
+
|
|
2214
|
+
// Write root output JSON with child references
|
|
2215
|
+
const rootMeta = {};
|
|
2216
|
+
const rootContentColumns = [];
|
|
2217
|
+
for (const [key, value] of Object.entries(output)) {
|
|
2218
|
+
if (key === '_children') continue;
|
|
2219
|
+
|
|
2220
|
+
// Always extract CustomSQL to companion .sql file
|
|
2221
|
+
if (key === 'CustomSQL') {
|
|
2222
|
+
const decoded = resolveContentValue(value);
|
|
2223
|
+
const sqlContent = (decoded && decoded.trim()) ? decoded : '';
|
|
2224
|
+
const sqlFilePath = rootMetaPath.replace(/\.json$/, '.CustomSQL.sql');
|
|
2225
|
+
await writeFile(sqlFilePath, sqlContent);
|
|
2226
|
+
rootMeta[key] = `@${basename(sqlFilePath)}`;
|
|
2227
|
+
rootContentColumns.push('CustomSQL');
|
|
2228
|
+
if (serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
2229
|
+
try { await setFileTimestamps(sqlFilePath, output._CreatedOn, output._LastUpdated, serverTz); } catch { /* non-critical */ }
|
|
2230
|
+
}
|
|
2231
|
+
log.dim(` → ${sqlFilePath}`);
|
|
2232
|
+
continue;
|
|
2233
|
+
}
|
|
2234
|
+
|
|
2235
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
2236
|
+
rootMeta[key] = resolveContentValue(value);
|
|
2237
|
+
} else {
|
|
2238
|
+
rootMeta[key] = value;
|
|
2239
|
+
}
|
|
2240
|
+
}
|
|
2241
|
+
rootMeta._entity = 'output';
|
|
2242
|
+
if (rootContentColumns.length > 0) {
|
|
2243
|
+
rootMeta._contentColumns = rootContentColumns;
|
|
2244
|
+
}
|
|
2245
|
+
rootMeta.children = childRefs;
|
|
2246
|
+
|
|
2247
|
+
// If user chose a bin for a BinID-less output, store it and mark as modified
|
|
2248
|
+
if (chosenBinId) {
|
|
2249
|
+
rootMeta.BinID = chosenBinId;
|
|
2250
|
+
log.dim(` Set BinID=${chosenBinId} on "${output.Name || output.UID}" (staged for next push)`);
|
|
2251
|
+
}
|
|
2252
|
+
|
|
2253
|
+
await writeFile(rootMetaPath, JSON.stringify(rootMeta, null, 2) + '\n');
|
|
2254
|
+
log.success(`Saved ${rootMetaPath}`);
|
|
2255
|
+
|
|
2256
|
+
// Set file timestamps to server's _LastUpdated so diff detection works.
|
|
2257
|
+
// Skip when chosenBinId is set — keep mtime at "now" so push detects the local edit.
|
|
2258
|
+
if (!chosenBinId && serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
2259
|
+
try {
|
|
2260
|
+
await setFileTimestamps(rootMetaPath, output._CreatedOn, output._LastUpdated, serverTz);
|
|
2261
|
+
} catch { /* non-critical */ }
|
|
2262
|
+
}
|
|
2263
|
+
|
|
2264
|
+
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
2265
|
+
}
|
|
2266
|
+
|
|
2267
|
+
return refs;
|
|
2268
|
+
}
|
|
2269
|
+
|
|
2270
|
+
/**
|
|
2271
|
+
* Write a single output hierarchy entity file (column, join, or filter).
|
|
2272
|
+
* Handles CustomSQL extraction to companion .sql files.
|
|
2273
|
+
*/
|
|
2274
|
+
async function writeOutputEntityFile(node, physicalEntity, filePath, serverTz) {
|
|
2275
|
+
const meta = {};
|
|
2276
|
+
const contentColumns = [];
|
|
2277
|
+
|
|
2278
|
+
for (const [key, value] of Object.entries(node)) {
|
|
2279
|
+
if (key === '_children') continue;
|
|
2280
|
+
|
|
2281
|
+
// Always extract CustomSQL to companion .sql file
|
|
2282
|
+
if (key === 'CustomSQL') {
|
|
2283
|
+
const decoded = resolveContentValue(value);
|
|
2284
|
+
const sqlContent = (decoded && decoded.trim()) ? decoded : '';
|
|
2285
|
+
const sqlFilePath = filePath.replace(/\.json$/, '.CustomSQL.sql');
|
|
2286
|
+
await writeFile(sqlFilePath, sqlContent);
|
|
2287
|
+
meta[key] = `@${basename(sqlFilePath)}`;
|
|
2288
|
+
contentColumns.push('CustomSQL');
|
|
2289
|
+
|
|
2290
|
+
if (serverTz && (node._CreatedOn || node._LastUpdated)) {
|
|
2291
|
+
try {
|
|
2292
|
+
await setFileTimestamps(sqlFilePath, node._CreatedOn, node._LastUpdated, serverTz);
|
|
2293
|
+
} catch { /* non-critical */ }
|
|
2294
|
+
}
|
|
2295
|
+
log.dim(` → ${sqlFilePath}`);
|
|
2296
|
+
continue;
|
|
2297
|
+
}
|
|
2298
|
+
|
|
2299
|
+
// Decode other base64 columns inline
|
|
2300
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
2301
|
+
meta[key] = resolveContentValue(value);
|
|
2302
|
+
} else {
|
|
2303
|
+
meta[key] = value;
|
|
2304
|
+
}
|
|
2305
|
+
}
|
|
2306
|
+
|
|
2307
|
+
meta._entity = physicalEntity;
|
|
2308
|
+
if (contentColumns.length > 0) {
|
|
2309
|
+
meta._contentColumns = contentColumns;
|
|
2310
|
+
}
|
|
2311
|
+
|
|
2312
|
+
await writeFile(filePath, JSON.stringify(meta, null, 2) + '\n');
|
|
2313
|
+
log.dim(` → ${filePath}`);
|
|
2314
|
+
|
|
2315
|
+
if (serverTz && (node._CreatedOn || node._LastUpdated)) {
|
|
2316
|
+
try {
|
|
2317
|
+
await setFileTimestamps(filePath, node._CreatedOn, node._LastUpdated, serverTz);
|
|
2318
|
+
} catch { /* non-critical */ }
|
|
2319
|
+
}
|
|
2320
|
+
}
|
|
2321
|
+
|
|
1692
2322
|
/**
|
|
1693
2323
|
* Save app.json to project root with @ references replacing processed entries.
|
|
1694
2324
|
*/
|
|
1695
|
-
async function saveAppJson(appJson, contentRefs, otherRefs) {
|
|
2325
|
+
async function saveAppJson(appJson, contentRefs, otherRefs, domain) {
|
|
1696
2326
|
const output = { ...appJson };
|
|
2327
|
+
if (domain) output._domain = domain;
|
|
1697
2328
|
output.children = { ...appJson.children };
|
|
1698
2329
|
|
|
1699
2330
|
// Replace content array with references
|
|
@@ -1719,7 +2350,13 @@ async function saveAppJson(appJson, contentRefs, otherRefs) {
|
|
|
1719
2350
|
}
|
|
1720
2351
|
|
|
1721
2352
|
// Bins stay as-is (directory structure, no metadata files)
|
|
1722
|
-
|
|
2353
|
+
|
|
2354
|
+
// Remove sub-entity arrays that are now embedded in output hierarchy files
|
|
2355
|
+
if (otherRefs.output) {
|
|
2356
|
+
delete output.children.output_value;
|
|
2357
|
+
delete output.children.output_value_filter;
|
|
2358
|
+
delete output.children.output_value_entity_column_rel;
|
|
2359
|
+
}
|
|
1723
2360
|
|
|
1724
2361
|
await writeFile('app.json', JSON.stringify(output, null, 2) + '\n');
|
|
1725
2362
|
}
|