@dboio/cli 0.9.6 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -1
- package/bin/dbo.js +2 -0
- package/package.json +1 -1
- package/src/commands/add.js +46 -0
- package/src/commands/clone.js +560 -246
- package/src/commands/init.js +30 -32
- package/src/commands/pull.js +264 -87
- package/src/commands/push.js +502 -57
- package/src/commands/rm.js +1 -1
- package/src/commands/sync.js +68 -0
- package/src/lib/config.js +49 -8
- package/src/lib/delta.js +115 -28
- package/src/lib/diff.js +9 -3
- package/src/lib/folder-icon.js +120 -0
- package/src/lib/ignore.js +1 -1
- package/src/lib/input-parser.js +37 -10
- package/src/lib/scaffold.js +82 -2
- package/src/lib/structure.js +2 -0
package/src/commands/clone.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { Command } from 'commander';
|
|
2
|
-
import { readFile, writeFile, mkdir, access } from 'fs/promises';
|
|
3
|
-
import { join, basename, extname } from 'path';
|
|
2
|
+
import { readFile, writeFile, mkdir, access, readdir, rename } from 'fs/promises';
|
|
3
|
+
import { join, basename, extname, dirname } from 'path';
|
|
4
|
+
import { fileURLToPath } from 'url';
|
|
4
5
|
import { DboClient } from '../lib/client.js';
|
|
5
6
|
import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline, addDeleteEntry, loadCollisionResolutions, saveCollisionResolutions, loadSynchronize, saveSynchronize, saveAppModifyKey, loadTransactionKeyPreset, saveTransactionKeyPreset, loadOutputFilenamePreference, saveOutputFilenamePreference, saveCloneSource, loadCloneSource, saveDescriptorFilenamePreference, loadDescriptorFilenamePreference, saveDescriptorContentExtractions, loadDescriptorContentExtractions, saveExtensionDocumentationMDPlacement, loadExtensionDocumentationMDPlacement } from '../lib/config.js';
|
|
6
7
|
import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_NAMES, OUTPUT_ENTITY_MAP, OUTPUT_HIERARCHY_ENTITIES, EXTENSION_DESCRIPTORS_DIR, EXTENSION_UNSUPPORTED_DIR, DOCUMENTATION_DIR, buildDescriptorMapping, saveDescriptorMapping, loadDescriptorMapping, resolveExtensionSubDir } from '../lib/structure.js';
|
|
@@ -9,6 +10,7 @@ import { buildUidFilename, detectLegacyDotUid } from '../lib/filenames.js';
|
|
|
9
10
|
import { setFileTimestamps, parseServerDate } from '../lib/timestamps.js';
|
|
10
11
|
import { getLocalSyncTime, isServerNewer, hasLocalModifications, promptChangeDetection, inlineDiffAndMerge, isDiffable } from '../lib/diff.js';
|
|
11
12
|
import { checkDomainChange } from '../lib/domain-guard.js';
|
|
13
|
+
import { applyTrashIcon, ensureTrashIcon } from '../lib/folder-icon.js';
|
|
12
14
|
import { loadMetadataTemplates, saveMetadataTemplates, getTemplateCols, setTemplateCols, buildTemplateFromCloneRecord } from '../lib/metadata-templates.js';
|
|
13
15
|
|
|
14
16
|
/**
|
|
@@ -93,7 +95,7 @@ function resolvePathToBinsDir(pathValue, structure) {
|
|
|
93
95
|
* Extract path components for content/generic records (read-only, no file writes).
|
|
94
96
|
* Replicates logic from processRecord() for collision detection.
|
|
95
97
|
*/
|
|
96
|
-
function resolveRecordPaths(entityName, record, structure, placementPref) {
|
|
98
|
+
export function resolveRecordPaths(entityName, record, structure, placementPref) {
|
|
97
99
|
let name = sanitizeFilename(String(record.Name || record.UID || 'untitled'));
|
|
98
100
|
|
|
99
101
|
// Determine extension (priority: Extension field > Name > Path)
|
|
@@ -152,7 +154,7 @@ function resolveRecordPaths(entityName, record, structure, placementPref) {
|
|
|
152
154
|
* Extract path components for media records.
|
|
153
155
|
* Replicates logic from processMediaEntries() for collision detection.
|
|
154
156
|
*/
|
|
155
|
-
function resolveMediaPaths(record, structure, placementPref) {
|
|
157
|
+
export function resolveMediaPaths(record, structure, placementPref) {
|
|
156
158
|
const filename = record.Filename || `${record.Name || record.UID}.${(record.Extension || 'bin').toLowerCase()}`;
|
|
157
159
|
const name = sanitizeFilename(filename.replace(/\.[^.]+$/, ''));
|
|
158
160
|
const ext = (record.Extension || 'bin').toLowerCase();
|
|
@@ -191,7 +193,7 @@ function resolveMediaPaths(record, structure, placementPref) {
|
|
|
191
193
|
* Extract path components for entity-dir records.
|
|
192
194
|
* Simplified from processEntityDirEntries() for collision detection.
|
|
193
195
|
*/
|
|
194
|
-
function resolveEntityDirPaths(entityName, record, dirName) {
|
|
196
|
+
export function resolveEntityDirPaths(entityName, record, dirName) {
|
|
195
197
|
let name;
|
|
196
198
|
if (entityName === 'app_version' && record.Number) {
|
|
197
199
|
name = sanitizeFilename(String(record.Number));
|
|
@@ -644,10 +646,10 @@ export async function performClone(source, options = {}) {
|
|
|
644
646
|
const effectiveDomain = options.domain || config.domain;
|
|
645
647
|
let appJson;
|
|
646
648
|
|
|
647
|
-
// Step 1: Source mismatch detection
|
|
649
|
+
// Step 1: Source mismatch detection (skip in pull mode)
|
|
648
650
|
// Warn when the user provides an explicit source that differs from the stored one.
|
|
649
|
-
const storedCloneSource = await loadCloneSource();
|
|
650
|
-
if (source && storedCloneSource && source !== storedCloneSource) {
|
|
651
|
+
const storedCloneSource = options.pullMode ? null : await loadCloneSource();
|
|
652
|
+
if (!options.pullMode && source && storedCloneSource && source !== storedCloneSource) {
|
|
651
653
|
if (!options.force && !options.yes) {
|
|
652
654
|
log.warn('');
|
|
653
655
|
log.warn(` ⚠ This project was previously cloned from: ${storedCloneSource}`);
|
|
@@ -707,95 +709,132 @@ export async function performClone(source, options = {}) {
|
|
|
707
709
|
}
|
|
708
710
|
}
|
|
709
711
|
|
|
710
|
-
log.success(
|
|
712
|
+
log.success(`${options.pullMode ? 'Pulling' : 'Cloning'} "${appJson.Name}" (${appJson.ShortName})`);
|
|
711
713
|
|
|
712
|
-
// Check for un-pushed staged items in synchronize.json
|
|
713
|
-
|
|
714
|
+
// Check for un-pushed staged items in synchronize.json (skip in pull mode)
|
|
715
|
+
if (!options.pullMode) {
|
|
716
|
+
await checkPendingSynchronize(options);
|
|
717
|
+
}
|
|
714
718
|
|
|
715
719
|
// Ensure sensitive files are gitignored
|
|
716
|
-
await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt']);
|
|
717
|
-
|
|
718
|
-
// Step 2: Update .dbo/config.json
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
if (
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
const existingPreset = await loadTransactionKeyPreset();
|
|
740
|
-
if (!existingPreset) {
|
|
741
|
-
if (options.yes || !process.stdin.isTTY) {
|
|
742
|
-
await saveTransactionKeyPreset('RowUID');
|
|
743
|
-
log.dim(' TransactionKeyPreset: RowUID (default)');
|
|
744
|
-
} else {
|
|
745
|
-
const inquirer = (await import('inquirer')).default;
|
|
746
|
-
const { preset } = await inquirer.prompt([{
|
|
747
|
-
type: 'list',
|
|
748
|
-
name: 'preset',
|
|
749
|
-
message: 'Which row key should the CLI use when building input expressions?',
|
|
750
|
-
choices: [
|
|
751
|
-
{ name: 'RowUID (recommended — stable across domains)', value: 'RowUID' },
|
|
752
|
-
{ name: 'RowID (numeric IDs)', value: 'RowID' },
|
|
753
|
-
],
|
|
754
|
-
}]);
|
|
755
|
-
await saveTransactionKeyPreset(preset);
|
|
756
|
-
log.dim(` TransactionKeyPreset: ${preset}`);
|
|
720
|
+
await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt', '.dbo/.app_baseline.json']);
|
|
721
|
+
|
|
722
|
+
// Step 2: Update .dbo/config.json (skip in pull mode — config already set)
|
|
723
|
+
if (!options.pullMode) {
|
|
724
|
+
await updateConfigWithApp({
|
|
725
|
+
AppID: appJson.AppID,
|
|
726
|
+
AppUID: appJson.UID,
|
|
727
|
+
AppName: appJson.Name,
|
|
728
|
+
AppShortName: appJson.ShortName,
|
|
729
|
+
});
|
|
730
|
+
await saveCloneSource(activeSource || 'default');
|
|
731
|
+
log.dim(' Updated .dbo/config.json with app metadata');
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
// Detect and store ModifyKey for locked/production apps (skip in pull mode)
|
|
735
|
+
if (!options.pullMode) {
|
|
736
|
+
const modifyKey = appJson.ModifyKey || null;
|
|
737
|
+
await saveAppModifyKey(modifyKey);
|
|
738
|
+
if (modifyKey) {
|
|
739
|
+
log.warn('');
|
|
740
|
+
log.warn(' ⚠ This app has a ModifyKey set (production/locked mode).');
|
|
741
|
+
log.warn(' You will be prompted to enter the ModifyKey before any push, input, add, content deploy, or deploy command.');
|
|
742
|
+
log.warn('');
|
|
757
743
|
}
|
|
758
744
|
}
|
|
759
745
|
|
|
760
|
-
//
|
|
761
|
-
|
|
746
|
+
// Prompt for TransactionKeyPreset if not already set (skip in pull mode)
|
|
747
|
+
if (!options.pullMode) {
|
|
748
|
+
const existingPreset = await loadTransactionKeyPreset();
|
|
749
|
+
if (!existingPreset) {
|
|
750
|
+
if (options.yes || !process.stdin.isTTY) {
|
|
751
|
+
await saveTransactionKeyPreset('RowUID');
|
|
752
|
+
log.dim(' TransactionKeyPreset: RowUID (default)');
|
|
753
|
+
} else {
|
|
754
|
+
const inquirer = (await import('inquirer')).default;
|
|
755
|
+
const { preset } = await inquirer.prompt([{
|
|
756
|
+
type: 'list',
|
|
757
|
+
name: 'preset',
|
|
758
|
+
message: 'Which row key should the CLI use when building input expressions?',
|
|
759
|
+
choices: [
|
|
760
|
+
{ name: 'RowUID (recommended — stable across domains)', value: 'RowUID' },
|
|
761
|
+
{ name: 'RowID (numeric IDs)', value: 'RowID' },
|
|
762
|
+
],
|
|
763
|
+
}]);
|
|
764
|
+
await saveTransactionKeyPreset(preset);
|
|
765
|
+
log.dim(` TransactionKeyPreset: ${preset}`);
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
}
|
|
762
769
|
|
|
763
|
-
// Step
|
|
764
|
-
|
|
765
|
-
await
|
|
770
|
+
// Step 3: Update package.json (skip in pull mode)
|
|
771
|
+
if (!options.pullMode) {
|
|
772
|
+
await updatePackageJson(appJson, config);
|
|
766
773
|
}
|
|
767
774
|
|
|
775
|
+
// Step 4: Create default project directories + bin structure
|
|
768
776
|
const bins = appJson.children.bin || [];
|
|
769
777
|
const structure = buildBinHierarchy(bins, appJson.AppID);
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
'Src': 'src',
|
|
791
|
-
};
|
|
792
|
-
for (const [oldName, newName] of Object.entries(LEGACY_DIR_MAP)) {
|
|
793
|
-
try {
|
|
794
|
-
await access(join(process.cwd(), oldName));
|
|
795
|
-
log.warn(`Legacy directory detected: "${oldName}/" — rename it to "${newName}/" for the new convention.`);
|
|
796
|
-
} catch {
|
|
797
|
-
// does not exist — no warning needed
|
|
778
|
+
|
|
779
|
+
if (!options.pullMode) {
|
|
780
|
+
for (const dir of DEFAULT_PROJECT_DIRS) {
|
|
781
|
+
await mkdir(dir, { recursive: true });
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
// Create media sub-directories for this app:
|
|
785
|
+
// media/<ShortName>/app/ — app-level media assets
|
|
786
|
+
// media/<ShortName>/user/ — user-uploaded media
|
|
787
|
+
const appShortName = appJson.ShortName;
|
|
788
|
+
const mediaSubs = [];
|
|
789
|
+
if (appShortName) {
|
|
790
|
+
const mediaDirs = [
|
|
791
|
+
`media/${appShortName}/app`,
|
|
792
|
+
`media/${appShortName}/user`,
|
|
793
|
+
];
|
|
794
|
+
for (const sub of mediaDirs) {
|
|
795
|
+
await mkdir(sub, { recursive: true });
|
|
796
|
+
mediaSubs.push(sub);
|
|
797
|
+
}
|
|
798
798
|
}
|
|
799
|
+
|
|
800
|
+
// Best-effort: apply trash icon
|
|
801
|
+
await applyTrashIcon(join(process.cwd(), 'trash'));
|
|
802
|
+
|
|
803
|
+
const createdDirs = await createDirectories(structure);
|
|
804
|
+
await saveStructureFile(structure);
|
|
805
|
+
|
|
806
|
+
const totalDirs = DEFAULT_PROJECT_DIRS.length + mediaSubs.length + createdDirs.length;
|
|
807
|
+
log.success(`Created ${totalDirs} director${totalDirs === 1 ? 'y' : 'ies'}`);
|
|
808
|
+
for (const d of DEFAULT_PROJECT_DIRS) log.dim(` ${d}/`);
|
|
809
|
+
for (const d of mediaSubs) log.dim(` ${d}/`);
|
|
810
|
+
for (const d of createdDirs) log.dim(` ${d}/`);
|
|
811
|
+
|
|
812
|
+
// Warn about legacy mixed-case directories from pre-0.9.1
|
|
813
|
+
const LEGACY_DIR_MAP = {
|
|
814
|
+
'Bins': 'bins',
|
|
815
|
+
'Automations': 'automation',
|
|
816
|
+
'App Versions': 'app_version',
|
|
817
|
+
'Documentation': 'docs',
|
|
818
|
+
'Sites': 'site',
|
|
819
|
+
'Extensions': 'extension',
|
|
820
|
+
'Data Sources': 'data_source',
|
|
821
|
+
'Groups': 'group',
|
|
822
|
+
'Integrations': 'integration',
|
|
823
|
+
'Trash': 'trash',
|
|
824
|
+
'Src': 'src',
|
|
825
|
+
};
|
|
826
|
+
for (const [oldName, newName] of Object.entries(LEGACY_DIR_MAP)) {
|
|
827
|
+
try {
|
|
828
|
+
await access(join(process.cwd(), oldName));
|
|
829
|
+
log.warn(`Legacy directory detected: "${oldName}/" — rename it to "${newName}/" for the new convention.`);
|
|
830
|
+
} catch {
|
|
831
|
+
// does not exist — no warning needed
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
} else {
|
|
835
|
+
// Pull mode: reuse existing structure, just ensure dirs exist for new bins
|
|
836
|
+
await createDirectories(structure);
|
|
837
|
+
await saveStructureFile(structure);
|
|
799
838
|
}
|
|
800
839
|
|
|
801
840
|
// Step 4b: Determine placement preferences (from config or prompt)
|
|
@@ -816,9 +855,9 @@ export async function performClone(source, options = {}) {
|
|
|
816
855
|
log.info(`Entity filter: only processing ${options.entity}`);
|
|
817
856
|
}
|
|
818
857
|
|
|
819
|
-
// Step 4c: Detect and resolve file path collisions (skip in entity-filter mode)
|
|
858
|
+
// Step 4c: Detect and resolve file path collisions (skip in pull mode and entity-filter mode)
|
|
820
859
|
let toDeleteUIDs = new Set();
|
|
821
|
-
if (!entityFilter) {
|
|
860
|
+
if (!options.pullMode && !entityFilter) {
|
|
822
861
|
log.info('Scanning for file path collisions...');
|
|
823
862
|
const fileRegistry = await buildFileRegistry(appJson, structure, placementPrefs);
|
|
824
863
|
toDeleteUIDs = await resolveCollisions(fileRegistry, options);
|
|
@@ -841,6 +880,11 @@ export async function performClone(source, options = {}) {
|
|
|
841
880
|
);
|
|
842
881
|
}
|
|
843
882
|
|
|
883
|
+
// Step 5a: Write manifest.json to project root (from server content or resolved template)
|
|
884
|
+
if (!entityFilter || entityFilter.has('content')) {
|
|
885
|
+
await writeManifestJson(appJson, contentRefs);
|
|
886
|
+
}
|
|
887
|
+
|
|
844
888
|
// Step 5b: Process media → download binary files + metadata (skip rejected records)
|
|
845
889
|
let mediaRefs = [];
|
|
846
890
|
if (!entityFilter || entityFilter.has('media')) {
|
|
@@ -900,18 +944,18 @@ export async function performClone(source, options = {}) {
|
|
|
900
944
|
// Step 7: Save app.json with references
|
|
901
945
|
await saveAppJson(appJson, contentRefs, otherRefs, effectiveDomain);
|
|
902
946
|
|
|
903
|
-
// Step 8: Create .
|
|
947
|
+
// Step 8: Create .dbo/.app_baseline.json baseline for delta tracking (skip in entity-filter mode to avoid overwriting)
|
|
904
948
|
if (!entityFilter) {
|
|
905
949
|
await saveBaselineFile(appJson);
|
|
906
950
|
}
|
|
907
951
|
|
|
908
|
-
// Step 9: Ensure .app.json is in .gitignore
|
|
909
|
-
await ensureGitignore(['.app.json']);
|
|
910
|
-
|
|
911
952
|
log.plain('');
|
|
912
|
-
|
|
953
|
+
const verb = options.pullMode ? 'Pull' : 'Clone';
|
|
954
|
+
log.success(entityFilter ? `${verb} complete! (filtered: ${options.entity})` : `${verb} complete!`);
|
|
913
955
|
log.dim(' app.json saved to project root');
|
|
914
|
-
|
|
956
|
+
if (!options.pullMode) {
|
|
957
|
+
log.dim(' Run "dbo login" to authenticate, then "dbo push" to deploy changes');
|
|
958
|
+
}
|
|
915
959
|
}
|
|
916
960
|
|
|
917
961
|
/**
|
|
@@ -922,7 +966,7 @@ export async function performClone(source, options = {}) {
|
|
|
922
966
|
* Entity-dir names (e.g. "extension", "site") are matched directly.
|
|
923
967
|
* Documentation aliases are also accepted (e.g. "column" → "output_value").
|
|
924
968
|
*/
|
|
925
|
-
function resolveEntityFilter(entityArg) {
|
|
969
|
+
export function resolveEntityFilter(entityArg) {
|
|
926
970
|
if (!entityArg) return null;
|
|
927
971
|
|
|
928
972
|
const input = entityArg.toLowerCase().trim();
|
|
@@ -947,6 +991,14 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
947
991
|
let contentPlacement = saved.contentPlacement;
|
|
948
992
|
let mediaPlacement = saved.mediaPlacement;
|
|
949
993
|
|
|
994
|
+
// Pull mode: use saved preferences or default to 'bin', no prompts
|
|
995
|
+
if (options.pullMode) {
|
|
996
|
+
return {
|
|
997
|
+
contentPlacement: contentPlacement || 'bin',
|
|
998
|
+
mediaPlacement: mediaPlacement || 'bin',
|
|
999
|
+
};
|
|
1000
|
+
}
|
|
1001
|
+
|
|
950
1002
|
// --media-placement flag takes precedence over saved config
|
|
951
1003
|
if (options.mediaPlacement) {
|
|
952
1004
|
mediaPlacement = options.mediaPlacement === 'fullpath' ? 'fullpath' : 'bin';
|
|
@@ -1015,6 +1067,8 @@ async function resolvePlacementPreferences(appJson, options) {
|
|
|
1015
1067
|
|
|
1016
1068
|
/**
|
|
1017
1069
|
* Fetch app JSON from the server by AppShortName.
|
|
1070
|
+
* Distinguishes between authentication failures (expired session) and
|
|
1071
|
+
* genuine "app not found" responses, offering re-login when appropriate.
|
|
1018
1072
|
*/
|
|
1019
1073
|
async function fetchAppFromServer(appShortName, options, config) {
|
|
1020
1074
|
const client = new DboClient({ domain: options.domain, verbose: options.verbose });
|
|
@@ -1030,6 +1084,58 @@ async function fetchAppFromServer(appShortName, options, config) {
|
|
|
1030
1084
|
throw err;
|
|
1031
1085
|
}
|
|
1032
1086
|
|
|
1087
|
+
// Check for authentication / session errors before parsing app data.
|
|
1088
|
+
// The server may return HTTP 401/403 or a 200 envelope with Successful=false
|
|
1089
|
+
// and messages containing user identity patterns.
|
|
1090
|
+
const AUTH_PATTERNS = ['LoggedInUser_UID', 'LoggedInUserID', 'CurrentUserID', 'UserID', 'not authenticated', 'session expired', 'login required'];
|
|
1091
|
+
const messages = result.messages || [];
|
|
1092
|
+
const allMsgText = messages.filter(m => typeof m === 'string').join(' ');
|
|
1093
|
+
const isAuthError = !result.ok && (result.status === 401 || result.status === 403)
|
|
1094
|
+
|| (!result.successful && AUTH_PATTERNS.some(p => allMsgText.includes(p)));
|
|
1095
|
+
|
|
1096
|
+
if (isAuthError) {
|
|
1097
|
+
spinner.fail('Session expired or not authenticated');
|
|
1098
|
+
log.warn('Your session appears to have expired.');
|
|
1099
|
+
if (allMsgText) log.dim(` Server: ${allMsgText.substring(0, 200)}`);
|
|
1100
|
+
|
|
1101
|
+
// Offer re-login
|
|
1102
|
+
if (process.stdin.isTTY) {
|
|
1103
|
+
const inquirer = (await import('inquirer')).default;
|
|
1104
|
+
const { action } = await inquirer.prompt([{
|
|
1105
|
+
type: 'list',
|
|
1106
|
+
name: 'action',
|
|
1107
|
+
message: 'How would you like to proceed?',
|
|
1108
|
+
choices: [
|
|
1109
|
+
{ name: 'Re-login now (recommended)', value: 'relogin' },
|
|
1110
|
+
{ name: 'Abort', value: 'abort' },
|
|
1111
|
+
],
|
|
1112
|
+
}]);
|
|
1113
|
+
|
|
1114
|
+
if (action === 'relogin') {
|
|
1115
|
+
const { performLogin } = await import('./login.js');
|
|
1116
|
+
await performLogin(options.domain || config.domain);
|
|
1117
|
+
log.info('Retrying app fetch...');
|
|
1118
|
+
return fetchAppFromServer(appShortName, options, config);
|
|
1119
|
+
}
|
|
1120
|
+
} else {
|
|
1121
|
+
log.dim(' Run "dbo login" to authenticate, then retry.');
|
|
1122
|
+
}
|
|
1123
|
+
throw new Error('Authentication required. Run "dbo login" first.');
|
|
1124
|
+
}
|
|
1125
|
+
|
|
1126
|
+
// Check for non-auth server errors (500, envelope Successful=false, etc.)
|
|
1127
|
+
if (!result.ok && result.status >= 500) {
|
|
1128
|
+
spinner.fail(`Server error (HTTP ${result.status})`);
|
|
1129
|
+
if (allMsgText) log.dim(` Server: ${allMsgText.substring(0, 200)}`);
|
|
1130
|
+
throw new Error(`Server error (HTTP ${result.status}) fetching app "${appShortName}"`);
|
|
1131
|
+
}
|
|
1132
|
+
|
|
1133
|
+
if (!result.successful && allMsgText) {
|
|
1134
|
+
spinner.fail(`Server returned an error`);
|
|
1135
|
+
log.warn(` ${allMsgText.substring(0, 300)}`);
|
|
1136
|
+
throw new Error(`Server error fetching app "${appShortName}": ${allMsgText.substring(0, 200)}`);
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1033
1139
|
const data = result.payload || result.data;
|
|
1034
1140
|
|
|
1035
1141
|
// Handle all response shapes:
|
|
@@ -1100,6 +1206,21 @@ async function updatePackageJson(appJson, config) {
|
|
|
1100
1206
|
changed = true;
|
|
1101
1207
|
}
|
|
1102
1208
|
|
|
1209
|
+
// Add @dboio/cli to devDependencies with current CLI version
|
|
1210
|
+
if (!pkg.devDependencies || !pkg.devDependencies['@dboio/cli']) {
|
|
1211
|
+
try {
|
|
1212
|
+
const cliRoot = join(dirname(fileURLToPath(import.meta.url)), '..', '..');
|
|
1213
|
+
const cliPkg = JSON.parse(await readFile(join(cliRoot, 'package.json'), 'utf8'));
|
|
1214
|
+
if (cliPkg.version) {
|
|
1215
|
+
if (!pkg.devDependencies) pkg.devDependencies = {};
|
|
1216
|
+
pkg.devDependencies['@dboio/cli'] = `^${cliPkg.version}`;
|
|
1217
|
+
changed = true;
|
|
1218
|
+
}
|
|
1219
|
+
} catch {
|
|
1220
|
+
// Could not read CLI version — skip
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1103
1224
|
if (changed) {
|
|
1104
1225
|
await writeFile(pkgPath, JSON.stringify(pkg, null, 2) + '\n');
|
|
1105
1226
|
log.dim(' Updated package.json with app metadata');
|
|
@@ -1555,6 +1676,20 @@ async function buildDescriptorPrePass(extensionEntries, structure) {
|
|
|
1555
1676
|
log.dim(` ${fullDir}/`);
|
|
1556
1677
|
}
|
|
1557
1678
|
|
|
1679
|
+
// Create directories for descriptors not in the mapping but with a non-empty value
|
|
1680
|
+
const unmappedDescriptors = new Set();
|
|
1681
|
+
for (const rec of extensionEntries) {
|
|
1682
|
+
const d = rec.Descriptor;
|
|
1683
|
+
if (d && d !== 'descriptor_definition' && !mapping[d]) {
|
|
1684
|
+
unmappedDescriptors.add(d);
|
|
1685
|
+
}
|
|
1686
|
+
}
|
|
1687
|
+
for (const dirName of unmappedDescriptors) {
|
|
1688
|
+
const fullDir = `${EXTENSION_DESCRIPTORS_DIR}/${dirName}`;
|
|
1689
|
+
await mkdir(fullDir, { recursive: true });
|
|
1690
|
+
log.dim(` ${fullDir}/`);
|
|
1691
|
+
}
|
|
1692
|
+
|
|
1558
1693
|
await saveDescriptorMapping(structure, mapping);
|
|
1559
1694
|
log.dim(` Saved descriptorMapping to .dbo/structure.json`);
|
|
1560
1695
|
|
|
@@ -1961,6 +2096,42 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1961
2096
|
// Track stale records (404s) for cleanup prompt
|
|
1962
2097
|
const staleRecords = [];
|
|
1963
2098
|
|
|
2099
|
+
// Pre-scan: determine which media files actually need downloading
|
|
2100
|
+
// (new files or files with newer server timestamps)
|
|
2101
|
+
const needsDownload = [];
|
|
2102
|
+
const upToDateRefs = [];
|
|
2103
|
+
|
|
2104
|
+
for (const record of mediaRecords) {
|
|
2105
|
+
if (skipUIDs.has(record.UID)) continue;
|
|
2106
|
+
|
|
2107
|
+
const { metaPath: scanMetaPath } = resolveMediaPaths(record, structure, mediaPlacement === 'fullpath' ? 'path' : mediaPlacement === 'bin' ? 'bin' : null);
|
|
2108
|
+
const scanExists = await fileExists(scanMetaPath);
|
|
2109
|
+
|
|
2110
|
+
if (!scanExists) {
|
|
2111
|
+
// New file — always needs download
|
|
2112
|
+
needsDownload.push(record);
|
|
2113
|
+
} else if (options.force) {
|
|
2114
|
+
// Force mode — re-download everything
|
|
2115
|
+
needsDownload.push(record);
|
|
2116
|
+
} else {
|
|
2117
|
+
// Existing file — check if server is newer
|
|
2118
|
+
const configWithTz = { ...config, ServerTimezone: serverTz };
|
|
2119
|
+
const localSyncTime = await getLocalSyncTime(scanMetaPath);
|
|
2120
|
+
const serverNewer = isServerNewer(localSyncTime, record._LastUpdated, configWithTz);
|
|
2121
|
+
if (serverNewer) {
|
|
2122
|
+
needsDownload.push(record);
|
|
2123
|
+
} else {
|
|
2124
|
+
// Up to date — still need ref for app.json
|
|
2125
|
+
upToDateRefs.push({ uid: record.UID, metaPath: scanMetaPath });
|
|
2126
|
+
}
|
|
2127
|
+
}
|
|
2128
|
+
}
|
|
2129
|
+
|
|
2130
|
+
if (needsDownload.length === 0) {
|
|
2131
|
+
log.dim(` All ${mediaRecords.length} media file(s) up to date`);
|
|
2132
|
+
return upToDateRefs;
|
|
2133
|
+
}
|
|
2134
|
+
|
|
1964
2135
|
// Determine if we can download (need a server connection)
|
|
1965
2136
|
let canDownload = false;
|
|
1966
2137
|
let client = null;
|
|
@@ -1970,7 +2141,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1970
2141
|
const { download } = await inquirer.prompt([{
|
|
1971
2142
|
type: 'confirm',
|
|
1972
2143
|
name: 'download',
|
|
1973
|
-
message: `${
|
|
2144
|
+
message: `${needsDownload.length} media file(s) need to be downloaded (${mediaRecords.length - needsDownload.length} up to date). Attempt download now?`,
|
|
1974
2145
|
default: true,
|
|
1975
2146
|
}]);
|
|
1976
2147
|
canDownload = download;
|
|
@@ -1989,8 +2160,8 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
1989
2160
|
}
|
|
1990
2161
|
|
|
1991
2162
|
if (!canDownload) {
|
|
1992
|
-
log.warn(`Skipping ${
|
|
1993
|
-
return
|
|
2163
|
+
log.warn(`Skipping ${needsDownload.length} media file(s) — download not attempted`);
|
|
2164
|
+
return upToDateRefs;
|
|
1994
2165
|
}
|
|
1995
2166
|
|
|
1996
2167
|
const refs = [];
|
|
@@ -2229,7 +2400,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
2229
2400
|
refs.push({ uid: record.UID, metaPath });
|
|
2230
2401
|
}
|
|
2231
2402
|
|
|
2232
|
-
log.info(`Media: ${downloaded} downloaded, ${failed} failed`);
|
|
2403
|
+
log.info(`Media: ${downloaded} downloaded, ${failed} failed, ${upToDateRefs.length} up to date`);
|
|
2233
2404
|
|
|
2234
2405
|
// Prompt for stale record cleanup
|
|
2235
2406
|
if (staleRecords.length > 0 && !options.yes) {
|
|
@@ -2265,7 +2436,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
|
|
|
2265
2436
|
log.info(`Non-interactive mode: skipping stale cleanup for ${staleRecords.length} record(s)`);
|
|
2266
2437
|
}
|
|
2267
2438
|
|
|
2268
|
-
return refs;
|
|
2439
|
+
return [...upToDateRefs, ...refs];
|
|
2269
2440
|
}
|
|
2270
2441
|
|
|
2271
2442
|
/**
|
|
@@ -2533,13 +2704,9 @@ async function processRecord(entityName, record, structure, options, usedNames,
|
|
|
2533
2704
|
meta._contentColumns = ['Content'];
|
|
2534
2705
|
}
|
|
2535
2706
|
|
|
2536
|
-
//
|
|
2537
|
-
//
|
|
2538
|
-
// detection from flagging
|
|
2539
|
-
if (ext && !record.Extension) {
|
|
2540
|
-
meta.Extension = ext;
|
|
2541
|
-
record.Extension = ext;
|
|
2542
|
-
}
|
|
2707
|
+
// Extension was derived from Name/Path for local filename purposes only.
|
|
2708
|
+
// Do NOT write it to metadata or baseline when the server doesn't have it —
|
|
2709
|
+
// this prevents delta detection from flagging a false change on push.
|
|
2543
2710
|
|
|
2544
2711
|
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
2545
2712
|
log.dim(` → ${metaPath}`);
|
|
@@ -2793,6 +2960,184 @@ export function buildOutputFilename(entityType, node, filenameCol, parentChain =
|
|
|
2793
2960
|
return allSegments.join('.');
|
|
2794
2961
|
}
|
|
2795
2962
|
|
|
2963
|
+
// ─── Inline Output Helpers ─────────────────────────────────────────────────
|
|
2964
|
+
|
|
2965
|
+
const INLINE_DOC_NAMES = { output_value: 'column', output_value_filter: 'filter', output_value_entity_column_rel: 'join' };
|
|
2966
|
+
const INLINE_DOC_TO_PHYSICAL = { column: 'output_value', join: 'output_value_entity_column_rel', filter: 'output_value_filter' };
|
|
2967
|
+
const INLINE_DOC_KEYS = ['column', 'join', 'filter'];
|
|
2968
|
+
|
|
2969
|
+
/**
|
|
2970
|
+
* Build the companion file stem for a child entity within a root output file.
|
|
2971
|
+
* e.g. root stem "_output~Sales~abc", entity "output_value", uid "col1"
|
|
2972
|
+
* → "_output~Sales~abc.column~col1"
|
|
2973
|
+
*
|
|
2974
|
+
* @param {string} rootStem - Root output file stem (no extension)
|
|
2975
|
+
* @param {string} physicalEntity - Physical entity name ('output_value', etc.)
|
|
2976
|
+
* @param {string} uid - Child entity UID
|
|
2977
|
+
* @param {string} [parentChainStem] - Already-built ancestor stem (for nested children)
|
|
2978
|
+
* @returns {string}
|
|
2979
|
+
*/
|
|
2980
|
+
export function getChildCompanionStem(rootStem, physicalEntity, uid, parentChainStem = rootStem) {
|
|
2981
|
+
const docName = INLINE_DOC_NAMES[physicalEntity] || physicalEntity;
|
|
2982
|
+
return `${parentChainStem}.${docName}~${uid}`;
|
|
2983
|
+
}
|
|
2984
|
+
|
|
2985
|
+
/**
|
|
2986
|
+
* Extract CustomSQL as a companion .sql file if rules require it.
|
|
2987
|
+
* Rules:
|
|
2988
|
+
* 1. Type === 'CustomSQL' → always extract (even empty)
|
|
2989
|
+
* 2. Type !== 'CustomSQL' AND CustomSQL non-empty decoded value → extract
|
|
2990
|
+
* 3. Otherwise → store "" inline; no file
|
|
2991
|
+
*
|
|
2992
|
+
* Mutates entityObj.CustomSQL to the @basename reference when extracted.
|
|
2993
|
+
* Returns the companion filename (without directory) if written, else null.
|
|
2994
|
+
*
|
|
2995
|
+
* @param {Object} entityObj - The entity object (mutated in place)
|
|
2996
|
+
* @param {string} companionStem - Stem for the companion file (no extension)
|
|
2997
|
+
* @param {string} outputDir - Directory where the root output JSON lives
|
|
2998
|
+
* @param {string} serverTz - Server timezone for timestamp syncing
|
|
2999
|
+
* @returns {Promise<string|null>} - Companion filename or null
|
|
3000
|
+
*/
|
|
3001
|
+
async function extractCustomSqlIfNeeded(entityObj, companionStem, outputDir, serverTz) {
|
|
3002
|
+
const rawSql = entityObj.CustomSQL;
|
|
3003
|
+
const isCustomSqlType = entityObj.Type === 'CustomSQL';
|
|
3004
|
+
|
|
3005
|
+
// Decode base64 server value if needed
|
|
3006
|
+
const decoded = resolveContentValue(rawSql) ?? '';
|
|
3007
|
+
const hasContent = typeof decoded === 'string' && decoded.trim().length > 0;
|
|
3008
|
+
|
|
3009
|
+
if (!isCustomSqlType && !hasContent) {
|
|
3010
|
+
// Rule 3: store empty string inline, no file
|
|
3011
|
+
entityObj.CustomSQL = '';
|
|
3012
|
+
return null;
|
|
3013
|
+
}
|
|
3014
|
+
|
|
3015
|
+
// Rules 1 and 2: extract as companion .sql file
|
|
3016
|
+
const companionName = `${companionStem}.CustomSQL.sql`;
|
|
3017
|
+
const companionPath = join(outputDir, companionName);
|
|
3018
|
+
await writeFile(companionPath, hasContent ? decoded : '', 'utf8');
|
|
3019
|
+
entityObj.CustomSQL = `@${companionName}`;
|
|
3020
|
+
entityObj._contentColumns = entityObj._contentColumns || [];
|
|
3021
|
+
if (!entityObj._contentColumns.includes('CustomSQL')) {
|
|
3022
|
+
entityObj._contentColumns.push('CustomSQL');
|
|
3023
|
+
}
|
|
3024
|
+
|
|
3025
|
+
// Sync timestamps
|
|
3026
|
+
if (serverTz && (entityObj._CreatedOn || entityObj._LastUpdated)) {
|
|
3027
|
+
try { await setFileTimestamps(companionPath, entityObj._CreatedOn, entityObj._LastUpdated, serverTz); } catch { /* non-critical */ }
|
|
3028
|
+
}
|
|
3029
|
+
|
|
3030
|
+
log.dim(` → ${companionPath}`);
|
|
3031
|
+
return companionName;
|
|
3032
|
+
}
|
|
3033
|
+
|
|
3034
|
+
/**
|
|
3035
|
+
* Recursively build a children object for a parent entity.
|
|
3036
|
+
* Mutates parentObj to set parentObj.children = { column: [], join: [], filter: [] }.
|
|
3037
|
+
* Returns companionFiles: string[] of written companion file basenames.
|
|
3038
|
+
*
|
|
3039
|
+
* Each child object retains _entity set to the physical entity name
|
|
3040
|
+
* (output_value, output_value_entity_column_rel, output_value_filter)
|
|
3041
|
+
* so that push can route submissions correctly.
|
|
3042
|
+
*
|
|
3043
|
+
* @param {Object} parentObj - The entity object to populate (mutated in place)
|
|
3044
|
+
* @param {Object} node - Tree node from buildOutputHierarchyTree (has _children)
|
|
3045
|
+
* @param {string} rootStem - Root output file stem (e.g. "_output~Sales~abc")
|
|
3046
|
+
* @param {string} outputDir - Directory where root output JSON lives
|
|
3047
|
+
* @param {string} serverTz - Server timezone
|
|
3048
|
+
* @param {string} [parentStem] - Ancestor stem for compound companion naming
|
|
3049
|
+
* @returns {Promise<string[]>} - Array of written companion file basenames
|
|
3050
|
+
*/
|
|
3051
|
+
async function buildInlineOutputChildren(parentObj, node, rootStem, outputDir, serverTz, parentStem = rootStem) {
|
|
3052
|
+
const companionFiles = [];
|
|
3053
|
+
const nodeChildren = node._children || {};
|
|
3054
|
+
|
|
3055
|
+
// Always create children object with all three doc keys
|
|
3056
|
+
parentObj.children = { column: [], join: [], filter: [] };
|
|
3057
|
+
|
|
3058
|
+
for (const docKey of INLINE_DOC_KEYS) {
|
|
3059
|
+
const entityArray = nodeChildren[docKey];
|
|
3060
|
+
const physicalKey = INLINE_DOC_TO_PHYSICAL[docKey];
|
|
3061
|
+
|
|
3062
|
+
if (!Array.isArray(entityArray) || entityArray.length === 0) continue;
|
|
3063
|
+
|
|
3064
|
+
for (const child of entityArray) {
|
|
3065
|
+
// Build a clean copy without tree-internal fields
|
|
3066
|
+
const childObj = { ...child };
|
|
3067
|
+
delete childObj._children;
|
|
3068
|
+
|
|
3069
|
+
// Decode any base64 values
|
|
3070
|
+
for (const [key, value] of Object.entries(childObj)) {
|
|
3071
|
+
if (key === 'CustomSQL') continue; // handled by extractCustomSqlIfNeeded
|
|
3072
|
+
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
3073
|
+
childObj[key] = resolveContentValue(value);
|
|
3074
|
+
}
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
// Ensure _entity is set to physical entity name (for push routing)
|
|
3078
|
+
childObj._entity = physicalKey;
|
|
3079
|
+
|
|
3080
|
+
// Compute companion stem for this child
|
|
3081
|
+
const childStem = getChildCompanionStem(rootStem, physicalKey, child.UID, parentStem);
|
|
3082
|
+
|
|
3083
|
+
// Extract CustomSQL if needed
|
|
3084
|
+
const companionFile = await extractCustomSqlIfNeeded(childObj, childStem, outputDir, serverTz);
|
|
3085
|
+
if (companionFile) companionFiles.push(companionFile);
|
|
3086
|
+
|
|
3087
|
+
// Recurse into child's _children (e.g. join→column, column→filter)
|
|
3088
|
+
if (child._children && Object.keys(child._children).some(k => child._children[k]?.length > 0)) {
|
|
3089
|
+
const gcFiles = await buildInlineOutputChildren(childObj, child, rootStem, outputDir, serverTz, childStem);
|
|
3090
|
+
companionFiles.push(...gcFiles);
|
|
3091
|
+
} else {
|
|
3092
|
+
// Leaf node: still set empty children
|
|
3093
|
+
childObj.children = { column: [], join: [], filter: [] };
|
|
3094
|
+
}
|
|
3095
|
+
|
|
3096
|
+
parentObj.children[docKey].push(childObj);
|
|
3097
|
+
}
|
|
3098
|
+
}
|
|
3099
|
+
|
|
3100
|
+
return companionFiles;
|
|
3101
|
+
}
|
|
3102
|
+
|
|
3103
|
+
/**
|
|
3104
|
+
* Move orphaned old-format child output .json files to /trash.
|
|
3105
|
+
* Old format: _output~name~uid.column~name~uid.json (has .column~, .join~, or .filter~ segments)
|
|
3106
|
+
*
|
|
3107
|
+
* @param {string} outputDir - Directory containing output files
|
|
3108
|
+
* @param {string} rootStem - Root output file stem (e.g. "_output~Sales~abc")
|
|
3109
|
+
*/
|
|
3110
|
+
async function trashOrphanedChildFiles(outputDir, rootStem) {
|
|
3111
|
+
let files;
|
|
3112
|
+
try { files = await readdir(outputDir); } catch { return; }
|
|
3113
|
+
|
|
3114
|
+
const trashDir = join(process.cwd(), 'trash');
|
|
3115
|
+
let trashCreated = false;
|
|
3116
|
+
|
|
3117
|
+
for (const f of files) {
|
|
3118
|
+
if (f.startsWith(`${rootStem}.`) && f.endsWith('.json') && !f.includes('.CustomSQL.')) {
|
|
3119
|
+
// Check it's actually an old child file (has .column~, .join~, or .filter~ segment)
|
|
3120
|
+
if (/\.(column|join|filter)~/.test(f)) {
|
|
3121
|
+
if (!trashCreated) {
|
|
3122
|
+
await mkdir(trashDir, { recursive: true });
|
|
3123
|
+
trashCreated = true;
|
|
3124
|
+
}
|
|
3125
|
+
try {
|
|
3126
|
+
await rename(join(outputDir, f), join(trashDir, f));
|
|
3127
|
+
log.dim(` Trashed orphaned child file: ${f}`);
|
|
3128
|
+
} catch { /* non-critical */ }
|
|
3129
|
+
}
|
|
3130
|
+
}
|
|
3131
|
+
}
|
|
3132
|
+
|
|
3133
|
+
// Re-apply trash icon if files were moved (self-heals after user clears trash)
|
|
3134
|
+
if (trashCreated) {
|
|
3135
|
+
await ensureTrashIcon(trashDir);
|
|
3136
|
+
}
|
|
3137
|
+
}
|
|
3138
|
+
|
|
3139
|
+
// ─── Filename Parsing ──────────────────────────────────────────────────────
|
|
3140
|
+
|
|
2796
3141
|
/**
|
|
2797
3142
|
* Parse an output hierarchy filename back into entity relationships.
|
|
2798
3143
|
*
|
|
@@ -2886,7 +3231,11 @@ export function parseOutputHierarchyFile(filename) {
|
|
|
2886
3231
|
|
|
2887
3232
|
/**
|
|
2888
3233
|
* Main orchestrator: process output hierarchy entities during clone.
|
|
2889
|
-
* Builds tree, resolves filenames, writes
|
|
3234
|
+
* Builds tree, resolves filenames, writes single-file inline format.
|
|
3235
|
+
*
|
|
3236
|
+
* Each root output produces exactly one .json file with all children
|
|
3237
|
+
* embedded inline under children: { column: [], join: [], filter: [] }.
|
|
3238
|
+
* Companion .sql files are extracted per CustomSQL rules.
|
|
2890
3239
|
*
|
|
2891
3240
|
* @param {Object} appJson - The full app JSON
|
|
2892
3241
|
* @param {Object} structure - Bin hierarchy structure
|
|
@@ -2950,8 +3299,29 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2950
3299
|
const rootBasename = buildOutputFilename('output', output, filenameCols.output);
|
|
2951
3300
|
const rootMetaPath = join(binDir, `${rootBasename}.json`);
|
|
2952
3301
|
|
|
3302
|
+
// Detect old-format files that need migration to inline children format.
|
|
3303
|
+
// Old format: children.column/join/filter contain @reference strings to separate files.
|
|
3304
|
+
// New format: children contain inline entity objects directly.
|
|
3305
|
+
let needsFormatMigration = false;
|
|
3306
|
+
if (await fileExists(rootMetaPath)) {
|
|
3307
|
+
try {
|
|
3308
|
+
const existingMeta = JSON.parse(await readFile(rootMetaPath, 'utf8'));
|
|
3309
|
+
if (existingMeta.children) {
|
|
3310
|
+
const allRefs = [
|
|
3311
|
+
...(existingMeta.children.column || []),
|
|
3312
|
+
...(existingMeta.children.join || []),
|
|
3313
|
+
...(existingMeta.children.filter || []),
|
|
3314
|
+
];
|
|
3315
|
+
needsFormatMigration = allRefs.some(ref => typeof ref === 'string' && ref.startsWith('@'));
|
|
3316
|
+
}
|
|
3317
|
+
} catch { /* read error — will be overwritten */ }
|
|
3318
|
+
if (needsFormatMigration) {
|
|
3319
|
+
log.info(` Migrating ${rootBasename} to inline children format...`);
|
|
3320
|
+
}
|
|
3321
|
+
}
|
|
3322
|
+
|
|
2953
3323
|
// Change detection for existing files (skip when --entity forces re-processing)
|
|
2954
|
-
if (await fileExists(rootMetaPath) && !options.yes && !forceReprocess) {
|
|
3324
|
+
if (await fileExists(rootMetaPath) && !options.yes && !forceReprocess && !needsFormatMigration) {
|
|
2955
3325
|
if (bulkAction.value === 'skip_all') {
|
|
2956
3326
|
log.dim(` Skipped ${rootBasename}`);
|
|
2957
3327
|
refs.push({ uid: output.UID, metaPath: rootMetaPath });
|
|
@@ -2984,94 +3354,11 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
2984
3354
|
}
|
|
2985
3355
|
}
|
|
2986
3356
|
|
|
2987
|
-
//
|
|
2988
|
-
const childRefs = { column: [], join: [], filter: [] };
|
|
2989
|
-
|
|
2990
|
-
// Helper to build a child filename segment
|
|
2991
|
-
const childSegment = (type, node, col) => {
|
|
2992
|
-
const uid = node.UID || '';
|
|
2993
|
-
const rawName = node[col];
|
|
2994
|
-
const name = rawName ? sanitizeFilename(String(rawName)) : '';
|
|
2995
|
-
return (!name || name === uid) ? `${type}~${uid}` : `${type}~${name}~${uid}`;
|
|
2996
|
-
};
|
|
2997
|
-
|
|
2998
|
-
// Process all children depth-first
|
|
2999
|
-
// Direct filters on output
|
|
3000
|
-
for (const filter of output._children.filter) {
|
|
3001
|
-
const filterName = `${rootBasename}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
3002
|
-
const filterPath = join(binDir, `${filterName}.json`);
|
|
3003
|
-
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
3004
|
-
childRefs.filter.push(`@${filterPath}`);
|
|
3005
|
-
}
|
|
3006
|
-
|
|
3007
|
-
// Direct columns on output
|
|
3008
|
-
for (const col of output._children.column) {
|
|
3009
|
-
const colSeg = childSegment('column', col, filenameCols.output_value);
|
|
3010
|
-
const colName = `${rootBasename}.${colSeg}`;
|
|
3011
|
-
const colPath = join(binDir, `${colName}.json`);
|
|
3012
|
-
await writeOutputEntityFile(col, 'output_value', colPath, serverTz);
|
|
3013
|
-
childRefs.column.push(`@${colPath}`);
|
|
3014
|
-
|
|
3015
|
-
// Filters under this column
|
|
3016
|
-
for (const filter of col._children.filter) {
|
|
3017
|
-
const filterName = `${colName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
3018
|
-
const filterPath = join(binDir, `${filterName}.json`);
|
|
3019
|
-
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
3020
|
-
childRefs.filter.push(`@${filterPath}`);
|
|
3021
|
-
}
|
|
3022
|
-
}
|
|
3023
|
-
|
|
3024
|
-
// Joins on output
|
|
3025
|
-
for (const j of output._children.join) {
|
|
3026
|
-
const joinSeg = childSegment('join', j, filenameCols.output_value_entity_column_rel);
|
|
3027
|
-
const joinName = `${rootBasename}.${joinSeg}`;
|
|
3028
|
-
const joinPath = join(binDir, `${joinName}.json`);
|
|
3029
|
-
await writeOutputEntityFile(j, 'output_value_entity_column_rel', joinPath, serverTz);
|
|
3030
|
-
childRefs.join.push(`@${joinPath}`);
|
|
3031
|
-
|
|
3032
|
-
// Columns under this join
|
|
3033
|
-
for (const col of j._children.column) {
|
|
3034
|
-
const joinColName = `${joinName}.${childSegment('column', col, filenameCols.output_value)}`;
|
|
3035
|
-
const joinColPath = join(binDir, `${joinColName}.json`);
|
|
3036
|
-
await writeOutputEntityFile(col, 'output_value', joinColPath, serverTz);
|
|
3037
|
-
childRefs.column.push(`@${joinColPath}`);
|
|
3038
|
-
|
|
3039
|
-
// Filters under this join→column
|
|
3040
|
-
for (const filter of col._children.filter) {
|
|
3041
|
-
const filterName = `${joinColName}.${childSegment('filter', filter, filenameCols.output_value_filter)}`;
|
|
3042
|
-
const filterPath = join(binDir, `${filterName}.json`);
|
|
3043
|
-
await writeOutputEntityFile(filter, 'output_value_filter', filterPath, serverTz);
|
|
3044
|
-
childRefs.filter.push(`@${filterPath}`);
|
|
3045
|
-
}
|
|
3046
|
-
}
|
|
3047
|
-
}
|
|
3048
|
-
|
|
3049
|
-
// Write root output JSON with child references
|
|
3357
|
+
// Build clean root object (strip tree-internal fields)
|
|
3050
3358
|
const rootMeta = {};
|
|
3051
|
-
const rootContentColumns = [];
|
|
3052
3359
|
for (const [key, value] of Object.entries(output)) {
|
|
3053
3360
|
if (key === '_children') continue;
|
|
3054
|
-
|
|
3055
|
-
// Extract CustomSQL to companion .sql file when Type is CustomSQL (even if empty)
|
|
3056
|
-
// or when the column has actual content
|
|
3057
|
-
if (key === 'CustomSQL') {
|
|
3058
|
-
const decoded = resolveContentValue(value);
|
|
3059
|
-
const hasContent = decoded && decoded.trim();
|
|
3060
|
-
if (output.Type === 'CustomSQL' || hasContent) {
|
|
3061
|
-
const sqlFilePath = rootMetaPath.replace(/\.json$/, '.CustomSQL.sql');
|
|
3062
|
-
await writeFile(sqlFilePath, hasContent ? decoded : '');
|
|
3063
|
-
rootMeta[key] = `@${basename(sqlFilePath)}`;
|
|
3064
|
-
rootContentColumns.push('CustomSQL');
|
|
3065
|
-
if (serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
3066
|
-
try { await setFileTimestamps(sqlFilePath, output._CreatedOn, output._LastUpdated, serverTz); } catch { /* non-critical */ }
|
|
3067
|
-
}
|
|
3068
|
-
log.dim(` → ${sqlFilePath}`);
|
|
3069
|
-
continue;
|
|
3070
|
-
}
|
|
3071
|
-
// Not CustomSQL type and empty — store inline
|
|
3072
|
-
rootMeta[key] = '';
|
|
3073
|
-
continue;
|
|
3074
|
-
}
|
|
3361
|
+
if (key === 'CustomSQL') continue; // handled by extractCustomSqlIfNeeded below
|
|
3075
3362
|
|
|
3076
3363
|
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
3077
3364
|
rootMeta[key] = resolveContentValue(value);
|
|
@@ -3080,10 +3367,16 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
3080
3367
|
}
|
|
3081
3368
|
}
|
|
3082
3369
|
rootMeta._entity = 'output';
|
|
3083
|
-
|
|
3084
|
-
|
|
3085
|
-
|
|
3086
|
-
|
|
3370
|
+
|
|
3371
|
+
// Copy raw CustomSQL for extraction helper
|
|
3372
|
+
rootMeta.CustomSQL = output.CustomSQL;
|
|
3373
|
+
|
|
3374
|
+
// Extract CustomSQL on root (rules 1/2/3)
|
|
3375
|
+
await extractCustomSqlIfNeeded(rootMeta, rootBasename, binDir, serverTz);
|
|
3376
|
+
|
|
3377
|
+
// Embed all children under rootMeta.children = { column, join, filter }
|
|
3378
|
+
await buildInlineOutputChildren(rootMeta, output, rootBasename, binDir, serverTz);
|
|
3379
|
+
// rootMeta now has .children = { column: [...], join: [...], filter: [...] }
|
|
3087
3380
|
|
|
3088
3381
|
// If user chose a bin for a BinID-less output, store it and mark as modified
|
|
3089
3382
|
if (chosenBinId) {
|
|
@@ -3094,6 +3387,9 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
3094
3387
|
await writeFile(rootMetaPath, JSON.stringify(rootMeta, null, 2) + '\n');
|
|
3095
3388
|
log.success(`Saved ${rootMetaPath}`);
|
|
3096
3389
|
|
|
3390
|
+
// Move orphaned old-format child .json files to /trash
|
|
3391
|
+
await trashOrphanedChildFiles(binDir, rootBasename);
|
|
3392
|
+
|
|
3097
3393
|
// Set file timestamps to server's _LastUpdated so diff detection works.
|
|
3098
3394
|
// Skip when chosenBinId is set — keep mtime at "now" so push detects the local edit.
|
|
3099
3395
|
if (!chosenBinId && serverTz && (output._CreatedOn || output._LastUpdated)) {
|
|
@@ -3109,61 +3405,79 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
|
|
|
3109
3405
|
}
|
|
3110
3406
|
|
|
3111
3407
|
/**
|
|
3112
|
-
* Write
|
|
3113
|
-
*
|
|
3408
|
+
* Write manifest.json to project root.
|
|
3409
|
+
* If a manifest content record was cloned from the server, use its Content value.
|
|
3410
|
+
* Otherwise, generate from appJson values (empty strings for missing fields).
|
|
3114
3411
|
*/
|
|
3115
|
-
async function
|
|
3116
|
-
|
|
3117
|
-
const
|
|
3118
|
-
|
|
3119
|
-
|
|
3120
|
-
|
|
3121
|
-
|
|
3122
|
-
|
|
3123
|
-
|
|
3124
|
-
|
|
3125
|
-
|
|
3126
|
-
|
|
3127
|
-
|
|
3128
|
-
|
|
3129
|
-
|
|
3130
|
-
|
|
3131
|
-
|
|
3132
|
-
|
|
3133
|
-
|
|
3134
|
-
|
|
3135
|
-
|
|
3136
|
-
|
|
3412
|
+
async function writeManifestJson(appJson, contentRefs) {
|
|
3413
|
+
// 1. Search contentRefs for a manifest content record
|
|
3414
|
+
for (const ref of contentRefs) {
|
|
3415
|
+
let meta;
|
|
3416
|
+
try {
|
|
3417
|
+
meta = JSON.parse(await readFile(ref.metaPath, 'utf8'));
|
|
3418
|
+
} catch { continue; }
|
|
3419
|
+
|
|
3420
|
+
const name = (meta.Name || '').toLowerCase();
|
|
3421
|
+
const ext = (meta.Extension || '').toLowerCase();
|
|
3422
|
+
if (name.startsWith('manifest') && ext === 'json') {
|
|
3423
|
+
// Found manifest — read content file and write to project root
|
|
3424
|
+
const contentRef = meta.Content;
|
|
3425
|
+
if (contentRef && String(contentRef).startsWith('@')) {
|
|
3426
|
+
const refFile = String(contentRef).substring(1);
|
|
3427
|
+
const contentPath = refFile.startsWith('/')
|
|
3428
|
+
? join(process.cwd(), refFile)
|
|
3429
|
+
: join(dirname(ref.metaPath), refFile);
|
|
3430
|
+
try {
|
|
3431
|
+
const content = await readFile(contentPath, 'utf8');
|
|
3432
|
+
await writeFile('manifest.json', content);
|
|
3433
|
+
log.dim(' manifest.json written to project root (from server content)');
|
|
3434
|
+
} catch (err) {
|
|
3435
|
+
log.warn(` Could not write manifest.json from server content: ${err.message}`);
|
|
3137
3436
|
}
|
|
3138
|
-
log.dim(` → ${sqlFilePath}`);
|
|
3139
|
-
continue;
|
|
3140
3437
|
}
|
|
3141
|
-
|
|
3142
|
-
meta[key] = '';
|
|
3143
|
-
continue;
|
|
3144
|
-
}
|
|
3145
|
-
|
|
3146
|
-
// Decode other base64 columns inline
|
|
3147
|
-
if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
|
|
3148
|
-
meta[key] = resolveContentValue(value);
|
|
3149
|
-
} else {
|
|
3150
|
-
meta[key] = value;
|
|
3438
|
+
return;
|
|
3151
3439
|
}
|
|
3152
3440
|
}
|
|
3153
3441
|
|
|
3154
|
-
|
|
3155
|
-
|
|
3156
|
-
|
|
3442
|
+
// 2. No manifest content record — generate from appJson values
|
|
3443
|
+
const shortName = appJson.ShortName || '';
|
|
3444
|
+
const appName = appJson.Name || '';
|
|
3445
|
+
const description = appJson.Description || '';
|
|
3446
|
+
|
|
3447
|
+
// Find background_color from extension children (widget descriptor matching ShortName)
|
|
3448
|
+
let bgColor = '#ffffff';
|
|
3449
|
+
if (shortName) {
|
|
3450
|
+
const extensions = appJson.children?.extension || [];
|
|
3451
|
+
for (const ext of extensions) {
|
|
3452
|
+
const descriptor = resolveContentValue(ext.Descriptor) || ext.Descriptor;
|
|
3453
|
+
const string1 = resolveContentValue(ext.String1) || ext.String1;
|
|
3454
|
+
if (descriptor === 'widget' && string1 === shortName) {
|
|
3455
|
+
bgColor = resolveContentValue(ext.String4) || ext.String4 || bgColor;
|
|
3456
|
+
break;
|
|
3457
|
+
}
|
|
3458
|
+
}
|
|
3157
3459
|
}
|
|
3158
3460
|
|
|
3159
|
-
|
|
3160
|
-
|
|
3461
|
+
const manifest = {
|
|
3462
|
+
name: appName,
|
|
3463
|
+
short_name: shortName,
|
|
3464
|
+
description,
|
|
3465
|
+
orientation: 'portrait',
|
|
3466
|
+
start_url: shortName ? `/app/${shortName}/ui/` : '',
|
|
3467
|
+
lang: 'en',
|
|
3468
|
+
scope: shortName ? `/app/${shortName}/ui/` : '',
|
|
3469
|
+
display_override: ['window-control-overlay', 'minimal-ui'],
|
|
3470
|
+
display: 'standalone',
|
|
3471
|
+
background_color: bgColor,
|
|
3472
|
+
theme_color: '#000000',
|
|
3473
|
+
id: shortName,
|
|
3474
|
+
screenshots: [],
|
|
3475
|
+
ios: {},
|
|
3476
|
+
icons: [],
|
|
3477
|
+
};
|
|
3161
3478
|
|
|
3162
|
-
|
|
3163
|
-
|
|
3164
|
-
await setFileTimestamps(filePath, node._CreatedOn, node._LastUpdated, serverTz);
|
|
3165
|
-
} catch { /* non-critical */ }
|
|
3166
|
-
}
|
|
3479
|
+
await writeFile('manifest.json', JSON.stringify(manifest, null, 2) + '\n');
|
|
3480
|
+
log.dim(' manifest.json generated at project root (from app.json values)');
|
|
3167
3481
|
}
|
|
3168
3482
|
|
|
3169
3483
|
/**
|
|
@@ -3209,7 +3523,7 @@ async function saveAppJson(appJson, contentRefs, otherRefs, domain) {
|
|
|
3209
3523
|
}
|
|
3210
3524
|
|
|
3211
3525
|
/**
|
|
3212
|
-
* Save .
|
|
3526
|
+
* Save .dbo/.app_baseline.json baseline file with decoded base64 values.
|
|
3213
3527
|
* This file tracks the server state for delta detection.
|
|
3214
3528
|
*/
|
|
3215
3529
|
async function saveBaselineFile(appJson) {
|
|
@@ -3222,14 +3536,14 @@ async function saveBaselineFile(appJson) {
|
|
|
3222
3536
|
// Save to .app.json
|
|
3223
3537
|
await saveAppJsonBaseline(baseline);
|
|
3224
3538
|
|
|
3225
|
-
log.dim(' .
|
|
3539
|
+
log.dim(' .dbo/.app_baseline.json baseline created (system-managed, do not edit)');
|
|
3226
3540
|
}
|
|
3227
3541
|
|
|
3228
3542
|
/**
|
|
3229
3543
|
* Recursively decode base64 fields in an object or array.
|
|
3230
3544
|
* Modifies the input object in-place.
|
|
3231
3545
|
*/
|
|
3232
|
-
function decodeBase64Fields(obj) {
|
|
3546
|
+
export function decodeBase64Fields(obj) {
|
|
3233
3547
|
if (!obj || typeof obj !== 'object') {
|
|
3234
3548
|
return;
|
|
3235
3549
|
}
|