@dboio/cli 0.17.0 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +111 -85
  2. package/package.json +1 -1
  3. package/plugins/claude/dbo/docs/dbo-cli-readme.md +111 -85
  4. package/src/commands/build.js +3 -3
  5. package/src/commands/clone.js +205 -71
  6. package/src/commands/deploy.js +3 -3
  7. package/src/commands/init.js +11 -11
  8. package/src/commands/install.js +3 -3
  9. package/src/commands/login.js +2 -2
  10. package/src/commands/mv.js +15 -15
  11. package/src/commands/pull.js +1 -1
  12. package/src/commands/push.js +193 -14
  13. package/src/commands/rm.js +2 -2
  14. package/src/commands/run.js +4 -4
  15. package/src/commands/status.js +1 -1
  16. package/src/commands/sync.js +2 -2
  17. package/src/lib/config.js +186 -135
  18. package/src/lib/delta.js +119 -17
  19. package/src/lib/dependencies.js +51 -24
  20. package/src/lib/deploy-config.js +4 -4
  21. package/src/lib/domain-guard.js +8 -9
  22. package/src/lib/filenames.js +12 -1
  23. package/src/lib/ignore.js +2 -3
  24. package/src/lib/insert.js +1 -1
  25. package/src/lib/metadata-schema.js +14 -20
  26. package/src/lib/metadata-templates.js +4 -4
  27. package/src/lib/migrations.js +1 -1
  28. package/src/lib/modify-key.js +1 -1
  29. package/src/lib/scaffold.js +5 -12
  30. package/src/lib/schema.js +67 -37
  31. package/src/lib/structure.js +6 -6
  32. package/src/lib/tagging.js +2 -2
  33. package/src/lib/ticketing.js +3 -7
  34. package/src/lib/toe-stepping.js +5 -5
  35. package/src/lib/transaction-key.js +1 -1
  36. package/src/migrations/004-rename-output-files.js +2 -2
  37. package/src/migrations/005-rename-output-metadata.js +2 -2
  38. package/src/migrations/006-remove-uid-companion-filenames.js +1 -1
  39. package/src/migrations/007-natural-entity-companion-filenames.js +1 -1
  40. package/src/migrations/008-metadata-uid-in-suffix.js +1 -1
  41. package/src/migrations/009-fix-media-collision-metadata-names.js +1 -1
  42. package/src/migrations/010-delete-paren-media-orphans.js +1 -1
  43. package/src/migrations/012-project-dir-restructure.js +211 -0
@@ -13,10 +13,17 @@ import { loadIgnore } from '../lib/ignore.js';
13
13
  import { checkDomainChange } from '../lib/domain-guard.js';
14
14
  import { applyTrashIcon, ensureTrashIcon, tagProjectFiles } from '../lib/tagging.js';
15
15
  import { loadMetadataSchema, saveMetadataSchema, getTemplateCols, setTemplateCols, buildTemplateFromCloneRecord, generateMetadataFromSchema, parseReferenceExpression, mergeDescriptorSchemaFromDependencies } from '../lib/metadata-schema.js';
16
- import { fetchSchema, loadSchema, saveSchema, isSchemaStale, SCHEMA_FILE } from '../lib/schema.js';
16
+ import { fetchSchema, loadSchema, saveSchema, isSchemaStale } from '../lib/schema.js';
17
+ import { appMetadataPath } from '../lib/config.js';
17
18
  import { runPendingMigrations } from '../lib/migrations.js';
18
19
  import { upsertDeployEntry } from '../lib/deploy-config.js';
19
20
  import { syncDependencies, parseDependenciesColumn } from '../lib/dependencies.js';
21
+ import { sep } from 'path';
22
+
23
+ /** True when cwd is inside app_dependencies/ (dependency checkout clone). */
24
+ function isDependencyCheckout() {
25
+ return process.cwd().includes(`${sep}app_dependencies${sep}`);
26
+ }
20
27
  import { mergeDependencies } from '../lib/config.js';
21
28
 
22
29
  /**
@@ -34,6 +41,32 @@ export function resolveContentValue(value) {
34
41
  return value !== null && value !== undefined ? String(value) : null;
35
42
  }
36
43
 
44
+ /**
45
+ * Embed a server children object into metadata, decoding any base64 field values.
46
+ * Used by processEntityDirEntries() and processExtensionEntries().
47
+ *
48
+ * @param {Object} childrenObj - Server-side children: { entity_column: [...], ... }
49
+ * @returns {Object} - Decoded children object safe to write to metadata
50
+ */
51
+ export function embedEntityChildren(childrenObj) {
52
+ const result = {};
53
+ for (const [childEntityName, childArray] of Object.entries(childrenObj)) {
54
+ if (!Array.isArray(childArray)) continue;
55
+ result[childEntityName] = childArray.map(child => {
56
+ const embedded = {};
57
+ for (const [k, v] of Object.entries(child)) {
58
+ if (v && typeof v === 'object' && !Array.isArray(v) && v.encoding === 'base64') {
59
+ embedded[k] = resolveContentValue(v) ?? '';
60
+ } else {
61
+ embedded[k] = v;
62
+ }
63
+ }
64
+ return embedded;
65
+ });
66
+ }
67
+ return result;
68
+ }
69
+
37
70
  export function sanitizeFilename(name) {
38
71
  return name.replace(/[/\\?%*:|"<>]/g, '-').replace(/\s+/g, '-').substring(0, 200);
39
72
  }
@@ -491,6 +524,49 @@ async function detectAndRenameLegacyCompanions(metaPath, meta) {
491
524
  return metaChanged;
492
525
  }
493
526
 
527
+ /**
528
+ * Clean up double/triple-metadata files (e.g., "app.metadata.metadata~uid.json")
529
+ * caused by an older bug where buildMetaFilename received a base already containing ".metadata".
530
+ * Scans lib/ directories for files matching the pattern and removes them.
531
+ */
532
+ async function cleanDoubleMetadataFiles() {
533
+ const libDir = join(process.cwd(), 'lib');
534
+ if (!await fileExists(libDir)) return;
535
+
536
+ const trashDir = join(process.cwd(), 'trash');
537
+ let cleaned = 0;
538
+
539
+ async function scan(dir) {
540
+ let entries;
541
+ try { entries = await readdir(dir, { withFileTypes: true }); } catch { return; }
542
+
543
+ for (const entry of entries) {
544
+ if (entry.name.startsWith('.')) continue;
545
+ const full = join(dir, entry.name);
546
+
547
+ if (entry.isDirectory()) {
548
+ if (['node_modules', 'trash', '.git', '.app', 'app_dependencies'].includes(entry.name)) continue;
549
+ await scan(full);
550
+ continue;
551
+ }
552
+
553
+ // Detect double+ .metadata pattern: "name.metadata.metadata~uid.json" or "name.metadata.metadata.metadata~uid.json"
554
+ if (/\.metadata\.metadata[.~]/.test(entry.name)) {
555
+ try {
556
+ await mkdir(trashDir, { recursive: true });
557
+ await rename(full, join(trashDir, entry.name));
558
+ cleaned++;
559
+ } catch { /* non-critical */ }
560
+ }
561
+ }
562
+ }
563
+
564
+ await scan(libDir);
565
+ if (cleaned > 0) {
566
+ log.dim(` Cleaned ${cleaned} duplicate metadata file(s) → trash/`);
567
+ }
568
+ }
569
+
494
570
  /**
495
571
  * Scan all directories under Bins/ for orphaned legacy ~UID companion files
496
572
  * that no metadata @reference points to, and move them to trash/.
@@ -793,7 +869,7 @@ async function stageCollisionDeletions(toDelete, appJson, options) {
793
869
  }
794
870
 
795
871
  if (staged > 0) {
796
- log.success(`${staged} record(s) staged in .dbo/synchronize.json`);
872
+ log.success(`${staged} record(s) staged in .app/synchronize.json`);
797
873
  log.dim(' Run "dbo push" to delete from server');
798
874
  }
799
875
  }
@@ -935,7 +1011,7 @@ async function checkPendingSynchronize(options) {
935
1011
  if (totalCount === 0) return;
936
1012
 
937
1013
  log.warn('');
938
- log.warn(` ⚠ There are ${totalCount} un-pushed staged item(s) in .dbo/synchronize.json:`);
1014
+ log.warn(` ⚠ There are ${totalCount} un-pushed staged item(s) in .app/synchronize.json:`);
939
1015
  if (deleteCount > 0) log.warn(` ${deleteCount} pending deletion(s)`);
940
1016
  if (editCount > 0) log.warn(` ${editCount} pending edit(s)`);
941
1017
  if (addCount > 0) log.warn(` ${addCount} pending add(s)`);
@@ -1025,7 +1101,7 @@ export async function performClone(source, options = {}) {
1025
1101
  if (!shouldFetch && schema) {
1026
1102
  try {
1027
1103
  shouldFetch = await isSchemaStale({ domain: effectiveDomain, verbose: options.verbose });
1028
- if (shouldFetch) log.dim(` Server schema is newer — refreshing schema.json`);
1104
+ if (shouldFetch) log.dim(` Server schema is newer — refreshing _system dependency baseline`);
1029
1105
  } catch {
1030
1106
  // Can't check — continue with local schema
1031
1107
  }
@@ -1034,7 +1110,7 @@ export async function performClone(source, options = {}) {
1034
1110
  try {
1035
1111
  schema = await fetchSchema({ domain: effectiveDomain, verbose: options.verbose });
1036
1112
  await saveSchema(schema);
1037
- log.dim(` Saved schema.json`);
1113
+ log.dim(` Refreshed _system dependency baseline`);
1038
1114
  } catch (err) {
1039
1115
  if (!schema) log.warn(` Could not fetch schema: ${err.message}`);
1040
1116
  // Continue with stale schema or null
@@ -1118,10 +1194,12 @@ export async function performClone(source, options = {}) {
1118
1194
  await checkPendingSynchronize(options);
1119
1195
  }
1120
1196
 
1121
- // Ensure sensitive files are gitignored
1122
- await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt', '.dbo/.app_baseline.json']);
1197
+ // Ensure sensitive files are gitignored (skip for dependency checkouts — not user projects)
1198
+ if (!isDependencyCheckout()) {
1199
+ await ensureGitignore(['.app/credentials.json', '.app/cookies.txt', '.app/ticketing.local.json', '.app/scripts.local.json', '.app/errors.log', 'app_dependencies/']);
1200
+ }
1123
1201
 
1124
- // Step 2: Update .dbo/config.json (skip in pull mode — config already set)
1202
+ // Step 2: Update .app/config.json (skip in pull mode — config already set)
1125
1203
  if (!options.pullMode) {
1126
1204
  await updateConfigWithApp({
1127
1205
  AppID: appJson.AppID,
@@ -1130,9 +1208,9 @@ export async function performClone(source, options = {}) {
1130
1208
  AppShortName: appJson.ShortName,
1131
1209
  });
1132
1210
  await saveCloneSource(activeSource || 'default');
1133
- log.dim(' Updated .dbo/config.json with app metadata');
1211
+ log.dim(' Updated .app/config.json with app metadata');
1134
1212
 
1135
- // Merge Dependencies from app.json into .dbo/config.json
1213
+ // Merge Dependencies into .app/config.json
1136
1214
  // Always ensure at least ["_system"] is persisted
1137
1215
  const fromApp = parseDependenciesColumn(appJson.Dependencies);
1138
1216
  if (fromApp.length > 0) {
@@ -1164,8 +1242,8 @@ export async function performClone(source, options = {}) {
1164
1242
  }
1165
1243
  }
1166
1244
 
1167
- // Step 3: Update package.json (skip in pull mode)
1168
- if (!options.pullMode) {
1245
+ // Step 3: Update package.json (skip in pull mode and dependency checkouts)
1246
+ if (!options.pullMode && !isDependencyCheckout()) {
1169
1247
  await updatePackageJson(appJson, config);
1170
1248
  }
1171
1249
 
@@ -1183,7 +1261,7 @@ export async function performClone(source, options = {}) {
1183
1261
  force: explicitDeps ? true : options.force,
1184
1262
  schema: options.schema,
1185
1263
  verbose: options.verbose,
1186
- systemSchemaPath: join(process.cwd(), SCHEMA_FILE),
1264
+ systemSchemaPath: join(process.cwd(), 'app_dependencies', '_system', '.app', '_system.json'),
1187
1265
  only: explicitDeps || undefined,
1188
1266
  });
1189
1267
  } catch (err) {
@@ -1209,19 +1287,28 @@ export async function performClone(source, options = {}) {
1209
1287
  const structure = buildBinHierarchy(bins, appJson.AppID);
1210
1288
 
1211
1289
  if (!options.pullMode) {
1212
- for (const dir of SCAFFOLD_DIRS) {
1290
+ // Inside app_dependencies/: skip development-only scaffold dirs (src, test, trash)
1291
+ const isDependencyCheckout = process.cwd().includes(`${sep}app_dependencies${sep}`);
1292
+ const DEP_SKIP_DIRS = new Set(['src', 'test', 'trash']);
1293
+ const dirsToScaffold = isDependencyCheckout
1294
+ ? SCAFFOLD_DIRS.filter(d => !DEP_SKIP_DIRS.has(d))
1295
+ : SCAFFOLD_DIRS;
1296
+
1297
+ for (const dir of dirsToScaffold) {
1213
1298
  await mkdir(dir, { recursive: true });
1214
1299
  }
1215
1300
 
1216
1301
  // Best-effort: apply trash icon
1217
- await applyTrashIcon(join(process.cwd(), 'trash'));
1302
+ if (!isDependencyCheckout) {
1303
+ await applyTrashIcon(join(process.cwd(), 'trash'));
1304
+ }
1218
1305
 
1219
1306
  const createdDirs = await createDirectories(structure);
1220
1307
  await saveStructureFile(structure);
1221
1308
 
1222
- const totalDirs = SCAFFOLD_DIRS.length + createdDirs.length;
1309
+ const totalDirs = dirsToScaffold.length + createdDirs.length;
1223
1310
  log.success(`Created ${totalDirs} director${totalDirs === 1 ? 'y' : 'ies'}`);
1224
- for (const d of SCAFFOLD_DIRS) log.dim(` ${d}/`);
1311
+ for (const d of dirsToScaffold) log.dim(` ${d}/`);
1225
1312
  for (const d of createdDirs) log.dim(` ${d}/`);
1226
1313
 
1227
1314
  // Warn about legacy root-level entity directories
@@ -1250,7 +1337,7 @@ export async function performClone(source, options = {}) {
1250
1337
  if (!serverTz || serverTz === 'UTC') {
1251
1338
  serverTz = 'America/Los_Angeles';
1252
1339
  await updateConfigWithApp({ ServerTimezone: serverTz });
1253
- log.dim(` Set ServerTimezone to ${serverTz} in .dbo/config.json`);
1340
+ log.dim(` Set ServerTimezone to ${serverTz} in .app/config.json`);
1254
1341
  }
1255
1342
 
1256
1343
  // Resolve --entity filter: which entity types to process
@@ -1300,8 +1387,8 @@ export async function performClone(source, options = {}) {
1300
1387
  );
1301
1388
  }
1302
1389
 
1303
- // Step 5a: Write manifest.json to project root (from server content or resolved template)
1304
- if (!entityFilter || entityFilter.has('content')) {
1390
+ // Step 5a: Write manifest.json to project root (skip for dependency checkouts)
1391
+ if ((!entityFilter || entityFilter.has('content')) && !isDependencyCheckout()) {
1305
1392
  await writeManifestJson(appJson, contentRefs);
1306
1393
  }
1307
1394
 
@@ -1358,7 +1445,7 @@ export async function performClone(source, options = {}) {
1358
1445
  // Step 7: Save app.json with references
1359
1446
  await saveAppJson(appJson, contentRefs, otherRefs, effectiveDomain);
1360
1447
 
1361
- // Step 8: Create .dbo/.app_baseline.json baseline for delta tracking (skip in entity-filter mode to avoid overwriting)
1448
+ // Step 8: Create .app/<shortName>.json baseline for delta tracking (skip in entity-filter mode to avoid overwriting)
1362
1449
  if (!entityFilter) {
1363
1450
  await saveBaselineFile(appJson);
1364
1451
  resetBaselineCache(); // invalidate so next operation reloads the fresh baseline
@@ -1374,13 +1461,17 @@ export async function performClone(source, options = {}) {
1374
1461
  // Step 9: Trash orphaned legacy ~UID companion files that no metadata references
1375
1462
  await trashOrphanedLegacyCompanions();
1376
1463
 
1464
+ // Step 9b: Clean up double-metadata files (e.g., "app.metadata.metadata~uid.json")
1465
+ // caused by an older bug where buildMetaFilename received a base already containing ".metadata"
1466
+ await cleanDoubleMetadataFiles();
1467
+
1377
1468
  // Step 10: Tag project files with sync status (best-effort, non-blocking)
1378
1469
  tagProjectFiles({ verbose: false }).catch(() => {});
1379
1470
 
1380
1471
  log.plain('');
1381
1472
  const verb = options.pullMode ? 'Pull' : 'Clone';
1382
1473
  log.success(entityFilter ? `${verb} complete! (filtered: ${options.entity})` : `${verb} complete!`);
1383
- log.dim(' app.json saved to project root');
1474
+ log.dim(' App metadata saved to .app/');
1384
1475
  if (!options.pullMode) {
1385
1476
  log.dim(' Run "dbo login" to authenticate, then "dbo push" to deploy changes');
1386
1477
  }
@@ -1546,7 +1637,24 @@ async function fetchAppFromServer(appShortName, options, config) {
1546
1637
  throw new Error(`No app found with ShortName "${appShortName}"`);
1547
1638
  }
1548
1639
 
1549
- spinner.succeed(`Found app on server`);
1640
+ // Heuristic: detect sparse responses — may indicate expired session or
1641
+ // limited permissions on the target app. Warn but proceed: the server may
1642
+ // intentionally scope /api/app/object responses by user security.
1643
+ const children = appRecord.children || {};
1644
+ const childKeys = Object.keys(children);
1645
+ const hasContentOrMedia = children.content?.length > 0 || children.media?.length > 0 || children.bin?.length > 0;
1646
+ if (childKeys.length > 0 && !hasContentOrMedia) {
1647
+ const totalRecords = childKeys.reduce((sum, k) => sum + (Array.isArray(children[k]) ? children[k].length : 0), 0);
1648
+ if (totalRecords <= 5) {
1649
+ spinner.warn(`App "${appShortName}" returned sparse data (${totalRecords} record(s), no content/media/bins)`);
1650
+ log.warn(' If data is missing, check permissions or run "dbo login" and re-clone.');
1651
+ } else {
1652
+ spinner.succeed(`Found app on server`);
1653
+ }
1654
+ } else {
1655
+ spinner.succeed(`Found app on server`);
1656
+ }
1657
+
1550
1658
  return appRecord;
1551
1659
  }
1552
1660
 
@@ -1967,7 +2075,14 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
1967
2075
  const extractedContentCols = [];
1968
2076
 
1969
2077
  for (const [key, value] of Object.entries(record)) {
1970
- if (key === 'children') continue;
2078
+ if (key === 'children') {
2079
+ // Embed children inline with base64 decoding (entity_column, security_column, etc.)
2080
+ if (value && typeof value === 'object' && !Array.isArray(value)
2081
+ && Object.keys(value).length > 0) {
2082
+ meta.children = embedEntityChildren(value);
2083
+ }
2084
+ continue;
2085
+ }
1971
2086
 
1972
2087
  // Check if this column should be extracted as a companion file
1973
2088
  const extractInfo = contentColsToExtract.find(c => c.col === key);
@@ -2105,7 +2220,7 @@ async function buildDescriptorPrePass(extensionEntries, structure, metadataSchem
2105
2220
  }
2106
2221
 
2107
2222
  await saveDescriptorMapping(structure, mapping);
2108
- log.dim(` Saved descriptorMapping to .dbo/structure.json`);
2223
+ log.dim(` Saved descriptorMapping to .app/directories.json`);
2109
2224
 
2110
2225
  // Parse form-control-code from descriptor_definition records → populate metadata_schema.json
2111
2226
  const descriptorDefs = extensionEntries.filter(r =>
@@ -2545,7 +2660,14 @@ async function processExtensionEntries(entries, structure, options, serverTz) {
2545
2660
  const extractedCols = [];
2546
2661
 
2547
2662
  for (const [key, value] of Object.entries(record)) {
2548
- if (key === 'children') continue;
2663
+ if (key === 'children') {
2664
+ // Embed children inline with base64 decoding
2665
+ if (value && typeof value === 'object' && !Array.isArray(value)
2666
+ && Object.keys(value).length > 0) {
2667
+ meta.children = embedEntityChildren(value);
2668
+ }
2669
+ continue;
2670
+ }
2549
2671
 
2550
2672
  const companionRef = companionRefs.find(r => r.column.toLowerCase() === key.toLowerCase());
2551
2673
  if (companionRef) {
@@ -2878,7 +3000,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
2878
3000
  log.dim(` Error: ${err.message}`);
2879
3001
  }
2880
3002
 
2881
- // Append to .dbo/errors.log
3003
+ // Append to .app/errors.log
2882
3004
  await appendErrorLog({
2883
3005
  timestamp: new Date().toISOString(),
2884
3006
  command: 'clone',
@@ -2957,7 +3079,7 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
2957
3079
  });
2958
3080
  log.dim(` Staged: ${stale.filename}`);
2959
3081
  }
2960
- log.success('Stale media records staged in .dbo/synchronize.json');
3082
+ log.success('Stale media records staged in .app/synchronize.json');
2961
3083
  log.dim(' Run "dbo push" to delete from server');
2962
3084
  }
2963
3085
 
@@ -3024,7 +3146,9 @@ async function processRecord(entityName, record, structure, options, usedNames,
3024
3146
  }
3025
3147
 
3026
3148
  // If no extension determined and Content column has data, prompt user to choose one
3027
- if (!ext && !options.yes && record.Content) {
3149
+ // Only prompt when --configure is set; otherwise skip silently (companion extraction
3150
+ // should not surprise the user with interactive prompts during normal clone/pull)
3151
+ if (!ext && options.configure && !options.yes && record.Content) {
3028
3152
  const cv = record.Content;
3029
3153
  const hasContentData = cv && (
3030
3154
  (typeof cv === 'object' && cv.value !== null && cv.value !== undefined) ||
@@ -3574,27 +3698,23 @@ async function extractCustomSqlIfNeeded(entityObj, companionStem, outputDir, ser
3574
3698
 
3575
3699
  /**
3576
3700
  * Recursively build a children object for a parent entity.
3577
- * Mutates parentObj to set parentObj.children = { column: [], join: [], filter: [] }.
3578
- * Returns companionFiles: string[] of written companion file basenames.
3701
+ * Mutates parentObj to set parentObj.children keyed by physical entity names:
3702
+ * { output_value: [], output_value_filter: [], output_value_entity_column_rel: [] }
3579
3703
  *
3580
3704
  * Each child object retains _entity set to the physical entity name
3581
- * (output_value, output_value_entity_column_rel, output_value_filter)
3582
3705
  * so that push can route submissions correctly.
3583
3706
  *
3707
+ * Child CustomSQL values are decoded inline as strings — only the root output's
3708
+ * CustomSQL is extracted as a companion .sql file (done by the caller).
3709
+ *
3584
3710
  * @param {Object} parentObj - The entity object to populate (mutated in place)
3585
3711
  * @param {Object} node - Tree node from buildOutputHierarchyTree (has _children)
3586
- * @param {string} rootStem - Root output file stem (e.g. "Sales~abc")
3587
- * @param {string} outputDir - Directory where root output JSON lives
3588
- * @param {string} serverTz - Server timezone
3589
- * @param {string} [parentStem] - Ancestor stem for compound companion naming
3590
- * @returns {Promise<string[]>} - Array of written companion file basenames
3591
3712
  */
3592
- async function buildInlineOutputChildren(parentObj, node, rootStem, outputDir, serverTz, parentStem = rootStem) {
3593
- const companionFiles = [];
3713
+ export function buildInlineOutputChildren(parentObj, node) {
3594
3714
  const nodeChildren = node._children || {};
3595
3715
 
3596
- // Always create children object with all three doc keys
3597
- parentObj.children = { column: [], join: [], filter: [] };
3716
+ // Use physical entity names as children keys (not doc aliases)
3717
+ parentObj.children = { output_value: [], output_value_filter: [], output_value_entity_column_rel: [] };
3598
3718
 
3599
3719
  for (const docKey of INLINE_DOC_KEYS) {
3600
3720
  const entityArray = nodeChildren[docKey];
@@ -3602,44 +3722,32 @@ async function buildInlineOutputChildren(parentObj, node, rootStem, outputDir, s
3602
3722
 
3603
3723
  if (!Array.isArray(entityArray) || entityArray.length === 0) continue;
3604
3724
 
3605
- for (let childIdx = 0; childIdx < entityArray.length; childIdx++) {
3606
- const child = entityArray[childIdx];
3725
+ for (const child of entityArray) {
3607
3726
  // Build a clean copy without tree-internal fields
3608
3727
  const childObj = { ...child };
3609
3728
  delete childObj._children;
3610
3729
 
3611
- // Decode any base64 values
3730
+ // Decode all base64 fields inline (including CustomSQL — only root output extracts SQL files)
3612
3731
  for (const [key, value] of Object.entries(childObj)) {
3613
- if (key === 'CustomSQL') continue; // handled by extractCustomSqlIfNeeded
3614
3732
  if (value && typeof value === 'object' && !Array.isArray(value) && value.encoding === 'base64') {
3615
- childObj[key] = resolveContentValue(value);
3733
+ childObj[key] = resolveContentValue(value) ?? '';
3616
3734
  }
3617
3735
  }
3618
3736
 
3619
3737
  // Ensure _entity is set to physical entity name (for push routing)
3620
3738
  childObj._entity = physicalKey;
3621
3739
 
3622
- // Compute companion stem for this child (index-based, not UID-based)
3623
- const childStem = getChildCompanionStem(rootStem, physicalKey, childIdx, parentStem);
3624
-
3625
- // Extract CustomSQL if needed
3626
- const companionFile = await extractCustomSqlIfNeeded(childObj, childStem, outputDir, serverTz);
3627
- if (companionFile) companionFiles.push(companionFile);
3628
-
3629
3740
  // Recurse into child's _children (e.g. join→column, column→filter)
3630
3741
  if (child._children && Object.keys(child._children).some(k => child._children[k]?.length > 0)) {
3631
- const gcFiles = await buildInlineOutputChildren(childObj, child, rootStem, outputDir, serverTz, childStem);
3632
- companionFiles.push(...gcFiles);
3742
+ buildInlineOutputChildren(childObj, child);
3633
3743
  } else {
3634
3744
  // Leaf node: still set empty children
3635
- childObj.children = { column: [], join: [], filter: [] };
3745
+ childObj.children = { output_value: [], output_value_filter: [], output_value_entity_column_rel: [] };
3636
3746
  }
3637
3747
 
3638
- parentObj.children[docKey].push(childObj);
3748
+ parentObj.children[physicalKey].push(childObj);
3639
3749
  }
3640
3750
  }
3641
-
3642
- return companionFiles;
3643
3751
  }
3644
3752
 
3645
3753
  /**
@@ -3661,8 +3769,8 @@ async function trashOrphanedChildFiles(outputDir, rootStem) {
3661
3769
  for (const f of files) {
3662
3770
  const matchesCurrent = f.startsWith(`${rootStem}.`);
3663
3771
  const matchesLegacy = f.startsWith(`${legacyStem}.`);
3664
- if ((matchesCurrent || matchesLegacy) && /\.(column|join|filter)~/.test(f)) {
3665
- // Old child file or legacy CustomSQL companion — trash it
3772
+ if ((matchesCurrent || matchesLegacy) && /\.(column|join|filter)(~|-\d+\.|\.CustomSQL)/.test(f)) {
3773
+ // Old child file, legacy child JSON, or old per-child CustomSQL companion — trash it
3666
3774
  if (!trashCreated) {
3667
3775
  await mkdir(trashDir, { recursive: true });
3668
3776
  trashCreated = true;
@@ -3814,6 +3922,26 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
3814
3922
  // Resolve filename columns for each entity type
3815
3923
  const filenameCols = await resolveOutputFilenameColumns(appJson, options);
3816
3924
 
3925
+ // Detect companion filename collisions: multiple outputs that produce the same
3926
+ // rootBasename + binDir. When collisions exist, use UID-qualified companion stems
3927
+ // so each output's .CustomSQL.sql doesn't overwrite the others.
3928
+ const companionKey = (output) => {
3929
+ let binDir = BINS_DIR;
3930
+ if (output.BinID && structure[output.BinID]) {
3931
+ binDir = resolveBinPath(output.BinID, structure);
3932
+ }
3933
+ const base = buildOutputFilename('output', output, filenameCols.output);
3934
+ return `${binDir}/${base}`;
3935
+ };
3936
+ const companionKeyCounts = new Map();
3937
+ for (const output of tree) {
3938
+ const key = companionKey(output);
3939
+ companionKeyCounts.set(key, (companionKeyCounts.get(key) || 0) + 1);
3940
+ }
3941
+ const collidingCompanionKeys = new Set(
3942
+ [...companionKeyCounts.entries()].filter(([, count]) => count > 1).map(([key]) => key)
3943
+ );
3944
+
3817
3945
  const refs = [];
3818
3946
  const bulkAction = { value: null };
3819
3947
  const config = await loadConfig();
@@ -3924,11 +4052,16 @@ async function processOutputHierarchy(appJson, structure, options, serverTz) {
3924
4052
  // Copy raw CustomSQL for extraction helper
3925
4053
  rootMeta.CustomSQL = output.CustomSQL;
3926
4054
 
4055
+ // When multiple outputs share the same name+bin, qualify the companion stem
4056
+ // with the UID so each gets its own .CustomSQL.sql file.
4057
+ const isCollision = collidingCompanionKeys.has(`${binDir}/${rootBasename}`);
4058
+ const companionStem = isCollision ? `${rootBasename}~${rootUid}` : rootBasename;
4059
+
3927
4060
  // Extract CustomSQL on root (rules 1/2/3)
3928
- await extractCustomSqlIfNeeded(rootMeta, rootBasename, binDir, serverTz);
4061
+ await extractCustomSqlIfNeeded(rootMeta, companionStem, binDir, serverTz);
3929
4062
 
3930
4063
  // Embed all children under rootMeta.children = { column, join, filter }
3931
- await buildInlineOutputChildren(rootMeta, output, rootBasename, binDir, serverTz);
4064
+ buildInlineOutputChildren(rootMeta, output);
3932
4065
  // rootMeta now has .children = { column: [...], join: [...], filter: [...] }
3933
4066
 
3934
4067
  await writeFile(rootMetaPath, JSON.stringify(rootMeta, null, 2) + '\n');
@@ -4030,11 +4163,11 @@ async function writeManifestJson(appJson, contentRefs) {
4030
4163
  };
4031
4164
 
4032
4165
  await writeFile('manifest.json', JSON.stringify(manifest, null, 2) + '\n');
4033
- log.dim(' manifest.json generated at project root (from app.json values)');
4166
+ log.dim(' manifest.json generated at project root (from app metadata)');
4034
4167
  }
4035
4168
 
4036
4169
  /**
4037
- * Save app.json to project root with @ references replacing processed entries.
4170
+ * Save app metadata to project root with @ references replacing processed entries.
4038
4171
  */
4039
4172
  async function saveAppJson(appJson, contentRefs, otherRefs, domain) {
4040
4173
  const output = { ...appJson };
@@ -4072,11 +4205,12 @@ async function saveAppJson(appJson, contentRefs, otherRefs, domain) {
4072
4205
  delete output.children.output_value_entity_column_rel;
4073
4206
  }
4074
4207
 
4075
- await writeFile('app.json', JSON.stringify(output, null, 2) + '\n');
4208
+ const metaPath = await appMetadataPath();
4209
+ await writeFile(metaPath, JSON.stringify(output, null, 2) + '\n');
4076
4210
  }
4077
4211
 
4078
4212
  /**
4079
- * Save .dbo/.app_baseline.json baseline file with decoded base64 values.
4213
+ * Save .app/<shortName>.json baseline file with decoded base64 values.
4080
4214
  * This file tracks the server state for delta detection.
4081
4215
  */
4082
4216
  async function saveBaselineFile(appJson) {
@@ -4086,10 +4220,10 @@ async function saveBaselineFile(appJson) {
4086
4220
  // Recursively decode all base64 fields
4087
4221
  decodeBase64Fields(baseline);
4088
4222
 
4089
- // Save to .app.json
4223
+ // Save baseline
4090
4224
  await saveAppJsonBaseline(baseline);
4091
4225
 
4092
- log.dim(' .dbo/.app_baseline.json baseline created (system-managed, do not edit)');
4226
+ log.dim(' .app/ baseline created (system-managed, do not edit)');
4093
4227
  }
4094
4228
 
4095
4229
  /**
@@ -4125,10 +4259,10 @@ export function decodeBase64Fields(obj) {
4125
4259
 
4126
4260
  // ── Error log ─────────────────────────────────────────────────────────────
4127
4261
 
4128
- const ERROR_LOG_PATH = join('.dbo', 'errors.log');
4262
+ const ERROR_LOG_PATH = join('.app', 'errors.log');
4129
4263
 
4130
4264
  /**
4131
- * Append a structured error entry to .dbo/errors.log.
4265
+ * Append a structured error entry to .app/errors.log.
4132
4266
  * Creates the file if absent. Each entry is one JSON line (JSONL format).
4133
4267
  */
4134
4268
  async function appendErrorLog(entry) {
@@ -9,12 +9,12 @@ import { resolveTransactionKey } from '../lib/transaction-key.js';
9
9
  import { log } from '../lib/logger.js';
10
10
  import { runPendingMigrations } from '../lib/migrations.js';
11
11
 
12
- const MANIFEST_FILE = '.dbo/deploy_config.json';
12
+ const MANIFEST_FILE = '.app/deploy_config.json';
13
13
  const LEGACY_MANIFEST_FILE = 'dbo.deploy.json';
14
14
 
15
15
  export const deployCommand = new Command('deploy')
16
16
  .description('Deploy files to DBO.io using a manifest or direct arguments')
17
- .argument('[name]', 'Deployment name from .dbo/deploy_config.json (e.g., css:colors)')
17
+ .argument('[name]', 'Deployment name from .app/deploy_config.json (e.g., css:colors)')
18
18
  .option('--all', 'Deploy all entries in the manifest')
19
19
  .option('-C, --confirm <value>', 'Commit: true (default) or false', 'true')
20
20
  .option('--ticket <id>', 'Override ticket ID')
@@ -29,7 +29,7 @@ export const deployCommand = new Command('deploy')
29
29
  await runPendingMigrations(options);
30
30
  const client = new DboClient({ domain: options.domain, verbose: options.verbose });
31
31
 
32
- // Load manifest — try .dbo/deploy_config.json first, fall back to legacy dbo.deploy.json
32
+ // Load manifest — try .app/deploy_config.json first, fall back to legacy dbo.deploy.json
33
33
  let manifest;
34
34
  let manifestSource;
35
35
  try {
@@ -9,7 +9,7 @@ import { log } from '../lib/logger.js';
9
9
  import { checkDomainChange, writeAppJsonDomain } from '../lib/domain-guard.js';
10
10
  import { performLogin } from './login.js';
11
11
  import { runPendingMigrations } from '../lib/migrations.js';
12
- import { fetchSchema, saveSchema, SCHEMA_FILE } from '../lib/schema.js';
12
+ import { fetchSchema, saveSchema } from '../lib/schema.js';
13
13
  import { loadMetadataSchema, saveMetadataSchema, generateMetadataFromSchema } from '../lib/metadata-schema.js';
14
14
  import { syncDependencies } from '../lib/dependencies.js';
15
15
  import { mergeDependencies } from '../lib/config.js';
@@ -108,7 +108,7 @@ export const initCommand = new Command('init')
108
108
  }
109
109
 
110
110
  // Ensure sensitive files are gitignored
111
- await ensureGitignore(['.dbo/credentials.json', '.dbo/cookies.txt', '.dbo/config.local.json', '.dbo/ticketing.local.json', '.dbo/scripts.local.json', '.dbo/errors.log', 'trash/', 'Icon\\r', 'schema.json', '.dbo/dependencies/']);
111
+ await ensureGitignore(['.app/credentials.json', '.app/cookies.txt', '.app/ticketing.local.json', '.app/scripts.local.json', '.app/errors.log', 'trash/', 'Icon\\r', 'app_dependencies/']);
112
112
 
113
113
  const createdIgnore = await createDboignore();
114
114
  if (createdIgnore) log.dim(' Created .dboignore');
@@ -122,19 +122,19 @@ export const initCommand = new Command('init')
122
122
 
123
123
  // Create empty scripts.json and scripts.local.json if they don't exist
124
124
  const emptyScripts = JSON.stringify({ scripts: {}, targets: {}, entities: {} }, null, 2) + '\n';
125
- const dboDir = join(process.cwd(), '.dbo');
126
- const scriptsPath = join(dboDir, 'scripts.json');
127
- const scriptsLocalPath = join(dboDir, 'scripts.local.json');
125
+ const appDir = join(process.cwd(), '.app');
126
+ const scriptsPath = join(appDir, 'scripts.json');
127
+ const scriptsLocalPath = join(appDir, 'scripts.local.json');
128
128
  try { await access(scriptsPath); } catch {
129
129
  await writeFile(scriptsPath, emptyScripts);
130
- log.dim(' Created .dbo/scripts.json');
130
+ log.dim(' Created .app/scripts.json');
131
131
  }
132
132
  try { await access(scriptsLocalPath); } catch {
133
133
  await writeFile(scriptsLocalPath, emptyScripts);
134
- log.dim(' Created .dbo/scripts.local.json');
134
+ log.dim(' Created .app/scripts.local.json');
135
135
  }
136
136
 
137
- log.success(`Initialized .dbo/ for ${domain}`);
137
+ log.success(`Initialized .app/ for ${domain}`);
138
138
 
139
139
  // Authenticate early so the session is ready for subsequent operations
140
140
  if (!options.nonInteractive && username) {
@@ -145,12 +145,12 @@ export const initCommand = new Command('init')
145
145
  try {
146
146
  const schemaData = await fetchSchema({ domain, verbose: options.verbose });
147
147
  await saveSchema(schemaData);
148
- log.dim(` Saved ${SCHEMA_FILE}`);
148
+ log.dim(` Refreshed _system dependency baseline`);
149
149
 
150
150
  const existing = await loadMetadataSchema();
151
151
  const updated = generateMetadataFromSchema(schemaData, existing ?? {});
152
152
  await saveMetadataSchema(updated);
153
- log.dim(` Updated .dbo/metadata_schema.json`);
153
+ log.dim(` Updated metadata schema`);
154
154
  } catch (err) {
155
155
  log.warn(` Could not fetch schema (${err.message}) — run 'dbo clone --schema' after login.`);
156
156
  }
@@ -168,7 +168,7 @@ export const initCommand = new Command('init')
168
168
  domain,
169
169
  force: explicitDeps ? true : undefined,
170
170
  verbose: options.verbose,
171
- systemSchemaPath: join(process.cwd(), SCHEMA_FILE),
171
+ systemSchemaPath: join(process.cwd(), 'app_dependencies', '_system', '.app', '_system.json'),
172
172
  only: explicitDeps || undefined,
173
173
  });
174
174
  } catch (err) {