@dboio/cli 0.5.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -31,6 +31,8 @@ npm link
31
31
  npx @dboio/cli <command>
32
32
  ```
33
33
 
34
+ > **Shorthand:** You can use `dbo i` as a shortcut for `dbo install` (similar to `npm i`).
35
+
34
36
  ### Requirements
35
37
 
36
38
  - **Node.js 18+** (uses native `fetch` and `FormData`)
@@ -307,6 +309,51 @@ project/
307
309
  media/operator/app/... # ← FullPath placement (when MediaPlacement=fullpath)
308
310
  ```
309
311
 
312
+ #### Understanding the `Bins/` directory structure
313
+
314
+ The `Bins/` directory is the default location for all bin-placed content files during clone. It organizes files according to your app's bin hierarchy from DBO.io.
315
+
316
+ **Directory Organization:**
317
+
318
+ - **`Bins/`** — Root directory for all bin-placed files (local organizational directory only)
319
+ - **`Bins/app/`** — Special subdirectory for the main app bin (typically the default bin)
320
+ - **`Bins/custom_name/`** — Custom bin directories (e.g., `tpl/`, `ticket_test/`, etc.)
321
+
322
+ **Important: The `Bins/app/` special case**
323
+
324
+ The `app/` subdirectory under `Bins/` is treated specially:
325
+
326
+ 1. **It's organizational only** — The `Bins/app/` prefix exists only for local file organization and is **not part of the server-side path**.
327
+
328
+ 2. **Path normalization** — When comparing paths (during `dbo push`), the CLI automatically strips both `Bins/` and `app/` from paths:
329
+ ```
330
+ Local file: Bins/app/assets/css/operator.css
331
+ Server Path: assets/css/operator.css
332
+ → These are considered the same path ✓
333
+ ```
334
+
335
+ 3. **Custom bins are preserved** — Other subdirectories like `Bins/tpl/` or `Bins/ticket_test/` represent actual bin hierarchies and their names are meaningful:
336
+ ```
337
+ Local file: Bins/tpl/header.html
338
+ Server Path: tpl/header.html
339
+ → The 'tpl/' directory is preserved ✓
340
+ ```
341
+
342
+ **Why this matters:**
343
+
344
+ - When you `dbo push` files from `Bins/app/`, the CLI knows these paths should match the root-level paths in your metadata
345
+ - If your metadata `Path` column contains `assets/css/colors.css`, it will correctly match files in `Bins/app/assets/css/colors.css`
346
+ - Custom bin directories like `Bins/tpl/` serve from the `tpl/` directive and maintain their path structure
347
+
348
+ **Leading slash handling:**
349
+
350
+ The CLI handles leading `/` in metadata paths flexibly:
351
+ - `Path: assets/css/file.css` matches `Bins/app/assets/css/file.css` ✓
352
+ - `Path: /assets/css/file.css` also matches `Bins/app/assets/css/file.css` ✓
353
+ - `Path: /assets/css/file.css` matches `Bins/assets/css/file.css` ✓
354
+
355
+ This ensures compatibility with various path formats from the server while maintaining correct local file organization.
356
+
310
357
  ---
311
358
 
312
359
  ### `dbo login`
@@ -1072,7 +1119,7 @@ If no stored user info is available, it prompts for direct input. The CLI automa
1072
1119
 
1073
1120
  ---
1074
1121
 
1075
- ### `dbo install`
1122
+ ### `dbo install` (alias: `dbo i`)
1076
1123
 
1077
1124
  Install or upgrade dbo-cli components including the CLI itself, plugins, and Claude Code integration.
1078
1125
 
@@ -1081,11 +1128,11 @@ Install or upgrade dbo-cli components including the CLI itself, plugins, and Cla
1081
1128
  dbo install
1082
1129
 
1083
1130
  # Install/upgrade CLI from npm (latest)
1084
- dbo install dbo
1085
- dbo install dbo@latest
1131
+ dbo i dbo
1132
+ dbo i dbo@latest
1086
1133
 
1087
1134
  # Install a specific CLI version
1088
- dbo install dbo@0.4.1
1135
+ dbo i dbo@0.4.1
1089
1136
 
1090
1137
  # Install CLI from local source directory
1091
1138
  dbo install /path/to/local/cli/src
package/bin/dbo.js CHANGED
@@ -1,6 +1,11 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  import { Command } from 'commander';
4
+ import { createRequire } from 'module';
5
+
6
+ const require = createRequire(import.meta.url);
7
+ const packageJson = require('../package.json');
8
+
4
9
  import { initCommand } from '../src/commands/init.js';
5
10
  import { loginCommand } from '../src/commands/login.js';
6
11
  import { logoutCommand } from '../src/commands/logout.js';
@@ -28,7 +33,7 @@ const program = new Command();
28
33
  program
29
34
  .name('dbo')
30
35
  .description('CLI for the DBO.io framework')
31
- .version('0.5.0');
36
+ .version(packageJson.version, '-v, --version', 'output the version number');
32
37
 
33
38
  program.addCommand(initCommand);
34
39
  program.addCommand(loginCommand);
@@ -0,0 +1,88 @@
1
+ #!/usr/bin/env node
2
+
3
+ // Post-install hook for `npm i @dboio/cli`
4
+ // Interactive plugin picker when TTY is available, static message otherwise.
5
+
6
+ const RESET = '\x1b[0m';
7
+ const BOLD = '\x1b[1m';
8
+ const DIM = '\x1b[2m';
9
+ const CYAN = '\x1b[36m';
10
+ const YELLOW = '\x1b[33m';
11
+
12
+ // Available plugins — add new entries here as plugins are created
13
+ const PLUGINS = [
14
+ {
15
+ name: 'dbo',
16
+ label: 'Claude Code — /dbo slash command',
17
+ command: 'dbo install --claudecommand dbo'
18
+ }
19
+ ];
20
+
21
+ function printStaticMessage() {
22
+ console.log('');
23
+ console.log(`${BOLD}${CYAN} DBO.io CLI installed successfully.${RESET}`);
24
+ console.log('');
25
+ console.log(` ${DIM}Plugins available:${RESET}`);
26
+ for (const plugin of PLUGINS) {
27
+ console.log(` ${YELLOW}${plugin.label}${RESET}`);
28
+ console.log(` ${DIM}${plugin.command}${RESET}`);
29
+ }
30
+ console.log('');
31
+ console.log(` To install all plugins at once, run:`);
32
+ console.log(` ${BOLD}dbo install plugins${RESET}`);
33
+ console.log('');
34
+ }
35
+
36
+ async function promptInteractive() {
37
+ console.log('');
38
+ console.log(`${BOLD}${CYAN} DBO.io CLI installed successfully.${RESET}`);
39
+ console.log('');
40
+
41
+ const { default: inquirer } = await import('inquirer');
42
+
43
+ const { selected } = await inquirer.prompt([
44
+ {
45
+ type: 'checkbox',
46
+ name: 'selected',
47
+ message: 'Select plugins to install:',
48
+ choices: PLUGINS.map(p => ({
49
+ name: p.label,
50
+ value: p.command,
51
+ checked: false
52
+ }))
53
+ }
54
+ ]);
55
+
56
+ if (selected.length === 0) {
57
+ console.log('');
58
+ console.log(` ${DIM}No plugins selected. You can install them later with:${RESET}`);
59
+ console.log(` ${BOLD}dbo install plugins${RESET}`);
60
+ console.log('');
61
+ return;
62
+ }
63
+
64
+ const { execSync } = await import('child_process');
65
+ const combined = selected.join(' && ');
66
+
67
+ console.log('');
68
+ console.log(` ${DIM}Running: ${combined}${RESET}`);
69
+ console.log('');
70
+
71
+ try {
72
+ execSync(combined, { stdio: 'inherit' });
73
+ } catch {
74
+ console.log('');
75
+ console.log(` ${YELLOW}Plugin installation had an issue. You can retry with:${RESET}`);
76
+ for (const cmd of selected) {
77
+ console.log(` ${BOLD}${cmd}${RESET}`);
78
+ }
79
+ console.log('');
80
+ }
81
+ }
82
+
83
+ // TTY check — interactive prompt only works when user has a terminal
84
+ if (process.stdin.isTTY && process.stdout.isTTY) {
85
+ promptInteractive().catch(() => printStaticMessage());
86
+ } else {
87
+ printStaticMessage();
88
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dboio/cli",
3
- "version": "0.5.1",
3
+ "version": "0.6.0",
4
4
  "description": "CLI for the DBO.io framework",
5
5
  "type": "module",
6
6
  "bin": {
@@ -16,6 +16,7 @@
16
16
  "node": ">=18.0.0"
17
17
  },
18
18
  "scripts": {
19
+ "postinstall": "node bin/postinstall.js",
19
20
  "test": "node --test src/**/*.test.js tests/**/*.test.js"
20
21
  },
21
22
  "dependencies": {
@@ -2,7 +2,7 @@ import { Command } from 'commander';
2
2
  import { readFile, writeFile, mkdir, access } from 'fs/promises';
3
3
  import { join, basename, extname } from 'path';
4
4
  import { DboClient } from '../lib/client.js';
5
- import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference } from '../lib/config.js';
5
+ import { loadConfig, updateConfigWithApp, loadClonePlacement, saveClonePlacement, ensureGitignore, saveEntityDirPreference, loadEntityDirPreference, saveEntityContentExtractions, loadEntityContentExtractions, saveAppJsonBaseline } from '../lib/config.js';
6
6
  import { buildBinHierarchy, resolveBinPath, createDirectories, saveStructureFile, getBinName, findBinByPath, BINS_DIR, DEFAULT_PROJECT_DIRS, ENTITY_DIR_MAP } from '../lib/structure.js';
7
7
  import { log } from '../lib/logger.js';
8
8
  import { setFileTimestamps } from '../lib/timestamps.js';
@@ -43,6 +43,16 @@ async function fileExists(path) {
43
43
  * 3. If not found → place under Bins/<dir> (e.g. "Bins/tpl")
44
44
  *
45
45
  * This ensures content files always land inside Bins/, never at the project root.
46
+ *
47
+ * **Note on Bins/app/:**
48
+ * Files placed in Bins/app/ are purely for local organization. During push operations,
49
+ * the "app/" subdirectory is treated specially and stripped from path comparisons,
50
+ * as these files are served from the root path on the server without the "app/" prefix.
51
+ * Other custom bin directories (like "tpl/") maintain their directory name in the path.
52
+ *
53
+ * @param {string} pathValue - Server-side Path value
54
+ * @param {Object} structure - Bin hierarchy structure from structure.json
55
+ * @returns {string} - Local directory path under Bins/
46
56
  */
47
57
  function resolvePathToBinsDir(pathValue, structure) {
48
58
  const cleaned = pathValue.replace(/^\/+|\/+$/g, '');
@@ -229,6 +239,12 @@ export async function performClone(source, options = {}) {
229
239
  // Step 7: Save app.json with references
230
240
  await saveAppJson(appJson, contentRefs, otherRefs);
231
241
 
242
+ // Step 8: Create .app.json baseline for delta tracking
243
+ await saveBaselineFile(appJson);
244
+
245
+ // Step 9: Ensure .app.json is in .gitignore
246
+ await ensureGitignore(['.app.json']);
247
+
232
248
  log.plain('');
233
249
  log.success('Clone complete!');
234
250
  log.dim(' app.json saved to project root');
@@ -473,8 +489,9 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
473
489
  if (columns.length > 0) {
474
490
  const inquirer = (await import('inquirer')).default;
475
491
 
476
- // Find best default
477
- const defaultCol = columns.includes('Name') ? 'Name'
492
+ // Find best default (app_version prioritizes Number → Name → UID)
493
+ const defaultCol = (entityName === 'app_version' && columns.includes('Number')) ? 'Number'
494
+ : columns.includes('Name') ? 'Name'
478
495
  : columns.includes('name') ? 'name'
479
496
  : columns.includes('UID') ? 'UID'
480
497
  : columns[0];
@@ -520,10 +537,31 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
520
537
  }
521
538
 
522
539
  if (base64Cols.length > 0) {
540
+ // Load saved content extraction preferences
541
+ const savedExtractions = await loadEntityContentExtractions(entityName);
542
+ const newPreferences = savedExtractions ? { ...savedExtractions } : {};
543
+ let hasNewChoices = false;
544
+
523
545
  const inquirer = (await import('inquirer')).default;
524
546
 
525
547
  // Prompt per column: show snippet and ask extract yes/no + extension
526
548
  for (const { col, snippet } of base64Cols) {
549
+ // Check if we have a saved preference for this column
550
+ if (savedExtractions && col in savedExtractions) {
551
+ const savedPref = savedExtractions[col];
552
+ if (savedPref === false) {
553
+ // User previously chose not to extract this column
554
+ log.dim(` Skipping "${col}" (saved preference: no extraction)`);
555
+ continue;
556
+ } else if (typeof savedPref === 'string') {
557
+ // User previously chose to extract with a specific extension
558
+ log.dim(` Extracting "${col}" as .${savedPref} file (saved preference)`);
559
+ contentColsToExtract.push({ col, ext: savedPref });
560
+ continue;
561
+ }
562
+ }
563
+
564
+ // No saved preference - prompt the user
527
565
  const preview = snippet ? ` (${snippet})` : '';
528
566
  const guessed = guessExtensionForColumn(col);
529
567
 
@@ -541,9 +579,22 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
541
579
  message: `File extension for "${col}":`,
542
580
  default: guessed,
543
581
  }]);
544
- contentColsToExtract.push({ col, ext: ext.replace(/^\./, '') });
582
+ const cleanExt = ext.replace(/^\./, '');
583
+ contentColsToExtract.push({ col, ext: cleanExt });
584
+ newPreferences[col] = cleanExt;
585
+ hasNewChoices = true;
586
+ } else {
587
+ // User chose not to extract - save this preference too
588
+ newPreferences[col] = false;
589
+ hasNewChoices = true;
545
590
  }
546
591
  }
592
+
593
+ // Save preferences if any new choices were made
594
+ if (hasNewChoices) {
595
+ await saveEntityContentExtractions(entityName, newPreferences);
596
+ log.dim(` Saved content extraction preferences for ${entityName}`);
597
+ }
547
598
  }
548
599
  }
549
600
 
@@ -554,6 +605,9 @@ async function processEntityDirEntries(entityName, entries, options, serverTz) {
554
605
  let name;
555
606
  if (filenameCol && record[filenameCol] !== null && record[filenameCol] !== undefined) {
556
607
  name = sanitizeFilename(String(record[filenameCol]));
608
+ } else if (entityName === 'app_version' && record.Number) {
609
+ // Special fallback for app_version: Number is preferred over Name
610
+ name = sanitizeFilename(String(record.Number));
557
611
  } else if (record.Name) {
558
612
  name = sanitizeFilename(String(record.Name));
559
613
  } else {
@@ -942,12 +996,33 @@ async function processMediaEntries(mediaRecords, structure, options, config, app
942
996
  */
943
997
  async function processRecord(entityName, record, structure, options, usedNames, placementPreference, serverTz, bulkAction = { value: null }) {
944
998
  let name = sanitizeFilename(String(record.Name || record.UID || 'untitled'));
945
- const ext = (record.Extension || 'txt').toLowerCase();
999
+
1000
+ // Determine file extension (priority: Extension field > Name field > Path field > empty)
1001
+ let ext = '';
1002
+ if (record.Extension) {
1003
+ // Use explicit Extension field
1004
+ ext = String(record.Extension).toLowerCase();
1005
+ } else {
1006
+ // Try to extract from Name field first
1007
+ if (record.Name) {
1008
+ const extractedExt = extname(String(record.Name));
1009
+ ext = extractedExt ? extractedExt.substring(1).toLowerCase() : '';
1010
+ }
1011
+
1012
+ // If no extension from Name, try Path field as fallback
1013
+ if (!ext && record.Path) {
1014
+ const extractedExt = extname(String(record.Path));
1015
+ ext = extractedExt ? extractedExt.substring(1).toLowerCase() : '';
1016
+ }
1017
+ }
1018
+ // If still no extension, ext remains '' (no extension)
946
1019
 
947
1020
  // Avoid double extension: if name already ends with .ext, strip it
948
- const extWithDot = `.${ext}`;
949
- if (name.toLowerCase().endsWith(extWithDot)) {
950
- name = name.substring(0, name.length - extWithDot.length);
1021
+ if (ext) {
1022
+ const extWithDot = `.${ext}`;
1023
+ if (name.toLowerCase().endsWith(extWithDot)) {
1024
+ name = name.substring(0, name.length - extWithDot.length);
1025
+ }
951
1026
  }
952
1027
 
953
1028
  // Determine target directory (default: bins/ for items without explicit placement)
@@ -1022,7 +1097,7 @@ async function processRecord(entityName, record, structure, options, usedNames,
1022
1097
  (typeof contentValue === 'string' && contentValue.length > 0)
1023
1098
  );
1024
1099
 
1025
- const fileName = `${finalName}.${ext}`;
1100
+ const fileName = ext ? `${finalName}.${ext}` : finalName;
1026
1101
  const filePath = join(dir, fileName);
1027
1102
  const metaPath = join(dir, `${finalName}.metadata.json`);
1028
1103
 
@@ -1195,3 +1270,51 @@ async function saveAppJson(appJson, contentRefs, otherRefs) {
1195
1270
 
1196
1271
  await writeFile('app.json', JSON.stringify(output, null, 2) + '\n');
1197
1272
  }
1273
+
1274
+ /**
1275
+ * Save .app.json baseline file with decoded base64 values.
1276
+ * This file tracks the server state for delta detection.
1277
+ */
1278
+ async function saveBaselineFile(appJson) {
1279
+ // Deep clone the app JSON
1280
+ const baseline = JSON.parse(JSON.stringify(appJson));
1281
+
1282
+ // Recursively decode all base64 fields
1283
+ decodeBase64Fields(baseline);
1284
+
1285
+ // Save to .app.json
1286
+ await saveAppJsonBaseline(baseline);
1287
+
1288
+ log.dim(' .app.json baseline created (system-managed, do not edit)');
1289
+ }
1290
+
1291
+ /**
1292
+ * Recursively decode base64 fields in an object or array.
1293
+ * Modifies the input object in-place.
1294
+ */
1295
+ function decodeBase64Fields(obj) {
1296
+ if (!obj || typeof obj !== 'object') {
1297
+ return;
1298
+ }
1299
+
1300
+ if (Array.isArray(obj)) {
1301
+ for (const item of obj) {
1302
+ decodeBase64Fields(item);
1303
+ }
1304
+ return;
1305
+ }
1306
+
1307
+ // Process each property
1308
+ for (const [key, value] of Object.entries(obj)) {
1309
+ if (value && typeof value === 'object') {
1310
+ // Check if it's a base64 encoded value
1311
+ if (!Array.isArray(value) && value.encoding === 'base64' && typeof value.value === 'string') {
1312
+ // Decode using existing resolveContentValue function
1313
+ obj[key] = resolveContentValue(value);
1314
+ } else {
1315
+ // Recursively process nested objects/arrays
1316
+ decodeBase64Fields(value);
1317
+ }
1318
+ }
1319
+ }
1320
+ }
@@ -144,6 +144,7 @@ function parseTarget(target) {
144
144
  }
145
145
 
146
146
  export const installCommand = new Command('install')
147
+ .alias('i')
147
148
  .description('Install or upgrade dbo-cli, plugins, or Claude Code commands')
148
149
  .argument('[target]', 'What to install: dbo[@version], plugins, claudecommands, claudecode, or a local path')
149
150
  .option('--claudecommand <name>', 'Install/update a specific Claude command by name')
@@ -161,7 +162,7 @@ export const installCommand = new Command('install')
161
162
  await installCli(parsed.version);
162
163
  break;
163
164
  case 'cli-local':
164
- await installCliFromLocal(parsed.path, options);
165
+ await installCliFromLocal(parsed.path);
165
166
  break;
166
167
  case 'plugins':
167
168
  await installPlugins(options);
@@ -316,7 +317,7 @@ async function installCli(version) {
316
317
  }
317
318
  }
318
319
 
319
- async function installCliFromLocal(localPath, options = {}) {
320
+ async function installCliFromLocal(localPath) {
320
321
  const resolvedPath = resolve(localPath);
321
322
 
322
323
  if (!await fileExists(resolvedPath)) {
@@ -366,16 +367,6 @@ async function installCliFromLocal(localPath, options = {}) {
366
367
  log.dim('You can also use: cd <path> && npm link');
367
368
  return;
368
369
  }
369
-
370
- // Offer to install/upgrade plugins after CLI install
371
- const { installPluginsNow } = await inquirer.prompt([{
372
- type: 'confirm', name: 'installPluginsNow',
373
- message: 'Install/upgrade Claude Code command plugins?',
374
- default: true,
375
- }]);
376
- if (installPluginsNow) {
377
- await installOrUpdateClaudeCommands(options);
378
- }
379
370
  }
380
371
 
381
372
  // ─── Claude Code Installation ───────────────────────────────────────────────
@@ -6,9 +6,11 @@ import { buildInputBody, checkSubmitErrors } from '../lib/input-parser.js';
6
6
  import { formatResponse, formatError } from '../lib/formatter.js';
7
7
  import { log } from '../lib/logger.js';
8
8
  import { shouldSkipColumn } from '../lib/columns.js';
9
- import { loadConfig, loadSynchronize, saveSynchronize } from '../lib/config.js';
9
+ import { loadConfig, loadSynchronize, saveSynchronize, loadAppJsonBaseline, saveAppJsonBaseline, hasBaseline } from '../lib/config.js';
10
10
  import { setFileTimestamps } from '../lib/timestamps.js';
11
11
  import { findMetadataFiles } from '../lib/diff.js';
12
+ import { detectChangedColumns, findBaselineEntry } from '../lib/delta.js';
13
+ import { buildDependencyGraph } from '../lib/dependencies.js';
12
14
 
13
15
  export const pushCommand = new Command('push')
14
16
  .description('Push local files back to DBO.io using metadata from pull')
@@ -52,6 +54,8 @@ async function processPendingDeletes(client, options) {
52
54
  log.info(`Processing ${sync.delete.length} pending deletion(s)...`);
53
55
 
54
56
  const remaining = [];
57
+ const deletedUids = [];
58
+
55
59
  for (const entry of sync.delete) {
56
60
  log.info(`Deleting "${entry.name}" (${entry.entity}:${entry.RowID})`);
57
61
 
@@ -71,6 +75,7 @@ async function processPendingDeletes(client, options) {
71
75
  const retryResult = await client.postUrlEncoded('/api/input/submit', retryBody);
72
76
  if (retryResult.successful) {
73
77
  log.success(` Deleted "${entry.name}" from server`);
78
+ deletedUids.push(entry.UID);
74
79
  } else {
75
80
  log.error(` Failed to delete "${entry.name}"`);
76
81
  formatResponse(retryResult, { json: options.json, jq: options.jq });
@@ -78,6 +83,7 @@ async function processPendingDeletes(client, options) {
78
83
  }
79
84
  } else if (result.successful) {
80
85
  log.success(` Deleted "${entry.name}" from server`);
86
+ deletedUids.push(entry.UID);
81
87
  } else {
82
88
  log.error(` Failed to delete "${entry.name}"`);
83
89
  formatResponse(result, { json: options.json, jq: options.jq });
@@ -89,6 +95,11 @@ async function processPendingDeletes(client, options) {
89
95
  }
90
96
  }
91
97
 
98
+ // Remove edit entries for successfully deleted records (spec requirement)
99
+ if (deletedUids.length > 0) {
100
+ sync.edit = (sync.edit || []).filter(e => !deletedUids.includes(e.UID));
101
+ }
102
+
92
103
  // Update synchronize.json with any remaining entries
93
104
  sync.delete = remaining;
94
105
  await saveSynchronize(sync);
@@ -131,8 +142,17 @@ async function pushDirectory(dirPath, client, options) {
131
142
 
132
143
  log.info(`Found ${metaFiles.length} record(s) to push`);
133
144
 
134
- let succeeded = 0;
135
- let failed = 0;
145
+ // Load baseline for delta detection
146
+ const baseline = await loadAppJsonBaseline();
147
+ const config = await loadConfig();
148
+
149
+ if (!baseline) {
150
+ log.warn('No .app.json baseline found — performing full push (run "dbo clone" to enable delta sync)');
151
+ }
152
+
153
+ // Collect metadata with detected changes
154
+ const toPush = [];
155
+ let skipped = 0;
136
156
 
137
157
  for (const metaPath of metaFiles) {
138
158
  let meta;
@@ -140,19 +160,19 @@ async function pushDirectory(dirPath, client, options) {
140
160
  meta = JSON.parse(await readFile(metaPath, 'utf8'));
141
161
  } catch (err) {
142
162
  log.warn(`Skipping invalid metadata: ${metaPath} (${err.message})`);
143
- failed++;
163
+ skipped++;
144
164
  continue;
145
165
  }
146
166
 
147
167
  if (!meta.UID && !meta._id) {
148
168
  log.warn(`Skipping "${metaPath}": no UID or _id found`);
149
- failed++;
169
+ skipped++;
150
170
  continue;
151
171
  }
152
172
 
153
173
  if (!meta._entity) {
154
174
  log.warn(`Skipping "${metaPath}": no _entity found`);
155
- failed++;
175
+ skipped++;
156
176
  continue;
157
177
  }
158
178
 
@@ -172,24 +192,77 @@ async function pushDirectory(dirPath, client, options) {
172
192
  }
173
193
  }
174
194
  }
175
- if (missingFiles) { failed++; continue; }
195
+ if (missingFiles) { skipped++; continue; }
196
+
197
+ // Detect changed columns (delta detection)
198
+ let changedColumns = null;
199
+ if (baseline) {
200
+ try {
201
+ changedColumns = await detectChangedColumns(metaPath, baseline, config);
202
+ if (changedColumns.length === 0) {
203
+ log.dim(` Skipping ${basename(metaPath)} — no changes detected`);
204
+ skipped++;
205
+ continue;
206
+ }
207
+ } catch (err) {
208
+ log.warn(`Delta detection failed for ${metaPath}: ${err.message} — performing full push`);
209
+ }
210
+ }
211
+
212
+ toPush.push({ meta, metaPath, changedColumns });
213
+ }
176
214
 
215
+ if (toPush.length === 0) {
216
+ log.info('No changes to push');
217
+ return;
218
+ }
219
+
220
+ // Group by entity and apply dependency ordering
221
+ const byEntity = {};
222
+ for (const item of toPush) {
223
+ const entity = item.meta._entity;
224
+ if (!byEntity[entity]) byEntity[entity] = [];
225
+ byEntity[entity].push(item);
226
+ }
227
+
228
+ // Process in dependency order
229
+ let succeeded = 0;
230
+ let failed = 0;
231
+ const successfulPushes = [];
232
+
233
+ for (const item of toPush) {
177
234
  try {
178
- await pushFromMetadata(meta, metaPath, client, options);
179
- succeeded++;
235
+ const success = await pushFromMetadata(item.meta, item.metaPath, client, options, item.changedColumns);
236
+ if (success) {
237
+ succeeded++;
238
+ successfulPushes.push(item);
239
+ } else {
240
+ failed++;
241
+ }
180
242
  } catch (err) {
181
- log.error(`Failed: ${metaPath} — ${err.message}`);
243
+ log.error(`Failed: ${item.metaPath} — ${err.message}`);
182
244
  failed++;
183
245
  }
184
246
  }
185
247
 
186
- log.info(`Push complete: ${succeeded} succeeded, ${failed} failed`);
248
+ // Update baseline after successful pushes
249
+ if (baseline && successfulPushes.length > 0) {
250
+ await updateBaselineAfterPush(baseline, successfulPushes);
251
+ }
252
+
253
+ log.info(`Push complete: ${succeeded} succeeded, ${failed} failed, ${skipped} skipped`);
187
254
  }
188
255
 
189
256
  /**
190
257
  * Build and submit input expressions from a metadata object
258
+ * @param {Object} meta - Metadata object
259
+ * @param {string} metaPath - Path to metadata file
260
+ * @param {DboClient} client - API client
261
+ * @param {Object} options - Push options
262
+ * @param {string[]|null} changedColumns - Optional array of changed column names (for delta sync)
263
+ * @returns {Promise<boolean>} - True if push succeeded
191
264
  */
192
- async function pushFromMetadata(meta, metaPath, client, options) {
265
+ async function pushFromMetadata(meta, metaPath, client, options, changedColumns = null) {
193
266
  const uid = meta.UID || meta._id;
194
267
  const entity = meta._entity;
195
268
  const contentCols = new Set(meta._contentColumns || []);
@@ -210,11 +283,17 @@ async function pushFromMetadata(meta, metaPath, client, options) {
210
283
  const dataExprs = [];
211
284
  let metaUpdated = false;
212
285
 
286
+ // If changedColumns is provided, only push those columns (delta sync)
287
+ const columnsToProcess = changedColumns ? new Set(changedColumns) : null;
288
+
213
289
  for (const [key, value] of Object.entries(meta)) {
214
290
  if (shouldSkipColumn(key)) continue;
215
291
  if (key === 'UID') continue; // UID is the identifier, not a column to update
216
292
  if (value === null || value === undefined) continue;
217
293
 
294
+ // Delta sync: skip columns not in changedColumns
295
+ if (columnsToProcess && !columnsToProcess.has(key)) continue;
296
+
218
297
  const isContentCol = contentCols.has(key);
219
298
 
220
299
  // --meta-only: skip content columns
@@ -236,10 +315,11 @@ async function pushFromMetadata(meta, metaPath, client, options) {
236
315
 
237
316
  if (dataExprs.length === 0) {
238
317
  log.warn(`Nothing to push for ${basename(metaPath)}`);
239
- return;
318
+ return false;
240
319
  }
241
320
 
242
- log.info(`Pushing ${basename(metaPath, '.metadata.json')} (${entity}:${uid}) ${dataExprs.length} field(s)`);
321
+ const fieldLabel = changedColumns ? `${dataExprs.length} changed field(s)` : `${dataExprs.length} field(s)`;
322
+ log.info(`Pushing ${basename(metaPath, '.metadata.json')} (${entity}:${uid}) — ${fieldLabel}`);
243
323
 
244
324
  const extraParams = { '_confirm': options.confirm };
245
325
  if (options.ticket) extraParams['_OverrideTicketID'] = options.ticket;
@@ -264,7 +344,7 @@ async function pushFromMetadata(meta, metaPath, client, options) {
264
344
  }
265
345
 
266
346
  if (!result.successful) {
267
- throw new Error('Push failed');
347
+ return false;
268
348
  }
269
349
 
270
350
  // Update file timestamps from server response
@@ -293,6 +373,64 @@ async function pushFromMetadata(meta, metaPath, client, options) {
293
373
  }
294
374
  }
295
375
  } catch { /* non-critical timestamp update */ }
376
+
377
+ return true;
378
+ }
379
+
380
+ /**
381
+ * Normalize a local file path for comparison with server-side Path.
382
+ *
383
+ * The Bins/ directory is used for local file organization and does not reflect
384
+ * the actual server-side serving path. This function strips organizational prefixes
385
+ * to enable correct path comparison during push operations.
386
+ *
387
+ * **Directory Structure:**
388
+ * - `Bins/` — Local organizational root (always stripped)
389
+ * - `Bins/app/` — Special case: the "app" subdirectory is also stripped because it's
390
+ * purely organizational. Files in Bins/app/ are served from the app root without
391
+ * the "app/" prefix on the server.
392
+ * - `Bins/custom_name/` — Custom bin directories (tpl/, ticket_test/, etc.) are
393
+ * preserved because they represent actual bin hierarchies that serve from their
394
+ * directory name.
395
+ *
396
+ * **Why "app/" is special:**
397
+ * The main app bin is placed in Bins/app/ locally for organization, but server-side
398
+ * these files are served from the root path (no "app/" prefix). Other custom bins
399
+ * like "tpl/" maintain their directory name in the serving path.
400
+ *
401
+ * Examples:
402
+ * "Bins/app/assets/css/file.css" → "assets/css/file.css" (strips Bins/app/)
403
+ * "Bins/tpl/header.html" → "tpl/header.html" (preserves tpl/)
404
+ * "Bins/assets/css/file.css" → "assets/css/file.css" (strips Bins/ only)
405
+ * "Sites/MySite/content/page.html" → "Sites/MySite/content/page.html" (unchanged)
406
+ *
407
+ * @param {string} localPath - Relative path from project root
408
+ * @returns {string} - Normalized path for comparison with metadata Path column
409
+ */
410
+ function normalizePathForComparison(localPath) {
411
+ // Convert backslashes to forward slashes (Windows compatibility)
412
+ const normalized = localPath.replace(/\\/g, '/');
413
+
414
+ // Strip leading and trailing slashes
415
+ const cleaned = normalized.replace(/^\/+|\/+$/g, '');
416
+
417
+ // Check if path starts with "Bins/" organizational directory
418
+ if (cleaned.startsWith('Bins/')) {
419
+ // Remove "Bins/" prefix (length = 5)
420
+ const withoutBins = cleaned.substring(5);
421
+
422
+ // Special case: strip "app/" organizational subdirectory
423
+ // This is the only special subdirectory - all others (tpl/, assets/, etc.) are preserved
424
+ if (withoutBins.startsWith('app/')) {
425
+ return withoutBins.substring(4); // Remove "app/" (length = 4)
426
+ }
427
+
428
+ // For all other paths, return without Bins/ prefix
429
+ return withoutBins;
430
+ }
431
+
432
+ // Path doesn't start with Bins/, return as-is
433
+ return cleaned;
296
434
  }
297
435
 
298
436
  /**
@@ -319,15 +457,25 @@ async function checkPathMismatch(meta, metaPath, options) {
319
457
  const currentFilePath = join(metaDir, contentFileName);
320
458
  const currentRelPath = relative(process.cwd(), currentFilePath);
321
459
 
322
- // Normalize stored path for comparison
323
- const storedPath = String(meta.Path).replace(/^\/+|\/+$/g, '');
324
- const currentPath = currentRelPath.replace(/\\/g, '/');
460
+ // Normalize both paths for comparison
461
+ // The metadata Path may also incorrectly contain Bins/app/ prefix from old data
462
+ const storedPathRaw = String(meta.Path).replace(/^\/+|\/+$/g, ''); // Strip leading/trailing slashes
463
+ const normalizedStoredPath = normalizePathForComparison(storedPathRaw);
464
+ const normalizedCurrentPath = normalizePathForComparison(currentRelPath);
465
+
466
+ // Compare with flexible handling of leading slash
467
+ // (Both /path and path are considered equivalent)
468
+ const storedNormalized = normalizedStoredPath.replace(/^\/+/, '');
469
+ const currentNormalized = normalizedCurrentPath.replace(/^\/+/, '');
325
470
 
326
- if (storedPath === currentPath) return;
471
+ if (storedNormalized === currentNormalized) return;
472
+
473
+ // Use normalized path for display
474
+ const currentPath = normalizedCurrentPath;
327
475
 
328
476
  // Path mismatch detected
329
477
  log.warn(`Path mismatch for "${metaBase}":`);
330
- log.label(' Metadata Path', storedPath);
478
+ log.label(' Metadata Path', storedPathRaw);
331
479
  log.label(' Current path ', currentPath);
332
480
 
333
481
  let updatePath = options.yes;
@@ -349,3 +497,69 @@ async function checkPathMismatch(meta, metaPath, options) {
349
497
  log.success(` Path updated to "${currentPath}"`);
350
498
  }
351
499
  }
500
+
501
+ /**
502
+ * Update baseline file (.app.json) after successful pushes.
503
+ * Syncs changed column values and timestamps from metadata to baseline.
504
+ *
505
+ * @param {Object} baseline - The baseline JSON object
506
+ * @param {Array} successfulPushes - Array of { meta, metaPath, changedColumns }
507
+ */
508
+ async function updateBaselineAfterPush(baseline, successfulPushes) {
509
+ let modified = false;
510
+
511
+ for (const { meta, metaPath, changedColumns } of successfulPushes) {
512
+ const uid = meta.UID || meta._id;
513
+ const entity = meta._entity;
514
+
515
+ // Find the baseline entry
516
+ const baselineEntry = findBaselineEntry(baseline, entity, uid);
517
+ if (!baselineEntry) {
518
+ log.warn(` Baseline entry not found for ${entity}:${uid} — skipping baseline update`);
519
+ continue;
520
+ }
521
+
522
+ // Update _LastUpdated and _LastUpdatedUserID from metadata
523
+ if (meta._LastUpdated) {
524
+ baselineEntry._LastUpdated = meta._LastUpdated;
525
+ modified = true;
526
+ }
527
+ if (meta._LastUpdatedUserID) {
528
+ baselineEntry._LastUpdatedUserID = meta._LastUpdatedUserID;
529
+ modified = true;
530
+ }
531
+
532
+ // Update changed column values in baseline
533
+ const columnsToUpdate = changedColumns || Object.keys(meta).filter(k => !shouldSkipColumn(k) && k !== 'UID');
534
+
535
+ for (const col of columnsToUpdate) {
536
+ const value = meta[col];
537
+ if (value === null || value === undefined) continue;
538
+
539
+ const strValue = String(value);
540
+
541
+ // If it's a @reference, read the file content and store in baseline
542
+ if (strValue.startsWith('@')) {
543
+ try {
544
+ const refFile = strValue.substring(1);
545
+ const refPath = join(dirname(metaPath), refFile);
546
+ const fileContent = await readFile(refPath, 'utf8');
547
+ baselineEntry[col] = fileContent;
548
+ modified = true;
549
+ } catch (err) {
550
+ log.warn(` Failed to read ${strValue} for baseline update: ${err.message}`);
551
+ }
552
+ } else {
553
+ // Scalar value: store directly
554
+ baselineEntry[col] = value;
555
+ modified = true;
556
+ }
557
+ }
558
+ }
559
+
560
+ // Save updated baseline
561
+ if (modified) {
562
+ await saveAppJsonBaseline(baseline);
563
+ log.dim(' Baseline updated with pushed changes');
564
+ }
565
+ }
package/src/lib/config.js CHANGED
@@ -8,6 +8,7 @@ const CONFIG_LOCAL_FILE = 'config.local.json';
8
8
  const CREDENTIALS_FILE = 'credentials.json';
9
9
  const COOKIES_FILE = 'cookies.txt';
10
10
  const SYNCHRONIZE_FILE = 'synchronize.json';
11
+ const BASELINE_FILE = '.app.json';
11
12
 
12
13
  function dboDir() {
13
14
  return join(process.cwd(), DBO_DIR);
@@ -248,6 +249,43 @@ export async function loadEntityDirPreference(entityKey) {
248
249
  }
249
250
  }
250
251
 
252
+ /**
253
+ * Save content extraction preferences for an entity type.
254
+ * Stores which base64 columns should be extracted as companion files and their extensions.
255
+ *
256
+ * @param {string} entityKey - Entity type (e.g., 'extension', 'site')
257
+ * @param {Object} extractions - Map of column names to extensions: { "String10": "css", "String7": false, ... }
258
+ * false means user explicitly chose not to extract that column
259
+ */
260
+ export async function saveEntityContentExtractions(entityKey, extractions) {
261
+ await mkdir(dboDir(), { recursive: true });
262
+ let existing = {};
263
+ try {
264
+ existing = JSON.parse(await readFile(configPath(), 'utf8'));
265
+ } catch { /* no existing config */ }
266
+ // Capitalize first letter for config key: extension → ExtensionContentExtractions
267
+ const configKey = entityKey.charAt(0).toUpperCase() + entityKey.slice(1) + 'ContentExtractions';
268
+ existing[configKey] = extractions;
269
+ await writeFile(configPath(), JSON.stringify(existing, null, 2) + '\n');
270
+ }
271
+
272
+ /**
273
+ * Load content extraction preferences for an entity type from .dbo/config.json.
274
+ *
275
+ * @param {string} entityKey - Entity type (e.g., 'extension', 'site')
276
+ * @returns {Object|null} - Map of column names to extensions, or null if not saved
277
+ */
278
+ export async function loadEntityContentExtractions(entityKey) {
279
+ try {
280
+ const raw = await readFile(configPath(), 'utf8');
281
+ const config = JSON.parse(raw);
282
+ const configKey = entityKey.charAt(0).toUpperCase() + entityKey.slice(1) + 'ContentExtractions';
283
+ return config[configKey] || null;
284
+ } catch {
285
+ return null;
286
+ }
287
+ }
288
+
251
289
  /**
252
290
  * Save user profile fields (FirstName, LastName, Email) into credentials.json.
253
291
  */
@@ -462,3 +500,35 @@ export async function ensureGitignore(patterns) {
462
500
  await writeFile(gitignorePath, content + addition);
463
501
  for (const p of toAdd) log.dim(` Added ${p} to .gitignore`);
464
502
  }
503
+
504
+ // ─── Baseline (.app.json) ─────────────────────────────────────────────────
505
+
506
+ function baselinePath() {
507
+ return join(process.cwd(), BASELINE_FILE);
508
+ }
509
+
510
+ /**
511
+ * Check if baseline file (.app.json) exists.
512
+ */
513
+ export async function hasBaseline() {
514
+ return exists(baselinePath());
515
+ }
516
+
517
+ /**
518
+ * Load .app.json baseline file (tracks server state for delta detection).
519
+ */
520
+ export async function loadAppJsonBaseline() {
521
+ try {
522
+ const raw = await readFile(baselinePath(), 'utf8');
523
+ return JSON.parse(raw);
524
+ } catch {
525
+ return null;
526
+ }
527
+ }
528
+
529
+ /**
530
+ * Save .app.json baseline file.
531
+ */
532
+ export async function saveAppJsonBaseline(data) {
533
+ await writeFile(baselinePath(), JSON.stringify(data, null, 2) + '\n');
534
+ }
@@ -0,0 +1,204 @@
1
+ import { readFile } from 'fs/promises';
2
+ import { join, dirname } from 'path';
3
+ import { log } from './logger.js';
4
+
5
+ /**
6
+ * Load the baseline file (.app.json) from disk.
7
+ *
8
+ * @param {string} cwd - Current working directory
9
+ * @returns {Promise<Object|null>} - Baseline JSON or null if not found
10
+ */
11
+ export async function loadBaseline(cwd = process.cwd()) {
12
+ const baselinePath = join(cwd, '.app.json');
13
+ try {
14
+ const raw = await readFile(baselinePath, 'utf8');
15
+ return JSON.parse(raw);
16
+ } catch (err) {
17
+ if (err.code === 'ENOENT') {
18
+ return null; // Baseline doesn't exist
19
+ }
20
+ log.warn(`Failed to parse .app.json: ${err.message}`);
21
+ return null;
22
+ }
23
+ }
24
+
25
+ /**
26
+ * Save baseline data to .app.json file.
27
+ *
28
+ * @param {Object} data - Baseline JSON data
29
+ * @param {string} cwd - Current working directory
30
+ */
31
+ export async function saveBaseline(data, cwd = process.cwd()) {
32
+ const { writeFile } = await import('fs/promises');
33
+ const baselinePath = join(cwd, '.app.json');
34
+ await writeFile(baselinePath, JSON.stringify(data, null, 2), 'utf8');
35
+ }
36
+
37
+ /**
38
+ * Find a baseline entry by UID and entity type.
39
+ * Traverses the baseline's children hierarchy.
40
+ *
41
+ * @param {Object} baseline - The baseline JSON
42
+ * @param {string} entity - Entity type (e.g., "content", "output")
43
+ * @param {string} uid - Record UID
44
+ * @returns {Object|null} - Matching entry or null
45
+ */
46
+ export function findBaselineEntry(baseline, entity, uid) {
47
+ if (!baseline || !baseline.children) {
48
+ return null;
49
+ }
50
+
51
+ const entityArray = baseline.children[entity];
52
+ if (!Array.isArray(entityArray)) {
53
+ return null;
54
+ }
55
+
56
+ return entityArray.find(item => item.UID === uid) || null;
57
+ }
58
+
59
+ /**
60
+ * Compare a file's content on disk against a baseline value.
61
+ *
62
+ * @param {string} filePath - Absolute path to file
63
+ * @param {string|null} baselineValue - Baseline value to compare against
64
+ * @returns {Promise<boolean>} - True if different, false if same
65
+ */
66
+ export async function compareFileContent(filePath, baselineValue) {
67
+ try {
68
+ const currentContent = await readFile(filePath, 'utf8');
69
+
70
+ // Normalize line endings for comparison
71
+ const normalizedCurrent = currentContent.replace(/\r\n/g, '\n').trim();
72
+ const normalizedBaseline = (baselineValue || '').replace(/\r\n/g, '\n').trim();
73
+
74
+ return normalizedCurrent !== normalizedBaseline;
75
+ } catch (err) {
76
+ // If file doesn't exist or can't be read, consider it changed
77
+ log.warn(`Failed to read ${filePath}: ${err.message}`);
78
+ return true;
79
+ }
80
+ }
81
+
82
+ /**
83
+ * Detect which columns have changed by comparing current metadata against baseline.
84
+ *
85
+ * @param {string} metaPath - Path to metadata.json file
86
+ * @param {Object} baseline - The baseline JSON
87
+ * @param {Object} config - CLI config (for resolving file paths)
88
+ * @returns {Promise<string[]>} - Array of changed column names
89
+ */
90
+ export async function detectChangedColumns(metaPath, baseline, config) {
91
+ // Load current metadata
92
+ const metaRaw = await readFile(metaPath, 'utf8');
93
+ const metadata = JSON.parse(metaRaw);
94
+
95
+ const { UID, _entity } = metadata;
96
+
97
+ // Find matching baseline entry
98
+ const baselineEntry = findBaselineEntry(baseline, _entity, UID);
99
+
100
+ // If no baseline exists for this record, all non-system columns are "changed"
101
+ if (!baselineEntry) {
102
+ return getAllUserColumns(metadata);
103
+ }
104
+
105
+ const changedColumns = [];
106
+ const metaDir = dirname(metaPath);
107
+
108
+ // Compare each column
109
+ for (const [columnName, columnValue] of Object.entries(metadata)) {
110
+ // Skip system columns and special fields
111
+ if (shouldSkipColumn(columnName)) {
112
+ continue;
113
+ }
114
+
115
+ // Check if value is a @reference
116
+ if (isReference(columnValue)) {
117
+ const refPath = resolveReferencePath(columnValue, metaDir);
118
+ const baselineValue = baselineEntry[columnName];
119
+
120
+ // Compare file content
121
+ const isDifferent = await compareFileContent(refPath, baselineValue);
122
+ if (isDifferent) {
123
+ changedColumns.push(columnName);
124
+ }
125
+ } else {
126
+ // Scalar value comparison
127
+ const currentValue = normalizeValue(columnValue);
128
+ const baselineValue = normalizeValue(baselineEntry[columnName]);
129
+
130
+ if (currentValue !== baselineValue) {
131
+ changedColumns.push(columnName);
132
+ }
133
+ }
134
+ }
135
+
136
+ return changedColumns;
137
+ }
138
+
139
+ /**
140
+ * Get all user-defined columns (non-system columns).
141
+ *
142
+ * @param {Object} metadata - Metadata object
143
+ * @returns {string[]} - Array of user column names
144
+ */
145
+ function getAllUserColumns(metadata) {
146
+ return Object.keys(metadata).filter(col => !shouldSkipColumn(col));
147
+ }
148
+
149
+ /**
150
+ * Determine if a column should be skipped (system columns).
151
+ *
152
+ * @param {string} columnName - Column name
153
+ * @returns {boolean} - True if should skip
154
+ */
155
+ function shouldSkipColumn(columnName) {
156
+ // Skip system columns starting with underscore, UID, and children
157
+ return columnName.startsWith('_') ||
158
+ columnName === 'UID' ||
159
+ columnName === 'children';
160
+ }
161
+
162
+ /**
163
+ * Check if a value is a @reference object.
164
+ *
165
+ * @param {*} value - Value to check
166
+ * @returns {boolean} - True if reference
167
+ */
168
+ function isReference(value) {
169
+ return value &&
170
+ typeof value === 'object' &&
171
+ !Array.isArray(value) &&
172
+ value['@reference'] !== undefined;
173
+ }
174
+
175
+ /**
176
+ * Resolve a @reference path to absolute file path.
177
+ *
178
+ * @param {Object} reference - Reference object with @reference property
179
+ * @param {string} baseDir - Base directory containing metadata
180
+ * @returns {string} - Absolute file path
181
+ */
182
+ function resolveReferencePath(reference, baseDir) {
183
+ const refPath = reference['@reference'];
184
+ return join(baseDir, refPath);
185
+ }
186
+
187
+ /**
188
+ * Normalize a value for comparison (handle null, undefined, etc.).
189
+ *
190
+ * @param {*} value - Value to normalize
191
+ * @returns {string} - Normalized string value
192
+ */
193
+ function normalizeValue(value) {
194
+ if (value === null || value === undefined) {
195
+ return '';
196
+ }
197
+
198
+ // If it's still a @reference object in baseline, compare as string
199
+ if (isReference(value)) {
200
+ return JSON.stringify(value);
201
+ }
202
+
203
+ return String(value).trim();
204
+ }
@@ -0,0 +1,131 @@
1
+ /**
2
+ * Dependency management for entity synchronization.
3
+ * Ensures children are processed before parents to maintain referential integrity.
4
+ */
5
+
6
+ /**
7
+ * Entity dependency hierarchy.
8
+ * Lower levels must be processed before higher levels.
9
+ * This ensures foreign key relationships are maintained (children before parents).
10
+ */
11
+ export const ENTITY_DEPENDENCIES = {
12
+ // Level 1: Most dependent (children)
13
+ 'output_value_filter': 1,
14
+ 'output_value_entity_column_rel': 2,
15
+
16
+ // Level 2: Mid-level dependencies
17
+ 'output_value': 2,
18
+
19
+ // Level 3: Parent entities
20
+ 'output': 3,
21
+
22
+ // Default level 0 for entities not in this map
23
+ };
24
+
25
+ /**
26
+ * Get the dependency level for an entity type.
27
+ *
28
+ * @param {string} entity - Entity type name
29
+ * @returns {number} - Dependency level (0 = no dependencies, higher = more dependent)
30
+ */
31
+ function getDependencyLevel(entity) {
32
+ return ENTITY_DEPENDENCIES[entity] || 0;
33
+ }
34
+
35
+ /**
36
+ * Build a dependency-ordered structure from synchronize.json data.
37
+ * Returns operations grouped by type and sorted by dependency level.
38
+ *
39
+ * @param {Object} synchronizeData - The synchronize.json contents
40
+ * @returns {Object} - Ordered structure: { delete: [], add: [], edit: [] }
41
+ */
42
+ export function buildDependencyGraph(synchronizeData) {
43
+ if (!synchronizeData) {
44
+ return { delete: [], add: [], edit: [] };
45
+ }
46
+
47
+ const result = {
48
+ delete: sortByDependency(synchronizeData.delete || [], 'delete'),
49
+ add: sortByDependency(synchronizeData.add || [], 'add'),
50
+ edit: sortByDependency(synchronizeData.edit || [], 'edit'),
51
+ };
52
+
53
+ return result;
54
+ }
55
+
56
+ /**
57
+ * Sort entries by dependency level and UID.
58
+ * For delete operations: process parents before children (descending order)
59
+ * For add/edit operations: process children before parents (ascending order)
60
+ *
61
+ * @param {Array} entries - Array of sync entries
62
+ * @param {string} operationType - Operation type: 'delete', 'add', or 'edit'
63
+ * @returns {Array} - Sorted entries
64
+ */
65
+ function sortByDependency(entries, operationType) {
66
+ if (!Array.isArray(entries) || entries.length === 0) {
67
+ return [];
68
+ }
69
+
70
+ // Group by entity type
71
+ const grouped = groupByEntity(entries);
72
+
73
+ // Sort entity types by dependency level
74
+ const entityTypes = Object.keys(grouped);
75
+ entityTypes.sort((a, b) => {
76
+ const levelA = getDependencyLevel(a);
77
+ const levelB = getDependencyLevel(b);
78
+
79
+ // Delete operations: parents before children (descending)
80
+ // Add/Edit operations: children before parents (ascending)
81
+ if (operationType === 'delete') {
82
+ return levelB - levelA; // Higher level first
83
+ } else {
84
+ return levelA - levelB; // Lower level first
85
+ }
86
+ });
87
+
88
+ // Flatten back to array, maintaining UID sort within each entity type
89
+ const sorted = [];
90
+ for (const entity of entityTypes) {
91
+ const entityEntries = grouped[entity];
92
+ // Sort by UID for consistent ordering
93
+ entityEntries.sort((a, b) => (a.UID || '').localeCompare(b.UID || ''));
94
+ sorted.push(...entityEntries);
95
+ }
96
+
97
+ return sorted;
98
+ }
99
+
100
+ /**
101
+ * Group entries by entity type.
102
+ *
103
+ * @param {Array} entries - Array of sync entries
104
+ * @returns {Object} - Entries grouped by entity: { entityType: [...entries] }
105
+ */
106
+ function groupByEntity(entries) {
107
+ const groups = {};
108
+
109
+ for (const entry of entries) {
110
+ const entity = entry.entity || entry._entity || 'unknown';
111
+ if (!groups[entity]) {
112
+ groups[entity] = [];
113
+ }
114
+ groups[entity].push(entry);
115
+ }
116
+
117
+ return groups;
118
+ }
119
+
120
+ /**
121
+ * Sort entries by UID for consistent ordering within an entity type.
122
+ *
123
+ * @param {Array} entries - Array of entries
124
+ * @returns {Array} - Sorted entries
125
+ */
126
+ export function sortEntriesByUid(entries) {
127
+ if (!Array.isArray(entries)) {
128
+ return [];
129
+ }
130
+ return entries.slice().sort((a, b) => (a.UID || '').localeCompare(b.UID || ''));
131
+ }
@@ -37,7 +37,7 @@ Here are the available commands:
37
37
  | rm | Remove a file and stage server deletion |
38
38
  | deploy | Deploy via manifest |
39
39
  | cache | Manage cache |
40
- | install | Install or upgrade CLI, plugins, Claude commands |
40
+ | install | Install or upgrade CLI, plugins, Claude commands (shorthand: `i`) |
41
41
 
42
42
  Just tell me what you'd like to do and I'll help you build the right command!
43
43
 
@@ -80,9 +80,9 @@ Available subcommands:
80
80
  - `rm -f <path>` — Remove without confirmation prompts
81
81
  - `rm --keep-local <path>` — Stage server deletions without deleting local files/directories
82
82
  - `deploy [name]` — Deploy via dbo.deploy.json manifest
83
- - `install` — Install or upgrade CLI, plugins, or Claude commands
84
- - `install dbo` or `install dbo@latest` — Install/upgrade the CLI from npm
85
- - `install dbo@0.4.1` — Install a specific CLI version
83
+ - `install` (alias: `i`) — Install or upgrade CLI, plugins, or Claude commands
84
+ - `i dbo` or `i dbo@latest` — Install/upgrade the CLI from npm
85
+ - `i dbo@0.4.1` — Install a specific CLI version
86
86
  - `install /path/to/src` — Install CLI from local source
87
87
  - `install plugins` — Install/upgrade Claude command plugins
88
88
  - `install plugins --global` — Install plugins to `~/.claude/commands/` (shared across projects)