@dboio/cli 0.9.8 → 0.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +172 -70
  2. package/bin/dbo.js +2 -0
  3. package/bin/postinstall.js +9 -1
  4. package/package.json +3 -3
  5. package/plugins/claude/dbo/commands/dbo.md +3 -3
  6. package/plugins/claude/dbo/skills/cli/SKILL.md +3 -3
  7. package/src/commands/add.js +50 -0
  8. package/src/commands/clone.js +720 -552
  9. package/src/commands/content.js +7 -3
  10. package/src/commands/deploy.js +22 -7
  11. package/src/commands/diff.js +41 -3
  12. package/src/commands/init.js +42 -79
  13. package/src/commands/input.js +5 -0
  14. package/src/commands/login.js +2 -2
  15. package/src/commands/mv.js +3 -0
  16. package/src/commands/output.js +8 -10
  17. package/src/commands/pull.js +268 -87
  18. package/src/commands/push.js +814 -94
  19. package/src/commands/rm.js +4 -1
  20. package/src/commands/status.js +12 -1
  21. package/src/commands/sync.js +71 -0
  22. package/src/lib/client.js +10 -0
  23. package/src/lib/config.js +80 -8
  24. package/src/lib/delta.js +178 -25
  25. package/src/lib/diff.js +150 -20
  26. package/src/lib/folder-icon.js +120 -0
  27. package/src/lib/ignore.js +2 -3
  28. package/src/lib/input-parser.js +37 -10
  29. package/src/lib/metadata-templates.js +21 -4
  30. package/src/lib/migrations.js +75 -0
  31. package/src/lib/save-to-disk.js +1 -1
  32. package/src/lib/scaffold.js +58 -3
  33. package/src/lib/structure.js +158 -21
  34. package/src/lib/toe-stepping.js +381 -0
  35. package/src/migrations/001-transaction-key-preset-scope.js +35 -0
  36. package/src/migrations/002-move-entity-dirs-to-lib.js +190 -0
  37. package/src/migrations/003-move-deploy-config.js +50 -0
  38. package/src/migrations/004-rename-output-files.js +101 -0
@@ -6,6 +6,7 @@ import { formatError } from '../lib/formatter.js';
6
6
  import { addDeleteEntry, removeAppJsonReference } from '../lib/config.js';
7
7
  import { findMetadataFiles } from '../lib/diff.js';
8
8
  import { loadStructureFile, findBinByPath, findChildBins, BINS_DIR } from '../lib/structure.js';
9
+ import { runPendingMigrations } from '../lib/migrations.js';
9
10
 
10
11
  export const rmCommand = new Command('rm')
11
12
  .description('Remove a file or directory locally and stage server deletions for the next dbo push')
@@ -13,8 +14,10 @@ export const rmCommand = new Command('rm')
13
14
  .option('-f, --force', 'Skip confirmation prompts')
14
15
  .option('--keep-local', 'Only stage server deletion, do not delete local files')
15
16
  .option('--hard', 'Immediately delete local files (no Trash; legacy behavior)')
17
+ .option('--no-migrate', 'Skip pending migrations for this invocation')
16
18
  .action(async (targetPath, options) => {
17
19
  try {
20
+ await runPendingMigrations(options);
18
21
  const pathStat = await stat(targetPath).catch(() => null);
19
22
  if (!pathStat) {
20
23
  log.error(`Path not found: "${targetPath}"`);
@@ -172,7 +175,7 @@ async function rmFile(filePath, options) {
172
175
  process.exit(1);
173
176
  }
174
177
  if (!rowId) {
175
- log.error(`No row ID found in "${metaPath}". Cannot build delete expression.`);
178
+ log.error(`No row ID found in "${metaPath}". Cannot build delete expression.\n Metadata needs _id or ${entity.charAt(0).toUpperCase() + entity.slice(1)}ID. Run "dbo pull" to populate.`);
176
179
  process.exit(1);
177
180
  }
178
181
 
@@ -5,10 +5,12 @@ import { access } from 'fs/promises';
5
5
  import { join } from 'path';
6
6
  import { homedir } from 'os';
7
7
  import { log } from '../lib/logger.js';
8
+ import { runPendingMigrations, countPendingMigrations } from '../lib/migrations.js';
8
9
 
9
10
  export const statusCommand = new Command('status')
10
11
  .description('Show current DBO CLI configuration and session status')
11
- .action(async () => {
12
+ .option('--no-migrate', 'Skip pending migrations for this invocation')
13
+ .action(async (options) => {
12
14
  try {
13
15
  const initialized = await isInitialized();
14
16
  const config = await loadConfig();
@@ -40,6 +42,15 @@ export const statusCommand = new Command('status')
40
42
  const transactionKeyPreset = await loadTransactionKeyPreset();
41
43
  log.label('Transaction Key', transactionKeyPreset || '(not set — defaults to RowUID)');
42
44
 
45
+ // Run pending migrations (no-op if --no-migrate)
46
+ await runPendingMigrations(options);
47
+
48
+ // Report pending count (after running, so completed ones are excluded)
49
+ const pending = await countPendingMigrations();
50
+ if (pending > 0) {
51
+ log.label('Pending migrations', `${pending} (will run on next command; use --no-migrate to skip)`);
52
+ }
53
+
43
54
  // Display plugin status
44
55
  const scopes = await getAllPluginScopes();
45
56
  const pluginNames = Object.keys(scopes);
@@ -0,0 +1,71 @@
1
+ import { Command } from 'commander';
2
+ import { log } from '../lib/logger.js';
3
+ import { loadConfig, loadAppConfig, saveAppJsonBaseline } from '../lib/config.js';
4
+ import { DboClient } from '../lib/client.js';
5
+ import { decodeBase64Fields } from './clone.js';
6
+ import { runPendingMigrations } from '../lib/migrations.js';
7
+
8
+ export const syncCommand = new Command('sync')
9
+ .description('Synchronise local state with the server')
10
+ .option('--baseline', 'Re-fetch server state and update .dbo/.app_baseline.json (does not modify local files)')
11
+ .option('--no-migrate', 'Skip pending migrations for this invocation')
12
+ .action(async (options) => {
13
+ await runPendingMigrations(options);
14
+ if (!options.baseline) {
15
+ log.warn('No sync mode specified. Use --baseline to reset the baseline file.');
16
+ process.exit(1);
17
+ }
18
+
19
+ const config = await loadConfig();
20
+ if (!config.domain) {
21
+ log.error('No domain configured. Run "dbo init" first.');
22
+ process.exit(1);
23
+ }
24
+
25
+ const appConfig = await loadAppConfig();
26
+ if (!appConfig.AppShortName) {
27
+ log.error('No AppShortName found. Run "dbo clone" first.');
28
+ process.exit(1);
29
+ }
30
+
31
+ const ora = (await import('ora')).default;
32
+ const spinner = ora('Syncing baseline from server...').start();
33
+
34
+ const client = new DboClient({ domain: config.domain, verbose: options.verbose });
35
+
36
+ let result;
37
+ try {
38
+ result = await client.get(`/api/app/object/${appConfig.AppShortName}`);
39
+ } catch (err) {
40
+ spinner.fail(`Failed to fetch app JSON: ${err.message}`);
41
+ process.exit(1);
42
+ }
43
+
44
+ const data = result.payload || result.data;
45
+
46
+ let appRecord;
47
+ if (Array.isArray(data)) {
48
+ appRecord = data.length > 0 ? data[0] : null;
49
+ } else if (data?.Rows?.length > 0) {
50
+ appRecord = data.Rows[0];
51
+ } else if (data?.rows?.length > 0) {
52
+ appRecord = data.rows[0];
53
+ } else if (data && typeof data === 'object' && (data.UID || data.ShortName)) {
54
+ appRecord = data;
55
+ } else {
56
+ appRecord = null;
57
+ }
58
+
59
+ if (!appRecord) {
60
+ spinner.fail(`No app found with ShortName "${appConfig.AppShortName}"`);
61
+ process.exit(1);
62
+ }
63
+
64
+ // Deep clone and decode base64 fields (same logic as clone's saveBaselineFile)
65
+ const baseline = JSON.parse(JSON.stringify(appRecord));
66
+ decodeBase64Fields(baseline);
67
+
68
+ await saveAppJsonBaseline(baseline);
69
+ spinner.succeed('.dbo/.app_baseline.json updated from server');
70
+ log.dim(' Run "dbo push" to sync local changes against the new baseline');
71
+ });
package/src/lib/client.js CHANGED
@@ -123,6 +123,16 @@ export class DboClient {
123
123
  return this._parseResponse(response);
124
124
  }
125
125
 
126
+ /**
127
+ * Clear the server-side cache. Must be called after POST transactions so that
128
+ * subsequent GET requests (diff, pull, toe-stepping) return fresh data.
129
+ */
130
+ async voidCache() {
131
+ try {
132
+ await this.request('/?voidcache=true');
133
+ } catch { /* best-effort — don't block on failure */ }
134
+ }
135
+
126
136
  /**
127
137
  * Fetch a URL and return the raw response as a Buffer (for binary downloads).
128
138
  */
package/src/lib/config.js CHANGED
@@ -1,4 +1,4 @@
1
- import { readFile, writeFile, mkdir, access } from 'fs/promises';
1
+ import { readFile, writeFile, mkdir, access, chmod, unlink } from 'fs/promises';
2
2
  import { join } from 'path';
3
3
  import { log } from './logger.js';
4
4
 
@@ -8,7 +8,7 @@ const CONFIG_LOCAL_FILE = 'config.local.json';
8
8
  const CREDENTIALS_FILE = 'credentials.json';
9
9
  const COOKIES_FILE = 'cookies.txt';
10
10
  const SYNCHRONIZE_FILE = 'synchronize.json';
11
- const BASELINE_FILE = '.app.json';
11
+ const BASELINE_FILE = '.app_baseline.json';
12
12
 
13
13
  function dboDir() {
14
14
  return join(process.cwd(), DBO_DIR);
@@ -525,6 +525,37 @@ export async function getAllPluginScopes() {
525
525
  return result;
526
526
  }
527
527
 
528
+ // ─── Migration tracking (config.local.json._completedMigrations) ──────────
529
+
530
+ /**
531
+ * Load the list of completed migration IDs from .dbo/config.local.json.
532
+ * Returns an empty array if the file does not exist or the key is absent.
533
+ * @returns {Promise<string[]>}
534
+ */
535
+ export async function loadCompletedMigrations() {
536
+ try {
537
+ const local = await loadLocalConfig();
538
+ const ids = local._completedMigrations;
539
+ return Array.isArray(ids) ? ids : [];
540
+ } catch {
541
+ return [];
542
+ }
543
+ }
544
+
545
+ /**
546
+ * Append a migration ID to .dbo/config.local.json._completedMigrations.
547
+ * Deduplicates: if the ID is already present, no-op.
548
+ * @param {string} id - Three-digit migration ID, e.g. '001'
549
+ */
550
+ export async function saveCompletedMigration(id) {
551
+ const local = await loadLocalConfig();
552
+ const existing = new Set(Array.isArray(local._completedMigrations) ? local._completedMigrations : []);
553
+ if (existing.has(id)) return; // already recorded — idempotent
554
+ existing.add(id);
555
+ local._completedMigrations = [...existing].sort();
556
+ await saveLocalConfig(local);
557
+ }
558
+
528
559
  // ─── Output Hierarchy Filename Preferences ────────────────────────────────
529
560
 
530
561
  /**
@@ -693,10 +724,10 @@ export async function ensureGitignore(patterns) {
693
724
  for (const p of toAdd) log.dim(` Added ${p} to .gitignore`);
694
725
  }
695
726
 
696
- // ─── Baseline (.app.json) ─────────────────────────────────────────────────
727
+ // ─── Baseline (.dbo/.app_baseline.json) ───────────────────────────────────
697
728
 
698
- function baselinePath() {
699
- return join(process.cwd(), BASELINE_FILE);
729
+ export function baselinePath() {
730
+ return join(dboDir(), BASELINE_FILE);
700
731
  }
701
732
 
702
733
  /**
@@ -707,11 +738,38 @@ export async function hasBaseline() {
707
738
  }
708
739
 
709
740
  /**
710
- * Load .app.json baseline file (tracks server state for delta detection).
741
+ * Load .dbo/.app_baseline.json baseline file.
742
+ * Auto-migrates from legacy root .app.json if the new path does not exist.
711
743
  */
712
744
  export async function loadAppJsonBaseline() {
745
+ const newPath = baselinePath();
746
+
747
+ // Legacy migration: root .app.json → .dbo/.app_baseline.json
748
+ if (!(await exists(newPath))) {
749
+ const legacyPath = join(process.cwd(), '.app.json');
750
+ if (await exists(legacyPath)) {
751
+ let parsed;
752
+ try {
753
+ parsed = JSON.parse(await readFile(legacyPath, 'utf8'));
754
+ } catch {
755
+ log.warn('Could not migrate .app.json — file is not valid JSON. Delete it manually and run "dbo clone" to recreate the baseline.');
756
+ return null;
757
+ }
758
+ await mkdir(dboDir(), { recursive: true });
759
+ await writeFile(newPath, JSON.stringify(parsed, null, 2) + '\n');
760
+ try { await chmod(newPath, 0o444); } catch { /* ignore */ }
761
+ try {
762
+ await unlink(legacyPath);
763
+ } catch {
764
+ log.warn('Migrated baseline but could not delete root .app.json — please remove it manually.');
765
+ }
766
+ log.dim('Migrated .app.json → .dbo/.app_baseline.json (system-managed baseline)');
767
+ return parsed;
768
+ }
769
+ }
770
+
713
771
  try {
714
- const raw = await readFile(baselinePath(), 'utf8');
772
+ const raw = await readFile(newPath, 'utf8');
715
773
  return JSON.parse(raw);
716
774
  } catch {
717
775
  return null;
@@ -719,10 +777,24 @@ export async function loadAppJsonBaseline() {
719
777
  }
720
778
 
721
779
  /**
722
- * Save .app.json baseline file.
780
+ * Save .dbo/.app_baseline.json baseline file.
781
+ * Temporarily widens permissions before writing (chmod 0o644),
782
+ * then restores read-only (chmod 0o444) after writing.
723
783
  */
724
784
  export async function saveAppJsonBaseline(data) {
785
+ await mkdir(dboDir(), { recursive: true });
786
+
787
+ try {
788
+ await chmod(baselinePath(), 0o644);
789
+ } catch { /* file doesn't exist yet — first write */ }
790
+
725
791
  await writeFile(baselinePath(), JSON.stringify(data, null, 2) + '\n');
792
+
793
+ try {
794
+ await chmod(baselinePath(), 0o444);
795
+ } catch {
796
+ log.warn('⚠ Could not set baseline file permissions — ensure .dbo/.app_baseline.json is not manually edited');
797
+ }
726
798
  }
727
799
 
728
800
  /**
package/src/lib/delta.js CHANGED
@@ -1,37 +1,26 @@
1
1
  import { readFile, stat } from 'fs/promises';
2
2
  import { join, dirname } from 'path';
3
3
  import { log } from './logger.js';
4
+ import { loadAppJsonBaseline, saveAppJsonBaseline } from './config.js';
4
5
 
5
6
  /**
6
- * Load the baseline file (.app.json) from disk.
7
+ * Load the baseline file from disk.
8
+ * Delegates to loadAppJsonBaseline() in config.js for the canonical path.
7
9
  *
8
- * @param {string} cwd - Current working directory
9
10
  * @returns {Promise<Object|null>} - Baseline JSON or null if not found
10
11
  */
11
- export async function loadBaseline(cwd = process.cwd()) {
12
- const baselinePath = join(cwd, '.app.json');
13
- try {
14
- const raw = await readFile(baselinePath, 'utf8');
15
- return JSON.parse(raw);
16
- } catch (err) {
17
- if (err.code === 'ENOENT') {
18
- return null; // Baseline doesn't exist
19
- }
20
- log.warn(`Failed to parse .app.json: ${err.message}`);
21
- return null;
22
- }
12
+ export async function loadBaseline() {
13
+ return loadAppJsonBaseline();
23
14
  }
24
15
 
25
16
  /**
26
- * Save baseline data to .app.json file.
17
+ * Save baseline data to disk.
18
+ * Delegates to saveAppJsonBaseline() in config.js for the canonical path.
27
19
  *
28
20
  * @param {Object} data - Baseline JSON data
29
- * @param {string} cwd - Current working directory
30
21
  */
31
- export async function saveBaseline(data, cwd = process.cwd()) {
32
- const { writeFile } = await import('fs/promises');
33
- const baselinePath = join(cwd, '.app.json');
34
- await writeFile(baselinePath, JSON.stringify(data, null, 2), 'utf8');
22
+ export async function saveBaseline(data) {
23
+ return saveAppJsonBaseline(data);
35
24
  }
36
25
 
37
26
  /**
@@ -158,6 +147,17 @@ export async function detectChangedColumns(metaPath, baseline) {
158
147
  }
159
148
  }
160
149
 
150
+ // Check for columns present in baseline but removed from local metadata
151
+ // (user deleted the key or set it to null → should clear on server)
152
+ for (const [columnName, baselineVal] of Object.entries(baselineEntry)) {
153
+ if (shouldSkipColumn(columnName)) continue;
154
+ if (columnName in metadata) continue; // already compared above
155
+ // Baseline has a non-null value but local metadata doesn't have this key at all
156
+ if (baselineVal !== null && baselineVal !== undefined && normalizeValue(baselineVal) !== '') {
157
+ changedColumns.push(columnName);
158
+ }
159
+ }
160
+
161
161
  // Check _mediaFile for binary file changes (media entities)
162
162
  if (metadata._mediaFile && isReference(metadata._mediaFile)) {
163
163
  const mediaPath = resolveReferencePath(metadata._mediaFile, metaDir);
@@ -180,7 +180,7 @@ export async function detectChangedColumns(metaPath, baseline) {
180
180
  * @param {Object} metadata - Metadata object
181
181
  * @returns {string[]} - Array of user column names
182
182
  */
183
- function getAllUserColumns(metadata) {
183
+ export function getAllUserColumns(metadata) {
184
184
  return Object.keys(metadata).filter(col => !shouldSkipColumn(col));
185
185
  }
186
186
 
@@ -190,7 +190,7 @@ function getAllUserColumns(metadata) {
190
190
  * @param {string} columnName - Column name
191
191
  * @returns {boolean} - True if should skip
192
192
  */
193
- function shouldSkipColumn(columnName) {
193
+ export function shouldSkipColumn(columnName) {
194
194
  // Skip system columns starting with underscore, UID, and children
195
195
  return columnName.startsWith('_') ||
196
196
  columnName === 'UID' ||
@@ -203,7 +203,7 @@ function shouldSkipColumn(columnName) {
203
203
  * @param {*} value - Value to check
204
204
  * @returns {boolean} - True if reference
205
205
  */
206
- function isReference(value) {
206
+ export function isReference(value) {
207
207
  return typeof value === 'string' && value.startsWith('@');
208
208
  }
209
209
 
@@ -215,7 +215,7 @@ function isReference(value) {
215
215
  * @param {string} baseDir - Base directory containing metadata
216
216
  * @returns {string} - Absolute file path
217
217
  */
218
- function resolveReferencePath(reference, baseDir) {
218
+ export function resolveReferencePath(reference, baseDir) {
219
219
  const refPath = reference.substring(1); // Strip leading @
220
220
  if (refPath.startsWith('/')) {
221
221
  return join(process.cwd(), refPath);
@@ -229,7 +229,7 @@ function resolveReferencePath(reference, baseDir) {
229
229
  * @param {*} value - Value to normalize
230
230
  * @returns {string} - Normalized string value
231
231
  */
232
- function normalizeValue(value) {
232
+ export function normalizeValue(value) {
233
233
  if (value === null || value === undefined) {
234
234
  return '';
235
235
  }
@@ -241,3 +241,156 @@ function normalizeValue(value) {
241
241
 
242
242
  return String(value).trim();
243
243
  }
244
+
245
+ // ─── Compound Output Delta Detection ────────────────────────────────────────
246
+
247
+ const _OUTPUT_DOC_KEYS = ['column', 'join', 'filter'];
248
+
249
+ /**
250
+ * Detect changed columns across a compound output file (root + inline children).
251
+ *
252
+ * @param {string} metaPath - Path to the root output JSON file
253
+ * @param {Object} baseline - The baseline JSON
254
+ * @returns {Promise<{ root: string[], children: Object<string, string[]> }>}
255
+ * root: changed column names on the root output entity
256
+ * children: { uid: [changedCols] } for each inline child entity
257
+ */
258
+ export async function detectOutputChanges(metaPath, baseline) {
259
+ const metaRaw = await readFile(metaPath, 'utf8');
260
+ const meta = JSON.parse(metaRaw);
261
+ const metaDir = dirname(metaPath);
262
+
263
+ // Detect root changes
264
+ const root = await _detectEntityChanges(meta, baseline, metaDir);
265
+
266
+ // Detect child changes recursively via children.{column,join,filter}
267
+ const children = {};
268
+ if (meta.children) {
269
+ await _walkChildrenForChanges(meta.children, baseline, metaDir, children);
270
+ }
271
+
272
+ return { root, children };
273
+ }
274
+
275
+ async function _detectEntityChanges(entity, baseline, metaDir) {
276
+ const { UID, _entity } = entity;
277
+ const baselineEntry = findBaselineEntry(baseline, _entity, UID);
278
+ if (!baselineEntry) {
279
+ return getAllUserColumns(entity);
280
+ }
281
+ const changed = [];
282
+ for (const [col, val] of Object.entries(entity)) {
283
+ if (shouldSkipColumn(col) || col === 'children') continue;
284
+ if (isReference(val)) {
285
+ const refPath = resolveReferencePath(val, metaDir);
286
+ if (await compareFileContent(refPath, baselineEntry[col])) changed.push(col);
287
+ } else {
288
+ if (normalizeValue(val) !== normalizeValue(baselineEntry[col])) {
289
+ // Skip Extension column when baseline is null/empty (same as detectChangedColumns)
290
+ if (col === 'Extension'
291
+ && !normalizeValue(baselineEntry[col])
292
+ && (baselineEntry[col] === null || baselineEntry[col] === undefined)) {
293
+ continue;
294
+ }
295
+ changed.push(col);
296
+ }
297
+ }
298
+ }
299
+ // Check for columns removed from entity but present in baseline
300
+ for (const [col, baselineVal] of Object.entries(baselineEntry)) {
301
+ if (shouldSkipColumn(col) || col === 'children') continue;
302
+ if (col in entity) continue;
303
+ if (baselineVal !== null && baselineVal !== undefined && normalizeValue(baselineVal) !== '') {
304
+ changed.push(col);
305
+ }
306
+ }
307
+ return changed;
308
+ }
309
+
310
+ // ─── Bin Delta Detection ─────────────────────────────────────────────────────
311
+
312
+ /** Columns tracked for bin change detection */
313
+ const BIN_TRACKED_COLUMNS = ['Name', 'Path', 'ParentBinID', 'Active', 'Public'];
314
+
315
+ /**
316
+ * Detect changes between a current bin entry (from structure.json) and the
317
+ * baseline (from app.json children.bin array).
318
+ *
319
+ * @param {Object} binEntry - Current bin entry from structure.json (with binId, name, path, etc.)
320
+ * @param {Object} baseline - The baseline JSON (app.json)
321
+ * @returns {string[]} - Array of changed column names
322
+ */
323
+ export function detectBinChanges(binEntry, baseline) {
324
+ if (!baseline || !baseline.children || !Array.isArray(baseline.children.bin)) {
325
+ return BIN_TRACKED_COLUMNS; // no baseline → treat all as changed
326
+ }
327
+
328
+ const baselineBin = baseline.children.bin.find(
329
+ b => b.UID === binEntry.uid || b.BinID === binEntry.binId
330
+ );
331
+
332
+ if (!baselineBin) {
333
+ return BIN_TRACKED_COLUMNS; // new bin, not in baseline
334
+ }
335
+
336
+ const changed = [];
337
+
338
+ // Map structure.json field names → server column names for comparison
339
+ const fieldMap = {
340
+ Name: { current: binEntry.name, baseline: baselineBin.Name },
341
+ Path: { current: binEntry.path, baseline: baselineBin.Path },
342
+ ParentBinID: { current: binEntry.parentBinID, baseline: baselineBin.ParentBinID },
343
+ };
344
+
345
+ for (const [col, { current, baseline: base }] of Object.entries(fieldMap)) {
346
+ const curStr = current !== null && current !== undefined ? String(current) : '';
347
+ const baseStr = base !== null && base !== undefined ? String(base) : '';
348
+ if (curStr !== baseStr) changed.push(col);
349
+ }
350
+
351
+ // Direct column comparisons for Active/Public (may exist on baseline)
352
+ for (const col of ['Active', 'Public']) {
353
+ if (baselineBin[col] !== undefined && binEntry[col] !== undefined) {
354
+ if (String(binEntry[col]) !== String(baselineBin[col])) {
355
+ changed.push(col);
356
+ }
357
+ }
358
+ }
359
+
360
+ return changed;
361
+ }
362
+
363
+ /**
364
+ * Synthesize a virtual metadata object for a bin entry (for use with pushFromMetadata).
365
+ *
366
+ * @param {Object} binEntry - Bin entry from structure.json
367
+ * @param {number} [appId] - AppID from app config
368
+ * @returns {Object} - Virtual metadata object
369
+ */
370
+ export function synthesizeBinMetadata(binEntry, appId) {
371
+ const meta = {
372
+ _entity: 'bin',
373
+ _id: binEntry.binId,
374
+ UID: binEntry.uid,
375
+ Name: binEntry.name,
376
+ Path: binEntry.path,
377
+ ParentBinID: binEntry.parentBinID,
378
+ };
379
+ if (appId) meta.AppID = appId;
380
+ return meta;
381
+ }
382
+
383
+ async function _walkChildrenForChanges(childrenObj, baseline, metaDir, result) {
384
+ for (const docKey of _OUTPUT_DOC_KEYS) {
385
+ const entityArray = childrenObj[docKey];
386
+ if (!Array.isArray(entityArray) || entityArray.length === 0) continue;
387
+ for (const child of entityArray) {
388
+ const changed = await _detectEntityChanges(child, baseline, metaDir);
389
+ result[child.UID] = changed;
390
+ // Recurse into child's own children (e.g. column[0].children.filter)
391
+ if (child.children) {
392
+ await _walkChildrenForChanges(child.children, baseline, metaDir, result);
393
+ }
394
+ }
395
+ }
396
+ }