@dboio/cli 0.9.6 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,21 +1,22 @@
1
1
  import { Command } from 'commander';
2
- import { readFile, stat, writeFile, rename as fsRename, mkdir } from 'fs/promises';
2
+ import { readFile, readdir, stat, writeFile, rename as fsRename, mkdir, access } from 'fs/promises';
3
3
  import { join, dirname, basename, extname, relative } from 'path';
4
4
  import { DboClient } from '../lib/client.js';
5
5
  import { buildInputBody, checkSubmitErrors, getSessionUserOverride } from '../lib/input-parser.js';
6
6
  import { formatResponse, formatError } from '../lib/formatter.js';
7
7
  import { log } from '../lib/logger.js';
8
8
  import { shouldSkipColumn } from '../lib/columns.js';
9
- import { loadConfig, loadSynchronize, saveSynchronize, loadAppJsonBaseline, saveAppJsonBaseline, hasBaseline } from '../lib/config.js';
9
+ import { loadConfig, loadAppConfig, loadSynchronize, saveSynchronize, loadAppJsonBaseline, saveAppJsonBaseline, hasBaseline } from '../lib/config.js';
10
10
  import { checkStoredTicket, applyStoredTicketToSubmission, clearRecordTicket, clearGlobalTicket } from '../lib/ticketing.js';
11
11
  import { checkModifyKey, isModifyKeyError, handleModifyKeyError } from '../lib/modify-key.js';
12
12
  import { resolveTransactionKey } from '../lib/transaction-key.js';
13
13
  import { setFileTimestamps } from '../lib/timestamps.js';
14
- import { stripUidFromFilename, renameToUidConvention, hasUidInFilename } from '../lib/filenames.js';
14
+ import { stripUidFromFilename, renameToUidConvention, hasUidInFilename, buildUidFilename } from '../lib/filenames.js';
15
15
  import { findMetadataFiles } from '../lib/diff.js';
16
16
  import { loadIgnore } from '../lib/ignore.js';
17
- import { detectChangedColumns, findBaselineEntry } from '../lib/delta.js';
18
- import { BINS_DIR, ENTITY_DIR_NAMES } from '../lib/structure.js';
17
+ import { detectChangedColumns, findBaselineEntry, detectOutputChanges, getAllUserColumns, isReference, resolveReferencePath } from '../lib/delta.js';
18
+ import { BINS_DIR, ENTITY_DIR_NAMES, loadStructureFile, findBinByPath } from '../lib/structure.js';
19
+ import { ensureTrashIcon } from '../lib/folder-icon.js';
19
20
 
20
21
  /**
21
22
  * Resolve an @reference file path to an absolute filesystem path.
@@ -101,37 +102,16 @@ async function processPendingDeletes(client, options, modifyKey = null, transact
101
102
  try {
102
103
  const result = await client.postUrlEncoded('/api/input/submit', body);
103
104
 
104
- // Retry with prompted params if needed
105
- const errorResult = await checkSubmitErrors(result);
106
- if (errorResult) {
107
- if (errorResult.skipRecord) {
108
- log.warn(` Skipping deletion of "${entry.name}"`);
109
- remaining.push(entry);
110
- continue;
111
- }
112
- if (errorResult.skipAll) {
113
- log.warn(` Skipping deletion of "${entry.name}" and all remaining`);
114
- remaining.push(entry);
115
- // Push all remaining entries too
116
- const currentIdx = sync.delete.indexOf(entry);
117
- for (let i = currentIdx + 1; i < sync.delete.length; i++) {
118
- remaining.push(sync.delete[i]);
119
- }
120
- break;
121
- }
122
- const params = errorResult.retryParams || errorResult;
123
- Object.assign(extraParams, params);
124
- const retryBody = await buildInputBody([entry.expression], extraParams);
125
- const retryResponse = await client.postUrlEncoded('/api/input/submit', retryBody);
126
- if (retryResponse.successful) {
127
- log.success(` Deleted "${entry.name}" from server`);
128
- deletedUids.push(entry.UID);
129
- } else {
130
- log.error(` Failed to delete "${entry.name}"`);
131
- formatResponse(retryResponse, { json: options.json, jq: options.jq, verbose: options.verbose });
132
- remaining.push(entry);
133
- }
134
- } else if (result.successful) {
105
+ // Deletes never require ticketing treat ticket-only errors as success
106
+ const deleteMessages = (result.messages || result.data?.Messages || [])
107
+ .filter(m => typeof m === 'string');
108
+ const isTicketOnlyError = !result.successful
109
+ && deleteMessages.length > 0
110
+ && deleteMessages.every(m => m.includes('ticket_error') || m.includes('ticket_lookup'));
111
+ const deleteOk = result.successful || isTicketOnlyError;
112
+
113
+ if (deleteOk) {
114
+ if (isTicketOnlyError) log.dim(' (Ticket error ignored for delete)');
135
115
  log.success(` Deleted "${entry.name}" from server`);
136
116
  deletedUids.push(entry.UID);
137
117
  } else {
@@ -222,6 +202,11 @@ async function moveWillDeleteToTrash(entry) {
222
202
  log.warn(` Could not move to trash: ${from} — ${err.message}`);
223
203
  }
224
204
  }
205
+
206
+ // Re-apply trash icon if files were moved (self-heals after user clears trash)
207
+ if (filesToMove.length > 0) {
208
+ await ensureTrashIcon(trashDir);
209
+ }
225
210
  }
226
211
 
227
212
  /**
@@ -244,10 +229,72 @@ async function pushSingleFile(filePath, client, options, modifyKey = null, trans
244
229
  await pushFromMetadata(meta, metaPath, client, options, null, modifyKey, transactionKey);
245
230
  }
246
231
 
232
+ /**
233
+ * Ensure manifest.json at project root has companion metadata in bins/app/.
234
+ * If manifest.json exists but no manifest*.metadata.json is in bins/app/,
235
+ * auto-create the metadata so the push flow picks it up.
236
+ */
237
+ async function ensureManifestMetadata() {
238
+ // Check if manifest.json exists at project root
239
+ try {
240
+ await access(join(process.cwd(), 'manifest.json'));
241
+ } catch {
242
+ return; // No manifest.json — nothing to do
243
+ }
244
+
245
+ // Check if bins/app/ already has metadata that references @/manifest.json.
246
+ // A filename-only check (startsWith('manifest')) is insufficient because
247
+ // the metadata may have been renamed with a ~UID suffix or prefixed with
248
+ // __WILL_DELETE__. Instead, scan actual metadata content for the reference.
249
+ const binsAppDir = join(process.cwd(), 'bins', 'app');
250
+ try {
251
+ const entries = await readdir(binsAppDir);
252
+ const metaEntries = entries.filter(e => e.endsWith('.metadata.json'));
253
+ for (const entry of metaEntries) {
254
+ try {
255
+ const raw = await readFile(join(binsAppDir, entry), 'utf8');
256
+ const parsed = JSON.parse(raw);
257
+ if (parsed.Content === '@/manifest.json') return; // Already tracked
258
+ } catch { /* skip unreadable files */ }
259
+ }
260
+ } catch {
261
+ // bins/app/ doesn't exist — will create it
262
+ }
263
+
264
+ // Auto-create manifest.metadata.json
265
+ const appConfig = await loadAppConfig();
266
+ const structure = await loadStructureFile();
267
+ const appBin = findBinByPath('app', structure);
268
+
269
+ await mkdir(binsAppDir, { recursive: true });
270
+
271
+ const meta = {
272
+ _entity: 'content',
273
+ _contentColumns: ['Content'],
274
+ Content: '@/manifest.json',
275
+ Path: 'manifest.json',
276
+ Name: 'manifest.json',
277
+ Extension: 'JSON',
278
+ Public: 1,
279
+ Active: 1,
280
+ Title: 'PWA Manifest',
281
+ };
282
+
283
+ if (appBin) meta.BinID = appBin.binId;
284
+ if (appConfig.AppID) meta.AppID = appConfig.AppID;
285
+
286
+ const metaPath = join(binsAppDir, 'manifest.metadata.json');
287
+ await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
288
+ log.info('Auto-created manifest.metadata.json for manifest.json');
289
+ }
290
+
247
291
  /**
248
292
  * Push all records found in a directory (recursive)
249
293
  */
250
294
  async function pushDirectory(dirPath, client, options, modifyKey = null, transactionKey = 'RowUID') {
295
+ // Auto-create manifest.metadata.json if manifest.json exists at root without companion metadata
296
+ await ensureManifestMetadata();
297
+
251
298
  const ig = await loadIgnore();
252
299
  const metaFiles = await findMetadataFiles(dirPath, ig);
253
300
 
@@ -267,6 +314,7 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
267
314
 
268
315
  // Collect metadata with detected changes
269
316
  const toPush = [];
317
+ const outputCompoundFiles = [];
270
318
  let skipped = 0;
271
319
 
272
320
  for (const metaPath of metaFiles) {
@@ -279,18 +327,21 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
279
327
  continue;
280
328
  }
281
329
 
282
- if (!meta.UID && !meta._id) {
283
- log.warn(`Skipping "${metaPath}": no UID or _id found`);
330
+ if (!meta._entity) {
331
+ log.warn(`Skipping "${metaPath}": no _entity found`);
284
332
  skipped++;
285
333
  continue;
286
334
  }
287
335
 
288
- if (!meta._entity) {
289
- log.warn(`Skipping "${metaPath}": no _entity found`);
290
- skipped++;
336
+ // Compound output files: handle root + all inline children together
337
+ // These have _entity='output' and inline children under .children
338
+ if (meta._entity === 'output' && meta.children) {
339
+ outputCompoundFiles.push({ meta, metaPath });
291
340
  continue;
292
341
  }
293
342
 
343
+ const isNewRecord = !meta.UID && !meta._id;
344
+
294
345
  // Verify @file references exist
295
346
  const contentCols = meta._contentColumns || [];
296
347
  let missingFiles = false;
@@ -329,9 +380,9 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
329
380
  if (contentIgnored) { skipped++; continue; }
330
381
  }
331
382
 
332
- // Detect changed columns (delta detection)
383
+ // Detect changed columns (delta detection) — skip for new records
333
384
  let changedColumns = null;
334
- if (baseline) {
385
+ if (!isNewRecord && baseline) {
335
386
  try {
336
387
  changedColumns = await detectChangedColumns(metaPath, baseline);
337
388
  if (changedColumns.length === 0) {
@@ -344,18 +395,22 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
344
395
  }
345
396
  }
346
397
 
347
- toPush.push({ meta, metaPath, changedColumns });
398
+ toPush.push({ meta, metaPath, changedColumns, isNew: isNewRecord });
348
399
  }
349
400
 
350
- if (toPush.length === 0) {
401
+ if (toPush.length === 0 && outputCompoundFiles.length === 0) {
351
402
  log.info('No changes to push');
352
403
  return;
353
404
  }
354
405
 
355
406
  // Pre-flight ticket validation (only if no --ticket flag)
356
- if (!options.ticket && toPush.length > 0) {
357
- const recordSummary = toPush.map(r => basename(r.metaPath, '.metadata.json')).join(', ');
358
- const ticketCheck = await checkStoredTicket(options, `${toPush.length} record(s): ${recordSummary}`);
407
+ const totalRecords = toPush.length + outputCompoundFiles.length;
408
+ if (!options.ticket && totalRecords > 0) {
409
+ const recordSummary = [
410
+ ...toPush.map(r => basename(r.metaPath, '.metadata.json')),
411
+ ...outputCompoundFiles.map(r => basename(r.metaPath, '.json')),
412
+ ].join(', ');
413
+ const ticketCheck = await checkStoredTicket(options, `${totalRecords} record(s): ${recordSummary}`);
359
414
  if (ticketCheck.cancel) {
360
415
  log.info('Submission cancelled');
361
416
  return;
@@ -366,19 +421,48 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
366
421
  }
367
422
  }
368
423
 
369
- // Sort by dependency level: children first (ascending level) for add/edit operations
370
- toPush.sort((a, b) => {
424
+ // Separate new records (adds) from existing records (edits)
425
+ const toAdd = toPush.filter(item => item.isNew);
426
+ const toEdit = toPush.filter(item => !item.isNew);
427
+
428
+ // Sort each group by dependency level
429
+ const sortByDependency = (a, b) => {
371
430
  const levelA = ENTITY_DEPENDENCIES[a.meta._entity] || 0;
372
431
  const levelB = ENTITY_DEPENDENCIES[b.meta._entity] || 0;
373
432
  return levelA - levelB;
374
- });
433
+ };
434
+ toAdd.sort(sortByDependency);
435
+ toEdit.sort(sortByDependency);
375
436
 
376
- // Process in dependency order
377
437
  let succeeded = 0;
378
438
  let failed = 0;
379
439
  const successfulPushes = [];
380
440
 
381
- for (const item of toPush) {
441
+ // Process adds first
442
+ if (toAdd.length > 0) {
443
+ log.info(`Adding ${toAdd.length} new record(s)...`);
444
+ }
445
+ for (const item of toAdd) {
446
+ try {
447
+ const success = await addFromMetadata(item.meta, item.metaPath, client, options, modifyKey);
448
+ if (success) {
449
+ succeeded++;
450
+ successfulPushes.push(item);
451
+ } else {
452
+ failed++;
453
+ }
454
+ } catch (err) {
455
+ if (err.message === 'SKIP_ALL') {
456
+ log.info('Skipping remaining records');
457
+ break;
458
+ }
459
+ log.error(`Failed to add: ${item.metaPath} — ${err.message}`);
460
+ failed++;
461
+ }
462
+ }
463
+
464
+ // Then process edits
465
+ for (const item of toEdit) {
382
466
  try {
383
467
  const success = await pushFromMetadata(item.meta, item.metaPath, client, options, item.changedColumns, modifyKey, transactionKey);
384
468
  if (success) {
@@ -397,6 +481,22 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
397
481
  }
398
482
  }
399
483
 
484
+ // Process compound output files (root + inline children)
485
+ for (const { meta, metaPath } of outputCompoundFiles) {
486
+ try {
487
+ const result = await pushOutputCompound(meta, metaPath, client, options, baseline, modifyKey, transactionKey);
488
+ if (result.pushed > 0) {
489
+ succeeded++;
490
+ successfulPushes.push({ meta, metaPath, changedColumns: null });
491
+ } else {
492
+ skipped++;
493
+ }
494
+ } catch (err) {
495
+ log.error(`Failed compound output push: ${metaPath} — ${err.message}`);
496
+ failed++;
497
+ }
498
+ }
499
+
400
500
  // Update baseline after successful pushes
401
501
  if (baseline && successfulPushes.length > 0) {
402
502
  await updateBaselineAfterPush(baseline, successfulPushes);
@@ -405,6 +505,160 @@ async function pushDirectory(dirPath, client, options, modifyKey = null, transac
405
505
  log.info(`Push complete: ${succeeded} succeeded, ${failed} failed, ${skipped} skipped`);
406
506
  }
407
507
 
508
+ /**
509
+ * Submit a new record (add) from metadata that has no UID yet.
510
+ * Builds RowID:add1 expressions, submits, then renames files with the returned ~UID.
511
+ */
512
+ async function addFromMetadata(meta, metaPath, client, options, modifyKey = null) {
513
+ const entity = meta._entity;
514
+ const contentCols = new Set(meta._contentColumns || []);
515
+ const metaDir = dirname(metaPath);
516
+
517
+ const dataExprs = [];
518
+ const addIndex = 1;
519
+
520
+ for (const [key, value] of Object.entries(meta)) {
521
+ if (shouldSkipColumn(key)) continue;
522
+ if (key === 'UID') continue;
523
+ if (value === null || value === undefined) continue;
524
+
525
+ const strValue = String(value);
526
+
527
+ if (strValue.startsWith('@')) {
528
+ const refFile = strValue.substring(1);
529
+ const refPath = resolveAtReference(refFile, metaDir);
530
+ dataExprs.push(`RowID:add${addIndex};column:${entity}.${key}@${refPath}`);
531
+ } else {
532
+ dataExprs.push(`RowID:add${addIndex};column:${entity}.${key}=${strValue}`);
533
+ }
534
+ }
535
+
536
+ if (dataExprs.length === 0) {
537
+ log.warn(`Nothing to submit for ${basename(metaPath)}`);
538
+ return false;
539
+ }
540
+
541
+ log.info(`Adding ${basename(metaPath)} (${entity}) — ${dataExprs.length} field(s)`);
542
+
543
+ // Apply stored ticket — add operations always use RowID (not RowUID)
544
+ let storedTicket = null;
545
+ if (!options.ticket) {
546
+ const globalTicket = await (await import('../lib/ticketing.js')).getGlobalTicket();
547
+ if (globalTicket) {
548
+ dataExprs.push(`RowID:add${addIndex};column:${entity}._LastUpdatedTicketID=${globalTicket}`);
549
+ log.dim(` Applying ticket: ${globalTicket}`);
550
+ storedTicket = globalTicket;
551
+ }
552
+ }
553
+
554
+ const extraParams = { '_confirm': options.confirm || 'true' };
555
+ if (options.ticket) extraParams['_OverrideTicketID'] = options.ticket;
556
+ else if (storedTicket) extraParams['_OverrideTicketID'] = storedTicket;
557
+ if (modifyKey) extraParams['_modify_key'] = modifyKey;
558
+ const cachedUser = getSessionUserOverride();
559
+ if (cachedUser) extraParams['_OverrideUserID'] = cachedUser;
560
+
561
+ let body = await buildInputBody(dataExprs, extraParams);
562
+ let result = await client.postUrlEncoded('/api/input/submit', body);
563
+
564
+ // Reactive ModifyKey retry
565
+ if (!result.successful && result.messages?.some(m => isModifyKeyError(m))) {
566
+ const retryMK = await handleModifyKeyError();
567
+ if (retryMK.cancel) { log.info('Submission cancelled'); return false; }
568
+ extraParams['_modify_key'] = retryMK.modifyKey;
569
+ body = await buildInputBody(dataExprs, extraParams);
570
+ result = await client.postUrlEncoded('/api/input/submit', body);
571
+ }
572
+
573
+ // Retry with prompted params if needed
574
+ const retryResult = await checkSubmitErrors(result);
575
+ if (retryResult) {
576
+ if (retryResult.skipRecord) { log.warn(' Skipping record'); return false; }
577
+ if (retryResult.skipAll) throw new Error('SKIP_ALL');
578
+ if (retryResult.ticketExpressions?.length > 0) dataExprs.push(...retryResult.ticketExpressions);
579
+ const params = retryResult.retryParams || retryResult;
580
+ Object.assign(extraParams, params);
581
+ body = await buildInputBody(dataExprs, extraParams);
582
+ result = await client.postUrlEncoded('/api/input/submit', body);
583
+ }
584
+
585
+ if (!result.successful) {
586
+ const msgs = result.messages || result.data?.Messages || [];
587
+ log.error(`Add failed for ${basename(metaPath)}`);
588
+ if (msgs.length > 0) {
589
+ for (const m of msgs) log.dim(` ${typeof m === 'string' ? m : JSON.stringify(m)}`);
590
+ }
591
+ return false;
592
+ }
593
+
594
+ // Extract UID from response and rename files to ~uid convention
595
+ const addResults = result.payload?.Results?.Add || result.data?.Payload?.Results?.Add || [];
596
+ if (addResults.length > 0) {
597
+ const returnedUID = addResults[0].UID;
598
+ const returnedLastUpdated = addResults[0]._LastUpdated;
599
+
600
+ if (returnedUID) {
601
+ meta.UID = returnedUID;
602
+
603
+ // Store numeric ID for delete operations (RowID:del<id>)
604
+ const entityIdKey = entity.charAt(0).toUpperCase() + entity.slice(1) + 'ID';
605
+ const returnedId = addResults[0][entityIdKey] || addResults[0]._id || addResults[0].ID;
606
+ if (returnedId) meta._id = returnedId;
607
+
608
+ const currentMetaBase = basename(metaPath, '.metadata.json');
609
+
610
+ // Guard: don't append UID if it's already in the filename
611
+ if (hasUidInFilename(currentMetaBase, returnedUID)) {
612
+ await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
613
+ log.success(`UID ${returnedUID} already in filename`);
614
+ return true;
615
+ }
616
+
617
+ const newBase = buildUidFilename(currentMetaBase, returnedUID);
618
+ const newMetaPath = join(metaDir, `${newBase}.metadata.json`);
619
+
620
+ // Update @references in metadata to include ~UID for non-root references
621
+ for (const col of (meta._contentColumns || [])) {
622
+ const ref = meta[col];
623
+ if (ref && String(ref).startsWith('@') && !String(ref).startsWith('@/')) {
624
+ // Local file reference — rename it too
625
+ const oldRefFile = String(ref).substring(1);
626
+ const refExt = extname(oldRefFile);
627
+ const refBase = basename(oldRefFile, refExt);
628
+ const newRefBase = buildUidFilename(refBase, returnedUID);
629
+ const newRefFile = refExt ? `${newRefBase}${refExt}` : newRefBase;
630
+
631
+ const oldRefPath = join(metaDir, oldRefFile);
632
+ const newRefPath = join(metaDir, newRefFile);
633
+ try {
634
+ await fsRename(oldRefPath, newRefPath);
635
+ meta[col] = `@${newRefFile}`;
636
+ } catch { /* content file may be root-relative */ }
637
+ }
638
+ }
639
+
640
+ // Rename old metadata file, then write updated content
641
+ if (metaPath !== newMetaPath) {
642
+ try { await fsRename(metaPath, newMetaPath); } catch { /* ignore if same */ }
643
+ }
644
+ await writeFile(newMetaPath, JSON.stringify(meta, null, 2) + '\n');
645
+
646
+ // Set timestamps from server
647
+ const config = await loadConfig();
648
+ const serverTz = config.ServerTimezone;
649
+ if (serverTz && returnedLastUpdated) {
650
+ try {
651
+ await setFileTimestamps(newMetaPath, returnedLastUpdated, returnedLastUpdated, serverTz);
652
+ } catch { /* non-critical */ }
653
+ }
654
+
655
+ log.success(`Added: ${basename(metaPath)} → UID ${returnedUID}`);
656
+ }
657
+ }
658
+
659
+ return true;
660
+ }
661
+
408
662
  /**
409
663
  * Build and submit input expressions from a metadata object
410
664
  * @param {Object} meta - Metadata object
@@ -800,6 +1054,194 @@ async function checkPathMismatch(meta, metaPath, entity, options) {
800
1054
  }
801
1055
  }
802
1056
 
1057
+ // ─── Compound Output Push ───────────────────────────────────────────────────
1058
+
1059
+ const _COMPOUND_DOC_KEYS = ['column', 'join', 'filter'];
1060
+
1061
+ /**
1062
+ * Push a compound output file (root + inline children) to the server.
1063
+ * Handles delta detection, dependency ordering, FK preservation,
1064
+ * CustomSQL @reference resolution, and root _lastUpdated stamping.
1065
+ *
1066
+ * @param {Object} meta - Parsed root output JSON
1067
+ * @param {string} metaPath - Absolute path to root output JSON file
1068
+ * @param {DboClient} client - API client
1069
+ * @param {Object} options - Push options
1070
+ * @param {Object} baseline - Loaded baseline
1071
+ * @param {string|null} modifyKey - ModifyKey value
1072
+ * @param {string} transactionKey - RowUID or RowID
1073
+ * @returns {Promise<{ pushed: number }>} - Count of entities pushed
1074
+ */
1075
+ async function pushOutputCompound(meta, metaPath, client, options, baseline, modifyKey = null, transactionKey = 'RowUID') {
1076
+ const metaDir = dirname(metaPath);
1077
+
1078
+ // Delta detection for compound output
1079
+ let rootChanges, childChanges;
1080
+ if (baseline) {
1081
+ try {
1082
+ const delta = await detectOutputChanges(metaPath, baseline);
1083
+ rootChanges = delta.root;
1084
+ childChanges = delta.children;
1085
+ } catch (err) {
1086
+ log.warn(`Compound output delta detection failed for ${metaPath}: ${err.message} — performing full push`);
1087
+ rootChanges = getAllUserColumns(meta);
1088
+ childChanges = null; // null = push all children
1089
+ }
1090
+ } else {
1091
+ rootChanges = getAllUserColumns(meta);
1092
+ childChanges = null;
1093
+ }
1094
+
1095
+ const totalChanges = rootChanges.length +
1096
+ (childChanges ? Object.values(childChanges).reduce((s, c) => s + c.length, 0) : 999);
1097
+
1098
+ if (totalChanges === 0) {
1099
+ log.dim(` Skipping ${basename(metaPath)} — no changes detected`);
1100
+ return { pushed: 0 };
1101
+ }
1102
+
1103
+ // Flatten all inline children with depth annotation
1104
+ const allChildren = [];
1105
+ _flattenOutputChildren(meta.children || {}, allChildren);
1106
+
1107
+ // Separate adds (no baseline entry) from edits
1108
+ const adds = [];
1109
+ const edits = [];
1110
+ for (const child of allChildren) {
1111
+ const entry = baseline ? findBaselineEntry(baseline, child._entity, child.UID) : null;
1112
+ const changes = childChanges ? (childChanges[child.UID] || []) : getAllUserColumns(child);
1113
+ if (!entry) {
1114
+ adds.push({ child, changes: getAllUserColumns(child) });
1115
+ } else if (changes.length > 0) {
1116
+ edits.push({ child, changes });
1117
+ }
1118
+ }
1119
+
1120
+ // Check if root itself is new
1121
+ const rootEntry = baseline ? findBaselineEntry(baseline, 'output', meta.UID) : null;
1122
+ const rootIsNew = !rootEntry;
1123
+
1124
+ let pushed = 0;
1125
+
1126
+ // EDIT ORDER: deepest children first (highest _depth)
1127
+ edits.sort((a, b) => b.child._depth - a.child._depth);
1128
+ for (const { child, changes } of edits) {
1129
+ const success = await _submitOutputEntity(child, child._entity, changes, metaDir, client, options, modifyKey, transactionKey);
1130
+ if (success) pushed++;
1131
+ }
1132
+
1133
+ // ADD ORDER: root first (if new), then children shallowest→deepest
1134
+ if (rootIsNew && rootChanges.length > 0) {
1135
+ const success = await _submitOutputEntity(meta, 'output', rootChanges, metaDir, client, options, modifyKey, transactionKey);
1136
+ if (success) pushed++;
1137
+ }
1138
+ adds.sort((a, b) => a.child._depth - b.child._depth);
1139
+ for (const { child, changes } of adds) {
1140
+ const success = await _submitOutputEntity(child, child._entity, changes, metaDir, client, options, modifyKey, transactionKey);
1141
+ if (success) pushed++;
1142
+ }
1143
+
1144
+ // Always update root (with _lastUpdated) — submit root changes or just touch it
1145
+ if (!rootIsNew && (rootChanges.length > 0 || edits.length > 0 || adds.length > 0)) {
1146
+ const success = await _submitOutputEntity(meta, 'output', rootChanges.length > 0 ? rootChanges : ['Name'], metaDir, client, options, modifyKey, transactionKey);
1147
+ if (success) pushed++;
1148
+ }
1149
+
1150
+ log.info(`Compound output push: ${basename(metaPath)} — ${pushed} entity submission(s)`);
1151
+ return { pushed };
1152
+ }
1153
+
1154
+ /**
1155
+ * Flatten children object ({ column, join, filter }) into a flat array.
1156
+ * Annotates each child with _depth (1 = direct child of root, 2 = grandchild, etc.)
1157
+ */
1158
+ function _flattenOutputChildren(childrenObj, result, depth = 1) {
1159
+ for (const docKey of _COMPOUND_DOC_KEYS) {
1160
+ const entityArray = childrenObj[docKey];
1161
+ if (!Array.isArray(entityArray) || entityArray.length === 0) continue;
1162
+ for (const child of entityArray) {
1163
+ child._depth = depth;
1164
+ result.push(child);
1165
+ if (child.children) _flattenOutputChildren(child.children, result, depth + 1);
1166
+ }
1167
+ }
1168
+ }
1169
+
1170
+ /**
1171
+ * Submit a single output hierarchy entity to the server.
1172
+ * Resolves @reference values, builds data expressions, and submits.
1173
+ */
1174
+ async function _submitOutputEntity(entity, physicalEntity, changedColumns, metaDir, client, options, modifyKey, transactionKey) {
1175
+ const uid = entity.UID;
1176
+ if (!uid) {
1177
+ log.warn(` Output entity ${physicalEntity} has no UID — skipping`);
1178
+ return false;
1179
+ }
1180
+
1181
+ const rowKeyPrefix = transactionKey === 'RowID' && entity._id ? 'RowID' : 'RowUID';
1182
+ const rowKeyValue = rowKeyPrefix === 'RowID' ? entity._id : uid;
1183
+
1184
+ const dataExprs = [];
1185
+
1186
+ for (const col of changedColumns) {
1187
+ if (shouldSkipColumn(col)) continue;
1188
+ if (col === 'UID' || col === 'children') continue;
1189
+
1190
+ const val = entity[col];
1191
+ if (val === null || val === undefined) continue;
1192
+
1193
+ const strValue = String(val);
1194
+ if (isReference(strValue)) {
1195
+ const refPath = resolveReferencePath(strValue, metaDir);
1196
+ dataExprs.push(`${rowKeyPrefix}:${rowKeyValue};column:${physicalEntity}.${col}@${refPath}`);
1197
+ } else {
1198
+ dataExprs.push(`${rowKeyPrefix}:${rowKeyValue};column:${physicalEntity}.${col}=${strValue}`);
1199
+ }
1200
+ }
1201
+
1202
+ if (dataExprs.length === 0) return false;
1203
+
1204
+ log.info(` Pushing ${physicalEntity}:${uid} — ${dataExprs.length} field(s)`);
1205
+
1206
+ const storedTicket = await applyStoredTicketToSubmission(dataExprs, physicalEntity, uid, uid, options);
1207
+
1208
+ const extraParams = { '_confirm': options.confirm || 'true' };
1209
+ if (options.ticket) extraParams['_OverrideTicketID'] = options.ticket;
1210
+ else if (storedTicket) extraParams['_OverrideTicketID'] = storedTicket;
1211
+ if (modifyKey) extraParams['_modify_key'] = modifyKey;
1212
+ const cachedUser = getSessionUserOverride();
1213
+ if (cachedUser) extraParams['_OverrideUserID'] = cachedUser;
1214
+
1215
+ const body = await buildInputBody(dataExprs, extraParams);
1216
+ let result = await client.postUrlEncoded('/api/input/submit', body);
1217
+
1218
+ // Reactive ModifyKey retry
1219
+ if (!result.successful && result.messages?.some(m => isModifyKeyError(m))) {
1220
+ const retryMK = await handleModifyKeyError();
1221
+ if (retryMK.cancel) { log.info('Submission cancelled'); return false; }
1222
+ extraParams['_modify_key'] = retryMK.modifyKey;
1223
+ const retryBody = await buildInputBody(dataExprs, extraParams);
1224
+ result = await client.postUrlEncoded('/api/input/submit', retryBody);
1225
+ }
1226
+
1227
+ formatResponse(result, { json: options.json, jq: options.jq, verbose: options.verbose });
1228
+
1229
+ if (!result.successful) return false;
1230
+
1231
+ // Update metadata _LastUpdated from server response
1232
+ try {
1233
+ const editResults = result.payload?.Results?.Edit || result.data?.Payload?.Results?.Edit || [];
1234
+ if (editResults.length > 0) {
1235
+ const updated = editResults[0]._LastUpdated || editResults[0].LastUpdated;
1236
+ if (updated) entity._LastUpdated = updated;
1237
+ }
1238
+ } catch { /* non-critical */ }
1239
+
1240
+ return true;
1241
+ }
1242
+
1243
+ // ─── Baseline Update ────────────────────────────────────────────────────────
1244
+
803
1245
  /**
804
1246
  * Update baseline file (.app.json) after successful pushes.
805
1247
  * Syncs changed column values and timestamps from metadata to baseline.
@@ -814,11 +1256,14 @@ async function updateBaselineAfterPush(baseline, successfulPushes) {
814
1256
  const uid = meta.UID || meta._id;
815
1257
  const entity = meta._entity;
816
1258
 
817
- // Find the baseline entry
818
- const baselineEntry = findBaselineEntry(baseline, entity, uid);
1259
+ // Find or create the baseline entry
1260
+ let baselineEntry = findBaselineEntry(baseline, entity, uid);
819
1261
  if (!baselineEntry) {
820
- log.warn(` Baseline entry not found for ${entity}:${uid} skipping baseline update`);
821
- continue;
1262
+ // New record (from add)insert into baseline
1263
+ if (!baseline.children) baseline.children = {};
1264
+ if (!Array.isArray(baseline.children[entity])) baseline.children[entity] = [];
1265
+ baselineEntry = { UID: uid };
1266
+ baseline.children[entity].push(baselineEntry);
822
1267
  }
823
1268
 
824
1269
  // Update _LastUpdated and _LastUpdatedUserID from metadata