@thxgg/steward 0.1.23 → 0.1.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,6 +34,47 @@ function resolveDbPath() {
34
34
  }
35
35
  return DEFAULT_DB_PATH;
36
36
  }
37
+ function getTableColumnNames(adapter, tableName) {
38
+ const rows = adapter.all(`PRAGMA table_info(${tableName})`);
39
+ const names = new Set();
40
+ for (const row of rows) {
41
+ if (typeof row.name === 'string' && row.name.length > 0) {
42
+ names.add(row.name);
43
+ }
44
+ }
45
+ return names;
46
+ }
47
+ function ensurePrdStateFieldClockColumns(adapter) {
48
+ const columnNames = getTableColumnNames(adapter, 'prd_states');
49
+ const requiredColumns = ['tasks_updated_at', 'progress_updated_at', 'notes_updated_at'];
50
+ for (const columnName of requiredColumns) {
51
+ if (columnNames.has(columnName)) {
52
+ continue;
53
+ }
54
+ adapter.exec(`ALTER TABLE prd_states ADD COLUMN ${columnName} TEXT;`);
55
+ columnNames.add(columnName);
56
+ }
57
+ }
58
+ function backfillPrdStateFieldClocks(adapter) {
59
+ adapter.run(`
60
+ UPDATE prd_states
61
+ SET tasks_updated_at = updated_at
62
+ WHERE tasks_updated_at IS NULL
63
+ AND tasks_json IS NOT NULL
64
+ `);
65
+ adapter.run(`
66
+ UPDATE prd_states
67
+ SET progress_updated_at = updated_at
68
+ WHERE progress_updated_at IS NULL
69
+ AND progress_json IS NOT NULL
70
+ `);
71
+ adapter.run(`
72
+ UPDATE prd_states
73
+ SET notes_updated_at = updated_at
74
+ WHERE notes_updated_at IS NULL
75
+ AND notes_md IS NOT NULL
76
+ `);
77
+ }
37
78
  function formatNodeRuntimeHint() {
38
79
  const nodeOptions = process.env.NODE_OPTIONS || '';
39
80
  const hasDisableFlag = process.execArgv.includes(SQLITE_DISABLE_FLAG)
@@ -177,6 +218,9 @@ async function initializeDatabase() {
177
218
  tasks_json TEXT,
178
219
  progress_json TEXT,
179
220
  notes_md TEXT,
221
+ tasks_updated_at TEXT,
222
+ progress_updated_at TEXT,
223
+ notes_updated_at TEXT,
180
224
  updated_at TEXT NOT NULL,
181
225
  PRIMARY KEY (repo_id, slug),
182
226
  FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
@@ -196,9 +240,29 @@ async function initializeDatabase() {
196
240
  updated_at TEXT NOT NULL
197
241
  );
198
242
 
243
+ CREATE TABLE IF NOT EXISTS repo_sync_meta (
244
+ repo_id TEXT PRIMARY KEY,
245
+ sync_key TEXT NOT NULL UNIQUE,
246
+ fingerprint TEXT,
247
+ fingerprint_kind TEXT,
248
+ updated_at TEXT NOT NULL,
249
+ FOREIGN KEY (repo_id) REFERENCES repos(id) ON DELETE CASCADE
250
+ );
251
+
252
+ CREATE TABLE IF NOT EXISTS sync_bundle_log (
253
+ bundle_id TEXT PRIMARY KEY,
254
+ source_device_id TEXT,
255
+ applied_at TEXT NOT NULL,
256
+ summary_json TEXT NOT NULL
257
+ );
258
+
199
259
  CREATE INDEX IF NOT EXISTS idx_prd_states_repo_id ON prd_states(repo_id);
200
260
  CREATE INDEX IF NOT EXISTS idx_prd_archives_repo_id ON prd_archives(repo_id);
261
+ CREATE INDEX IF NOT EXISTS idx_repo_sync_meta_sync_key ON repo_sync_meta(sync_key);
262
+ CREATE INDEX IF NOT EXISTS idx_sync_bundle_log_applied_at ON sync_bundle_log(applied_at);
201
263
  `);
264
+ ensurePrdStateFieldClockColumns(adapter);
265
+ backfillPrdStateFieldClocks(adapter);
202
266
  return adapter;
203
267
  }
204
268
  async function getAdapter() {
@@ -92,13 +92,29 @@ export async function upsertPrdState(repoId, slug, update) {
92
92
  ? null
93
93
  : update.notes;
94
94
  const updatedAt = new Date().toISOString();
95
+ const tasksUpdatedAt = updateTasks ? updatedAt : null;
96
+ const progressUpdatedAt = updateProgress ? updatedAt : null;
97
+ const notesUpdatedAt = updateNotes ? updatedAt : null;
95
98
  await dbRun(`
96
- INSERT INTO prd_states (repo_id, slug, tasks_json, progress_json, notes_md, updated_at)
97
- VALUES (?, ?, ?, ?, ?, ?)
99
+ INSERT INTO prd_states (
100
+ repo_id,
101
+ slug,
102
+ tasks_json,
103
+ progress_json,
104
+ notes_md,
105
+ tasks_updated_at,
106
+ progress_updated_at,
107
+ notes_updated_at,
108
+ updated_at
109
+ )
110
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
98
111
  ON CONFLICT(repo_id, slug) DO UPDATE SET
99
112
  tasks_json = CASE WHEN ? THEN excluded.tasks_json ELSE prd_states.tasks_json END,
100
113
  progress_json = CASE WHEN ? THEN excluded.progress_json ELSE prd_states.progress_json END,
101
114
  notes_md = CASE WHEN ? THEN excluded.notes_md ELSE prd_states.notes_md END,
115
+ tasks_updated_at = CASE WHEN ? THEN excluded.tasks_updated_at ELSE prd_states.tasks_updated_at END,
116
+ progress_updated_at = CASE WHEN ? THEN excluded.progress_updated_at ELSE prd_states.progress_updated_at END,
117
+ notes_updated_at = CASE WHEN ? THEN excluded.notes_updated_at ELSE prd_states.notes_updated_at END,
102
118
  updated_at = excluded.updated_at
103
119
  `, [
104
120
  repoId,
@@ -106,9 +122,15 @@ export async function upsertPrdState(repoId, slug, update) {
106
122
  tasksJson,
107
123
  progressJson,
108
124
  notesMd,
125
+ tasksUpdatedAt,
126
+ progressUpdatedAt,
127
+ notesUpdatedAt,
109
128
  updatedAt,
110
129
  updateTasks ? 1 : 0,
111
130
  updateProgress ? 1 : 0,
131
+ updateNotes ? 1 : 0,
132
+ updateTasks ? 1 : 0,
133
+ updateProgress ? 1 : 0,
112
134
  updateNotes ? 1 : 0
113
135
  ]);
114
136
  if (validatedTasks !== undefined) {
@@ -3,6 +3,7 @@ import { join, basename, dirname, resolve, relative, isAbsolute } from 'node:pat
3
3
  import { randomUUID } from 'node:crypto';
4
4
  import { fileURLToPath } from 'node:url';
5
5
  import { dbAll, dbGet, dbRun } from './db.js';
6
+ import { ensureRepoSyncMetaForRepo, ensureRepoSyncMetaForRepos } from './sync-identity.js';
6
7
  function findPackageRoot(startDir) {
7
8
  let currentDir = startDir;
8
9
  while (true) {
@@ -136,7 +137,9 @@ async function importLegacyReposIfNeeded() {
136
137
  export async function getRepos() {
137
138
  await importLegacyReposIfNeeded();
138
139
  const rows = await dbAll('SELECT id, name, path, added_at, git_repos_json FROM repos ORDER BY added_at ASC');
139
- return rows.map(rowToRepo);
140
+ const repos = rows.map(rowToRepo);
141
+ await ensureRepoSyncMetaForRepos(repos);
142
+ return repos;
140
143
  }
141
144
  export async function saveRepos(repos) {
142
145
  await importLegacyReposIfNeeded();
@@ -158,6 +161,7 @@ export async function saveRepos(repos) {
158
161
  const repoIds = repos.map(repo => repo.id);
159
162
  const placeholders = repoIds.map(() => '?').join(', ');
160
163
  await dbRun(`DELETE FROM repos WHERE id NOT IN (${placeholders})`, repoIds);
164
+ await ensureRepoSyncMetaForRepos(repos);
161
165
  }
162
166
  export async function addRepo(path, name) {
163
167
  await importLegacyReposIfNeeded();
@@ -175,6 +179,7 @@ export async function addRepo(path, name) {
175
179
  ...(gitRepos.length > 0 && { gitRepos })
176
180
  };
177
181
  await dbRun('INSERT INTO repos (id, name, path, added_at, git_repos_json) VALUES (?, ?, ?, ?, ?)', [repo.id, repo.name, repo.path, repo.addedAt, serializeGitRepos(repo.gitRepos)]);
182
+ await ensureRepoSyncMetaForRepo(repo);
178
183
  return repo;
179
184
  }
180
185
  /**
@@ -183,7 +188,12 @@ export async function addRepo(path, name) {
183
188
  export async function getRepoById(id) {
184
189
  await importLegacyReposIfNeeded();
185
190
  const row = await dbGet('SELECT id, name, path, added_at, git_repos_json FROM repos WHERE id = ?', [id]);
186
- return row ? rowToRepo(row) : undefined;
191
+ if (!row) {
192
+ return undefined;
193
+ }
194
+ const repo = rowToRepo(row);
195
+ await ensureRepoSyncMetaForRepo(repo);
196
+ return repo;
187
197
  }
188
198
  export async function updateRepoGitRepos(id, gitRepos) {
189
199
  await importLegacyReposIfNeeded();
@@ -209,9 +209,9 @@ async function migrateProgressRows() {
209
209
  const updatedAt = nowIso();
210
210
  await dbRun(`
211
211
  UPDATE prd_states
212
- SET progress_json = ?, updated_at = ?
212
+ SET progress_json = ?, progress_updated_at = ?, updated_at = ?
213
213
  WHERE repo_id = ? AND slug = ?
214
- `, [JSON.stringify(normalized), updatedAt, row.repo_id, row.slug]);
214
+ `, [JSON.stringify(normalized), updatedAt, updatedAt, row.repo_id, row.slug]);
215
215
  status.migratedRows += 1;
216
216
  emitChange({
217
217
  type: 'change',
@@ -362,7 +362,7 @@ async function migrateCommitRepoRefs() {
362
362
  const updatedAt = nowIso();
363
363
  await dbRun(`
364
364
  UPDATE prd_states
365
- SET progress_json = ?, updated_at = ?
365
+ SET progress_json = ?, progress_updated_at = ?, updated_at = ?
366
366
  WHERE repo_id = ? AND slug = ?
367
367
  `, [
368
368
  JSON.stringify({
@@ -370,6 +370,7 @@ async function migrateCommitRepoRefs() {
370
370
  taskLogs: normalized.taskLogs
371
371
  }),
372
372
  updatedAt,
373
+ updatedAt,
373
374
  row.repo_id,
374
375
  row.slug
375
376
  ]);
@@ -0,0 +1,380 @@
1
+ import { randomUUID } from 'node:crypto';
2
+ import { promises as fs } from 'node:fs';
3
+ import { basename, dirname, join } from 'node:path';
4
+ import { dbExec, dbGet, dbRun, getDbPath } from './db.js';
5
+ import { planSyncMerge } from './sync-merge.js';
6
+ import { parseSyncBundle } from './sync-schema.js';
7
+ const DEFAULT_BACKUP_RETENTION_DAYS = 30;
8
+ const DEFAULT_MAX_BACKUPS = 20;
9
+ const DEFAULT_LOG_RETENTION_DAYS = 180;
10
+ const DEFAULT_MAX_LOG_ENTRIES = 10_000;
11
+ function sanitizeRetentionNumber(value, fallback, min) {
12
+ if (typeof value !== 'number' || !Number.isFinite(value)) {
13
+ return fallback;
14
+ }
15
+ return Math.max(min, Math.floor(value));
16
+ }
17
+ function resolveRetentionPolicy(options) {
18
+ return {
19
+ backupRetentionDays: sanitizeRetentionNumber(options.backupRetentionDays, DEFAULT_BACKUP_RETENTION_DAYS, 0),
20
+ maxBackups: sanitizeRetentionNumber(options.maxBackups, DEFAULT_MAX_BACKUPS, 1),
21
+ logRetentionDays: sanitizeRetentionNumber(options.logRetentionDays, DEFAULT_LOG_RETENTION_DAYS, 0),
22
+ maxLogEntries: sanitizeRetentionNumber(options.maxLogEntries, DEFAULT_MAX_LOG_ENTRIES, 1)
23
+ };
24
+ }
25
+ function toStateKey(repoSyncKey, slug) {
26
+ return `${repoSyncKey}:${slug}`;
27
+ }
28
+ function serializeJson(value) {
29
+ if (value === null || value === undefined) {
30
+ return null;
31
+ }
32
+ return JSON.stringify(value);
33
+ }
34
+ function parseIsoOrNow(value) {
35
+ if (typeof value === 'string' && value.trim().length > 0 && Number.isFinite(Date.parse(value))) {
36
+ return new Date(value).toISOString();
37
+ }
38
+ return new Date().toISOString();
39
+ }
40
+ function escapeSqliteString(value) {
41
+ return value.replaceAll("'", "''");
42
+ }
43
+ function toBackupFileTimestamp(nowIso) {
44
+ return nowIso
45
+ .replaceAll('-', '')
46
+ .replaceAll(':', '')
47
+ .replaceAll('.', '');
48
+ }
49
+ function toSafeFileSegment(value) {
50
+ const sanitized = value.replace(/[^A-Za-z0-9._-]/g, '_');
51
+ return sanitized.length > 0 ? sanitized : 'bundle';
52
+ }
53
+ function getBackupFilePrefix(dbPath) {
54
+ return `${basename(dbPath)}.sync-backup.`;
55
+ }
56
+ async function createDatabaseBackup(bundleId, nowIso) {
57
+ const dbPath = getDbPath();
58
+ const backupDir = dirname(dbPath);
59
+ const filePrefix = getBackupFilePrefix(dbPath);
60
+ const fileName = `${filePrefix}${toBackupFileTimestamp(nowIso)}-${toSafeFileSegment(bundleId)}-${randomUUID().slice(0, 8)}.db`;
61
+ const backupPath = join(backupDir, fileName);
62
+ await fs.mkdir(backupDir, { recursive: true });
63
+ await dbExec(`VACUUM INTO '${escapeSqliteString(backupPath)}';`);
64
+ return backupPath;
65
+ }
66
+ async function pruneDatabaseBackups(nowIso, policy) {
67
+ const dbPath = getDbPath();
68
+ const backupDir = dirname(dbPath);
69
+ const filePrefix = getBackupFilePrefix(dbPath);
70
+ const cutoffMs = Date.parse(nowIso) - (policy.backupRetentionDays * 24 * 60 * 60 * 1000);
71
+ let entries = [];
72
+ try {
73
+ const dirEntries = await fs.readdir(backupDir, { withFileTypes: true });
74
+ const candidateFiles = dirEntries
75
+ .filter((entry) => entry.isFile())
76
+ .map((entry) => entry.name)
77
+ .filter((name) => name.startsWith(filePrefix) && name.endsWith('.db'));
78
+ entries = await Promise.all(candidateFiles.map(async (name) => {
79
+ const filePath = join(backupDir, name);
80
+ const stat = await fs.stat(filePath);
81
+ return {
82
+ path: filePath,
83
+ mtimeMs: stat.mtimeMs
84
+ };
85
+ }));
86
+ }
87
+ catch {
88
+ return 0;
89
+ }
90
+ entries.sort((a, b) => b.mtimeMs - a.mtimeMs);
91
+ let deleted = 0;
92
+ for (let index = 0; index < entries.length; index += 1) {
93
+ const entry = entries[index];
94
+ const tooOld = Number.isFinite(cutoffMs) ? entry.mtimeMs < cutoffMs : false;
95
+ const overLimit = index >= policy.maxBackups;
96
+ if (!tooOld && !overLimit) {
97
+ continue;
98
+ }
99
+ try {
100
+ await fs.unlink(entry.path);
101
+ deleted += 1;
102
+ }
103
+ catch {
104
+ // Ignore retention cleanup errors; merge apply already succeeded.
105
+ }
106
+ }
107
+ return deleted;
108
+ }
109
+ async function pruneSyncLog(nowIso, policy) {
110
+ const cutoffIso = new Date(Date.parse(nowIso) - (policy.logRetentionDays * 24 * 60 * 60 * 1000)).toISOString();
111
+ let deleted = 0;
112
+ const deleteOlder = await dbRun('DELETE FROM sync_bundle_log WHERE applied_at < ?', [cutoffIso]);
113
+ deleted += deleteOlder.changes;
114
+ const countRow = await dbGet('SELECT COUNT(*) as count FROM sync_bundle_log');
115
+ const currentCount = countRow?.count ?? 0;
116
+ if (currentCount <= policy.maxLogEntries) {
117
+ return deleted;
118
+ }
119
+ const toDeleteCount = currentCount - policy.maxLogEntries;
120
+ if (toDeleteCount <= 0) {
121
+ return deleted;
122
+ }
123
+ const deleteOverflow = await dbRun(`
124
+ DELETE FROM sync_bundle_log
125
+ WHERE bundle_id IN (
126
+ SELECT bundle_id
127
+ FROM sync_bundle_log
128
+ ORDER BY applied_at ASC, bundle_id ASC
129
+ LIMIT ?
130
+ )
131
+ `, [toDeleteCount]);
132
+ deleted += deleteOverflow.changes;
133
+ return deleted;
134
+ }
135
+ function assertNoUnresolvedMappings(plan) {
136
+ const unresolvedMappings = plan.mappings.filter((mapping) => mapping.source === 'unresolved');
137
+ if (unresolvedMappings.length === 0) {
138
+ return;
139
+ }
140
+ const unresolvedKeys = unresolvedMappings.map((mapping) => mapping.incomingRepoSyncKey).join(', ');
141
+ throw new Error(`Cannot apply bundle with unresolved repositories: ${unresolvedKeys}`);
142
+ }
143
+ async function applyStateInsert(planRow, incomingRow, appliedAt) {
144
+ if (!planRow.localRepoId) {
145
+ throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
146
+ }
147
+ await dbRun(`
148
+ INSERT INTO prd_states (
149
+ repo_id,
150
+ slug,
151
+ tasks_json,
152
+ progress_json,
153
+ notes_md,
154
+ tasks_updated_at,
155
+ progress_updated_at,
156
+ notes_updated_at,
157
+ updated_at
158
+ )
159
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
160
+ ON CONFLICT(repo_id, slug) DO UPDATE SET
161
+ tasks_json = excluded.tasks_json,
162
+ progress_json = excluded.progress_json,
163
+ notes_md = excluded.notes_md,
164
+ tasks_updated_at = excluded.tasks_updated_at,
165
+ progress_updated_at = excluded.progress_updated_at,
166
+ notes_updated_at = excluded.notes_updated_at,
167
+ updated_at = excluded.updated_at
168
+ `, [
169
+ planRow.localRepoId,
170
+ incomingRow.slug,
171
+ serializeJson(incomingRow.tasks),
172
+ serializeJson(incomingRow.progress),
173
+ incomingRow.notes,
174
+ incomingRow.clocks.tasksUpdatedAt,
175
+ incomingRow.clocks.progressUpdatedAt,
176
+ incomingRow.clocks.notesUpdatedAt,
177
+ appliedAt
178
+ ]);
179
+ }
180
+ async function applyStateUpdate(planRow, incomingRow, appliedAt) {
181
+ if (!planRow.localRepoId) {
182
+ throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
183
+ }
184
+ const updateTasks = planRow.updateFields.includes('tasks');
185
+ const updateProgress = planRow.updateFields.includes('progress');
186
+ const updateNotes = planRow.updateFields.includes('notes');
187
+ const anyUpdate = updateTasks || updateProgress || updateNotes;
188
+ if (!anyUpdate) {
189
+ return;
190
+ }
191
+ const result = await dbRun(`
192
+ UPDATE prd_states
193
+ SET
194
+ tasks_json = CASE WHEN ? THEN ? ELSE tasks_json END,
195
+ progress_json = CASE WHEN ? THEN ? ELSE progress_json END,
196
+ notes_md = CASE WHEN ? THEN ? ELSE notes_md END,
197
+ tasks_updated_at = CASE WHEN ? THEN ? ELSE tasks_updated_at END,
198
+ progress_updated_at = CASE WHEN ? THEN ? ELSE progress_updated_at END,
199
+ notes_updated_at = CASE WHEN ? THEN ? ELSE notes_updated_at END,
200
+ updated_at = CASE WHEN ? THEN ? ELSE updated_at END
201
+ WHERE repo_id = ? AND slug = ?
202
+ `, [
203
+ updateTasks ? 1 : 0,
204
+ serializeJson(incomingRow.tasks),
205
+ updateProgress ? 1 : 0,
206
+ serializeJson(incomingRow.progress),
207
+ updateNotes ? 1 : 0,
208
+ incomingRow.notes,
209
+ updateTasks ? 1 : 0,
210
+ incomingRow.clocks.tasksUpdatedAt,
211
+ updateProgress ? 1 : 0,
212
+ incomingRow.clocks.progressUpdatedAt,
213
+ updateNotes ? 1 : 0,
214
+ incomingRow.clocks.notesUpdatedAt,
215
+ anyUpdate ? 1 : 0,
216
+ appliedAt,
217
+ planRow.localRepoId,
218
+ incomingRow.slug
219
+ ]);
220
+ if (result.changes > 0) {
221
+ return;
222
+ }
223
+ await applyStateInsert(planRow, incomingRow, appliedAt);
224
+ }
225
+ async function applyArchiveAction(planRow, incomingRow) {
226
+ if (!planRow.localRepoId) {
227
+ throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
228
+ }
229
+ if (planRow.action === 'insert') {
230
+ await dbRun(`
231
+ INSERT INTO prd_archives (repo_id, slug, archived_at)
232
+ VALUES (?, ?, ?)
233
+ ON CONFLICT(repo_id, slug) DO UPDATE SET
234
+ archived_at = CASE
235
+ WHEN excluded.archived_at > prd_archives.archived_at THEN excluded.archived_at
236
+ ELSE prd_archives.archived_at
237
+ END
238
+ `, [planRow.localRepoId, incomingRow.slug, incomingRow.archivedAt]);
239
+ return;
240
+ }
241
+ if (planRow.action === 'update') {
242
+ await dbRun(`
243
+ UPDATE prd_archives
244
+ SET archived_at = ?
245
+ WHERE repo_id = ? AND slug = ? AND archived_at < ?
246
+ `, [incomingRow.archivedAt, planRow.localRepoId, incomingRow.slug, incomingRow.archivedAt]);
247
+ }
248
+ }
249
+ async function assertIntegrityCheckPasses() {
250
+ const row = await dbGet('PRAGMA integrity_check');
251
+ if (!row) {
252
+ throw new Error('SQLite integrity check returned no results');
253
+ }
254
+ const firstValue = Object.values(row)[0];
255
+ if (firstValue !== 'ok') {
256
+ throw new Error(`SQLite integrity check failed: ${String(firstValue)}`);
257
+ }
258
+ }
259
+ async function hasBundleBeenApplied(bundleId) {
260
+ return await dbGet('SELECT bundle_id, applied_at FROM sync_bundle_log WHERE bundle_id = ?', [bundleId]);
261
+ }
262
+ export async function executeSyncMerge(bundleInput, options = {}) {
263
+ const bundle = parseSyncBundle(bundleInput);
264
+ const nowIso = parseIsoOrNow(options.now);
265
+ const retentionPolicy = resolveRetentionPolicy(options);
266
+ const plan = await planSyncMerge(bundle, {
267
+ repoMap: options.repoMap
268
+ });
269
+ if (options.apply !== true) {
270
+ return {
271
+ mode: 'dry_run',
272
+ applied: false,
273
+ alreadyApplied: false,
274
+ bundleId: bundle.bundleId,
275
+ plan,
276
+ retention: {
277
+ backupsDeleted: 0,
278
+ logsDeleted: 0
279
+ }
280
+ };
281
+ }
282
+ const existingLog = await hasBundleBeenApplied(bundle.bundleId);
283
+ if (existingLog) {
284
+ return {
285
+ mode: 'apply',
286
+ applied: false,
287
+ alreadyApplied: true,
288
+ bundleId: bundle.bundleId,
289
+ plan,
290
+ retention: {
291
+ backupsDeleted: 0,
292
+ logsDeleted: 0
293
+ }
294
+ };
295
+ }
296
+ assertNoUnresolvedMappings(plan);
297
+ const backupPath = await createDatabaseBackup(bundle.bundleId, nowIso);
298
+ const incomingStateByKey = new Map();
299
+ const incomingArchiveByKey = new Map();
300
+ for (const row of bundle.states) {
301
+ incomingStateByKey.set(toStateKey(row.repoSyncKey, row.slug), row);
302
+ }
303
+ for (const row of bundle.archives) {
304
+ incomingArchiveByKey.set(toStateKey(row.repoSyncKey, row.slug), row);
305
+ }
306
+ let logsDeleted = 0;
307
+ let inTransaction = false;
308
+ try {
309
+ await dbExec('BEGIN IMMEDIATE');
310
+ inTransaction = true;
311
+ for (const planRow of plan.states) {
312
+ if (planRow.action === 'skip' || planRow.action === 'unresolved') {
313
+ continue;
314
+ }
315
+ const incoming = incomingStateByKey.get(toStateKey(planRow.repoSyncKey, planRow.slug));
316
+ if (!incoming) {
317
+ throw new Error(`Missing incoming state row for ${planRow.repoSyncKey}:${planRow.slug}`);
318
+ }
319
+ if (planRow.action === 'insert') {
320
+ await applyStateInsert(planRow, incoming, nowIso);
321
+ }
322
+ else {
323
+ await applyStateUpdate(planRow, incoming, nowIso);
324
+ }
325
+ }
326
+ for (const planRow of plan.archives) {
327
+ if (planRow.action === 'skip' || planRow.action === 'unresolved') {
328
+ continue;
329
+ }
330
+ const incoming = incomingArchiveByKey.get(toStateKey(planRow.repoSyncKey, planRow.slug));
331
+ if (!incoming) {
332
+ throw new Error(`Missing incoming archive row for ${planRow.repoSyncKey}:${planRow.slug}`);
333
+ }
334
+ await applyArchiveAction(planRow, incoming);
335
+ }
336
+ await assertIntegrityCheckPasses();
337
+ await dbRun(`
338
+ INSERT INTO sync_bundle_log (bundle_id, source_device_id, applied_at, summary_json)
339
+ VALUES (?, ?, ?, ?)
340
+ `, [bundle.bundleId, bundle.sourceDeviceId, nowIso, JSON.stringify(plan.summary)]);
341
+ logsDeleted = await pruneSyncLog(nowIso, retentionPolicy);
342
+ await dbExec('COMMIT');
343
+ inTransaction = false;
344
+ }
345
+ catch (error) {
346
+ if (inTransaction) {
347
+ try {
348
+ await dbExec('ROLLBACK');
349
+ }
350
+ catch {
351
+ // Ignore rollback failure; original error is surfaced.
352
+ }
353
+ }
354
+ throw error;
355
+ }
356
+ const backupsDeleted = await pruneDatabaseBackups(nowIso, retentionPolicy);
357
+ return {
358
+ mode: 'apply',
359
+ applied: true,
360
+ alreadyApplied: false,
361
+ bundleId: bundle.bundleId,
362
+ plan,
363
+ backupPath,
364
+ retention: {
365
+ backupsDeleted,
366
+ logsDeleted
367
+ }
368
+ };
369
+ }
370
+ export async function executeSyncMergeJson(jsonPayload, options = {}) {
371
+ let parsed;
372
+ try {
373
+ parsed = JSON.parse(jsonPayload);
374
+ }
375
+ catch (error) {
376
+ const message = error instanceof Error ? error.message : String(error);
377
+ throw new Error(`Invalid bundle JSON: ${message}`);
378
+ }
379
+ return await executeSyncMerge(parsed, options);
380
+ }