@thxgg/steward 0.1.24 → 0.1.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.output/nitro.json +1 -1
- package/.output/public/_nuxt/builds/latest.json +1 -1
- package/.output/public/_nuxt/builds/meta/9ce7f1bc-d5e2-47bf-8026-f4910c257b2e.json +1 -0
- package/.output/server/chunks/_/prd-service.mjs.map +1 -1
- package/.output/server/chunks/nitro/nitro.mjs +818 -516
- package/.output/server/chunks/nitro/nitro.mjs.map +1 -1
- package/.output/server/package.json +1 -1
- package/README.md +41 -0
- package/bin/prd +1 -1
- package/dist/host/src/index.js +10 -0
- package/dist/host/src/sync.js +201 -0
- package/dist/server/utils/db.js +64 -0
- package/dist/server/utils/prd-state.js +24 -2
- package/dist/server/utils/repos.js +12 -2
- package/dist/server/utils/state-migration.js +4 -3
- package/dist/server/utils/sync-apply.js +380 -0
- package/dist/server/utils/sync-export.js +183 -0
- package/dist/server/utils/sync-identity.js +231 -0
- package/dist/server/utils/sync-inspect.js +103 -0
- package/dist/server/utils/sync-merge.js +579 -0
- package/dist/server/utils/sync-schema.js +100 -0
- package/package.json +1 -1
- package/.output/public/_nuxt/builds/meta/8c342d49-fe70-4f67-a987-821c16f86125.json +0 -1
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
import { randomUUID } from 'node:crypto';
|
|
2
|
+
import { promises as fs } from 'node:fs';
|
|
3
|
+
import { basename, dirname, join } from 'node:path';
|
|
4
|
+
import { dbExec, dbGet, dbRun, getDbPath } from './db.js';
|
|
5
|
+
import { planSyncMerge } from './sync-merge.js';
|
|
6
|
+
import { parseSyncBundle } from './sync-schema.js';
|
|
7
|
+
const DEFAULT_BACKUP_RETENTION_DAYS = 30;
|
|
8
|
+
const DEFAULT_MAX_BACKUPS = 20;
|
|
9
|
+
const DEFAULT_LOG_RETENTION_DAYS = 180;
|
|
10
|
+
const DEFAULT_MAX_LOG_ENTRIES = 10_000;
|
|
11
|
+
function sanitizeRetentionNumber(value, fallback, min) {
|
|
12
|
+
if (typeof value !== 'number' || !Number.isFinite(value)) {
|
|
13
|
+
return fallback;
|
|
14
|
+
}
|
|
15
|
+
return Math.max(min, Math.floor(value));
|
|
16
|
+
}
|
|
17
|
+
function resolveRetentionPolicy(options) {
|
|
18
|
+
return {
|
|
19
|
+
backupRetentionDays: sanitizeRetentionNumber(options.backupRetentionDays, DEFAULT_BACKUP_RETENTION_DAYS, 0),
|
|
20
|
+
maxBackups: sanitizeRetentionNumber(options.maxBackups, DEFAULT_MAX_BACKUPS, 1),
|
|
21
|
+
logRetentionDays: sanitizeRetentionNumber(options.logRetentionDays, DEFAULT_LOG_RETENTION_DAYS, 0),
|
|
22
|
+
maxLogEntries: sanitizeRetentionNumber(options.maxLogEntries, DEFAULT_MAX_LOG_ENTRIES, 1)
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
function toStateKey(repoSyncKey, slug) {
|
|
26
|
+
return `${repoSyncKey}:${slug}`;
|
|
27
|
+
}
|
|
28
|
+
function serializeJson(value) {
|
|
29
|
+
if (value === null || value === undefined) {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
return JSON.stringify(value);
|
|
33
|
+
}
|
|
34
|
+
function parseIsoOrNow(value) {
|
|
35
|
+
if (typeof value === 'string' && value.trim().length > 0 && Number.isFinite(Date.parse(value))) {
|
|
36
|
+
return new Date(value).toISOString();
|
|
37
|
+
}
|
|
38
|
+
return new Date().toISOString();
|
|
39
|
+
}
|
|
40
|
+
function escapeSqliteString(value) {
|
|
41
|
+
return value.replaceAll("'", "''");
|
|
42
|
+
}
|
|
43
|
+
function toBackupFileTimestamp(nowIso) {
|
|
44
|
+
return nowIso
|
|
45
|
+
.replaceAll('-', '')
|
|
46
|
+
.replaceAll(':', '')
|
|
47
|
+
.replaceAll('.', '');
|
|
48
|
+
}
|
|
49
|
+
function toSafeFileSegment(value) {
|
|
50
|
+
const sanitized = value.replace(/[^A-Za-z0-9._-]/g, '_');
|
|
51
|
+
return sanitized.length > 0 ? sanitized : 'bundle';
|
|
52
|
+
}
|
|
53
|
+
function getBackupFilePrefix(dbPath) {
|
|
54
|
+
return `${basename(dbPath)}.sync-backup.`;
|
|
55
|
+
}
|
|
56
|
+
async function createDatabaseBackup(bundleId, nowIso) {
|
|
57
|
+
const dbPath = getDbPath();
|
|
58
|
+
const backupDir = dirname(dbPath);
|
|
59
|
+
const filePrefix = getBackupFilePrefix(dbPath);
|
|
60
|
+
const fileName = `${filePrefix}${toBackupFileTimestamp(nowIso)}-${toSafeFileSegment(bundleId)}-${randomUUID().slice(0, 8)}.db`;
|
|
61
|
+
const backupPath = join(backupDir, fileName);
|
|
62
|
+
await fs.mkdir(backupDir, { recursive: true });
|
|
63
|
+
await dbExec(`VACUUM INTO '${escapeSqliteString(backupPath)}';`);
|
|
64
|
+
return backupPath;
|
|
65
|
+
}
|
|
66
|
+
async function pruneDatabaseBackups(nowIso, policy) {
|
|
67
|
+
const dbPath = getDbPath();
|
|
68
|
+
const backupDir = dirname(dbPath);
|
|
69
|
+
const filePrefix = getBackupFilePrefix(dbPath);
|
|
70
|
+
const cutoffMs = Date.parse(nowIso) - (policy.backupRetentionDays * 24 * 60 * 60 * 1000);
|
|
71
|
+
let entries = [];
|
|
72
|
+
try {
|
|
73
|
+
const dirEntries = await fs.readdir(backupDir, { withFileTypes: true });
|
|
74
|
+
const candidateFiles = dirEntries
|
|
75
|
+
.filter((entry) => entry.isFile())
|
|
76
|
+
.map((entry) => entry.name)
|
|
77
|
+
.filter((name) => name.startsWith(filePrefix) && name.endsWith('.db'));
|
|
78
|
+
entries = await Promise.all(candidateFiles.map(async (name) => {
|
|
79
|
+
const filePath = join(backupDir, name);
|
|
80
|
+
const stat = await fs.stat(filePath);
|
|
81
|
+
return {
|
|
82
|
+
path: filePath,
|
|
83
|
+
mtimeMs: stat.mtimeMs
|
|
84
|
+
};
|
|
85
|
+
}));
|
|
86
|
+
}
|
|
87
|
+
catch {
|
|
88
|
+
return 0;
|
|
89
|
+
}
|
|
90
|
+
entries.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
|
91
|
+
let deleted = 0;
|
|
92
|
+
for (let index = 0; index < entries.length; index += 1) {
|
|
93
|
+
const entry = entries[index];
|
|
94
|
+
const tooOld = Number.isFinite(cutoffMs) ? entry.mtimeMs < cutoffMs : false;
|
|
95
|
+
const overLimit = index >= policy.maxBackups;
|
|
96
|
+
if (!tooOld && !overLimit) {
|
|
97
|
+
continue;
|
|
98
|
+
}
|
|
99
|
+
try {
|
|
100
|
+
await fs.unlink(entry.path);
|
|
101
|
+
deleted += 1;
|
|
102
|
+
}
|
|
103
|
+
catch {
|
|
104
|
+
// Ignore retention cleanup errors; merge apply already succeeded.
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
return deleted;
|
|
108
|
+
}
|
|
109
|
+
async function pruneSyncLog(nowIso, policy) {
|
|
110
|
+
const cutoffIso = new Date(Date.parse(nowIso) - (policy.logRetentionDays * 24 * 60 * 60 * 1000)).toISOString();
|
|
111
|
+
let deleted = 0;
|
|
112
|
+
const deleteOlder = await dbRun('DELETE FROM sync_bundle_log WHERE applied_at < ?', [cutoffIso]);
|
|
113
|
+
deleted += deleteOlder.changes;
|
|
114
|
+
const countRow = await dbGet('SELECT COUNT(*) as count FROM sync_bundle_log');
|
|
115
|
+
const currentCount = countRow?.count ?? 0;
|
|
116
|
+
if (currentCount <= policy.maxLogEntries) {
|
|
117
|
+
return deleted;
|
|
118
|
+
}
|
|
119
|
+
const toDeleteCount = currentCount - policy.maxLogEntries;
|
|
120
|
+
if (toDeleteCount <= 0) {
|
|
121
|
+
return deleted;
|
|
122
|
+
}
|
|
123
|
+
const deleteOverflow = await dbRun(`
|
|
124
|
+
DELETE FROM sync_bundle_log
|
|
125
|
+
WHERE bundle_id IN (
|
|
126
|
+
SELECT bundle_id
|
|
127
|
+
FROM sync_bundle_log
|
|
128
|
+
ORDER BY applied_at ASC, bundle_id ASC
|
|
129
|
+
LIMIT ?
|
|
130
|
+
)
|
|
131
|
+
`, [toDeleteCount]);
|
|
132
|
+
deleted += deleteOverflow.changes;
|
|
133
|
+
return deleted;
|
|
134
|
+
}
|
|
135
|
+
function assertNoUnresolvedMappings(plan) {
|
|
136
|
+
const unresolvedMappings = plan.mappings.filter((mapping) => mapping.source === 'unresolved');
|
|
137
|
+
if (unresolvedMappings.length === 0) {
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
const unresolvedKeys = unresolvedMappings.map((mapping) => mapping.incomingRepoSyncKey).join(', ');
|
|
141
|
+
throw new Error(`Cannot apply bundle with unresolved repositories: ${unresolvedKeys}`);
|
|
142
|
+
}
|
|
143
|
+
async function applyStateInsert(planRow, incomingRow, appliedAt) {
|
|
144
|
+
if (!planRow.localRepoId) {
|
|
145
|
+
throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
|
|
146
|
+
}
|
|
147
|
+
await dbRun(`
|
|
148
|
+
INSERT INTO prd_states (
|
|
149
|
+
repo_id,
|
|
150
|
+
slug,
|
|
151
|
+
tasks_json,
|
|
152
|
+
progress_json,
|
|
153
|
+
notes_md,
|
|
154
|
+
tasks_updated_at,
|
|
155
|
+
progress_updated_at,
|
|
156
|
+
notes_updated_at,
|
|
157
|
+
updated_at
|
|
158
|
+
)
|
|
159
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
160
|
+
ON CONFLICT(repo_id, slug) DO UPDATE SET
|
|
161
|
+
tasks_json = excluded.tasks_json,
|
|
162
|
+
progress_json = excluded.progress_json,
|
|
163
|
+
notes_md = excluded.notes_md,
|
|
164
|
+
tasks_updated_at = excluded.tasks_updated_at,
|
|
165
|
+
progress_updated_at = excluded.progress_updated_at,
|
|
166
|
+
notes_updated_at = excluded.notes_updated_at,
|
|
167
|
+
updated_at = excluded.updated_at
|
|
168
|
+
`, [
|
|
169
|
+
planRow.localRepoId,
|
|
170
|
+
incomingRow.slug,
|
|
171
|
+
serializeJson(incomingRow.tasks),
|
|
172
|
+
serializeJson(incomingRow.progress),
|
|
173
|
+
incomingRow.notes,
|
|
174
|
+
incomingRow.clocks.tasksUpdatedAt,
|
|
175
|
+
incomingRow.clocks.progressUpdatedAt,
|
|
176
|
+
incomingRow.clocks.notesUpdatedAt,
|
|
177
|
+
appliedAt
|
|
178
|
+
]);
|
|
179
|
+
}
|
|
180
|
+
async function applyStateUpdate(planRow, incomingRow, appliedAt) {
|
|
181
|
+
if (!planRow.localRepoId) {
|
|
182
|
+
throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
|
|
183
|
+
}
|
|
184
|
+
const updateTasks = planRow.updateFields.includes('tasks');
|
|
185
|
+
const updateProgress = planRow.updateFields.includes('progress');
|
|
186
|
+
const updateNotes = planRow.updateFields.includes('notes');
|
|
187
|
+
const anyUpdate = updateTasks || updateProgress || updateNotes;
|
|
188
|
+
if (!anyUpdate) {
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
const result = await dbRun(`
|
|
192
|
+
UPDATE prd_states
|
|
193
|
+
SET
|
|
194
|
+
tasks_json = CASE WHEN ? THEN ? ELSE tasks_json END,
|
|
195
|
+
progress_json = CASE WHEN ? THEN ? ELSE progress_json END,
|
|
196
|
+
notes_md = CASE WHEN ? THEN ? ELSE notes_md END,
|
|
197
|
+
tasks_updated_at = CASE WHEN ? THEN ? ELSE tasks_updated_at END,
|
|
198
|
+
progress_updated_at = CASE WHEN ? THEN ? ELSE progress_updated_at END,
|
|
199
|
+
notes_updated_at = CASE WHEN ? THEN ? ELSE notes_updated_at END,
|
|
200
|
+
updated_at = CASE WHEN ? THEN ? ELSE updated_at END
|
|
201
|
+
WHERE repo_id = ? AND slug = ?
|
|
202
|
+
`, [
|
|
203
|
+
updateTasks ? 1 : 0,
|
|
204
|
+
serializeJson(incomingRow.tasks),
|
|
205
|
+
updateProgress ? 1 : 0,
|
|
206
|
+
serializeJson(incomingRow.progress),
|
|
207
|
+
updateNotes ? 1 : 0,
|
|
208
|
+
incomingRow.notes,
|
|
209
|
+
updateTasks ? 1 : 0,
|
|
210
|
+
incomingRow.clocks.tasksUpdatedAt,
|
|
211
|
+
updateProgress ? 1 : 0,
|
|
212
|
+
incomingRow.clocks.progressUpdatedAt,
|
|
213
|
+
updateNotes ? 1 : 0,
|
|
214
|
+
incomingRow.clocks.notesUpdatedAt,
|
|
215
|
+
anyUpdate ? 1 : 0,
|
|
216
|
+
appliedAt,
|
|
217
|
+
planRow.localRepoId,
|
|
218
|
+
incomingRow.slug
|
|
219
|
+
]);
|
|
220
|
+
if (result.changes > 0) {
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
await applyStateInsert(planRow, incomingRow, appliedAt);
|
|
224
|
+
}
|
|
225
|
+
async function applyArchiveAction(planRow, incomingRow) {
|
|
226
|
+
if (!planRow.localRepoId) {
|
|
227
|
+
throw new Error(`Missing local repository mapping for ${planRow.repoSyncKey}`);
|
|
228
|
+
}
|
|
229
|
+
if (planRow.action === 'insert') {
|
|
230
|
+
await dbRun(`
|
|
231
|
+
INSERT INTO prd_archives (repo_id, slug, archived_at)
|
|
232
|
+
VALUES (?, ?, ?)
|
|
233
|
+
ON CONFLICT(repo_id, slug) DO UPDATE SET
|
|
234
|
+
archived_at = CASE
|
|
235
|
+
WHEN excluded.archived_at > prd_archives.archived_at THEN excluded.archived_at
|
|
236
|
+
ELSE prd_archives.archived_at
|
|
237
|
+
END
|
|
238
|
+
`, [planRow.localRepoId, incomingRow.slug, incomingRow.archivedAt]);
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
if (planRow.action === 'update') {
|
|
242
|
+
await dbRun(`
|
|
243
|
+
UPDATE prd_archives
|
|
244
|
+
SET archived_at = ?
|
|
245
|
+
WHERE repo_id = ? AND slug = ? AND archived_at < ?
|
|
246
|
+
`, [incomingRow.archivedAt, planRow.localRepoId, incomingRow.slug, incomingRow.archivedAt]);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
async function assertIntegrityCheckPasses() {
|
|
250
|
+
const row = await dbGet('PRAGMA integrity_check');
|
|
251
|
+
if (!row) {
|
|
252
|
+
throw new Error('SQLite integrity check returned no results');
|
|
253
|
+
}
|
|
254
|
+
const firstValue = Object.values(row)[0];
|
|
255
|
+
if (firstValue !== 'ok') {
|
|
256
|
+
throw new Error(`SQLite integrity check failed: ${String(firstValue)}`);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
async function hasBundleBeenApplied(bundleId) {
|
|
260
|
+
return await dbGet('SELECT bundle_id, applied_at FROM sync_bundle_log WHERE bundle_id = ?', [bundleId]);
|
|
261
|
+
}
|
|
262
|
+
export async function executeSyncMerge(bundleInput, options = {}) {
|
|
263
|
+
const bundle = parseSyncBundle(bundleInput);
|
|
264
|
+
const nowIso = parseIsoOrNow(options.now);
|
|
265
|
+
const retentionPolicy = resolveRetentionPolicy(options);
|
|
266
|
+
const plan = await planSyncMerge(bundle, {
|
|
267
|
+
repoMap: options.repoMap
|
|
268
|
+
});
|
|
269
|
+
if (options.apply !== true) {
|
|
270
|
+
return {
|
|
271
|
+
mode: 'dry_run',
|
|
272
|
+
applied: false,
|
|
273
|
+
alreadyApplied: false,
|
|
274
|
+
bundleId: bundle.bundleId,
|
|
275
|
+
plan,
|
|
276
|
+
retention: {
|
|
277
|
+
backupsDeleted: 0,
|
|
278
|
+
logsDeleted: 0
|
|
279
|
+
}
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
const existingLog = await hasBundleBeenApplied(bundle.bundleId);
|
|
283
|
+
if (existingLog) {
|
|
284
|
+
return {
|
|
285
|
+
mode: 'apply',
|
|
286
|
+
applied: false,
|
|
287
|
+
alreadyApplied: true,
|
|
288
|
+
bundleId: bundle.bundleId,
|
|
289
|
+
plan,
|
|
290
|
+
retention: {
|
|
291
|
+
backupsDeleted: 0,
|
|
292
|
+
logsDeleted: 0
|
|
293
|
+
}
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
assertNoUnresolvedMappings(plan);
|
|
297
|
+
const backupPath = await createDatabaseBackup(bundle.bundleId, nowIso);
|
|
298
|
+
const incomingStateByKey = new Map();
|
|
299
|
+
const incomingArchiveByKey = new Map();
|
|
300
|
+
for (const row of bundle.states) {
|
|
301
|
+
incomingStateByKey.set(toStateKey(row.repoSyncKey, row.slug), row);
|
|
302
|
+
}
|
|
303
|
+
for (const row of bundle.archives) {
|
|
304
|
+
incomingArchiveByKey.set(toStateKey(row.repoSyncKey, row.slug), row);
|
|
305
|
+
}
|
|
306
|
+
let logsDeleted = 0;
|
|
307
|
+
let inTransaction = false;
|
|
308
|
+
try {
|
|
309
|
+
await dbExec('BEGIN IMMEDIATE');
|
|
310
|
+
inTransaction = true;
|
|
311
|
+
for (const planRow of plan.states) {
|
|
312
|
+
if (planRow.action === 'skip' || planRow.action === 'unresolved') {
|
|
313
|
+
continue;
|
|
314
|
+
}
|
|
315
|
+
const incoming = incomingStateByKey.get(toStateKey(planRow.repoSyncKey, planRow.slug));
|
|
316
|
+
if (!incoming) {
|
|
317
|
+
throw new Error(`Missing incoming state row for ${planRow.repoSyncKey}:${planRow.slug}`);
|
|
318
|
+
}
|
|
319
|
+
if (planRow.action === 'insert') {
|
|
320
|
+
await applyStateInsert(planRow, incoming, nowIso);
|
|
321
|
+
}
|
|
322
|
+
else {
|
|
323
|
+
await applyStateUpdate(planRow, incoming, nowIso);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
for (const planRow of plan.archives) {
|
|
327
|
+
if (planRow.action === 'skip' || planRow.action === 'unresolved') {
|
|
328
|
+
continue;
|
|
329
|
+
}
|
|
330
|
+
const incoming = incomingArchiveByKey.get(toStateKey(planRow.repoSyncKey, planRow.slug));
|
|
331
|
+
if (!incoming) {
|
|
332
|
+
throw new Error(`Missing incoming archive row for ${planRow.repoSyncKey}:${planRow.slug}`);
|
|
333
|
+
}
|
|
334
|
+
await applyArchiveAction(planRow, incoming);
|
|
335
|
+
}
|
|
336
|
+
await assertIntegrityCheckPasses();
|
|
337
|
+
await dbRun(`
|
|
338
|
+
INSERT INTO sync_bundle_log (bundle_id, source_device_id, applied_at, summary_json)
|
|
339
|
+
VALUES (?, ?, ?, ?)
|
|
340
|
+
`, [bundle.bundleId, bundle.sourceDeviceId, nowIso, JSON.stringify(plan.summary)]);
|
|
341
|
+
logsDeleted = await pruneSyncLog(nowIso, retentionPolicy);
|
|
342
|
+
await dbExec('COMMIT');
|
|
343
|
+
inTransaction = false;
|
|
344
|
+
}
|
|
345
|
+
catch (error) {
|
|
346
|
+
if (inTransaction) {
|
|
347
|
+
try {
|
|
348
|
+
await dbExec('ROLLBACK');
|
|
349
|
+
}
|
|
350
|
+
catch {
|
|
351
|
+
// Ignore rollback failure; original error is surfaced.
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
throw error;
|
|
355
|
+
}
|
|
356
|
+
const backupsDeleted = await pruneDatabaseBackups(nowIso, retentionPolicy);
|
|
357
|
+
return {
|
|
358
|
+
mode: 'apply',
|
|
359
|
+
applied: true,
|
|
360
|
+
alreadyApplied: false,
|
|
361
|
+
bundleId: bundle.bundleId,
|
|
362
|
+
plan,
|
|
363
|
+
backupPath,
|
|
364
|
+
retention: {
|
|
365
|
+
backupsDeleted,
|
|
366
|
+
logsDeleted
|
|
367
|
+
}
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
export async function executeSyncMergeJson(jsonPayload, options = {}) {
|
|
371
|
+
let parsed;
|
|
372
|
+
try {
|
|
373
|
+
parsed = JSON.parse(jsonPayload);
|
|
374
|
+
}
|
|
375
|
+
catch (error) {
|
|
376
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
377
|
+
throw new Error(`Invalid bundle JSON: ${message}`);
|
|
378
|
+
}
|
|
379
|
+
return await executeSyncMerge(parsed, options);
|
|
380
|
+
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import { randomUUID } from 'node:crypto';
|
|
2
|
+
import { existsSync } from 'node:fs';
|
|
3
|
+
import { promises as fs } from 'node:fs';
|
|
4
|
+
import { basename, dirname, join, resolve } from 'node:path';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
6
|
+
import { dbAll } from './db.js';
|
|
7
|
+
import { getRepos } from './repos.js';
|
|
8
|
+
import { createSyncFieldHashes, parseSyncBundle } from './sync-schema.js';
|
|
9
|
+
import { ensureRepoSyncMetaForRepos, getOrCreateSyncDeviceId } from './sync-identity.js';
|
|
10
|
+
import { parseStoredProgressFile, parseTasksFile } from './state-schema.js';
|
|
11
|
+
function parseStoredJson(rawValue, fieldName, parseValue) {
|
|
12
|
+
if (!rawValue) {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
let parsed;
|
|
16
|
+
try {
|
|
17
|
+
parsed = JSON.parse(rawValue);
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
21
|
+
throw new Error(`Invalid JSON stored in ${fieldName}: ${message}`);
|
|
22
|
+
}
|
|
23
|
+
return parseValue(parsed);
|
|
24
|
+
}
|
|
25
|
+
function resolvePathHint(path, mode) {
|
|
26
|
+
if (mode === 'none') {
|
|
27
|
+
return undefined;
|
|
28
|
+
}
|
|
29
|
+
if (mode === 'absolute') {
|
|
30
|
+
return resolve(path);
|
|
31
|
+
}
|
|
32
|
+
return basename(resolve(path));
|
|
33
|
+
}
|
|
34
|
+
function toClockValue(primary, fallback, hasValue) {
|
|
35
|
+
if (primary) {
|
|
36
|
+
return primary;
|
|
37
|
+
}
|
|
38
|
+
if (hasValue) {
|
|
39
|
+
return fallback;
|
|
40
|
+
}
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
function findPackageRoot(startDir) {
|
|
44
|
+
let currentDir = startDir;
|
|
45
|
+
while (true) {
|
|
46
|
+
const packageJsonPath = join(currentDir, 'package.json');
|
|
47
|
+
if (existsSync(packageJsonPath)) {
|
|
48
|
+
return currentDir;
|
|
49
|
+
}
|
|
50
|
+
const parentDir = dirname(currentDir);
|
|
51
|
+
if (parentDir === currentDir) {
|
|
52
|
+
return startDir;
|
|
53
|
+
}
|
|
54
|
+
currentDir = parentDir;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
async function readStewardVersion() {
|
|
58
|
+
const packageRoot = findPackageRoot(dirname(fileURLToPath(import.meta.url)));
|
|
59
|
+
const packageJsonPath = join(packageRoot, 'package.json');
|
|
60
|
+
try {
|
|
61
|
+
const contents = await fs.readFile(packageJsonPath, 'utf-8');
|
|
62
|
+
const parsed = JSON.parse(contents);
|
|
63
|
+
if (typeof parsed.version === 'string' && parsed.version.trim().length > 0) {
|
|
64
|
+
return parsed.version;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
catch {
|
|
68
|
+
// Fall back to unknown when package metadata cannot be loaded.
|
|
69
|
+
}
|
|
70
|
+
return 'unknown';
|
|
71
|
+
}
|
|
72
|
+
export async function buildSyncBundle(options = {}) {
|
|
73
|
+
const pathHintsMode = options.pathHints || 'basename';
|
|
74
|
+
const createdAt = options.createdAt || new Date().toISOString();
|
|
75
|
+
const [allRepos, sourceDeviceId, stewardVersion] = await Promise.all([
|
|
76
|
+
getRepos(),
|
|
77
|
+
getOrCreateSyncDeviceId(),
|
|
78
|
+
options.stewardVersion ? Promise.resolve(options.stewardVersion) : readStewardVersion()
|
|
79
|
+
]);
|
|
80
|
+
const filteredRepoIds = new Set(Array.isArray(options.repoIds)
|
|
81
|
+
? options.repoIds.filter((repoId) => typeof repoId === 'string' && repoId.trim().length > 0)
|
|
82
|
+
: []);
|
|
83
|
+
const repos = filteredRepoIds.size > 0
|
|
84
|
+
? allRepos.filter((repo) => filteredRepoIds.has(repo.id))
|
|
85
|
+
: allRepos;
|
|
86
|
+
const repoMetaById = await ensureRepoSyncMetaForRepos(repos);
|
|
87
|
+
const repoIds = repos.map((repo) => repo.id);
|
|
88
|
+
let stateRows = [];
|
|
89
|
+
let archiveRows = [];
|
|
90
|
+
if (repoIds.length > 0) {
|
|
91
|
+
const placeholders = repoIds.map(() => '?').join(', ');
|
|
92
|
+
stateRows = await dbAll(`
|
|
93
|
+
SELECT
|
|
94
|
+
repo_id,
|
|
95
|
+
slug,
|
|
96
|
+
tasks_json,
|
|
97
|
+
progress_json,
|
|
98
|
+
notes_md,
|
|
99
|
+
updated_at,
|
|
100
|
+
tasks_updated_at,
|
|
101
|
+
progress_updated_at,
|
|
102
|
+
notes_updated_at
|
|
103
|
+
FROM prd_states
|
|
104
|
+
WHERE repo_id IN (${placeholders})
|
|
105
|
+
ORDER BY repo_id ASC, slug ASC
|
|
106
|
+
`, repoIds);
|
|
107
|
+
archiveRows = await dbAll(`
|
|
108
|
+
SELECT repo_id, slug, archived_at
|
|
109
|
+
FROM prd_archives
|
|
110
|
+
WHERE repo_id IN (${placeholders})
|
|
111
|
+
ORDER BY repo_id ASC, slug ASC
|
|
112
|
+
`, repoIds);
|
|
113
|
+
}
|
|
114
|
+
const syncRepos = repos.map((repo) => {
|
|
115
|
+
const repoMeta = repoMetaById.get(repo.id);
|
|
116
|
+
if (!repoMeta) {
|
|
117
|
+
throw new Error(`Missing sync metadata for repository ${repo.id}`);
|
|
118
|
+
}
|
|
119
|
+
const pathHint = resolvePathHint(repo.path, pathHintsMode);
|
|
120
|
+
return {
|
|
121
|
+
repoSyncKey: repoMeta.syncKey,
|
|
122
|
+
name: repo.name,
|
|
123
|
+
...(pathHint && { pathHint }),
|
|
124
|
+
fingerprint: repoMeta.fingerprint,
|
|
125
|
+
fingerprintKind: repoMeta.fingerprintKind
|
|
126
|
+
};
|
|
127
|
+
});
|
|
128
|
+
const states = stateRows.map((row) => {
|
|
129
|
+
const repoMeta = repoMetaById.get(row.repo_id);
|
|
130
|
+
if (!repoMeta) {
|
|
131
|
+
throw new Error(`Missing sync metadata for repository ${row.repo_id}`);
|
|
132
|
+
}
|
|
133
|
+
const tasks = parseStoredJson(row.tasks_json, 'prd_states.tasks_json', parseTasksFile);
|
|
134
|
+
const progress = parseStoredJson(row.progress_json, 'prd_states.progress_json', (value) => parseStoredProgressFile(value, {
|
|
135
|
+
totalTasksHint: Array.isArray(tasks?.tasks) ? tasks.tasks.length : undefined,
|
|
136
|
+
prdNameFallback: tasks?.prd?.name || row.slug
|
|
137
|
+
}));
|
|
138
|
+
const clocks = {
|
|
139
|
+
tasksUpdatedAt: toClockValue(row.tasks_updated_at, row.updated_at, tasks !== null),
|
|
140
|
+
progressUpdatedAt: toClockValue(row.progress_updated_at, row.updated_at, progress !== null),
|
|
141
|
+
notesUpdatedAt: toClockValue(row.notes_updated_at, row.updated_at, row.notes_md !== null)
|
|
142
|
+
};
|
|
143
|
+
const hashes = createSyncFieldHashes({
|
|
144
|
+
tasks,
|
|
145
|
+
progress,
|
|
146
|
+
notes: row.notes_md
|
|
147
|
+
});
|
|
148
|
+
return {
|
|
149
|
+
repoSyncKey: repoMeta.syncKey,
|
|
150
|
+
slug: row.slug,
|
|
151
|
+
tasks,
|
|
152
|
+
progress,
|
|
153
|
+
notes: row.notes_md,
|
|
154
|
+
clocks,
|
|
155
|
+
hashes
|
|
156
|
+
};
|
|
157
|
+
});
|
|
158
|
+
const archives = archiveRows.map((row) => {
|
|
159
|
+
const repoMeta = repoMetaById.get(row.repo_id);
|
|
160
|
+
if (!repoMeta) {
|
|
161
|
+
throw new Error(`Missing sync metadata for repository ${row.repo_id}`);
|
|
162
|
+
}
|
|
163
|
+
return {
|
|
164
|
+
repoSyncKey: repoMeta.syncKey,
|
|
165
|
+
slug: row.slug,
|
|
166
|
+
archivedAt: row.archived_at
|
|
167
|
+
};
|
|
168
|
+
});
|
|
169
|
+
return parseSyncBundle({
|
|
170
|
+
type: 'steward-sync-bundle',
|
|
171
|
+
formatVersion: 1,
|
|
172
|
+
bundleId: options.bundleId || randomUUID(),
|
|
173
|
+
createdAt,
|
|
174
|
+
sourceDeviceId,
|
|
175
|
+
stewardVersion,
|
|
176
|
+
repos: syncRepos,
|
|
177
|
+
states,
|
|
178
|
+
archives
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
export function serializeSyncBundle(bundle) {
|
|
182
|
+
return JSON.stringify(bundle, null, 2);
|
|
183
|
+
}
|