@drewpayment/mink 0.8.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/dashboard/out/404.html +1 -1
  2. package/dashboard/out/action-log.html +1 -1
  3. package/dashboard/out/action-log.txt +1 -1
  4. package/dashboard/out/activity.html +1 -1
  5. package/dashboard/out/activity.txt +1 -1
  6. package/dashboard/out/bugs.html +1 -1
  7. package/dashboard/out/bugs.txt +1 -1
  8. package/dashboard/out/capture.html +1 -1
  9. package/dashboard/out/capture.txt +1 -1
  10. package/dashboard/out/config.html +1 -1
  11. package/dashboard/out/config.txt +1 -1
  12. package/dashboard/out/daemon.html +1 -1
  13. package/dashboard/out/daemon.txt +1 -1
  14. package/dashboard/out/design.html +1 -1
  15. package/dashboard/out/design.txt +1 -1
  16. package/dashboard/out/discord.html +1 -1
  17. package/dashboard/out/discord.txt +1 -1
  18. package/dashboard/out/file-index.html +1 -1
  19. package/dashboard/out/file-index.txt +1 -1
  20. package/dashboard/out/index.html +1 -1
  21. package/dashboard/out/index.txt +1 -1
  22. package/dashboard/out/insights.html +1 -1
  23. package/dashboard/out/insights.txt +1 -1
  24. package/dashboard/out/learning.html +1 -1
  25. package/dashboard/out/learning.txt +1 -1
  26. package/dashboard/out/overview.html +1 -1
  27. package/dashboard/out/overview.txt +1 -1
  28. package/dashboard/out/scheduler.html +1 -1
  29. package/dashboard/out/scheduler.txt +1 -1
  30. package/dashboard/out/sync.html +1 -1
  31. package/dashboard/out/sync.txt +1 -1
  32. package/dashboard/out/tokens.html +1 -1
  33. package/dashboard/out/tokens.txt +1 -1
  34. package/dashboard/out/waste.html +1 -1
  35. package/dashboard/out/waste.txt +1 -1
  36. package/dashboard/out/wiki.html +1 -1
  37. package/dashboard/out/wiki.txt +1 -1
  38. package/dist/cli.js +2105 -1068
  39. package/package.json +1 -1
  40. package/src/commands/bug-search.ts +3 -3
  41. package/src/commands/detect-waste.ts +34 -25
  42. package/src/commands/init.ts +21 -21
  43. package/src/commands/post-read.ts +6 -3
  44. package/src/commands/post-write.ts +6 -3
  45. package/src/commands/pre-read.ts +14 -10
  46. package/src/commands/pre-write.ts +8 -5
  47. package/src/commands/reflect.ts +12 -7
  48. package/src/commands/session-start.ts +34 -3
  49. package/src/commands/session-stop.ts +10 -6
  50. package/src/commands/status.ts +29 -17
  51. package/src/commands/sync-migrate.ts +330 -0
  52. package/src/commands/sync.ts +75 -1
  53. package/src/commands/update.ts +4 -9
  54. package/src/core/conflict-park.ts +84 -0
  55. package/src/core/dashboard-api.ts +12 -31
  56. package/src/core/note-writer.ts +52 -6
  57. package/src/core/paths.ts +66 -10
  58. package/src/core/state-aggregator.ts +304 -0
  59. package/src/core/state-counters.ts +46 -0
  60. package/src/core/sync-merge-drivers.ts +247 -0
  61. package/src/core/sync.ts +150 -68
  62. package/src/core/token-ledger.ts +19 -3
  63. /package/dashboard/out/_next/static/{EC-_8nIOf1GnPrIqZ7Mk3 → r7Xr9mrUpunsz4QtD3jh1}/_buildManifest.js +0 -0
  64. /package/dashboard/out/_next/static/{EC-_8nIOf1GnPrIqZ7Mk3 → r7Xr9mrUpunsz4QtD3jh1}/_ssgManifest.js +0 -0
@@ -0,0 +1,247 @@
1
+ import { readFileSync, writeFileSync, appendFileSync } from "fs";
2
+ import { join } from "path";
3
+ import { minkRoot } from "./paths";
4
+ import { parseLearningMemory, serializeLearningMemory } from "./learning-memory";
5
+ import type { LearningMemory, SectionName } from "../types/learning-memory";
6
+ import type { FileIndex, FileIndexEntry } from "../types/file-index";
7
+ import type { DeviceInfo, DeviceRegistry } from "../types/config";
8
+
9
+ // Custom git merge drivers. All three follow the same contract: read base /
10
+ // ours / theirs from disk, compute a deterministic merged result, write it to
11
+ // the ours-path, and exit 0 — never fail, never leave conflict markers. Any
12
+ // parse error or unexpected shape falls back to "ours" (the local side) and
13
+ // logs the failure to ~/.mink/sync-warnings.log so the user can investigate
14
+ // later, but the merge itself succeeds and sync stays unblocked.
15
+
16
+ interface DriverArgs {
17
+ basePath: string;
18
+ oursPath: string;
19
+ theirsPath: string;
20
+ // The original repo path being merged — used in the warning log only.
21
+ filePath: string;
22
+ }
23
+
24
+ function logWarning(driver: string, args: DriverArgs, err: unknown): void {
25
+ try {
26
+ const line = `[${new Date().toISOString()}] ${driver} fallback for ${args.filePath}: ${err instanceof Error ? err.message : String(err)}\n`;
27
+ appendFileSync(join(minkRoot(), "sync-warnings.log"), line);
28
+ } catch {
29
+ // Even logging is best-effort — we never want to block a merge.
30
+ }
31
+ }
32
+
33
+ function readJsonOrNull(path: string): unknown {
34
+ try {
35
+ return JSON.parse(readFileSync(path, "utf-8"));
36
+ } catch {
37
+ return null;
38
+ }
39
+ }
40
+
41
+ function readTextOrEmpty(path: string): string {
42
+ try {
43
+ return readFileSync(path, "utf-8");
44
+ } catch {
45
+ return "";
46
+ }
47
+ }
48
+
49
+ // ── mink-json-union: file-index.json ───────────────────────────────────────
50
+
51
+ function isFileIndexShape(value: unknown): value is FileIndex {
52
+ if (value === null || typeof value !== "object") return false;
53
+ const obj = value as Record<string, unknown>;
54
+ return (
55
+ typeof obj.header === "object" &&
56
+ obj.header !== null &&
57
+ typeof obj.entries === "object" &&
58
+ obj.entries !== null
59
+ );
60
+ }
61
+
62
+ function mergeFileIndex(ours: FileIndex, theirs: FileIndex): FileIndex {
63
+ const entries: Record<string, FileIndexEntry> = { ...ours.entries };
64
+ for (const [path, entry] of Object.entries(theirs.entries)) {
65
+ const existing = entries[path];
66
+ if (!existing) {
67
+ entries[path] = entry;
68
+ continue;
69
+ }
70
+ // Prefer the side with the more recent lastModified — describes the most
71
+ // up-to-date snapshot of the file.
72
+ if (entry.lastModified > existing.lastModified) {
73
+ entries[path] = entry;
74
+ }
75
+ }
76
+ // Header: latest lastScanTimestamp wins; totalFiles becomes the merged count.
77
+ const lastScan =
78
+ ours.header.lastScanTimestamp > theirs.header.lastScanTimestamp
79
+ ? ours.header.lastScanTimestamp
80
+ : theirs.header.lastScanTimestamp;
81
+ return {
82
+ header: {
83
+ lastScanTimestamp: lastScan,
84
+ totalFiles: Object.keys(entries).length,
85
+ // Header counters are deprecated under sync v2 (counters live in
86
+ // .mink-state-counters.json per device). Preserve max for legacy reads.
87
+ lifetimeHits: Math.max(
88
+ ours.header.lifetimeHits,
89
+ theirs.header.lifetimeHits
90
+ ),
91
+ lifetimeMisses: Math.max(
92
+ ours.header.lifetimeMisses,
93
+ theirs.header.lifetimeMisses
94
+ ),
95
+ },
96
+ entries,
97
+ };
98
+ }
99
+
100
+ export function mergeJsonUnion(args: DriverArgs): void {
101
+ try {
102
+ const ours = readJsonOrNull(args.oursPath);
103
+ const theirs = readJsonOrNull(args.theirsPath);
104
+ if (!isFileIndexShape(ours) || !isFileIndexShape(theirs)) {
105
+ logWarning(
106
+ "mink-json-union",
107
+ args,
108
+ new Error("non-FileIndex shape — keeping ours")
109
+ );
110
+ return;
111
+ }
112
+ const merged = mergeFileIndex(ours, theirs);
113
+ writeFileSync(args.oursPath, JSON.stringify(merged, null, 2));
114
+ } catch (err) {
115
+ logWarning("mink-json-union", args, err);
116
+ }
117
+ }
118
+
119
+ // ── mink-learning-memory: learning-memory.md ───────────────────────────────
120
+
121
+ function mergeLearningMemory(
122
+ ours: LearningMemory,
123
+ theirs: LearningMemory
124
+ ): LearningMemory {
125
+ const projectName =
126
+ ours.projectName !== "unknown"
127
+ ? ours.projectName
128
+ : theirs.projectName;
129
+ const sectionNames: SectionName[] = [
130
+ "User Preferences",
131
+ "Key Learnings",
132
+ "Do-Not-Repeat",
133
+ "Decision Log",
134
+ ];
135
+ const sections = {} as LearningMemory["sections"];
136
+ for (const section of sectionNames) {
137
+ const existing = new Map<string, string>();
138
+ for (const entry of ours.sections[section] ?? []) {
139
+ existing.set(entry.trim().toLowerCase(), entry);
140
+ }
141
+ for (const entry of theirs.sections[section] ?? []) {
142
+ const norm = entry.trim().toLowerCase();
143
+ if (!existing.has(norm)) {
144
+ existing.set(norm, entry);
145
+ }
146
+ }
147
+ sections[section] = [...existing.values()];
148
+ }
149
+ return { projectName, sections };
150
+ }
151
+
152
+ export function mergeLearningMemoryDriver(args: DriverArgs): void {
153
+ try {
154
+ const ours = parseLearningMemory(readTextOrEmpty(args.oursPath));
155
+ const theirs = parseLearningMemory(readTextOrEmpty(args.theirsPath));
156
+ const merged = mergeLearningMemory(ours, theirs);
157
+ writeFileSync(args.oursPath, serializeLearningMemory(merged));
158
+ } catch (err) {
159
+ logWarning("mink-learning-memory", args, err);
160
+ }
161
+ }
162
+
163
+ // ── mink-devices: devices.json ─────────────────────────────────────────────
164
+
165
+ function isDeviceRegistry(value: unknown): value is DeviceRegistry {
166
+ if (value === null || typeof value !== "object") return false;
167
+ const obj = value as Record<string, unknown>;
168
+ return (
169
+ typeof obj.devices === "object" &&
170
+ obj.devices !== null &&
171
+ !Array.isArray(obj.devices)
172
+ );
173
+ }
174
+
175
+ function mergeDevicesRegistry(
176
+ ours: DeviceRegistry,
177
+ theirs: DeviceRegistry
178
+ ): DeviceRegistry {
179
+ const devices: Record<string, DeviceInfo> = { ...ours.devices };
180
+ for (const [id, info] of Object.entries(theirs.devices)) {
181
+ const existing = devices[id];
182
+ if (!existing) {
183
+ devices[id] = info;
184
+ continue;
185
+ }
186
+ devices[id] = {
187
+ // Prefer ours.name (user-set) when set; otherwise take theirs.
188
+ name: existing.name || info.name,
189
+ hostname: existing.hostname || info.hostname,
190
+ platform: existing.platform || info.platform,
191
+ firstSeen:
192
+ existing.firstSeen < info.firstSeen
193
+ ? existing.firstSeen
194
+ : info.firstSeen,
195
+ lastSeen:
196
+ existing.lastSeen > info.lastSeen
197
+ ? existing.lastSeen
198
+ : info.lastSeen,
199
+ };
200
+ }
201
+ return { devices };
202
+ }
203
+
204
+ export function mergeDevicesDriver(args: DriverArgs): void {
205
+ try {
206
+ const ours = readJsonOrNull(args.oursPath);
207
+ const theirs = readJsonOrNull(args.theirsPath);
208
+ if (!isDeviceRegistry(ours) || !isDeviceRegistry(theirs)) {
209
+ logWarning(
210
+ "mink-devices",
211
+ args,
212
+ new Error("non-DeviceRegistry shape — keeping ours")
213
+ );
214
+ return;
215
+ }
216
+ const merged = mergeDevicesRegistry(ours, theirs);
217
+ writeFileSync(args.oursPath, JSON.stringify(merged, null, 2));
218
+ } catch (err) {
219
+ logWarning("mink-devices", args, err);
220
+ }
221
+ }
222
+
223
+ // ── Dispatcher ─────────────────────────────────────────────────────────────
224
+
225
+ export function runMergeDriver(
226
+ name: string,
227
+ basePath: string,
228
+ oursPath: string,
229
+ theirsPath: string,
230
+ filePath: string
231
+ ): number {
232
+ const args: DriverArgs = { basePath, oursPath, theirsPath, filePath };
233
+ switch (name) {
234
+ case "mink-json-union":
235
+ mergeJsonUnion(args);
236
+ return 0;
237
+ case "mink-learning-memory":
238
+ mergeLearningMemoryDriver(args);
239
+ return 0;
240
+ case "mink-devices":
241
+ mergeDevicesDriver(args);
242
+ return 0;
243
+ default:
244
+ logWarning(name, args, new Error("unknown driver — keeping ours"));
245
+ return 0;
246
+ }
247
+ }
package/src/core/sync.ts CHANGED
@@ -1,9 +1,10 @@
1
1
  import { existsSync, writeFileSync, readFileSync } from "fs";
2
2
  import { join } from "path";
3
3
  import { execSync } from "child_process";
4
- import { minkRoot } from "./paths";
4
+ import { minkRoot, syncVersionPath } from "./paths";
5
5
  import { resolveConfigValue, setConfigValue } from "./global-config";
6
6
  import { updateDeviceHeartbeat } from "./device";
7
+ import { parkConflictingState } from "./conflict-park";
7
8
 
8
9
  // ── Constants ──────────────────────────────────────────────────────────────
9
10
 
@@ -11,16 +12,61 @@ const GIT_TIMEOUT = 5_000;
11
12
  const PUSH_TIMEOUT = 10_000;
12
13
  const FETCH_TIMEOUT = 15_000;
13
14
 
15
+ // Sync layout version. Bumped when the on-disk shape of `~/.mink/` changes in
16
+ // a way that older devices cannot read. Migration runs on first session-start
17
+ // after upgrade when readSyncVersion() < MINK_SYNC_VERSION.
18
+ export const MINK_SYNC_VERSION = 2;
19
+
20
+ export function readSyncVersion(): number {
21
+ try {
22
+ const raw = readFileSync(syncVersionPath(), "utf-8").trim();
23
+ const n = parseInt(raw, 10);
24
+ return Number.isFinite(n) && n > 0 ? n : 1;
25
+ } catch {
26
+ // Pre-versioned repos default to v1.
27
+ return 1;
28
+ }
29
+ }
30
+
31
+ export function writeSyncVersion(version: number): void {
32
+ writeFileSync(syncVersionPath(), `${version}\n`);
33
+ }
34
+
14
35
  const GITIGNORE_CONTENTS = `# Runtime state — machine-specific
15
36
  scheduler.pid
16
37
  scheduler.log
38
+ channel.pid
39
+ channel.log
17
40
 
18
41
  # Device identity and local config — machine-specific
19
42
  device-id
20
43
  config.local
21
44
 
22
- # Local backupsmachine-specific snapshots
45
+ # Migration coordinationnever sync this
46
+ .sync-migrate.lock
47
+
48
+ # Local backups and per-device caches — machine-specific snapshots
23
49
  projects/*/backups/
50
+ projects/*/session.json
51
+ projects/*/scheduler-manifest.json
52
+ projects/*/design-captures/
53
+ projects/*/.mink-state-counters.json
54
+
55
+ # Wiki derived/regenerable pages — each device rebuilds locally
56
+ wiki/_index.md
57
+ wiki/.mink-index.json
58
+ wiki/projects/*/conventions.md
59
+ wiki/projects/*/architecture.md
60
+ `;
61
+
62
+ const GITATTRIBUTES_CONTENTS = `# Sync v2 — merge drivers eliminate conflicts on shared files.
63
+ # Drivers are registered in .git/config by ensureMergeDriversRegistered().
64
+ projects/*/file-index.json merge=mink-json-union
65
+ projects/*/learning-memory.*.md merge=union
66
+ projects/*/learning-memory.md merge=mink-learning-memory
67
+ wiki/areas/daily/*.md merge=union
68
+ wiki/projects/*/sessions/*.md merge=union
69
+ devices.json merge=mink-devices
24
70
  `;
25
71
 
26
72
  // ── Helpers ────────────────────────────────────────────────────────────────
@@ -54,6 +100,32 @@ export function ensureGitignore(): void {
54
100
  writeFileSync(gitignorePath, GITIGNORE_CONTENTS);
55
101
  }
56
102
 
103
+ export function ensureGitAttributes(): void {
104
+ const path = join(minkRoot(), ".gitattributes");
105
+ writeFileSync(path, GITATTRIBUTES_CONTENTS);
106
+ }
107
+
108
+ const MERGE_DRIVERS = [
109
+ "mink-json-union",
110
+ "mink-learning-memory",
111
+ "mink-devices",
112
+ ] as const;
113
+
114
+ // Register the custom merge drivers in the local repo's .git/config so git
115
+ // invokes `mink sync merge-driver <name>` whenever it encounters a conflict
116
+ // on a path matched by .gitattributes. We point at the absolute path to the
117
+ // currently-running mink CLI so a stale registration after npm relinks gets
118
+ // refreshed every time `ensureMergeDriversRegistered()` runs.
119
+ export function ensureMergeDriversRegistered(): void {
120
+ const cliPath = process.argv[1] ?? "mink";
121
+ for (const name of MERGE_DRIVERS) {
122
+ const command = `${cliPath} sync merge-driver ${name} %O %A %B %P`;
123
+ gitSafe(`config merge.${name}.name "Mink ${name}"`);
124
+ gitSafe(`config merge.${name}.driver "${command}"`);
125
+ gitSafe(`config merge.${name}.recursive binary`);
126
+ }
127
+ }
128
+
57
129
  export interface SyncStatusInfo {
58
130
  enabled: boolean;
59
131
  gitInitialized: boolean;
@@ -112,6 +184,12 @@ export function initSync(remoteUrl: string): void {
112
184
  git("init");
113
185
  git(`remote add origin ${remoteUrl}`);
114
186
 
187
+ // Install merge drivers + attributes now that .git exists. Drivers must be
188
+ // registered before the first pull so any incoming conflicts can be auto-
189
+ // resolved without surfacing to the user.
190
+ ensureGitAttributes();
191
+ ensureMergeDriversRegistered();
192
+
115
193
  // Try to fetch from remote
116
194
  const fetchResult = gitSafe("fetch origin", FETCH_TIMEOUT);
117
195
 
@@ -171,14 +249,36 @@ export function initSync(remoteUrl: string): void {
171
249
  console.log("[mink] manual sync: run 'mink sync' at any time");
172
250
  }
173
251
 
252
+ // Sync v2 helper: fetch + merge --no-edit using the registered merge drivers.
253
+ // Anything still conflicting after the drivers run gets parked to a hidden
254
+ // ref so sync can never block. Returns true on a clean merge.
255
+ function attemptMergeOrPark(
256
+ branch: string,
257
+ reason: string,
258
+ onMessage: (msg: string) => void
259
+ ): boolean {
260
+ try {
261
+ git(`merge --no-edit origin/${branch}`, FETCH_TIMEOUT);
262
+ return true;
263
+ } catch {
264
+ const parked = parkConflictingState(reason);
265
+ if (parked) {
266
+ onMessage(
267
+ `[mink] sync: parked conflicting state to ${parked} — sync continues, run 'mink sync reconcile list' to inspect`
268
+ );
269
+ }
270
+ return false;
271
+ }
272
+ }
273
+
174
274
  export function syncPull(
175
275
  onMessage: (msg: string) => void = (msg) => console.error(msg)
176
276
  ): void {
177
277
  if (!isSyncInitialized()) return;
178
278
 
179
279
  ensureGitignore();
180
-
181
- const root = minkRoot();
280
+ ensureGitAttributes();
281
+ ensureMergeDriversRegistered();
182
282
 
183
283
  try {
184
284
  // Stash any uncommitted local changes as safety net
@@ -192,25 +292,17 @@ export function syncPull(
192
292
  // Determine branch
193
293
  const branch = gitSafe("rev-parse --abbrev-ref HEAD") ?? "main";
194
294
 
195
- // Pull with rebase
196
- try {
197
- git(`pull --rebase origin ${branch}`, FETCH_TIMEOUT);
198
- } catch (err) {
199
- // Check if rebase is in progress and abort
200
- if (existsSync(join(root, ".git", "rebase-merge")) ||
201
- existsSync(join(root, ".git", "rebase-apply"))) {
202
- gitSafe("rebase --abort");
203
- onMessage(
204
- "[mink] sync pull: rebase conflict detected aborted rebase, local state preserved"
205
- );
206
- onMessage(
207
- "[mink] resolve manually with 'mink sync pull' or 'cd ~/.mink && git pull --rebase origin main'"
208
- );
209
- } else {
210
- onMessage(
211
- `[mink] sync pull failed: ${err instanceof Error ? err.message : String(err)}`
212
- );
213
- }
295
+ // Fetch + merge --no-edit. Custom merge drivers (file-index union,
296
+ // learning-memory section merge, devices registry union) resolve every
297
+ // anticipated conflict; anything left over gets parked to a hidden ref
298
+ // and the working tree advances to upstream HEAD so sync never gets stuck.
299
+ const fetched = gitSafe(`fetch origin ${branch}`, FETCH_TIMEOUT);
300
+ if (fetched !== null) {
301
+ attemptMergeOrPark(branch, "pull", onMessage);
302
+ } else {
303
+ onMessage(
304
+ "[mink] sync pull: fetch failed (network or auth) local state preserved"
305
+ );
214
306
  }
215
307
 
216
308
  // Pop stash if we stashed earlier
@@ -240,62 +332,52 @@ export function syncPush(
240
332
  if (!isSyncInitialized()) return;
241
333
 
242
334
  ensureGitignore();
243
- try { updateDeviceHeartbeat(); } catch { /* never crash hooks */ }
244
-
245
- const root = minkRoot();
335
+ ensureGitAttributes();
336
+ ensureMergeDriversRegistered();
337
+ try {
338
+ updateDeviceHeartbeat();
339
+ } catch {
340
+ /* never crash hooks */
341
+ }
246
342
 
247
343
  try {
248
- // Check for changes
249
344
  const status = gitSafe("status --porcelain");
250
- if (!status || !status.trim()) {
251
- // No local changes — still try to push any unpushed commits
252
- const branch = gitSafe("rev-parse --abbrev-ref HEAD") ?? "main";
253
- try {
254
- git(`push origin ${branch}`, PUSH_TIMEOUT);
255
- setConfigValue("sync.last-push", new Date().toISOString());
256
- } catch {
257
- // No unpushed commits or network error — silent
258
- }
259
- return;
260
- }
261
-
262
- // Stage all changes (respects .gitignore)
263
- git("add -A");
264
-
265
- // Commit
266
- const now = new Date();
267
- const timestamp = now.toISOString().replace("T", " ").slice(0, 16);
268
- git(`commit -m "mink: sync ${timestamp}"`);
269
-
270
- // Determine branch
345
+ const hasChanges = status !== null && status.trim().length > 0;
271
346
  const branch = gitSafe("rev-parse --abbrev-ref HEAD") ?? "main";
272
347
 
273
- // Pull with rebase to reconcile any remote changes
274
- try {
275
- git(`pull --rebase origin ${branch}`, FETCH_TIMEOUT);
276
- } catch {
277
- // Check for rebase conflict
278
- if (existsSync(join(root, ".git", "rebase-merge")) ||
279
- existsSync(join(root, ".git", "rebase-apply"))) {
280
- gitSafe("rebase --abort");
281
- onMessage(
282
- "[mink] sync: rebase conflict during push — local commit preserved, skipping push"
283
- );
284
- onMessage(
285
- "[mink] resolve manually with 'mink sync pull' then 'mink sync push'"
286
- );
287
- return;
288
- }
348
+ if (hasChanges) {
349
+ git("add -A");
350
+ const now = new Date();
351
+ const timestamp = now.toISOString().replace("T", " ").slice(0, 16);
352
+ gitSafe(`commit -m "mink: sync ${timestamp}"`);
289
353
  }
290
354
 
291
- // Push (best-effort)
355
+ // Reconcile with remote before pushing. Custom merge drivers handle
356
+ // anticipated conflicts; anything they can't is parked to a hidden ref.
357
+ const fetched = gitSafe(`fetch origin ${branch}`, FETCH_TIMEOUT);
358
+ if (fetched !== null) {
359
+ attemptMergeOrPark(branch, "push", onMessage);
360
+ }
361
+
362
+ // Push. Single retry on rejection (race with a simultaneous push from
363
+ // another device). After that we leave the commit local for next session
364
+ // — matches spec 15's push-failure handling.
292
365
  try {
293
366
  git(`push origin ${branch}`, PUSH_TIMEOUT);
294
367
  setConfigValue("sync.last-push", new Date().toISOString());
295
368
  } catch {
296
- onMessage(
297
- "[mink] sync push failed — local commit preserved, will retry next session"
298
- );
369
+ const refetched = gitSafe(`fetch origin ${branch}`, FETCH_TIMEOUT);
370
+ if (refetched !== null) {
371
+ attemptMergeOrPark(branch, "push-retry", onMessage);
372
+ }
373
+ try {
374
+ git(`push origin ${branch}`, PUSH_TIMEOUT);
375
+ setConfigValue("sync.last-push", new Date().toISOString());
376
+ } catch {
377
+ onMessage(
378
+ "[mink] sync push failed — local commit preserved, will retry next session"
379
+ );
380
+ }
299
381
  }
300
382
  } catch (err) {
301
383
  onMessage(
@@ -160,16 +160,32 @@ export function saveArchive(archivePath: string, newlyArchived: LedgerSession[])
160
160
 
161
161
  export function createLedgerFinalizer(
162
162
  projectDir: string,
163
+ deviceIdOrThreshold?: string | number,
163
164
  archiveThreshold: number = 1000
164
165
  ): SessionFinalizer {
165
- const ledgerPath = join(projectDir, "token-ledger.json");
166
- const archivePath = join(projectDir, "token-ledger-archive.json");
166
+ // Backward compat: callers that pass `(projectDir)` or
167
+ // `(projectDir, threshold)` still work and write to the legacy path. New
168
+ // callers pass `(projectDir, deviceId, threshold?)` to write into the
169
+ // per-device shard at projectDir/state/<deviceId>/...
170
+ let ledgerPath: string;
171
+ let archivePath: string;
172
+ let threshold: number;
173
+ if (typeof deviceIdOrThreshold === "string") {
174
+ const shardDir = join(projectDir, "state", deviceIdOrThreshold);
175
+ ledgerPath = join(shardDir, "token-ledger.json");
176
+ archivePath = join(shardDir, "token-ledger-archive.json");
177
+ threshold = archiveThreshold;
178
+ } else {
179
+ ledgerPath = join(projectDir, "token-ledger.json");
180
+ archivePath = join(projectDir, "token-ledger-archive.json");
181
+ threshold = deviceIdOrThreshold ?? archiveThreshold;
182
+ }
167
183
 
168
184
  return {
169
185
  appendSession(summary: SessionSummary): void {
170
186
  const ledger = loadLedger(ledgerPath);
171
187
  appendSession(ledger, summary);
172
- const { archived } = archiveIfNeeded(ledger, archiveThreshold);
188
+ const { archived } = archiveIfNeeded(ledger, threshold);
173
189
  if (archived.length > 0) {
174
190
  saveArchive(archivePath, archived);
175
191
  }