pushwork 2.0.0-a.sub.1 → 2.0.0-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. package/dist/branches.d.ts +19 -0
  2. package/dist/branches.d.ts.map +1 -0
  3. package/dist/branches.js +111 -0
  4. package/dist/branches.js.map +1 -0
  5. package/dist/cli.d.ts +1 -1
  6. package/dist/cli.d.ts.map +1 -1
  7. package/dist/cli.js +238 -272
  8. package/dist/cli.js.map +1 -1
  9. package/dist/config.d.ts +17 -0
  10. package/dist/config.d.ts.map +1 -0
  11. package/dist/config.js +84 -0
  12. package/dist/config.js.map +1 -0
  13. package/dist/fs-tree.d.ts +6 -0
  14. package/dist/fs-tree.d.ts.map +1 -0
  15. package/dist/fs-tree.js +99 -0
  16. package/dist/fs-tree.js.map +1 -0
  17. package/dist/ignore.d.ts +6 -0
  18. package/dist/ignore.d.ts.map +1 -0
  19. package/dist/ignore.js +74 -0
  20. package/dist/ignore.js.map +1 -0
  21. package/dist/index.d.ts +8 -4
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +34 -4
  24. package/dist/index.js.map +1 -1
  25. package/dist/log.d.ts +3 -0
  26. package/dist/log.d.ts.map +1 -0
  27. package/dist/log.js +14 -0
  28. package/dist/log.js.map +1 -0
  29. package/dist/pushwork.d.ts +115 -0
  30. package/dist/pushwork.d.ts.map +1 -0
  31. package/dist/pushwork.js +918 -0
  32. package/dist/pushwork.js.map +1 -0
  33. package/dist/repo.d.ts +14 -0
  34. package/dist/repo.d.ts.map +1 -0
  35. package/dist/repo.js +60 -0
  36. package/dist/repo.js.map +1 -0
  37. package/dist/shapes/custom.d.ts +3 -0
  38. package/dist/shapes/custom.d.ts.map +1 -0
  39. package/dist/shapes/custom.js +57 -0
  40. package/dist/shapes/custom.js.map +1 -0
  41. package/dist/shapes/file.d.ts +20 -0
  42. package/dist/shapes/file.d.ts.map +1 -0
  43. package/dist/shapes/file.js +140 -0
  44. package/dist/shapes/file.js.map +1 -0
  45. package/dist/shapes/index.d.ts +10 -0
  46. package/dist/shapes/index.d.ts.map +1 -0
  47. package/dist/shapes/index.js +35 -0
  48. package/dist/shapes/index.js.map +1 -0
  49. package/dist/shapes/patchwork-folder.d.ts +3 -0
  50. package/dist/shapes/patchwork-folder.d.ts.map +1 -0
  51. package/dist/shapes/patchwork-folder.js +160 -0
  52. package/dist/shapes/patchwork-folder.js.map +1 -0
  53. package/dist/shapes/types.d.ts +37 -0
  54. package/dist/shapes/types.d.ts.map +1 -0
  55. package/dist/shapes/types.js +52 -0
  56. package/dist/shapes/types.js.map +1 -0
  57. package/dist/shapes/vfs.d.ts +3 -0
  58. package/dist/shapes/vfs.d.ts.map +1 -0
  59. package/dist/shapes/vfs.js +88 -0
  60. package/dist/shapes/vfs.js.map +1 -0
  61. package/dist/stash.d.ts +23 -0
  62. package/dist/stash.d.ts.map +1 -0
  63. package/dist/stash.js +118 -0
  64. package/dist/stash.js.map +1 -0
  65. package/flake.lock +128 -0
  66. package/flake.nix +66 -0
  67. package/package.json +15 -48
  68. package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
  69. package/pnpm-workspace.yaml +5 -0
  70. package/src/branches.ts +93 -0
  71. package/src/cli.ts +258 -408
  72. package/src/config.ts +64 -0
  73. package/src/fs-tree.ts +70 -0
  74. package/src/ignore.ts +33 -0
  75. package/src/index.ts +38 -4
  76. package/src/log.ts +8 -0
  77. package/src/pushwork.ts +1055 -0
  78. package/src/repo.ts +76 -0
  79. package/src/shapes/custom.ts +29 -0
  80. package/src/shapes/file.ts +115 -0
  81. package/src/shapes/index.ts +19 -0
  82. package/src/shapes/patchwork-folder.ts +156 -0
  83. package/src/shapes/types.ts +79 -0
  84. package/src/shapes/vfs.ts +93 -0
  85. package/src/stash.ts +106 -0
  86. package/test/integration/branches.test.ts +389 -0
  87. package/test/integration/pushwork.test.ts +547 -0
  88. package/test/setup.ts +29 -0
  89. package/test/unit/doc-shape.test.ts +612 -0
  90. package/tsconfig.json +2 -3
  91. package/vitest.config.ts +14 -0
  92. package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
  93. package/CLAUDE.md +0 -141
  94. package/README.md +0 -221
  95. package/babel.config.js +0 -5
  96. package/dist/cli/commands.d.ts +0 -71
  97. package/dist/cli/commands.d.ts.map +0 -1
  98. package/dist/cli/commands.js +0 -794
  99. package/dist/cli/commands.js.map +0 -1
  100. package/dist/cli/index.d.ts +0 -2
  101. package/dist/cli/index.d.ts.map +0 -1
  102. package/dist/cli/index.js +0 -19
  103. package/dist/cli/index.js.map +0 -1
  104. package/dist/commands.d.ts +0 -61
  105. package/dist/commands.d.ts.map +0 -1
  106. package/dist/commands.js +0 -861
  107. package/dist/commands.js.map +0 -1
  108. package/dist/config/index.d.ts +0 -71
  109. package/dist/config/index.d.ts.map +0 -1
  110. package/dist/config/index.js +0 -314
  111. package/dist/config/index.js.map +0 -1
  112. package/dist/core/change-detection.d.ts +0 -80
  113. package/dist/core/change-detection.d.ts.map +0 -1
  114. package/dist/core/change-detection.js +0 -523
  115. package/dist/core/change-detection.js.map +0 -1
  116. package/dist/core/config.d.ts +0 -81
  117. package/dist/core/config.d.ts.map +0 -1
  118. package/dist/core/config.js +0 -258
  119. package/dist/core/config.js.map +0 -1
  120. package/dist/core/index.d.ts +0 -6
  121. package/dist/core/index.d.ts.map +0 -1
  122. package/dist/core/index.js +0 -6
  123. package/dist/core/index.js.map +0 -1
  124. package/dist/core/move-detection.d.ts +0 -34
  125. package/dist/core/move-detection.d.ts.map +0 -1
  126. package/dist/core/move-detection.js +0 -121
  127. package/dist/core/move-detection.js.map +0 -1
  128. package/dist/core/snapshot.d.ts +0 -105
  129. package/dist/core/snapshot.d.ts.map +0 -1
  130. package/dist/core/snapshot.js +0 -217
  131. package/dist/core/snapshot.js.map +0 -1
  132. package/dist/core/sync-engine.d.ts +0 -157
  133. package/dist/core/sync-engine.d.ts.map +0 -1
  134. package/dist/core/sync-engine.js +0 -1379
  135. package/dist/core/sync-engine.js.map +0 -1
  136. package/dist/types/config.d.ts +0 -99
  137. package/dist/types/config.d.ts.map +0 -1
  138. package/dist/types/config.js +0 -5
  139. package/dist/types/config.js.map +0 -1
  140. package/dist/types/documents.d.ts +0 -88
  141. package/dist/types/documents.d.ts.map +0 -1
  142. package/dist/types/documents.js +0 -20
  143. package/dist/types/documents.js.map +0 -1
  144. package/dist/types/index.d.ts +0 -4
  145. package/dist/types/index.d.ts.map +0 -1
  146. package/dist/types/index.js +0 -4
  147. package/dist/types/index.js.map +0 -1
  148. package/dist/types/snapshot.d.ts +0 -64
  149. package/dist/types/snapshot.d.ts.map +0 -1
  150. package/dist/types/snapshot.js +0 -2
  151. package/dist/types/snapshot.js.map +0 -1
  152. package/dist/utils/content-similarity.d.ts +0 -53
  153. package/dist/utils/content-similarity.d.ts.map +0 -1
  154. package/dist/utils/content-similarity.js +0 -155
  155. package/dist/utils/content-similarity.js.map +0 -1
  156. package/dist/utils/content.d.ts +0 -10
  157. package/dist/utils/content.d.ts.map +0 -1
  158. package/dist/utils/content.js +0 -31
  159. package/dist/utils/content.js.map +0 -1
  160. package/dist/utils/directory.d.ts +0 -24
  161. package/dist/utils/directory.d.ts.map +0 -1
  162. package/dist/utils/directory.js +0 -52
  163. package/dist/utils/directory.js.map +0 -1
  164. package/dist/utils/fs.d.ts +0 -74
  165. package/dist/utils/fs.d.ts.map +0 -1
  166. package/dist/utils/fs.js +0 -248
  167. package/dist/utils/fs.js.map +0 -1
  168. package/dist/utils/index.d.ts +0 -5
  169. package/dist/utils/index.d.ts.map +0 -1
  170. package/dist/utils/index.js +0 -5
  171. package/dist/utils/index.js.map +0 -1
  172. package/dist/utils/mime-types.d.ts +0 -13
  173. package/dist/utils/mime-types.d.ts.map +0 -1
  174. package/dist/utils/mime-types.js +0 -209
  175. package/dist/utils/mime-types.js.map +0 -1
  176. package/dist/utils/network-sync.d.ts +0 -36
  177. package/dist/utils/network-sync.d.ts.map +0 -1
  178. package/dist/utils/network-sync.js +0 -250
  179. package/dist/utils/network-sync.js.map +0 -1
  180. package/dist/utils/node-polyfills.d.ts +0 -9
  181. package/dist/utils/node-polyfills.d.ts.map +0 -1
  182. package/dist/utils/node-polyfills.js +0 -9
  183. package/dist/utils/node-polyfills.js.map +0 -1
  184. package/dist/utils/output.d.ts +0 -129
  185. package/dist/utils/output.d.ts.map +0 -1
  186. package/dist/utils/output.js +0 -368
  187. package/dist/utils/output.js.map +0 -1
  188. package/dist/utils/repo-factory.d.ts +0 -13
  189. package/dist/utils/repo-factory.d.ts.map +0 -1
  190. package/dist/utils/repo-factory.js +0 -46
  191. package/dist/utils/repo-factory.js.map +0 -1
  192. package/dist/utils/string-similarity.d.ts +0 -14
  193. package/dist/utils/string-similarity.d.ts.map +0 -1
  194. package/dist/utils/string-similarity.js +0 -39
  195. package/dist/utils/string-similarity.js.map +0 -1
  196. package/dist/utils/text-diff.d.ts +0 -37
  197. package/dist/utils/text-diff.d.ts.map +0 -1
  198. package/dist/utils/text-diff.js +0 -93
  199. package/dist/utils/text-diff.js.map +0 -1
  200. package/dist/utils/trace.d.ts +0 -19
  201. package/dist/utils/trace.d.ts.map +0 -1
  202. package/dist/utils/trace.js +0 -63
  203. package/dist/utils/trace.js.map +0 -1
  204. package/src/commands.ts +0 -1134
  205. package/src/core/change-detection.ts +0 -712
  206. package/src/core/config.ts +0 -313
  207. package/src/core/index.ts +0 -5
  208. package/src/core/move-detection.ts +0 -169
  209. package/src/core/snapshot.ts +0 -275
  210. package/src/core/sync-engine.ts +0 -1795
  211. package/src/types/config.ts +0 -111
  212. package/src/types/documents.ts +0 -91
  213. package/src/types/index.ts +0 -3
  214. package/src/types/snapshot.ts +0 -67
  215. package/src/utils/content.ts +0 -34
  216. package/src/utils/directory.ts +0 -73
  217. package/src/utils/fs.ts +0 -297
  218. package/src/utils/index.ts +0 -4
  219. package/src/utils/mime-types.ts +0 -244
  220. package/src/utils/network-sync.ts +0 -319
  221. package/src/utils/node-polyfills.ts +0 -8
  222. package/src/utils/output.ts +0 -450
  223. package/src/utils/repo-factory.ts +0 -73
  224. package/src/utils/string-similarity.ts +0 -54
  225. package/src/utils/text-diff.ts +0 -101
  226. package/src/utils/trace.ts +0 -70
  227. package/test/integration/README.md +0 -328
  228. package/test/integration/clone-test.sh +0 -310
  229. package/test/integration/conflict-resolution-test.sh +0 -309
  230. package/test/integration/debug-both-nested.sh +0 -74
  231. package/test/integration/debug-concurrent-nested.sh +0 -87
  232. package/test/integration/debug-nested.sh +0 -73
  233. package/test/integration/deletion-behavior-test.sh +0 -487
  234. package/test/integration/deletion-sync-test-simple.sh +0 -193
  235. package/test/integration/deletion-sync-test.sh +0 -297
  236. package/test/integration/exclude-patterns.test.ts +0 -144
  237. package/test/integration/full-integration-test.sh +0 -363
  238. package/test/integration/fuzzer.test.ts +0 -818
  239. package/test/integration/in-memory-sync.test.ts +0 -830
  240. package/test/integration/init-sync.test.ts +0 -89
  241. package/test/integration/manual-sync-test.sh +0 -84
  242. package/test/integration/sync-deletion.test.ts +0 -280
  243. package/test/integration/sync-flow.test.ts +0 -291
  244. package/test/jest.setup.ts +0 -34
  245. package/test/run-tests.sh +0 -225
  246. package/test/unit/deletion-behavior.test.ts +0 -249
  247. package/test/unit/enhanced-mime-detection.test.ts +0 -244
  248. package/test/unit/snapshot.test.ts +0 -404
  249. package/test/unit/sync-convergence.test.ts +0 -298
  250. package/test/unit/sync-timing.test.ts +0 -134
  251. package/test/unit/utils.test.ts +0 -366
@@ -0,0 +1,918 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.deleteBranchFile = void 0;
37
+ exports.init = init;
38
+ exports.clone = clone;
39
+ exports.url = url;
40
+ exports.sync = sync;
41
+ exports.save = save;
42
+ exports.status = status;
43
+ exports.diff = diff;
44
+ exports.listBranches = listBranches;
45
+ exports.currentBranch = currentBranch;
46
+ exports.createBranch = createBranch;
47
+ exports.previewMerge = previewMerge;
48
+ exports.mergeBranch = mergeBranch;
49
+ exports.cutWorkdir = cutWorkdir;
50
+ exports.pasteStash = pasteStash;
51
+ exports.showStashes = showStashes;
52
+ exports.switchBranch = switchBranch;
53
+ const fs = __importStar(require("fs/promises"));
54
+ const path = __importStar(require("path"));
55
+ const Automerge = __importStar(require("@automerge/automerge"));
56
+ const automerge_repo_1 = require("@automerge/automerge-repo");
57
+ const config_js_1 = require("./config.js");
58
+ const branches_js_1 = require("./branches.js");
59
+ Object.defineProperty(exports, "deleteBranchFile", { enumerable: true, get: function () { return branches_js_1.deleteBranchFile; } });
60
+ const ignore_js_1 = require("./ignore.js");
61
+ const fs_tree_js_1 = require("./fs-tree.js");
62
+ const log_js_1 = require("./log.js");
63
+ const repo_js_1 = require("./repo.js");
64
+ const stash_js_1 = require("./stash.js");
65
+ const index_js_1 = require("./shapes/index.js");
66
+ const dlog = (0, log_js_1.log)("pushwork");
67
+ const DEFAULT_ARTIFACT_DIRECTORIES = ["dist"];
68
+ async function init(opts) {
69
+ const root = path.resolve(opts.dir);
70
+ const useBranches = opts.branches ?? true;
71
+ const online = opts.online ?? true;
72
+ dlog("init root=%s backend=%s shape=%s branches=%s online=%s", root, opts.backend, opts.shape, useBranches, online);
73
+ if (await (0, config_js_1.configExists)(root)) {
74
+ throw new Error(`pushwork already initialized at ${root}`);
75
+ }
76
+ const artifactDirs = normalizeDirs(opts.artifactDirectories ?? DEFAULT_ARTIFACT_DIRECTORIES);
77
+ dlog("init artifactDirs=%o", artifactDirs);
78
+ await fs.mkdir((0, config_js_1.pushworkDir)(root), { recursive: true });
79
+ const repo = await (0, repo_js_1.openRepo)(opts.backend, (0, config_js_1.storageDir)(root), { offline: !online });
80
+ try {
81
+ const shape = await (0, index_js_1.resolveShape)(opts.shape);
82
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
83
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
84
+ dlog("init walked %d files", fsFiles.size);
85
+ const tree = await pushFiles(repo, fsFiles, undefined, artifactDirs);
86
+ const folderUrl = await shape.encode({ repo, tree });
87
+ dlog("init encoded folder=%s", folderUrl);
88
+ const folderHandle = await repo.find(folderUrl);
89
+ if (online) {
90
+ await (0, repo_js_1.waitForSync)(folderHandle, { minMs: 3000, idleMs: 1500, maxMs: 15000 });
91
+ stampLastSyncAt(folderHandle);
92
+ await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 10000 });
93
+ }
94
+ let rootUrl = folderUrl;
95
+ if (useBranches) {
96
+ const branchesHandle = repo.create({
97
+ "@patchwork": { type: "branches" },
98
+ branches: { [branches_js_1.DEFAULT_BRANCH]: folderUrl },
99
+ });
100
+ if (online) {
101
+ await (0, repo_js_1.waitForSync)(branchesHandle, { minMs: 1500, idleMs: 1500, maxMs: 10000 });
102
+ }
103
+ rootUrl = branchesHandle.url;
104
+ dlog("init wrapped in BranchesDoc=%s", rootUrl);
105
+ await (0, branches_js_1.writeBranchFile)(root, branches_js_1.DEFAULT_BRANCH);
106
+ }
107
+ await (0, config_js_1.writeConfig)(root, {
108
+ version: config_js_1.CONFIG_VERSION,
109
+ rootUrl,
110
+ backend: opts.backend,
111
+ shape: opts.shape,
112
+ artifactDirectories: artifactDirs,
113
+ branches: useBranches,
114
+ });
115
+ dlog("init complete: rootUrl=%s", rootUrl);
116
+ return rootUrl;
117
+ }
118
+ finally {
119
+ await repo.shutdown();
120
+ }
121
+ }
122
+ async function clone(opts) {
123
+ if (!(0, automerge_repo_1.isValidAutomergeUrl)(opts.url)) {
124
+ throw new Error(`invalid automerge URL: ${opts.url}`);
125
+ }
126
+ const root = path.resolve(opts.dir);
127
+ dlog("clone url=%s root=%s backend=%s shape=%s", opts.url, root, opts.backend, opts.shape);
128
+ await fs.mkdir(root, { recursive: true });
129
+ if (await (0, config_js_1.configExists)(root)) {
130
+ throw new Error(`pushwork already initialized at ${root}`);
131
+ }
132
+ const artifactDirs = normalizeDirs(opts.artifactDirectories ?? DEFAULT_ARTIFACT_DIRECTORIES);
133
+ await fs.mkdir((0, config_js_1.pushworkDir)(root), { recursive: true });
134
+ const online = opts.online ?? true;
135
+ const repo = await (0, repo_js_1.openRepo)(opts.backend, (0, config_js_1.storageDir)(root), { offline: !online });
136
+ try {
137
+ const shape = await (0, index_js_1.resolveShape)(opts.shape);
138
+ const rootHandle = await repo.find(opts.url);
139
+ if (online) {
140
+ await (0, repo_js_1.waitForSync)(rootHandle, { idleMs: 1500, maxMs: 15000 });
141
+ }
142
+ const docType = (0, branches_js_1.detectDocType)(rootHandle.doc());
143
+ dlog("clone detected docType=%s", docType);
144
+ let useBranches = false;
145
+ let folderHandle = rootHandle;
146
+ if (docType === "branches") {
147
+ useBranches = true;
148
+ const branchName = opts.branch ?? branches_js_1.DEFAULT_BRANCH;
149
+ folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
150
+ if (online) {
151
+ await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 15000 });
152
+ }
153
+ await (0, branches_js_1.writeBranchFile)(root, branchName);
154
+ dlog("clone branch=%s folder=%s", branchName, folderHandle.url);
155
+ }
156
+ else if (opts.branch) {
157
+ throw new Error(`--branch passed but root doc is not a branches doc (type=${docType})`);
158
+ }
159
+ const tree = await shape.decode({ repo, root: folderHandle });
160
+ await materializeTree(repo, root, tree);
161
+ await (0, config_js_1.writeConfig)(root, {
162
+ version: config_js_1.CONFIG_VERSION,
163
+ rootUrl: opts.url,
164
+ backend: opts.backend,
165
+ shape: opts.shape,
166
+ artifactDirectories: artifactDirs,
167
+ branches: useBranches,
168
+ });
169
+ dlog("clone complete");
170
+ }
171
+ finally {
172
+ await repo.shutdown();
173
+ }
174
+ }
175
+ async function url(cwd) {
176
+ const config = await (0, config_js_1.readConfig)(path.resolve(cwd));
177
+ return config.rootUrl;
178
+ }
179
+ async function sync(cwd) {
180
+ await commitWorkdir(cwd, { online: true });
181
+ }
182
+ async function save(cwd) {
183
+ await commitWorkdir(cwd, { online: false });
184
+ }
185
+ async function commitWorkdir(cwd, { online }) {
186
+ const root = path.resolve(cwd);
187
+ const config = await (0, config_js_1.readConfig)(root);
188
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
189
+ dlog("commit online=%s root=%s branch=%s", online, root, branchName);
190
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), {
191
+ offline: !online,
192
+ });
193
+ try {
194
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
195
+ const rootHandle = await repo.find(config.rootUrl);
196
+ // In branches mode + online, touch every branch's folder doc so the
197
+ // network adapter announces them. Without this, a branch created
198
+ // offline (`pushwork branch X`) is never pushed to the server, even
199
+ // though its entry is in the BranchesDoc.
200
+ const otherBranchHandles = [];
201
+ if (online && config.branches && (0, branches_js_1.isBranchesDoc)(rootHandle.doc())) {
202
+ const doc = rootHandle.doc();
203
+ for (const [name, url] of Object.entries(doc.branches)) {
204
+ if (name === branchName)
205
+ continue;
206
+ otherBranchHandles.push(await repo.find(url));
207
+ }
208
+ }
209
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
210
+ const previousTree = await shape.decode({ repo, root: folderHandle });
211
+ const previousFiles = await readFileBytes(repo, previousTree);
212
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
213
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
214
+ const newTree = await pushFiles(repo, fsFiles, previousFiles, config.artifactDirectories);
215
+ const changed = !sameTree(previousTree, newTree);
216
+ dlog("commit tree changed: %s", changed);
217
+ if (changed) {
218
+ await shape.encode({ repo, tree: newTree, previousRoot: folderHandle });
219
+ }
220
+ if (online) {
221
+ // Always stamp lastSyncAt on a sync, regardless of whether the
222
+ // working tree changed — a sync is also a checkpoint that "we
223
+ // reconciled with the server at this time."
224
+ stampLastSyncAt(folderHandle);
225
+ // Wait for the current branch's folder, the BranchesDoc itself
226
+ // (when in branches mode), and any other branch folder docs to
227
+ // flush. The maxMs is generous so a brand-new offline-created
228
+ // branch reliably propagates.
229
+ await (0, repo_js_1.waitForSync)(folderHandle, {
230
+ minMs: 3000,
231
+ idleMs: 1500,
232
+ maxMs: 15000,
233
+ });
234
+ if (config.branches) {
235
+ await (0, repo_js_1.waitForSync)(rootHandle, { idleMs: 1500, maxMs: 10000 });
236
+ }
237
+ for (const h of otherBranchHandles) {
238
+ await (0, repo_js_1.waitForSync)(h, { idleMs: 1500, maxMs: 10000 });
239
+ }
240
+ }
241
+ const finalTree = await shape.decode({ repo, root: folderHandle });
242
+ await materializeTree(repo, root, finalTree);
243
+ dlog("commit complete");
244
+ }
245
+ finally {
246
+ await repo.shutdown();
247
+ }
248
+ }
249
+ async function status(cwd) {
250
+ const root = path.resolve(cwd);
251
+ const config = await (0, config_js_1.readConfig)(root);
252
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
253
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
254
+ try {
255
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
256
+ const rootHandle = await repo.find(config.rootUrl);
257
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
258
+ const previousTree = await shape.decode({ repo, root: folderHandle });
259
+ const previousFiles = await readFileBytes(repo, previousTree);
260
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
261
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
262
+ const diff = computeDiff(previousFiles, fsFiles);
263
+ return { branch: branchName, diff };
264
+ }
265
+ finally {
266
+ await repo.shutdown();
267
+ }
268
+ }
269
+ async function diff(cwd, limitToPath) {
270
+ const root = path.resolve(cwd);
271
+ const config = await (0, config_js_1.readConfig)(root);
272
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
273
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
274
+ try {
275
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
276
+ const rootHandle = await repo.find(config.rootUrl);
277
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
278
+ const previousTree = await shape.decode({ repo, root: folderHandle });
279
+ const previousFiles = await readFileBytes(repo, previousTree);
280
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
281
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
282
+ const out = [];
283
+ for (const [p, bytes] of fsFiles) {
284
+ if (limitToPath && p !== limitToPath)
285
+ continue;
286
+ const prev = previousFiles.get(p);
287
+ if (!prev) {
288
+ out.push({ path: p, kind: "added", after: bytes });
289
+ }
290
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
291
+ out.push({ path: p, kind: "modified", before: prev.bytes, after: bytes });
292
+ }
293
+ }
294
+ for (const [p, prev] of previousFiles) {
295
+ if (limitToPath && p !== limitToPath)
296
+ continue;
297
+ if (!fsFiles.has(p))
298
+ out.push({ path: p, kind: "deleted", before: prev.bytes });
299
+ }
300
+ return out;
301
+ }
302
+ finally {
303
+ await repo.shutdown();
304
+ }
305
+ }
306
+ async function listBranches(cwd) {
307
+ const root = path.resolve(cwd);
308
+ const config = await (0, config_js_1.readConfig)(root);
309
+ if (!config.branches) {
310
+ throw new Error("pushwork repo has no branches");
311
+ }
312
+ const current = await (0, branches_js_1.readBranchFile)(root);
313
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
314
+ try {
315
+ const rootHandle = await repo.find(config.rootUrl);
316
+ const doc = rootHandle.doc();
317
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
318
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
319
+ }
320
+ return { current, names: (0, branches_js_1.listBranchNames)(doc) };
321
+ }
322
+ finally {
323
+ await repo.shutdown();
324
+ }
325
+ }
326
+ async function currentBranch(cwd) {
327
+ const root = path.resolve(cwd);
328
+ const config = await (0, config_js_1.readConfig)(root);
329
+ if (!config.branches)
330
+ return null;
331
+ return (0, branches_js_1.readBranchFile)(root);
332
+ }
333
+ async function createBranch(cwd, name) {
334
+ if (!name)
335
+ throw new Error("branch name is required");
336
+ if (name.includes("/") || name.includes("\\")) {
337
+ throw new Error("branch name may not contain slashes");
338
+ }
339
+ const root = path.resolve(cwd);
340
+ const config = await (0, config_js_1.readConfig)(root);
341
+ if (!config.branches)
342
+ throw new Error("pushwork repo has no branches");
343
+ const currentName = await (0, branches_js_1.readBranchFile)(root);
344
+ if (!currentName)
345
+ throw new Error("no current branch is set");
346
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
347
+ try {
348
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
349
+ const rootHandle = await repo.find(config.rootUrl);
350
+ const doc = rootHandle.doc();
351
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
352
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
353
+ }
354
+ if (doc.branches[name]) {
355
+ throw new Error(`branch "${name}" already exists`);
356
+ }
357
+ const sourceUrl = doc.branches[currentName];
358
+ if (!sourceUrl) {
359
+ throw new Error(`current branch "${currentName}" not found in branches doc`);
360
+ }
361
+ const sourceHandle = await repo.find(sourceUrl);
362
+ // Clone the folder doc.
363
+ const clonedFolder = repo.clone(sourceHandle);
364
+ dlog("createBranch %s cloned folder %s → %s", name, sourceUrl, clonedFolder.url);
365
+ // Deep-clone every file doc the source folder references, then rewrite
366
+ // the cloned folder's leaves to point at the new file URLs. Without
367
+ // this step both branches would alias the same UnixFileEntry docs and
368
+ // editing one branch would silently mutate the other.
369
+ const sourceTree = await shape.decode({ repo, root: sourceHandle });
370
+ const fileUrlRemap = new Map();
371
+ for (const [, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
372
+ const bare = (0, index_js_1.stripHeads)(fileUrl);
373
+ if (fileUrlRemap.has(bare))
374
+ continue;
375
+ const orig = await repo.find(bare);
376
+ const cloned = repo.clone(orig);
377
+ fileUrlRemap.set(bare, cloned.url);
378
+ dlog("createBranch cloned file %s → %s", bare, cloned.url);
379
+ }
380
+ const newTree = (0, index_js_1.newDir)();
381
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
382
+ const bare = (0, index_js_1.stripHeads)(fileUrl);
383
+ const remappedBare = fileUrlRemap.get(bare);
384
+ if (!remappedBare)
385
+ continue;
386
+ const parsed = (0, automerge_repo_1.parseAutomergeUrl)(fileUrl);
387
+ // Preserve heads-pinning if the source URL was pinned.
388
+ let finalUrl = remappedBare;
389
+ if (parsed.heads) {
390
+ const newHandle = await repo.find(remappedBare);
391
+ finalUrl = (0, index_js_1.pinUrl)(newHandle);
392
+ }
393
+ const segments = posixPath.split("/").filter(Boolean);
394
+ (0, index_js_1.setFileAt)(newTree, segments, finalUrl);
395
+ }
396
+ await shape.encode({ repo, tree: newTree, previousRoot: clonedFolder });
397
+ rootHandle.change((d) => {
398
+ d.branches[name] = clonedFolder.url;
399
+ });
400
+ return clonedFolder.url;
401
+ }
402
+ finally {
403
+ await repo.shutdown();
404
+ }
405
+ }
406
+ /**
407
+ * Apply changes from `source` branch onto the current branch.
408
+ *
409
+ * For each path:
410
+ * - In both branches: their UnixFileEntry docs share Automerge history (deep
411
+ * cloned at branch creation), so we Automerge-merge source's content into
412
+ * target's. Concurrent edits are CRDT-merged inside each file doc.
413
+ * - Only in source: deep-clone the source's file doc into a new doc and add
414
+ * it to target's folder. Editing on either branch afterward stays isolated.
415
+ * - Only in target: untouched. We don't propagate deletions from source — the
416
+ * user can do that explicitly.
417
+ *
418
+ * Refuses if the working tree has uncommitted changes against the current
419
+ * branch (run `pushwork save` first). Offline only — propagation happens on
420
+ * the next `pushwork sync`.
421
+ */
422
+ /**
423
+ * Compute what `merge <source>` would do without mutating any docs or the
424
+ * working tree. For paths in both branches we apply the merge to a *clone*
425
+ * of the target's file doc to learn the merged bytes; for paths only in
426
+ * source we just read source's bytes.
427
+ */
428
+ async function previewMerge(cwd, source) {
429
+ if (!source)
430
+ throw new Error("source branch name is required");
431
+ const root = path.resolve(cwd);
432
+ const config = await (0, config_js_1.readConfig)(root);
433
+ if (!config.branches)
434
+ throw new Error("pushwork repo has no branches");
435
+ const targetName = await (0, branches_js_1.readBranchFile)(root);
436
+ if (!targetName)
437
+ throw new Error("no current branch is set");
438
+ if (source === targetName) {
439
+ throw new Error(`cannot merge "${source}" into itself`);
440
+ }
441
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
442
+ try {
443
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
444
+ const rootHandle = await repo.find(config.rootUrl);
445
+ const branchesDoc = rootHandle.doc();
446
+ if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
447
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
448
+ }
449
+ if (!branchesDoc.branches[source]) {
450
+ throw new Error(`source branch "${source}" does not exist`);
451
+ }
452
+ const targetFolder = await repo.find(branchesDoc.branches[targetName]);
453
+ const sourceFolder = await repo.find(branchesDoc.branches[source]);
454
+ const tTree = await shape.decode({ repo, root: targetFolder });
455
+ const sTree = await shape.decode({ repo, root: sourceFolder });
456
+ const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
457
+ const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
458
+ const entries = [];
459
+ for (const [posixPath, sUrl] of sLeaves) {
460
+ const tUrl = tLeaves.get(posixPath);
461
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
462
+ const sHandle = await repo.find(sBare);
463
+ if (!tUrl) {
464
+ entries.push({
465
+ path: posixPath,
466
+ kind: "added",
467
+ after: (0, index_js_1.contentToBytes)(sHandle.doc().content),
468
+ });
469
+ continue;
470
+ }
471
+ const tBare = (0, index_js_1.stripHeads)(tUrl);
472
+ if (tBare === sBare)
473
+ continue;
474
+ const tHandle = await repo.find(tBare);
475
+ const before = (0, index_js_1.contentToBytes)(tHandle.doc().content);
476
+ // Compute merge result without touching the target doc.
477
+ const merged = Automerge.merge(Automerge.clone(tHandle.doc()), Automerge.clone(sHandle.doc()));
478
+ const after = (0, index_js_1.contentToBytes)(merged.content);
479
+ if ((0, fs_tree_js_1.byteEq)(before, after))
480
+ continue;
481
+ entries.push({ path: posixPath, kind: "merged", before, after });
482
+ }
483
+ entries.sort((a, b) => a.path.localeCompare(b.path));
484
+ return { source, target: targetName, entries };
485
+ }
486
+ finally {
487
+ await repo.shutdown();
488
+ }
489
+ }
490
+ async function mergeBranch(cwd, source) {
491
+ if (!source)
492
+ throw new Error("source branch name is required");
493
+ const root = path.resolve(cwd);
494
+ const config = await (0, config_js_1.readConfig)(root);
495
+ if (!config.branches)
496
+ throw new Error("pushwork repo has no branches");
497
+ const targetName = await (0, branches_js_1.readBranchFile)(root);
498
+ if (!targetName)
499
+ throw new Error("no current branch is set");
500
+ if (source === targetName) {
501
+ throw new Error(`cannot merge "${source}" into itself`);
502
+ }
503
+ dlog("merge source=%s target=%s", source, targetName);
504
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
505
+ try {
506
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
507
+ const rootHandle = await repo.find(config.rootUrl);
508
+ const branchesDoc = rootHandle.doc();
509
+ if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
510
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
511
+ }
512
+ if (!branchesDoc.branches[source]) {
513
+ throw new Error(`source branch "${source}" does not exist`);
514
+ }
515
+ const targetUrl = branchesDoc.branches[targetName];
516
+ const sourceUrl = branchesDoc.branches[source];
517
+ const targetFolder = await repo.find(targetUrl);
518
+ const sourceFolder = await repo.find(sourceUrl);
519
+ // Refuse on dirty working tree (mirror switchBranch policy).
520
+ const tFiles = await readFileBytes(repo, await shape.decode({ repo, root: targetFolder }));
521
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
522
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
523
+ const dirty = computeDiff(tFiles, fsFiles);
524
+ if (dirty.added.length || dirty.modified.length || dirty.deleted.length) {
525
+ throw new Error(`refusing to merge: working tree has uncommitted changes on branch "${targetName}". run \`pushwork save\` first.`);
526
+ }
527
+ const tTree = await shape.decode({ repo, root: targetFolder });
528
+ const sTree = await shape.decode({ repo, root: sourceFolder });
529
+ const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
530
+ const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
531
+ const merged = [];
532
+ const added = [];
533
+ // For paths in both: merge file docs in place.
534
+ for (const [posixPath, sUrl] of sLeaves) {
535
+ const tUrl = tLeaves.get(posixPath);
536
+ if (!tUrl)
537
+ continue;
538
+ const tBare = (0, index_js_1.stripHeads)(tUrl);
539
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
540
+ if (tBare === sBare) {
541
+ // Same file doc identity (shared) — already in sync, nothing to do.
542
+ continue;
543
+ }
544
+ const tHandle = await repo.find(tBare);
545
+ const sHandle = await repo.find(sBare);
546
+ tHandle.update((d) => Automerge.merge(d, Automerge.clone(sHandle.doc())));
547
+ merged.push(posixPath);
548
+ dlog("merge merged file at %s (%s ← %s)", posixPath, tBare, sBare);
549
+ }
550
+ // For paths only in source: deep-clone source's file doc, add to target's folder.
551
+ const newLeaves = new Map();
552
+ for (const [posixPath, sUrl] of sLeaves) {
553
+ if (tLeaves.has(posixPath))
554
+ continue;
555
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
556
+ const sHandle = await repo.find(sBare);
557
+ const cloned = repo.clone(sHandle);
558
+ let finalUrl = cloned.url;
559
+ const parsed = (0, automerge_repo_1.parseAutomergeUrl)(sUrl);
560
+ if (parsed.heads) {
561
+ finalUrl = (0, index_js_1.pinUrl)(cloned);
562
+ }
563
+ newLeaves.set(posixPath, finalUrl);
564
+ added.push(posixPath);
565
+ dlog("merge added %s url=%s", posixPath, finalUrl);
566
+ }
567
+ if (newLeaves.size > 0) {
568
+ // Build a tree for the encode call: existing target leaves + new ones.
569
+ const nextTree = (0, index_js_1.newDir)();
570
+ for (const [p, url] of tLeaves) {
571
+ (0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
572
+ }
573
+ for (const [p, url] of newLeaves) {
574
+ (0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
575
+ }
576
+ await shape.encode({ repo, tree: nextTree, previousRoot: targetFolder });
577
+ }
578
+ // Materialize current branch (target) onto disk to reflect the merge.
579
+ const finalTree = await shape.decode({ repo, root: targetFolder });
580
+ await materializeTree(repo, root, finalTree);
581
+ merged.sort();
582
+ added.sort();
583
+ return { source, target: targetName, merged, added };
584
+ }
585
+ finally {
586
+ await repo.shutdown();
587
+ }
588
+ }
589
+ /**
590
+ * Capture the working tree's changes against the current branch's saved
591
+ * state into a local stash, then reset the working tree to the saved state.
592
+ * Stashes live in `.pushwork/stash.json` and are never synced.
593
+ */
594
+ async function cutWorkdir(cwd, opts = {}) {
595
+ const root = path.resolve(cwd);
596
+ const config = await (0, config_js_1.readConfig)(root);
597
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
598
+ dlog("cut root=%s branch=%s name=%s", root, branchName, opts.name ?? "(unnamed)");
599
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
600
+ try {
601
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
602
+ const rootHandle = await repo.find(config.rootUrl);
603
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
604
+ const previousTree = await shape.decode({ repo, root: folderHandle });
605
+ const previousFiles = await readFileBytes(repo, previousTree);
606
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
607
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
608
+ const entries = [];
609
+ for (const [p, bytes] of fsFiles) {
610
+ const prev = previousFiles.get(p);
611
+ if (!prev) {
612
+ entries.push({ path: p, kind: "added", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
613
+ }
614
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
615
+ entries.push({
616
+ path: p,
617
+ kind: "modified",
618
+ contentBase64: (0, stash_js_1.encodeBytes)(bytes),
619
+ });
620
+ }
621
+ }
622
+ for (const [p] of previousFiles) {
623
+ if (!fsFiles.has(p))
624
+ entries.push({ path: p, kind: "deleted" });
625
+ }
626
+ if (entries.length === 0) {
627
+ throw new Error("nothing to cut: working tree clean");
628
+ }
629
+ entries.sort((a, b) => a.path.localeCompare(b.path));
630
+ const stash = await (0, stash_js_1.appendStash)(root, {
631
+ name: opts.name,
632
+ branch: branchName,
633
+ entries,
634
+ });
635
+ // Reset working tree to the branch's saved state.
636
+ await materializeTree(repo, root, previousTree);
637
+ dlog("cut complete id=%d entries=%d", stash.id, entries.length);
638
+ return { id: stash.id, entries: entries.length };
639
+ }
640
+ finally {
641
+ await repo.shutdown();
642
+ }
643
+ }
644
+ /**
645
+ * Apply a stash on top of the current working tree, then remove the stash
646
+ * entry. Refuses if the working tree has uncommitted changes (caller can
647
+ * `pushwork save` or `pushwork cut` first).
648
+ */
649
+ async function pasteStash(cwd, selector) {
650
+ const root = path.resolve(cwd);
651
+ const config = await (0, config_js_1.readConfig)(root);
652
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
653
+ // Check the working tree is clean against the current branch state.
654
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
655
+ try {
656
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
657
+ const rootHandle = await repo.find(config.rootUrl);
658
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
659
+ const previousTree = await shape.decode({ repo, root: folderHandle });
660
+ const previousFiles = await readFileBytes(repo, previousTree);
661
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
662
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
663
+ const dirty = computeDiff(previousFiles, fsFiles);
664
+ if (dirty.added.length || dirty.modified.length || dirty.deleted.length) {
665
+ throw new Error("refusing to paste: working tree has uncommitted changes. run `pushwork save` or `pushwork cut` first.");
666
+ }
667
+ }
668
+ finally {
669
+ await repo.shutdown();
670
+ }
671
+ const stash = await (0, stash_js_1.takeStash)(root, selector);
672
+ if (!stash) {
673
+ throw new Error(selector
674
+ ? `no stash matches "${selector}"`
675
+ : "nothing to paste: no stashes");
676
+ }
677
+ for (const entry of stash.entries) {
678
+ const target = path.join(root, fromPosix(entry.path));
679
+ if (entry.kind === "deleted") {
680
+ try {
681
+ await fs.unlink(target);
682
+ }
683
+ catch {
684
+ // already gone
685
+ }
686
+ await pruneEmptyDirs(root, path.dirname(fromPosix(entry.path)));
687
+ }
688
+ else if (entry.contentBase64 != null) {
689
+ const bytes = (0, stash_js_1.decodeBytes)(entry.contentBase64);
690
+ await (0, fs_tree_js_1.writeFileAtomic)(target, bytes);
691
+ }
692
+ }
693
+ dlog("paste complete id=%d entries=%d", stash.id, stash.entries.length);
694
+ return { id: stash.id, name: stash.name, entries: stash.entries.length };
695
+ }
696
+ async function showStashes(cwd) {
697
+ return (0, stash_js_1.listStashes)(path.resolve(cwd));
698
+ }
699
+ async function switchBranch(cwd, name) {
700
+ if (!name)
701
+ throw new Error("branch name is required");
702
+ const root = path.resolve(cwd);
703
+ const config = await (0, config_js_1.readConfig)(root);
704
+ if (!config.branches)
705
+ throw new Error("pushwork repo has no branches");
706
+ const currentName = await (0, branches_js_1.readBranchFile)(root);
707
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
708
+ try {
709
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
710
+ const rootHandle = await repo.find(config.rootUrl);
711
+ const doc = rootHandle.doc();
712
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
713
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
714
+ }
715
+ if (!doc.branches[name]) {
716
+ throw new Error(`branch "${name}" does not exist`);
717
+ }
718
+ // Refuse if the working dir has uncommitted changes against the current branch.
719
+ if (currentName) {
720
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, currentName);
721
+ const previousTree = await shape.decode({ repo, root: folderHandle });
722
+ const previousFiles = await readFileBytes(repo, previousTree);
723
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
724
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
725
+ const d = computeDiff(previousFiles, fsFiles);
726
+ if (d.added.length || d.modified.length || d.deleted.length) {
727
+ throw new Error(`refusing to switch: working tree has uncommitted changes on branch "${currentName}". run \`pushwork save\` first.`);
728
+ }
729
+ }
730
+ // Materialize from the new branch.
731
+ const newFolder = await repo.find(doc.branches[name]);
732
+ const tree = await shape.decode({ repo, root: newFolder });
733
+ await materializeTree(repo, root, tree);
734
+ await (0, branches_js_1.writeBranchFile)(root, name);
735
+ dlog("switch → %s", name);
736
+ }
737
+ finally {
738
+ await repo.shutdown();
739
+ }
740
+ }
741
+ function stampLastSyncAt(handle) {
742
+ handle.change((d) => {
743
+ d.lastSyncAt = Date.now();
744
+ });
745
+ }
746
+ function normalizeDirs(dirs) {
747
+ const seen = new Set();
748
+ const out = [];
749
+ for (const d of dirs) {
750
+ const norm = (0, index_js_1.normalizeArtifactDir)(d);
751
+ if (!norm || seen.has(norm))
752
+ continue;
753
+ seen.add(norm);
754
+ out.push(norm);
755
+ }
756
+ return out;
757
+ }
758
+ function computeDiff(previous, current) {
759
+ const added = [];
760
+ const modified = [];
761
+ const deleted = [];
762
+ for (const [p, bytes] of current) {
763
+ const prev = previous.get(p);
764
+ if (!prev)
765
+ added.push(p);
766
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes))
767
+ modified.push(p);
768
+ }
769
+ for (const p of previous.keys()) {
770
+ if (!current.has(p))
771
+ deleted.push(p);
772
+ }
773
+ added.sort();
774
+ modified.sort();
775
+ deleted.sort();
776
+ return { added, modified, deleted };
777
+ }
778
+ async function pushFiles(repo, fsFiles, previous, artifactDirs) {
779
+ const root = (0, index_js_1.newDir)();
780
+ let created = 0;
781
+ let updated = 0;
782
+ let unchanged = 0;
783
+ for (const [posixPath, bytes] of fsFiles) {
784
+ const segments = posixPath.split("/").filter(Boolean);
785
+ const isArtifact = (0, index_js_1.isInArtifactDir)(posixPath, artifactDirs);
786
+ const fresh = (0, index_js_1.makeFileEntry)(posixPath, bytes, isArtifact);
787
+ const prev = previous?.get(posixPath);
788
+ let baseUrl;
789
+ if (prev && (0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
790
+ // Unchanged path: keep the existing file-doc URL. For artifacts
791
+ // we'll re-pin from the current heads below.
792
+ baseUrl = (0, index_js_1.stripHeads)(prev.url);
793
+ unchanged++;
794
+ }
795
+ else if (prev) {
796
+ // Changed path: mutate the existing file doc in place. This keeps
797
+ // the file URL stable within a branch and avoids the propagation
798
+ // race where a brand-new file doc URL is referenced by the folder
799
+ // before its bytes have reached the sync server.
800
+ //
801
+ // For string content (text files) we use Automerge.updateText so
802
+ // concurrent character-level edits merge correctly. Bytes and
803
+ // ImmutableString are atomic — last writer wins on the field.
804
+ //
805
+ // Branch isolation is enforced separately: `createBranch` deep
806
+ // clones every file doc the source branch references, so two
807
+ // branches never share a UnixFileEntry doc identity.
808
+ const refreshUrl = (0, index_js_1.stripHeads)(prev.url);
809
+ const handle = await repo.find(refreshUrl);
810
+ handle.change((d) => {
811
+ if (!(0, index_js_1.contentEquals)(d.content, fresh.content)) {
812
+ if (typeof d.content === "string" &&
813
+ typeof fresh.content === "string") {
814
+ Automerge.updateText(d, ["content"], fresh.content);
815
+ }
816
+ else {
817
+ d.content = fresh.content;
818
+ }
819
+ }
820
+ if (d.extension !== fresh.extension)
821
+ d.extension = fresh.extension;
822
+ if (d.mimeType !== fresh.mimeType)
823
+ d.mimeType = fresh.mimeType;
824
+ if (d.name !== fresh.name)
825
+ d.name = fresh.name;
826
+ if (!d["@patchwork"])
827
+ d["@patchwork"] = { type: "file" };
828
+ });
829
+ baseUrl = refreshUrl;
830
+ updated++;
831
+ dlog("pushFiles updated %s url=%s artifact=%s bytes=%d", posixPath, baseUrl, isArtifact, bytes.length);
832
+ }
833
+ else {
834
+ // New path: create a fresh file doc.
835
+ const handle = repo.create(fresh);
836
+ baseUrl = handle.url;
837
+ created++;
838
+ dlog("pushFiles created %s url=%s artifact=%s bytes=%d", posixPath, baseUrl, isArtifact, bytes.length);
839
+ }
840
+ const finalUrl = isArtifact
841
+ ? (0, index_js_1.pinUrl)(await repo.find(baseUrl))
842
+ : baseUrl;
843
+ (0, index_js_1.setFileAt)(root, segments, finalUrl);
844
+ }
845
+ dlog("pushFiles done: %d created, %d updated, %d unchanged", created, updated, unchanged);
846
+ return root;
847
+ }
848
+ async function readFileBytes(repo, tree) {
849
+ const out = new Map();
850
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
851
+ const handle = await repo.find(fileUrl);
852
+ out.set(posixPath, {
853
+ url: fileUrl,
854
+ bytes: (0, index_js_1.contentToBytes)(handle.doc().content),
855
+ });
856
+ }
857
+ return out;
858
+ }
859
+ async function materializeTree(repo, root, tree) {
860
+ const desired = new Map();
861
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
862
+ const handle = await repo.find(fileUrl);
863
+ desired.set(posixPath, (0, index_js_1.contentToBytes)(handle.doc().content));
864
+ }
865
+ dlog("materialize desired: %d files", desired.size);
866
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
867
+ const present = await (0, fs_tree_js_1.walkDir)(root, ig);
868
+ let written = 0;
869
+ let removed = 0;
870
+ for (const [posixPath, bytes] of desired) {
871
+ if ((0, fs_tree_js_1.byteEq)(present.get(posixPath), bytes))
872
+ continue;
873
+ await (0, fs_tree_js_1.writeFileAtomic)(path.join(root, fromPosix(posixPath)), bytes);
874
+ written++;
875
+ }
876
+ for (const posixPath of present.keys()) {
877
+ if (desired.has(posixPath))
878
+ continue;
879
+ try {
880
+ await fs.unlink(path.join(root, fromPosix(posixPath)));
881
+ removed++;
882
+ }
883
+ catch {
884
+ // already gone
885
+ }
886
+ await pruneEmptyDirs(root, path.dirname(fromPosix(posixPath)));
887
+ }
888
+ dlog("materialize done: %d written, %d removed", written, removed);
889
+ }
890
+ const fromPosix = (p) => p.split("/").join(path.sep);
891
+ async function pruneEmptyDirs(root, relDir) {
892
+ let dir = relDir;
893
+ while (dir && dir !== "." && dir !== path.sep) {
894
+ const full = path.join(root, dir);
895
+ try {
896
+ const entries = await fs.readdir(full);
897
+ if (entries.length > 0)
898
+ return;
899
+ await fs.rmdir(full);
900
+ }
901
+ catch {
902
+ return;
903
+ }
904
+ dir = path.dirname(dir);
905
+ }
906
+ }
907
+ function sameTree(a, b) {
908
+ const av = (0, index_js_1.flattenLeaves)(a);
909
+ const bv = (0, index_js_1.flattenLeaves)(b);
910
+ if (av.size !== bv.size)
911
+ return false;
912
+ for (const [k, v] of av) {
913
+ if (bv.get(k) !== v)
914
+ return false;
915
+ }
916
+ return true;
917
+ }
918
+ //# sourceMappingURL=pushwork.js.map