pushwork 2.0.0-a.sub.1 → 2.0.0-preview.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (234) hide show
  1. package/dist/branches.d.ts +20 -0
  2. package/dist/branches.d.ts.map +1 -0
  3. package/dist/branches.js +111 -0
  4. package/dist/branches.js.map +1 -0
  5. package/dist/cli.d.ts +1 -1
  6. package/dist/cli.d.ts.map +1 -1
  7. package/dist/cli.js +245 -270
  8. package/dist/cli.js.map +1 -1
  9. package/dist/config.d.ts +17 -0
  10. package/dist/config.d.ts.map +1 -0
  11. package/dist/config.js +84 -0
  12. package/dist/config.js.map +1 -0
  13. package/dist/fs-tree.d.ts +6 -0
  14. package/dist/fs-tree.d.ts.map +1 -0
  15. package/dist/fs-tree.js +99 -0
  16. package/dist/fs-tree.js.map +1 -0
  17. package/dist/ignore.d.ts +6 -0
  18. package/dist/ignore.d.ts.map +1 -0
  19. package/dist/ignore.js +74 -0
  20. package/dist/ignore.js.map +1 -0
  21. package/dist/index.d.ts +8 -4
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +35 -4
  24. package/dist/index.js.map +1 -1
  25. package/dist/log.d.ts +3 -0
  26. package/dist/log.d.ts.map +1 -0
  27. package/dist/log.js +14 -0
  28. package/dist/log.js.map +1 -0
  29. package/dist/pushwork.d.ts +129 -0
  30. package/dist/pushwork.d.ts.map +1 -0
  31. package/dist/pushwork.js +1062 -0
  32. package/dist/pushwork.js.map +1 -0
  33. package/dist/repo.d.ts +14 -0
  34. package/dist/repo.d.ts.map +1 -0
  35. package/dist/repo.js +60 -0
  36. package/dist/repo.js.map +1 -0
  37. package/dist/shapes/custom.d.ts +3 -0
  38. package/dist/shapes/custom.d.ts.map +1 -0
  39. package/dist/shapes/custom.js +57 -0
  40. package/dist/shapes/custom.js.map +1 -0
  41. package/dist/shapes/file.d.ts +20 -0
  42. package/dist/shapes/file.d.ts.map +1 -0
  43. package/dist/shapes/file.js +140 -0
  44. package/dist/shapes/file.js.map +1 -0
  45. package/dist/shapes/index.d.ts +10 -0
  46. package/dist/shapes/index.d.ts.map +1 -0
  47. package/dist/shapes/index.js +35 -0
  48. package/dist/shapes/index.js.map +1 -0
  49. package/dist/shapes/patchwork-folder.d.ts +3 -0
  50. package/dist/shapes/patchwork-folder.d.ts.map +1 -0
  51. package/dist/shapes/patchwork-folder.js +160 -0
  52. package/dist/shapes/patchwork-folder.js.map +1 -0
  53. package/dist/shapes/types.d.ts +38 -0
  54. package/dist/shapes/types.d.ts.map +1 -0
  55. package/dist/shapes/types.js +52 -0
  56. package/dist/shapes/types.js.map +1 -0
  57. package/dist/shapes/vfs.d.ts +3 -0
  58. package/dist/shapes/vfs.d.ts.map +1 -0
  59. package/dist/shapes/vfs.js +92 -0
  60. package/dist/shapes/vfs.js.map +1 -0
  61. package/dist/stash.d.ts +23 -0
  62. package/dist/stash.d.ts.map +1 -0
  63. package/dist/stash.js +118 -0
  64. package/dist/stash.js.map +1 -0
  65. package/dist/version.d.ts +11 -0
  66. package/dist/version.d.ts.map +1 -0
  67. package/dist/version.js +93 -0
  68. package/dist/version.js.map +1 -0
  69. package/package.json +19 -48
  70. package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
  71. package/.prettierrc +0 -9
  72. package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
  73. package/CLAUDE.md +0 -141
  74. package/README.md +0 -221
  75. package/babel.config.js +0 -5
  76. package/dist/cli/commands.d.ts +0 -71
  77. package/dist/cli/commands.d.ts.map +0 -1
  78. package/dist/cli/commands.js +0 -794
  79. package/dist/cli/commands.js.map +0 -1
  80. package/dist/cli/index.d.ts +0 -2
  81. package/dist/cli/index.d.ts.map +0 -1
  82. package/dist/cli/index.js +0 -19
  83. package/dist/cli/index.js.map +0 -1
  84. package/dist/commands.d.ts +0 -61
  85. package/dist/commands.d.ts.map +0 -1
  86. package/dist/commands.js +0 -861
  87. package/dist/commands.js.map +0 -1
  88. package/dist/config/index.d.ts +0 -71
  89. package/dist/config/index.d.ts.map +0 -1
  90. package/dist/config/index.js +0 -314
  91. package/dist/config/index.js.map +0 -1
  92. package/dist/core/change-detection.d.ts +0 -80
  93. package/dist/core/change-detection.d.ts.map +0 -1
  94. package/dist/core/change-detection.js +0 -523
  95. package/dist/core/change-detection.js.map +0 -1
  96. package/dist/core/config.d.ts +0 -81
  97. package/dist/core/config.d.ts.map +0 -1
  98. package/dist/core/config.js +0 -258
  99. package/dist/core/config.js.map +0 -1
  100. package/dist/core/index.d.ts +0 -6
  101. package/dist/core/index.d.ts.map +0 -1
  102. package/dist/core/index.js +0 -6
  103. package/dist/core/index.js.map +0 -1
  104. package/dist/core/move-detection.d.ts +0 -34
  105. package/dist/core/move-detection.d.ts.map +0 -1
  106. package/dist/core/move-detection.js +0 -121
  107. package/dist/core/move-detection.js.map +0 -1
  108. package/dist/core/snapshot.d.ts +0 -105
  109. package/dist/core/snapshot.d.ts.map +0 -1
  110. package/dist/core/snapshot.js +0 -217
  111. package/dist/core/snapshot.js.map +0 -1
  112. package/dist/core/sync-engine.d.ts +0 -157
  113. package/dist/core/sync-engine.d.ts.map +0 -1
  114. package/dist/core/sync-engine.js +0 -1379
  115. package/dist/core/sync-engine.js.map +0 -1
  116. package/dist/types/config.d.ts +0 -99
  117. package/dist/types/config.d.ts.map +0 -1
  118. package/dist/types/config.js +0 -5
  119. package/dist/types/config.js.map +0 -1
  120. package/dist/types/documents.d.ts +0 -88
  121. package/dist/types/documents.d.ts.map +0 -1
  122. package/dist/types/documents.js +0 -20
  123. package/dist/types/documents.js.map +0 -1
  124. package/dist/types/index.d.ts +0 -4
  125. package/dist/types/index.d.ts.map +0 -1
  126. package/dist/types/index.js +0 -4
  127. package/dist/types/index.js.map +0 -1
  128. package/dist/types/snapshot.d.ts +0 -64
  129. package/dist/types/snapshot.d.ts.map +0 -1
  130. package/dist/types/snapshot.js +0 -2
  131. package/dist/types/snapshot.js.map +0 -1
  132. package/dist/utils/content-similarity.d.ts +0 -53
  133. package/dist/utils/content-similarity.d.ts.map +0 -1
  134. package/dist/utils/content-similarity.js +0 -155
  135. package/dist/utils/content-similarity.js.map +0 -1
  136. package/dist/utils/content.d.ts +0 -10
  137. package/dist/utils/content.d.ts.map +0 -1
  138. package/dist/utils/content.js +0 -31
  139. package/dist/utils/content.js.map +0 -1
  140. package/dist/utils/directory.d.ts +0 -24
  141. package/dist/utils/directory.d.ts.map +0 -1
  142. package/dist/utils/directory.js +0 -52
  143. package/dist/utils/directory.js.map +0 -1
  144. package/dist/utils/fs.d.ts +0 -74
  145. package/dist/utils/fs.d.ts.map +0 -1
  146. package/dist/utils/fs.js +0 -248
  147. package/dist/utils/fs.js.map +0 -1
  148. package/dist/utils/index.d.ts +0 -5
  149. package/dist/utils/index.d.ts.map +0 -1
  150. package/dist/utils/index.js +0 -5
  151. package/dist/utils/index.js.map +0 -1
  152. package/dist/utils/mime-types.d.ts +0 -13
  153. package/dist/utils/mime-types.d.ts.map +0 -1
  154. package/dist/utils/mime-types.js +0 -209
  155. package/dist/utils/mime-types.js.map +0 -1
  156. package/dist/utils/network-sync.d.ts +0 -36
  157. package/dist/utils/network-sync.d.ts.map +0 -1
  158. package/dist/utils/network-sync.js +0 -250
  159. package/dist/utils/network-sync.js.map +0 -1
  160. package/dist/utils/node-polyfills.d.ts +0 -9
  161. package/dist/utils/node-polyfills.d.ts.map +0 -1
  162. package/dist/utils/node-polyfills.js +0 -9
  163. package/dist/utils/node-polyfills.js.map +0 -1
  164. package/dist/utils/output.d.ts +0 -129
  165. package/dist/utils/output.d.ts.map +0 -1
  166. package/dist/utils/output.js +0 -368
  167. package/dist/utils/output.js.map +0 -1
  168. package/dist/utils/repo-factory.d.ts +0 -13
  169. package/dist/utils/repo-factory.d.ts.map +0 -1
  170. package/dist/utils/repo-factory.js +0 -46
  171. package/dist/utils/repo-factory.js.map +0 -1
  172. package/dist/utils/string-similarity.d.ts +0 -14
  173. package/dist/utils/string-similarity.d.ts.map +0 -1
  174. package/dist/utils/string-similarity.js +0 -39
  175. package/dist/utils/string-similarity.js.map +0 -1
  176. package/dist/utils/text-diff.d.ts +0 -37
  177. package/dist/utils/text-diff.d.ts.map +0 -1
  178. package/dist/utils/text-diff.js +0 -93
  179. package/dist/utils/text-diff.js.map +0 -1
  180. package/dist/utils/trace.d.ts +0 -19
  181. package/dist/utils/trace.d.ts.map +0 -1
  182. package/dist/utils/trace.js +0 -63
  183. package/dist/utils/trace.js.map +0 -1
  184. package/src/cli.ts +0 -442
  185. package/src/commands.ts +0 -1134
  186. package/src/core/change-detection.ts +0 -712
  187. package/src/core/config.ts +0 -313
  188. package/src/core/index.ts +0 -5
  189. package/src/core/move-detection.ts +0 -169
  190. package/src/core/snapshot.ts +0 -275
  191. package/src/core/sync-engine.ts +0 -1795
  192. package/src/index.ts +0 -4
  193. package/src/types/config.ts +0 -111
  194. package/src/types/documents.ts +0 -91
  195. package/src/types/index.ts +0 -3
  196. package/src/types/snapshot.ts +0 -67
  197. package/src/utils/content.ts +0 -34
  198. package/src/utils/directory.ts +0 -73
  199. package/src/utils/fs.ts +0 -297
  200. package/src/utils/index.ts +0 -4
  201. package/src/utils/mime-types.ts +0 -244
  202. package/src/utils/network-sync.ts +0 -319
  203. package/src/utils/node-polyfills.ts +0 -8
  204. package/src/utils/output.ts +0 -450
  205. package/src/utils/repo-factory.ts +0 -73
  206. package/src/utils/string-similarity.ts +0 -54
  207. package/src/utils/text-diff.ts +0 -101
  208. package/src/utils/trace.ts +0 -70
  209. package/test/integration/README.md +0 -328
  210. package/test/integration/clone-test.sh +0 -310
  211. package/test/integration/conflict-resolution-test.sh +0 -309
  212. package/test/integration/debug-both-nested.sh +0 -74
  213. package/test/integration/debug-concurrent-nested.sh +0 -87
  214. package/test/integration/debug-nested.sh +0 -73
  215. package/test/integration/deletion-behavior-test.sh +0 -487
  216. package/test/integration/deletion-sync-test-simple.sh +0 -193
  217. package/test/integration/deletion-sync-test.sh +0 -297
  218. package/test/integration/exclude-patterns.test.ts +0 -144
  219. package/test/integration/full-integration-test.sh +0 -363
  220. package/test/integration/fuzzer.test.ts +0 -818
  221. package/test/integration/in-memory-sync.test.ts +0 -830
  222. package/test/integration/init-sync.test.ts +0 -89
  223. package/test/integration/manual-sync-test.sh +0 -84
  224. package/test/integration/sync-deletion.test.ts +0 -280
  225. package/test/integration/sync-flow.test.ts +0 -291
  226. package/test/jest.setup.ts +0 -34
  227. package/test/run-tests.sh +0 -225
  228. package/test/unit/deletion-behavior.test.ts +0 -249
  229. package/test/unit/enhanced-mime-detection.test.ts +0 -244
  230. package/test/unit/snapshot.test.ts +0 -404
  231. package/test/unit/sync-convergence.test.ts +0 -298
  232. package/test/unit/sync-timing.test.ts +0 -134
  233. package/test/unit/utils.test.ts +0 -366
  234. package/tsconfig.json +0 -23
@@ -0,0 +1,1062 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.deleteBranchFile = void 0;
37
+ exports.init = init;
38
+ exports.clone = clone;
39
+ exports.url = url;
40
+ exports.sync = sync;
41
+ exports.nuclearizeRepo = nuclearizeRepo;
42
+ exports.save = save;
43
+ exports.status = status;
44
+ exports.diff = diff;
45
+ exports.listBranches = listBranches;
46
+ exports.currentBranch = currentBranch;
47
+ exports.createBranch = createBranch;
48
+ exports.previewMerge = previewMerge;
49
+ exports.mergeBranch = mergeBranch;
50
+ exports.cutWorkdir = cutWorkdir;
51
+ exports.pasteStash = pasteStash;
52
+ exports.showStashes = showStashes;
53
+ exports.switchBranch = switchBranch;
54
+ const fs = __importStar(require("fs/promises"));
55
+ const path = __importStar(require("path"));
56
+ const Automerge = __importStar(require("@automerge/automerge"));
57
+ const automerge_repo_1 = require("@automerge/automerge-repo");
58
+ const config_js_1 = require("./config.js");
59
+ const branches_js_1 = require("./branches.js");
60
+ Object.defineProperty(exports, "deleteBranchFile", { enumerable: true, get: function () { return branches_js_1.deleteBranchFile; } });
61
+ const ignore_js_1 = require("./ignore.js");
62
+ const fs_tree_js_1 = require("./fs-tree.js");
63
+ const log_js_1 = require("./log.js");
64
+ const repo_js_1 = require("./repo.js");
65
+ const stash_js_1 = require("./stash.js");
66
+ const index_js_1 = require("./shapes/index.js");
67
+ const dlog = (0, log_js_1.log)("pushwork");
68
+ const DEFAULT_ARTIFACT_DIRECTORIES = ["dist"];
69
+ async function init(opts) {
70
+ const root = path.resolve(opts.dir);
71
+ const useBranches = opts.branches ?? true;
72
+ const online = opts.online ?? true;
73
+ dlog("init root=%s backend=%s shape=%s branches=%s online=%s", root, opts.backend, opts.shape, useBranches, online);
74
+ if (await (0, config_js_1.configExists)(root)) {
75
+ throw new Error(`pushwork already initialized at ${root}`);
76
+ }
77
+ const artifactDirs = normalizeDirs(opts.artifactDirectories ?? DEFAULT_ARTIFACT_DIRECTORIES);
78
+ dlog("init artifactDirs=%o", artifactDirs);
79
+ await fs.mkdir((0, config_js_1.pushworkDir)(root), { recursive: true });
80
+ const repo = await (0, repo_js_1.openRepo)(opts.backend, (0, config_js_1.storageDir)(root), { offline: !online });
81
+ try {
82
+ const shape = await (0, index_js_1.resolveShape)(opts.shape);
83
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
84
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
85
+ dlog("init walked %d files", fsFiles.size);
86
+ const title = path.basename(root) || undefined;
87
+ const tree = await pushFiles(repo, fsFiles, undefined, artifactDirs);
88
+ const folderUrl = await shape.encode({ repo, tree, title });
89
+ dlog("init encoded folder=%s title=%s", folderUrl, title);
90
+ const folderHandle = await repo.find(folderUrl);
91
+ if (online) {
92
+ await (0, repo_js_1.waitForSync)(folderHandle, { minMs: 3000, idleMs: 1500, maxMs: 15000 });
93
+ stampLastSyncAt(folderHandle);
94
+ await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 10000 });
95
+ }
96
+ let rootUrl = folderUrl;
97
+ if (useBranches) {
98
+ const branchesHandle = repo.create({
99
+ "@patchwork": { type: "branches", ...(title ? { title } : {}) },
100
+ branches: { [branches_js_1.DEFAULT_BRANCH]: folderUrl },
101
+ });
102
+ if (online) {
103
+ await (0, repo_js_1.waitForSync)(branchesHandle, { minMs: 1500, idleMs: 1500, maxMs: 10000 });
104
+ }
105
+ rootUrl = branchesHandle.url;
106
+ dlog("init wrapped in BranchesDoc=%s", rootUrl);
107
+ await (0, branches_js_1.writeBranchFile)(root, branches_js_1.DEFAULT_BRANCH);
108
+ }
109
+ await (0, config_js_1.writeConfig)(root, {
110
+ version: config_js_1.CONFIG_VERSION,
111
+ rootUrl,
112
+ backend: opts.backend,
113
+ shape: opts.shape,
114
+ artifactDirectories: artifactDirs,
115
+ branches: useBranches,
116
+ });
117
+ dlog("init complete: rootUrl=%s", rootUrl);
118
+ return rootUrl;
119
+ }
120
+ finally {
121
+ await repo.shutdown();
122
+ }
123
+ }
124
+ async function clone(opts) {
125
+ if (!(0, automerge_repo_1.isValidAutomergeUrl)(opts.url)) {
126
+ throw new Error(`invalid automerge URL: ${opts.url}`);
127
+ }
128
+ const root = path.resolve(opts.dir);
129
+ dlog("clone url=%s root=%s backend=%s shape=%s", opts.url, root, opts.backend, opts.shape);
130
+ await fs.mkdir(root, { recursive: true });
131
+ if (await (0, config_js_1.configExists)(root)) {
132
+ throw new Error(`pushwork already initialized at ${root}`);
133
+ }
134
+ const artifactDirs = normalizeDirs(opts.artifactDirectories ?? DEFAULT_ARTIFACT_DIRECTORIES);
135
+ await fs.mkdir((0, config_js_1.pushworkDir)(root), { recursive: true });
136
+ const online = opts.online ?? true;
137
+ const repo = await (0, repo_js_1.openRepo)(opts.backend, (0, config_js_1.storageDir)(root), { offline: !online });
138
+ try {
139
+ const shape = await (0, index_js_1.resolveShape)(opts.shape);
140
+ const rootHandle = await repo.find(opts.url);
141
+ if (online) {
142
+ await (0, repo_js_1.waitForSync)(rootHandle, { idleMs: 1500, maxMs: 15000 });
143
+ }
144
+ const docType = (0, branches_js_1.detectDocType)(rootHandle.doc());
145
+ dlog("clone detected docType=%s", docType);
146
+ let useBranches = false;
147
+ let folderHandle = rootHandle;
148
+ if (docType === "branches") {
149
+ useBranches = true;
150
+ const branchName = opts.branch ?? branches_js_1.DEFAULT_BRANCH;
151
+ folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
152
+ if (online) {
153
+ await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 15000 });
154
+ }
155
+ await (0, branches_js_1.writeBranchFile)(root, branchName);
156
+ dlog("clone branch=%s folder=%s", branchName, folderHandle.url);
157
+ }
158
+ else if (opts.branch) {
159
+ throw new Error(`--branch passed but root doc is not a branches doc (type=${docType})`);
160
+ }
161
+ const tree = await shape.decode({ repo, root: folderHandle });
162
+ await materializeTree(repo, root, tree);
163
+ await (0, config_js_1.writeConfig)(root, {
164
+ version: config_js_1.CONFIG_VERSION,
165
+ rootUrl: opts.url,
166
+ backend: opts.backend,
167
+ shape: opts.shape,
168
+ artifactDirectories: artifactDirs,
169
+ branches: useBranches,
170
+ });
171
+ dlog("clone complete");
172
+ }
173
+ finally {
174
+ await repo.shutdown();
175
+ }
176
+ }
177
+ async function url(cwd) {
178
+ const config = await (0, config_js_1.readConfig)(path.resolve(cwd));
179
+ return config.rootUrl;
180
+ }
181
+ async function sync(cwd, opts = {}) {
182
+ if (opts.nuclear) {
183
+ await nuclearizeRepo(cwd);
184
+ }
185
+ await commitWorkdir(cwd, { online: true });
186
+ }
187
+ /**
188
+ * Re-create every Automerge doc this repo references — every UnixFileEntry,
189
+ * every folder/directory doc, and (when in branches mode) the BranchesDoc —
190
+ * with brand-new URLs and no shared history with the originals. Updates
191
+ * `config.json` to point at the new root. Offline; the next sync publishes
192
+ * the new docs to the server.
193
+ *
194
+ * Use sparingly: this orphans the previous URLs from this repo's perspective.
195
+ * Anyone who had cloned the old URL keeps working from those docs; this
196
+ * client just stops referencing them.
197
+ */
198
+ async function nuclearizeRepo(cwd) {
199
+ const root = path.resolve(cwd);
200
+ const config = await (0, config_js_1.readConfig)(root);
201
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
202
+ dlog("nuclear root=%s branches=%s current=%s", root, config.branches, branchName);
203
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
204
+ try {
205
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
206
+ const oldRootHandle = await repo.find(config.rootUrl);
207
+ const title = path.basename(root) || undefined;
208
+ const rebuildFolder = async (oldFolderUrl, folderTitle) => {
209
+ const oldFolder = await repo.find(oldFolderUrl);
210
+ const oldTree = await shape.decode({ repo, root: oldFolder });
211
+ // For each leaf: read content, create a fresh UnixFileEntry doc.
212
+ const newTree = (0, index_js_1.newDir)();
213
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(oldTree)) {
214
+ const bare = (0, index_js_1.stripHeads)(fileUrl);
215
+ const oldFileHandle = await repo.find(bare);
216
+ const oldDoc = oldFileHandle.doc();
217
+ const newFileHandle = repo.create({
218
+ "@patchwork": { type: "file" },
219
+ name: oldDoc.name,
220
+ extension: oldDoc.extension,
221
+ mimeType: oldDoc.mimeType,
222
+ content: oldDoc.content,
223
+ });
224
+ let finalUrl = newFileHandle.url;
225
+ if ((0, automerge_repo_1.parseAutomergeUrl)(fileUrl).heads) {
226
+ finalUrl = (0, index_js_1.pinUrl)(newFileHandle);
227
+ }
228
+ (0, index_js_1.setFileAt)(newTree, posixPath.split("/").filter(Boolean), finalUrl);
229
+ }
230
+ // Encode without previousRoot → fresh folder/directory doc URL.
231
+ return shape.encode({ repo, tree: newTree, title: folderTitle });
232
+ };
233
+ let newRootUrl;
234
+ if (config.branches && (0, branches_js_1.isBranchesDoc)(oldRootHandle.doc())) {
235
+ const oldDoc = oldRootHandle.doc();
236
+ const newBranches = {};
237
+ for (const [name, oldFolderUrl] of Object.entries(oldDoc.branches)) {
238
+ newBranches[name] = await rebuildFolder(oldFolderUrl, title);
239
+ dlog("nuclear rebuilt branch %s → %s", name, newBranches[name]);
240
+ }
241
+ const newRoot = repo.create({
242
+ "@patchwork": {
243
+ type: "branches",
244
+ ...(title ? { title } : {}),
245
+ },
246
+ branches: newBranches,
247
+ });
248
+ newRootUrl = newRoot.url;
249
+ }
250
+ else {
251
+ newRootUrl = await rebuildFolder(config.rootUrl, title);
252
+ }
253
+ dlog("nuclear new rootUrl=%s", newRootUrl);
254
+ await (0, config_js_1.writeConfig)(root, { ...config, rootUrl: newRootUrl });
255
+ }
256
+ finally {
257
+ await repo.shutdown();
258
+ }
259
+ }
260
+ async function save(cwd) {
261
+ await commitWorkdir(cwd, { online: false });
262
+ }
263
+ async function commitWorkdir(cwd, { online }) {
264
+ const root = path.resolve(cwd);
265
+ const config = await (0, config_js_1.readConfig)(root);
266
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
267
+ dlog("commit online=%s root=%s branch=%s", online, root, branchName);
268
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), {
269
+ offline: !online,
270
+ });
271
+ try {
272
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
273
+ const rootHandle = await repo.find(config.rootUrl);
274
+ // In branches mode + online, touch every branch's folder doc so the
275
+ // network adapter announces them. Without this, a branch created
276
+ // offline (`pushwork branch X`) is never pushed to the server, even
277
+ // though its entry is in the BranchesDoc.
278
+ const otherBranchHandles = [];
279
+ if (online && config.branches && (0, branches_js_1.isBranchesDoc)(rootHandle.doc())) {
280
+ const doc = rootHandle.doc();
281
+ for (const [name, url] of Object.entries(doc.branches)) {
282
+ if (name === branchName)
283
+ continue;
284
+ otherBranchHandles.push(await repo.find(url));
285
+ }
286
+ }
287
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
288
+ const previousTree = await shape.decode({ repo, root: folderHandle });
289
+ const previousFiles = await readFileBytes(repo, previousTree);
290
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
291
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
292
+ const newTree = await pushFiles(repo, fsFiles, previousFiles, config.artifactDirectories);
293
+ const changed = !sameTree(previousTree, newTree);
294
+ dlog("commit tree changed: %s", changed);
295
+ if (changed) {
296
+ await shape.encode({ repo, tree: newTree, previousRoot: folderHandle });
297
+ }
298
+ if (online) {
299
+ // Always stamp lastSyncAt on a sync, regardless of whether the
300
+ // working tree changed — a sync is also a checkpoint that "we
301
+ // reconciled with the server at this time."
302
+ stampLastSyncAt(folderHandle);
303
+ // Wait for the current branch's folder, the BranchesDoc itself
304
+ // (when in branches mode), and any other branch folder docs to
305
+ // flush. The maxMs is generous so a brand-new offline-created
306
+ // branch reliably propagates.
307
+ await (0, repo_js_1.waitForSync)(folderHandle, {
308
+ minMs: 3000,
309
+ idleMs: 1500,
310
+ maxMs: 15000,
311
+ });
312
+ if (config.branches) {
313
+ await (0, repo_js_1.waitForSync)(rootHandle, { idleMs: 1500, maxMs: 10000 });
314
+ }
315
+ for (const h of otherBranchHandles) {
316
+ await (0, repo_js_1.waitForSync)(h, { idleMs: 1500, maxMs: 10000 });
317
+ }
318
+ }
319
+ const finalTree = await shape.decode({ repo, root: folderHandle });
320
+ await materializeTree(repo, root, finalTree);
321
+ dlog("commit complete");
322
+ }
323
+ finally {
324
+ await repo.shutdown();
325
+ }
326
+ }
327
+ async function status(cwd) {
328
+ const root = path.resolve(cwd);
329
+ const config = await (0, config_js_1.readConfig)(root);
330
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
331
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
332
+ try {
333
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
334
+ const rootHandle = await repo.find(config.rootUrl);
335
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
336
+ const previousTree = await shape.decode({ repo, root: folderHandle });
337
+ const previousFiles = await readFileBytes(repo, previousTree);
338
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
339
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
340
+ const diff = computeDiff(previousFiles, fsFiles);
341
+ return { branch: branchName, diff };
342
+ }
343
+ finally {
344
+ await repo.shutdown();
345
+ }
346
+ }
347
+ async function diff(cwd, limitToPath) {
348
+ const root = path.resolve(cwd);
349
+ const config = await (0, config_js_1.readConfig)(root);
350
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
351
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
352
+ try {
353
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
354
+ const rootHandle = await repo.find(config.rootUrl);
355
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
356
+ const previousTree = await shape.decode({ repo, root: folderHandle });
357
+ const previousFiles = await readFileBytes(repo, previousTree);
358
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
359
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
360
+ const out = [];
361
+ for (const [p, bytes] of fsFiles) {
362
+ if (limitToPath && p !== limitToPath)
363
+ continue;
364
+ const prev = previousFiles.get(p);
365
+ if (!prev) {
366
+ out.push({ path: p, kind: "added", after: bytes });
367
+ }
368
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
369
+ out.push({ path: p, kind: "modified", before: prev.bytes, after: bytes });
370
+ }
371
+ }
372
+ for (const [p, prev] of previousFiles) {
373
+ if (limitToPath && p !== limitToPath)
374
+ continue;
375
+ if (!fsFiles.has(p))
376
+ out.push({ path: p, kind: "deleted", before: prev.bytes });
377
+ }
378
+ return out;
379
+ }
380
+ finally {
381
+ await repo.shutdown();
382
+ }
383
+ }
384
+ async function listBranches(cwd) {
385
+ const root = path.resolve(cwd);
386
+ const config = await (0, config_js_1.readConfig)(root);
387
+ if (!config.branches) {
388
+ throw new Error("pushwork repo has no branches");
389
+ }
390
+ const current = await (0, branches_js_1.readBranchFile)(root);
391
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
392
+ try {
393
+ const rootHandle = await repo.find(config.rootUrl);
394
+ const doc = rootHandle.doc();
395
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
396
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
397
+ }
398
+ return { current, names: (0, branches_js_1.listBranchNames)(doc) };
399
+ }
400
+ finally {
401
+ await repo.shutdown();
402
+ }
403
+ }
404
+ async function currentBranch(cwd) {
405
+ const root = path.resolve(cwd);
406
+ const config = await (0, config_js_1.readConfig)(root);
407
+ if (!config.branches)
408
+ return null;
409
+ return (0, branches_js_1.readBranchFile)(root);
410
+ }
411
+ async function createBranch(cwd, name) {
412
+ if (!name)
413
+ throw new Error("branch name is required");
414
+ if (name.includes("/") || name.includes("\\")) {
415
+ throw new Error("branch name may not contain slashes");
416
+ }
417
+ const root = path.resolve(cwd);
418
+ const config = await (0, config_js_1.readConfig)(root);
419
+ if (!config.branches)
420
+ throw new Error("pushwork repo has no branches");
421
+ const currentName = await (0, branches_js_1.readBranchFile)(root);
422
+ if (!currentName)
423
+ throw new Error("no current branch is set");
424
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
425
+ try {
426
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
427
+ const rootHandle = await repo.find(config.rootUrl);
428
+ const doc = rootHandle.doc();
429
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
430
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
431
+ }
432
+ if (doc.branches[name]) {
433
+ throw new Error(`branch "${name}" already exists`);
434
+ }
435
+ const sourceUrl = doc.branches[currentName];
436
+ if (!sourceUrl) {
437
+ throw new Error(`current branch "${currentName}" not found in branches doc`);
438
+ }
439
+ const sourceHandle = await repo.find(sourceUrl);
440
+ // Clone the folder doc.
441
+ const clonedFolder = repo.clone(sourceHandle);
442
+ dlog("createBranch %s cloned folder %s → %s", name, sourceUrl, clonedFolder.url);
443
+ // Deep-clone every file doc the source folder references, then rewrite
444
+ // the cloned folder's leaves to point at the new file URLs. Without
445
+ // this step both branches would alias the same UnixFileEntry docs and
446
+ // editing one branch would silently mutate the other.
447
+ const sourceTree = await shape.decode({ repo, root: sourceHandle });
448
+ const fileUrlRemap = new Map();
449
+ for (const [, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
450
+ const bare = (0, index_js_1.stripHeads)(fileUrl);
451
+ if (fileUrlRemap.has(bare))
452
+ continue;
453
+ const orig = await repo.find(bare);
454
+ const cloned = repo.clone(orig);
455
+ fileUrlRemap.set(bare, cloned.url);
456
+ dlog("createBranch cloned file %s → %s", bare, cloned.url);
457
+ }
458
+ const newTree = (0, index_js_1.newDir)();
459
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
460
+ const bare = (0, index_js_1.stripHeads)(fileUrl);
461
+ const remappedBare = fileUrlRemap.get(bare);
462
+ if (!remappedBare)
463
+ continue;
464
+ const parsed = (0, automerge_repo_1.parseAutomergeUrl)(fileUrl);
465
+ // Preserve heads-pinning if the source URL was pinned.
466
+ let finalUrl = remappedBare;
467
+ if (parsed.heads) {
468
+ const newHandle = await repo.find(remappedBare);
469
+ finalUrl = (0, index_js_1.pinUrl)(newHandle);
470
+ }
471
+ const segments = posixPath.split("/").filter(Boolean);
472
+ (0, index_js_1.setFileAt)(newTree, segments, finalUrl);
473
+ }
474
+ await shape.encode({ repo, tree: newTree, previousRoot: clonedFolder });
475
+ rootHandle.change((d) => {
476
+ d.branches[name] = clonedFolder.url;
477
+ });
478
+ // Switch to the new branch. The deep clone has identical content to the
479
+ // source, so the working tree on disk is already correct — we just
480
+ // update .pushwork/branch.
481
+ await (0, branches_js_1.writeBranchFile)(root, name);
482
+ dlog("createBranch switched to %s", name);
483
+ return clonedFolder.url;
484
+ }
485
+ finally {
486
+ await repo.shutdown();
487
+ }
488
+ }
489
+ /**
490
+ * Apply changes from `source` branch onto the current branch.
491
+ *
492
+ * For each path:
493
+ * - In both branches: their UnixFileEntry docs share Automerge history (deep
494
+ * cloned at branch creation), so we Automerge-merge source's content into
495
+ * target's. Concurrent edits are CRDT-merged inside each file doc.
496
+ * - Only in source: deep-clone the source's file doc into a new doc and add
497
+ * it to target's folder. Editing on either branch afterward stays isolated.
498
+ * - Only in target: untouched. We don't propagate deletions from source — the
499
+ * user can do that explicitly.
500
+ *
501
+ * Refuses if the working tree has uncommitted changes against the current
502
+ * branch (run `pushwork save` first). Offline only — propagation happens on
503
+ * the next `pushwork sync`.
504
+ */
505
+ /**
506
+ * Compute what `merge <source>` would do without mutating any docs or the
507
+ * working tree. For paths in both branches we apply the merge to a *clone*
508
+ * of the target's file doc to learn the merged bytes; for paths only in
509
+ * source we just read source's bytes.
510
+ */
511
+ async function previewMerge(cwd, source) {
512
+ if (!source)
513
+ throw new Error("source branch name is required");
514
+ const root = path.resolve(cwd);
515
+ const config = await (0, config_js_1.readConfig)(root);
516
+ if (!config.branches)
517
+ throw new Error("pushwork repo has no branches");
518
+ const targetName = await (0, branches_js_1.readBranchFile)(root);
519
+ if (!targetName)
520
+ throw new Error("no current branch is set");
521
+ if (source === targetName) {
522
+ throw new Error(`cannot merge "${source}" into itself`);
523
+ }
524
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
525
+ try {
526
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
527
+ const rootHandle = await repo.find(config.rootUrl);
528
+ const branchesDoc = rootHandle.doc();
529
+ if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
530
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
531
+ }
532
+ if (!branchesDoc.branches[source]) {
533
+ throw new Error(`source branch "${source}" does not exist`);
534
+ }
535
+ const targetFolder = await repo.find(branchesDoc.branches[targetName]);
536
+ const sourceFolder = await repo.find(branchesDoc.branches[source]);
537
+ const tTree = await shape.decode({ repo, root: targetFolder });
538
+ const sTree = await shape.decode({ repo, root: sourceFolder });
539
+ const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
540
+ const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
541
+ const entries = [];
542
+ for (const [posixPath, sUrl] of sLeaves) {
543
+ const tUrl = tLeaves.get(posixPath);
544
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
545
+ const sHandle = await repo.find(sBare);
546
+ if (!tUrl) {
547
+ entries.push({
548
+ path: posixPath,
549
+ kind: "added",
550
+ after: (0, index_js_1.contentToBytes)(sHandle.doc().content),
551
+ });
552
+ continue;
553
+ }
554
+ const tBare = (0, index_js_1.stripHeads)(tUrl);
555
+ if (tBare === sBare)
556
+ continue;
557
+ const tHandle = await repo.find(tBare);
558
+ const before = (0, index_js_1.contentToBytes)(tHandle.doc().content);
559
+ // Compute merge result without touching the target doc.
560
+ const merged = Automerge.merge(Automerge.clone(tHandle.doc()), Automerge.clone(sHandle.doc()));
561
+ const after = (0, index_js_1.contentToBytes)(merged.content);
562
+ if ((0, fs_tree_js_1.byteEq)(before, after))
563
+ continue;
564
+ entries.push({ path: posixPath, kind: "merged", before, after });
565
+ }
566
+ entries.sort((a, b) => a.path.localeCompare(b.path));
567
+ return { source, target: targetName, entries };
568
+ }
569
+ finally {
570
+ await repo.shutdown();
571
+ }
572
+ }
573
+ async function mergeBranch(cwd, source) {
574
+ if (!source)
575
+ throw new Error("source branch name is required");
576
+ const root = path.resolve(cwd);
577
+ const config = await (0, config_js_1.readConfig)(root);
578
+ if (!config.branches)
579
+ throw new Error("pushwork repo has no branches");
580
+ const targetName = await (0, branches_js_1.readBranchFile)(root);
581
+ if (!targetName)
582
+ throw new Error("no current branch is set");
583
+ if (source === targetName) {
584
+ throw new Error(`cannot merge "${source}" into itself`);
585
+ }
586
+ dlog("merge source=%s target=%s", source, targetName);
587
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
588
+ try {
589
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
590
+ const rootHandle = await repo.find(config.rootUrl);
591
+ const branchesDoc = rootHandle.doc();
592
+ if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
593
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
594
+ }
595
+ if (!branchesDoc.branches[source]) {
596
+ throw new Error(`source branch "${source}" does not exist`);
597
+ }
598
+ const targetUrl = branchesDoc.branches[targetName];
599
+ const sourceUrl = branchesDoc.branches[source];
600
+ const targetFolder = await repo.find(targetUrl);
601
+ const sourceFolder = await repo.find(sourceUrl);
602
+ // Refuse on dirty working tree (mirror switchBranch policy).
603
+ const tFiles = await readFileBytes(repo, await shape.decode({ repo, root: targetFolder }));
604
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
605
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
606
+ const dirty = computeDiff(tFiles, fsFiles);
607
+ if (dirty.added.length || dirty.modified.length || dirty.deleted.length) {
608
+ throw new Error(`refusing to merge: working tree has uncommitted changes on branch "${targetName}". run \`pushwork save\` first.`);
609
+ }
610
+ const tTree = await shape.decode({ repo, root: targetFolder });
611
+ const sTree = await shape.decode({ repo, root: sourceFolder });
612
+ const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
613
+ const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
614
+ const merged = [];
615
+ const added = [];
616
+ // For paths in both: merge file docs in place.
617
+ for (const [posixPath, sUrl] of sLeaves) {
618
+ const tUrl = tLeaves.get(posixPath);
619
+ if (!tUrl)
620
+ continue;
621
+ const tBare = (0, index_js_1.stripHeads)(tUrl);
622
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
623
+ if (tBare === sBare) {
624
+ // Same file doc identity (shared) — already in sync, nothing to do.
625
+ continue;
626
+ }
627
+ const tHandle = await repo.find(tBare);
628
+ const sHandle = await repo.find(sBare);
629
+ tHandle.update((d) => Automerge.merge(d, Automerge.clone(sHandle.doc())));
630
+ merged.push(posixPath);
631
+ dlog("merge merged file at %s (%s ← %s)", posixPath, tBare, sBare);
632
+ }
633
+ // For paths only in source: deep-clone source's file doc, add to target's folder.
634
+ const newLeaves = new Map();
635
+ for (const [posixPath, sUrl] of sLeaves) {
636
+ if (tLeaves.has(posixPath))
637
+ continue;
638
+ const sBare = (0, index_js_1.stripHeads)(sUrl);
639
+ const sHandle = await repo.find(sBare);
640
+ const cloned = repo.clone(sHandle);
641
+ let finalUrl = cloned.url;
642
+ const parsed = (0, automerge_repo_1.parseAutomergeUrl)(sUrl);
643
+ if (parsed.heads) {
644
+ finalUrl = (0, index_js_1.pinUrl)(cloned);
645
+ }
646
+ newLeaves.set(posixPath, finalUrl);
647
+ added.push(posixPath);
648
+ dlog("merge added %s url=%s", posixPath, finalUrl);
649
+ }
650
+ if (newLeaves.size > 0) {
651
+ // Build a tree for the encode call: existing target leaves + new ones.
652
+ const nextTree = (0, index_js_1.newDir)();
653
+ for (const [p, url] of tLeaves) {
654
+ (0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
655
+ }
656
+ for (const [p, url] of newLeaves) {
657
+ (0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
658
+ }
659
+ await shape.encode({ repo, tree: nextTree, previousRoot: targetFolder });
660
+ }
661
+ // Materialize current branch (target) onto disk to reflect the merge.
662
+ const finalTree = await shape.decode({ repo, root: targetFolder });
663
+ await materializeTree(repo, root, finalTree);
664
+ merged.sort();
665
+ added.sort();
666
+ return { source, target: targetName, merged, added };
667
+ }
668
+ finally {
669
+ await repo.shutdown();
670
+ }
671
+ }
672
+ /**
673
+ * Capture the working tree's changes against the current branch's saved
674
+ * state into a local stash, then reset the working tree to the saved state.
675
+ * Stashes live in `.pushwork/stash.json` and are never synced.
676
+ */
677
+ async function cutWorkdir(cwd, opts = {}) {
678
+ const root = path.resolve(cwd);
679
+ const config = await (0, config_js_1.readConfig)(root);
680
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
681
+ dlog("cut root=%s branch=%s name=%s", root, branchName, opts.name ?? "(unnamed)");
682
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
683
+ try {
684
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
685
+ const rootHandle = await repo.find(config.rootUrl);
686
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
687
+ const previousTree = await shape.decode({ repo, root: folderHandle });
688
+ const previousFiles = await readFileBytes(repo, previousTree);
689
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
690
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
691
+ const entries = [];
692
+ for (const [p, bytes] of fsFiles) {
693
+ const prev = previousFiles.get(p);
694
+ if (!prev) {
695
+ entries.push({ path: p, kind: "added", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
696
+ }
697
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
698
+ entries.push({
699
+ path: p,
700
+ kind: "modified",
701
+ contentBase64: (0, stash_js_1.encodeBytes)(bytes),
702
+ });
703
+ }
704
+ }
705
+ for (const [p] of previousFiles) {
706
+ if (!fsFiles.has(p))
707
+ entries.push({ path: p, kind: "deleted" });
708
+ }
709
+ if (entries.length === 0) {
710
+ throw new Error("nothing to cut: working tree clean");
711
+ }
712
+ entries.sort((a, b) => a.path.localeCompare(b.path));
713
+ const stash = await (0, stash_js_1.appendStash)(root, {
714
+ name: opts.name,
715
+ branch: branchName,
716
+ entries,
717
+ });
718
+ // Reset working tree to the branch's saved state.
719
+ await materializeTree(repo, root, previousTree);
720
+ dlog("cut complete id=%d entries=%d", stash.id, entries.length);
721
+ return { id: stash.id, entries: entries.length };
722
+ }
723
+ finally {
724
+ await repo.shutdown();
725
+ }
726
+ }
727
+ /**
728
+ * Apply a stash on top of the current working tree, then remove the stash
729
+ * entry. Refuses if the working tree has uncommitted changes (caller can
730
+ * `pushwork save` or `pushwork cut` first).
731
+ */
732
+ async function pasteStash(cwd, selector) {
733
+ const root = path.resolve(cwd);
734
+ const config = await (0, config_js_1.readConfig)(root);
735
+ const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
736
+ // Check the working tree is clean against the current branch state.
737
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
738
+ try {
739
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
740
+ const rootHandle = await repo.find(config.rootUrl);
741
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
742
+ const previousTree = await shape.decode({ repo, root: folderHandle });
743
+ const previousFiles = await readFileBytes(repo, previousTree);
744
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
745
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
746
+ const dirty = computeDiff(previousFiles, fsFiles);
747
+ if (dirty.added.length || dirty.modified.length || dirty.deleted.length) {
748
+ throw new Error("refusing to paste: working tree has uncommitted changes. run `pushwork save` or `pushwork cut` first.");
749
+ }
750
+ }
751
+ finally {
752
+ await repo.shutdown();
753
+ }
754
+ const stash = await (0, stash_js_1.takeStash)(root, selector);
755
+ if (!stash) {
756
+ throw new Error(selector
757
+ ? `no stash matches "${selector}"`
758
+ : "nothing to paste: no stashes");
759
+ }
760
+ for (const entry of stash.entries) {
761
+ const target = path.join(root, fromPosix(entry.path));
762
+ if (entry.kind === "deleted") {
763
+ try {
764
+ await fs.unlink(target);
765
+ }
766
+ catch {
767
+ // already gone
768
+ }
769
+ await pruneEmptyDirs(root, path.dirname(fromPosix(entry.path)));
770
+ }
771
+ else if (entry.contentBase64 != null) {
772
+ const bytes = (0, stash_js_1.decodeBytes)(entry.contentBase64);
773
+ await (0, fs_tree_js_1.writeFileAtomic)(target, bytes);
774
+ }
775
+ }
776
+ dlog("paste complete id=%d entries=%d", stash.id, stash.entries.length);
777
+ return { id: stash.id, name: stash.name, entries: stash.entries.length };
778
+ }
779
+ async function showStashes(cwd) {
780
+ return (0, stash_js_1.listStashes)(path.resolve(cwd));
781
+ }
782
+ async function switchBranch(cwd, name) {
783
+ if (!name)
784
+ throw new Error("branch name is required");
785
+ const root = path.resolve(cwd);
786
+ const config = await (0, config_js_1.readConfig)(root);
787
+ if (!config.branches)
788
+ throw new Error("pushwork repo has no branches");
789
+ const currentName = await (0, branches_js_1.readBranchFile)(root);
790
+ const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
791
+ try {
792
+ const shape = await (0, index_js_1.resolveShape)(config.shape);
793
+ const rootHandle = await repo.find(config.rootUrl);
794
+ const doc = rootHandle.doc();
795
+ if (!(0, branches_js_1.isBranchesDoc)(doc)) {
796
+ throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
797
+ }
798
+ if (!doc.branches[name]) {
799
+ throw new Error(`branch "${name}" does not exist`);
800
+ }
801
+ const stranded = !!currentName && !doc.branches[currentName];
802
+ // Refuse if the working dir has uncommitted changes against the current
803
+ // branch. The user can `pushwork save` to commit, or `pushwork cut` +
804
+ // `pushwork paste` to carry the changes across the switch.
805
+ if (currentName && !stranded) {
806
+ const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, currentName);
807
+ const previousTree = await shape.decode({ repo, root: folderHandle });
808
+ const previousFiles = await readFileBytes(repo, previousTree);
809
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
810
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
811
+ const d = computeDiff(previousFiles, fsFiles);
812
+ if (d.added.length || d.modified.length || d.deleted.length) {
813
+ throw new Error(`refusing to switch: working tree has uncommitted changes on branch "${currentName}". run \`pushwork save\` to commit, or \`pushwork cut\` + \`pushwork switch ${name}\` + \`pushwork paste\` to carry them across.`);
814
+ }
815
+ }
816
+ // Materialize from the new branch.
817
+ const newFolder = await repo.find(doc.branches[name]);
818
+ const tree = await shape.decode({ repo, root: newFolder });
819
+ // Stranded: the current branch is gone, so we have no reference for a
820
+ // dirty check. Auto-cut working changes against the destination branch,
821
+ // materialize, then auto-paste so the user's work survives the switch.
822
+ let strandedStashId = null;
823
+ if (stranded) {
824
+ const newFiles = await readFileBytes(repo, tree);
825
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
826
+ const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
827
+ const entries = [];
828
+ for (const [p, bytes] of fsFiles) {
829
+ const prev = newFiles.get(p);
830
+ if (!prev) {
831
+ entries.push({ path: p, kind: "added", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
832
+ }
833
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
834
+ entries.push({ path: p, kind: "modified", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
835
+ }
836
+ }
837
+ for (const [p] of newFiles) {
838
+ if (!fsFiles.has(p))
839
+ entries.push({ path: p, kind: "deleted" });
840
+ }
841
+ if (entries.length > 0) {
842
+ entries.sort((a, b) => a.path.localeCompare(b.path));
843
+ const stash = await (0, stash_js_1.appendStash)(root, {
844
+ name: `stranded-from-${currentName}`,
845
+ branch: currentName,
846
+ entries,
847
+ });
848
+ strandedStashId = stash.id;
849
+ process.stderr.write(`warning: branch "${currentName}" no longer exists; auto-cut ${entries.length} entr${entries.length === 1 ? "y" : "ies"} as stash #${stash.id} and will auto-paste after switch\n`);
850
+ }
851
+ else {
852
+ process.stderr.write(`warning: branch "${currentName}" no longer exists; switching (working tree already matches "${name}")\n`);
853
+ }
854
+ }
855
+ await materializeTree(repo, root, tree);
856
+ await (0, branches_js_1.writeBranchFile)(root, name);
857
+ dlog("switch → %s", name);
858
+ if (strandedStashId != null) {
859
+ const stash = await (0, stash_js_1.takeStash)(root, String(strandedStashId));
860
+ if (stash) {
861
+ for (const entry of stash.entries) {
862
+ const target = path.join(root, fromPosix(entry.path));
863
+ if (entry.kind === "deleted") {
864
+ try {
865
+ await fs.unlink(target);
866
+ }
867
+ catch {
868
+ // already gone
869
+ }
870
+ await pruneEmptyDirs(root, path.dirname(fromPosix(entry.path)));
871
+ }
872
+ else if (entry.contentBase64 != null) {
873
+ const bytes = (0, stash_js_1.decodeBytes)(entry.contentBase64);
874
+ await (0, fs_tree_js_1.writeFileAtomic)(target, bytes);
875
+ }
876
+ }
877
+ dlog("stranded auto-paste applied stash #%d (%d entries)", stash.id, stash.entries.length);
878
+ }
879
+ }
880
+ }
881
+ finally {
882
+ await repo.shutdown();
883
+ }
884
+ }
885
+ function stampLastSyncAt(handle) {
886
+ handle.change((d) => {
887
+ d.lastSyncAt = Date.now();
888
+ });
889
+ }
890
+ function normalizeDirs(dirs) {
891
+ const seen = new Set();
892
+ const out = [];
893
+ for (const d of dirs) {
894
+ const norm = (0, index_js_1.normalizeArtifactDir)(d);
895
+ if (!norm || seen.has(norm))
896
+ continue;
897
+ seen.add(norm);
898
+ out.push(norm);
899
+ }
900
+ return out;
901
+ }
902
+ function computeDiff(previous, current) {
903
+ const added = [];
904
+ const modified = [];
905
+ const deleted = [];
906
+ for (const [p, bytes] of current) {
907
+ const prev = previous.get(p);
908
+ if (!prev)
909
+ added.push(p);
910
+ else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes))
911
+ modified.push(p);
912
+ }
913
+ for (const p of previous.keys()) {
914
+ if (!current.has(p))
915
+ deleted.push(p);
916
+ }
917
+ added.sort();
918
+ modified.sort();
919
+ deleted.sort();
920
+ return { added, modified, deleted };
921
+ }
922
+ async function pushFiles(repo, fsFiles, previous, artifactDirs) {
923
+ const root = (0, index_js_1.newDir)();
924
+ let created = 0;
925
+ let updated = 0;
926
+ let unchanged = 0;
927
+ for (const [posixPath, bytes] of fsFiles) {
928
+ const segments = posixPath.split("/").filter(Boolean);
929
+ const isArtifact = (0, index_js_1.isInArtifactDir)(posixPath, artifactDirs);
930
+ const fresh = (0, index_js_1.makeFileEntry)(posixPath, bytes, isArtifact);
931
+ const prev = previous?.get(posixPath);
932
+ let baseUrl;
933
+ if (prev && (0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
934
+ // Unchanged path: keep the existing file-doc URL. For artifacts
935
+ // we'll re-pin from the current heads below.
936
+ baseUrl = (0, index_js_1.stripHeads)(prev.url);
937
+ unchanged++;
938
+ }
939
+ else if (prev) {
940
+ // Changed path: mutate the existing file doc in place. This keeps
941
+ // the file URL stable within a branch and avoids the propagation
942
+ // race where a brand-new file doc URL is referenced by the folder
943
+ // before its bytes have reached the sync server.
944
+ //
945
+ // For string content (text files) we use Automerge.updateText so
946
+ // concurrent character-level edits merge correctly. Bytes and
947
+ // ImmutableString are atomic — last writer wins on the field.
948
+ //
949
+ // Branch isolation is enforced separately: `createBranch` deep
950
+ // clones every file doc the source branch references, so two
951
+ // branches never share a UnixFileEntry doc identity.
952
+ const refreshUrl = (0, index_js_1.stripHeads)(prev.url);
953
+ const handle = await repo.find(refreshUrl);
954
+ handle.change((d) => {
955
+ if (!(0, index_js_1.contentEquals)(d.content, fresh.content)) {
956
+ if (typeof d.content === "string" &&
957
+ typeof fresh.content === "string") {
958
+ Automerge.updateText(d, ["content"], fresh.content);
959
+ }
960
+ else {
961
+ d.content = fresh.content;
962
+ }
963
+ }
964
+ if (d.extension !== fresh.extension)
965
+ d.extension = fresh.extension;
966
+ if (d.mimeType !== fresh.mimeType)
967
+ d.mimeType = fresh.mimeType;
968
+ if (d.name !== fresh.name)
969
+ d.name = fresh.name;
970
+ if (!d["@patchwork"])
971
+ d["@patchwork"] = { type: "file" };
972
+ });
973
+ baseUrl = refreshUrl;
974
+ updated++;
975
+ dlog("pushFiles updated %s url=%s artifact=%s bytes=%d", posixPath, baseUrl, isArtifact, bytes.length);
976
+ }
977
+ else {
978
+ // New path: create a fresh file doc.
979
+ const handle = repo.create(fresh);
980
+ baseUrl = handle.url;
981
+ created++;
982
+ dlog("pushFiles created %s url=%s artifact=%s bytes=%d", posixPath, baseUrl, isArtifact, bytes.length);
983
+ }
984
+ const finalUrl = isArtifact
985
+ ? (0, index_js_1.pinUrl)(await repo.find(baseUrl))
986
+ : baseUrl;
987
+ (0, index_js_1.setFileAt)(root, segments, finalUrl);
988
+ }
989
+ dlog("pushFiles done: %d created, %d updated, %d unchanged", created, updated, unchanged);
990
+ return root;
991
+ }
992
+ async function readFileBytes(repo, tree) {
993
+ const out = new Map();
994
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
995
+ const handle = await repo.find(fileUrl);
996
+ out.set(posixPath, {
997
+ url: fileUrl,
998
+ bytes: (0, index_js_1.contentToBytes)(handle.doc().content),
999
+ });
1000
+ }
1001
+ return out;
1002
+ }
1003
+ async function materializeTree(repo, root, tree) {
1004
+ const desired = new Map();
1005
+ for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
1006
+ const handle = await repo.find(fileUrl);
1007
+ desired.set(posixPath, (0, index_js_1.contentToBytes)(handle.doc().content));
1008
+ }
1009
+ dlog("materialize desired: %d files", desired.size);
1010
+ const ig = await (0, ignore_js_1.loadIgnore)(root);
1011
+ const present = await (0, fs_tree_js_1.walkDir)(root, ig);
1012
+ let written = 0;
1013
+ let removed = 0;
1014
+ for (const [posixPath, bytes] of desired) {
1015
+ if ((0, fs_tree_js_1.byteEq)(present.get(posixPath), bytes))
1016
+ continue;
1017
+ await (0, fs_tree_js_1.writeFileAtomic)(path.join(root, fromPosix(posixPath)), bytes);
1018
+ written++;
1019
+ }
1020
+ for (const posixPath of present.keys()) {
1021
+ if (desired.has(posixPath))
1022
+ continue;
1023
+ try {
1024
+ await fs.unlink(path.join(root, fromPosix(posixPath)));
1025
+ removed++;
1026
+ }
1027
+ catch {
1028
+ // already gone
1029
+ }
1030
+ await pruneEmptyDirs(root, path.dirname(fromPosix(posixPath)));
1031
+ }
1032
+ dlog("materialize done: %d written, %d removed", written, removed);
1033
+ }
1034
+ const fromPosix = (p) => p.split("/").join(path.sep);
1035
+ async function pruneEmptyDirs(root, relDir) {
1036
+ let dir = relDir;
1037
+ while (dir && dir !== "." && dir !== path.sep) {
1038
+ const full = path.join(root, dir);
1039
+ try {
1040
+ const entries = await fs.readdir(full);
1041
+ if (entries.length > 0)
1042
+ return;
1043
+ await fs.rmdir(full);
1044
+ }
1045
+ catch {
1046
+ return;
1047
+ }
1048
+ dir = path.dirname(dir);
1049
+ }
1050
+ }
1051
+ function sameTree(a, b) {
1052
+ const av = (0, index_js_1.flattenLeaves)(a);
1053
+ const bv = (0, index_js_1.flattenLeaves)(b);
1054
+ if (av.size !== bv.size)
1055
+ return false;
1056
+ for (const [k, v] of av) {
1057
+ if (bv.get(k) !== v)
1058
+ return false;
1059
+ }
1060
+ return true;
1061
+ }
1062
+ //# sourceMappingURL=pushwork.js.map