pushwork 2.0.0-preview.2 → 2.0.0-preview.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands.d.ts +71 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +794 -0
- package/dist/cli/commands.js.map +1 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +19 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli.js +63 -103
- package/dist/cli.js.map +1 -1
- package/dist/commands.d.ts +58 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +975 -0
- package/dist/commands.js.map +1 -0
- package/dist/config/index.d.ts +71 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +314 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config.d.ts +1 -2
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +1 -2
- package/dist/config.js.map +1 -1
- package/dist/core/change-detection.d.ts +80 -0
- package/dist/core/change-detection.d.ts.map +1 -0
- package/dist/core/change-detection.js +560 -0
- package/dist/core/change-detection.js.map +1 -0
- package/dist/core/config.d.ts +81 -0
- package/dist/core/config.d.ts.map +1 -0
- package/dist/core/config.js +304 -0
- package/dist/core/config.js.map +1 -0
- package/dist/core/index.d.ts +6 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +22 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/move-detection.d.ts +34 -0
- package/dist/core/move-detection.d.ts.map +1 -0
- package/dist/core/move-detection.js +128 -0
- package/dist/core/move-detection.js.map +1 -0
- package/dist/core/snapshot.d.ts +105 -0
- package/dist/core/snapshot.d.ts.map +1 -0
- package/dist/core/snapshot.js +254 -0
- package/dist/core/snapshot.js.map +1 -0
- package/dist/core/sync-engine.d.ts +177 -0
- package/dist/core/sync-engine.d.ts.map +1 -0
- package/dist/core/sync-engine.js +1471 -0
- package/dist/core/sync-engine.js.map +1 -0
- package/dist/index.d.ts +3 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -14
- package/dist/index.js.map +1 -1
- package/dist/pushwork.d.ts +36 -68
- package/dist/pushwork.d.ts.map +1 -1
- package/dist/pushwork.js +233 -569
- package/dist/pushwork.js.map +1 -1
- package/dist/snarf.d.ts +21 -0
- package/dist/snarf.d.ts.map +1 -0
- package/dist/snarf.js +117 -0
- package/dist/snarf.js.map +1 -0
- package/dist/stash.d.ts +0 -2
- package/dist/stash.d.ts.map +1 -1
- package/dist/stash.js +0 -1
- package/dist/stash.js.map +1 -1
- package/dist/types/config.d.ts +102 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +10 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/documents.d.ts +88 -0
- package/dist/types/documents.d.ts.map +1 -0
- package/dist/types/documents.js +23 -0
- package/dist/types/documents.js.map +1 -0
- package/dist/types/index.d.ts +4 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +20 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/snapshot.d.ts +64 -0
- package/dist/types/snapshot.d.ts.map +1 -0
- package/dist/types/snapshot.js +3 -0
- package/dist/types/snapshot.js.map +1 -0
- package/dist/utils/content-similarity.d.ts +53 -0
- package/dist/utils/content-similarity.d.ts.map +1 -0
- package/dist/utils/content-similarity.js +155 -0
- package/dist/utils/content-similarity.js.map +1 -0
- package/dist/utils/content.d.ts +10 -0
- package/dist/utils/content.d.ts.map +1 -0
- package/dist/utils/content.js +35 -0
- package/dist/utils/content.js.map +1 -0
- package/dist/utils/directory.d.ts +24 -0
- package/dist/utils/directory.d.ts.map +1 -0
- package/dist/utils/directory.js +56 -0
- package/dist/utils/directory.js.map +1 -0
- package/dist/utils/fs.d.ts +74 -0
- package/dist/utils/fs.d.ts.map +1 -0
- package/dist/utils/fs.js +298 -0
- package/dist/utils/fs.js.map +1 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/mime-types.d.ts +13 -0
- package/dist/utils/mime-types.d.ts.map +1 -0
- package/dist/utils/mime-types.js +247 -0
- package/dist/utils/mime-types.js.map +1 -0
- package/dist/utils/network-sync.d.ts +30 -0
- package/dist/utils/network-sync.d.ts.map +1 -0
- package/dist/utils/network-sync.js +391 -0
- package/dist/utils/network-sync.js.map +1 -0
- package/dist/utils/node-polyfills.d.ts +9 -0
- package/dist/utils/node-polyfills.d.ts.map +1 -0
- package/dist/utils/node-polyfills.js +9 -0
- package/dist/utils/node-polyfills.js.map +1 -0
- package/dist/utils/output.d.ts +129 -0
- package/dist/utils/output.d.ts.map +1 -0
- package/dist/utils/output.js +375 -0
- package/dist/utils/output.js.map +1 -0
- package/dist/utils/repo-factory.d.ts +15 -0
- package/dist/utils/repo-factory.d.ts.map +1 -0
- package/dist/utils/repo-factory.js +156 -0
- package/dist/utils/repo-factory.js.map +1 -0
- package/dist/utils/string-similarity.d.ts +14 -0
- package/dist/utils/string-similarity.d.ts.map +1 -0
- package/dist/utils/string-similarity.js +43 -0
- package/dist/utils/string-similarity.js.map +1 -0
- package/dist/utils/text-diff.d.ts +37 -0
- package/dist/utils/text-diff.d.ts.map +1 -0
- package/dist/utils/text-diff.js +131 -0
- package/dist/utils/text-diff.js.map +1 -0
- package/dist/utils/trace.d.ts +19 -0
- package/dist/utils/trace.d.ts.map +1 -0
- package/dist/utils/trace.js +68 -0
- package/dist/utils/trace.js.map +1 -0
- package/package.json +1 -1
package/dist/pushwork.js
CHANGED
|
@@ -33,44 +33,35 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.deleteBranchFile = void 0;
|
|
37
36
|
exports.init = init;
|
|
38
37
|
exports.clone = clone;
|
|
39
38
|
exports.url = url;
|
|
40
39
|
exports.sync = sync;
|
|
41
40
|
exports.nuclearizeRepo = nuclearizeRepo;
|
|
42
41
|
exports.save = save;
|
|
42
|
+
exports.heads = heads;
|
|
43
43
|
exports.status = status;
|
|
44
44
|
exports.diff = diff;
|
|
45
|
-
exports.listBranches = listBranches;
|
|
46
|
-
exports.currentBranch = currentBranch;
|
|
47
|
-
exports.createBranch = createBranch;
|
|
48
|
-
exports.previewMerge = previewMerge;
|
|
49
|
-
exports.mergeBranch = mergeBranch;
|
|
50
45
|
exports.cutWorkdir = cutWorkdir;
|
|
51
|
-
exports.
|
|
52
|
-
exports.
|
|
53
|
-
exports.switchBranch = switchBranch;
|
|
46
|
+
exports.pasteSnarf = pasteSnarf;
|
|
47
|
+
exports.showSnarfs = showSnarfs;
|
|
54
48
|
const fs = __importStar(require("fs/promises"));
|
|
55
49
|
const path = __importStar(require("path"));
|
|
56
50
|
const Automerge = __importStar(require("@automerge/automerge"));
|
|
57
51
|
const automerge_repo_1 = require("@automerge/automerge-repo");
|
|
58
52
|
const config_js_1 = require("./config.js");
|
|
59
|
-
const branches_js_1 = require("./branches.js");
|
|
60
|
-
Object.defineProperty(exports, "deleteBranchFile", { enumerable: true, get: function () { return branches_js_1.deleteBranchFile; } });
|
|
61
53
|
const ignore_js_1 = require("./ignore.js");
|
|
62
54
|
const fs_tree_js_1 = require("./fs-tree.js");
|
|
63
55
|
const log_js_1 = require("./log.js");
|
|
64
56
|
const repo_js_1 = require("./repo.js");
|
|
65
|
-
const
|
|
57
|
+
const snarf_js_1 = require("./snarf.js");
|
|
66
58
|
const index_js_1 = require("./shapes/index.js");
|
|
67
59
|
const dlog = (0, log_js_1.log)("pushwork");
|
|
68
60
|
const DEFAULT_ARTIFACT_DIRECTORIES = ["dist"];
|
|
69
61
|
async function init(opts) {
|
|
70
62
|
const root = path.resolve(opts.dir);
|
|
71
|
-
const useBranches = opts.branches ?? true;
|
|
72
63
|
const online = opts.online ?? true;
|
|
73
|
-
dlog("init root=%s backend=%s shape=%s
|
|
64
|
+
dlog("init root=%s backend=%s shape=%s online=%s", root, opts.backend, opts.shape, online);
|
|
74
65
|
if (await (0, config_js_1.configExists)(root)) {
|
|
75
66
|
throw new Error(`pushwork already initialized at ${root}`);
|
|
76
67
|
}
|
|
@@ -93,29 +84,15 @@ async function init(opts) {
|
|
|
93
84
|
stampLastSyncAt(folderHandle);
|
|
94
85
|
await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 10000 });
|
|
95
86
|
}
|
|
96
|
-
let rootUrl = folderUrl;
|
|
97
|
-
if (useBranches) {
|
|
98
|
-
const branchesHandle = repo.create({
|
|
99
|
-
"@patchwork": { type: "branches", ...(title ? { title } : {}) },
|
|
100
|
-
branches: { [branches_js_1.DEFAULT_BRANCH]: folderUrl },
|
|
101
|
-
});
|
|
102
|
-
if (online) {
|
|
103
|
-
await (0, repo_js_1.waitForSync)(branchesHandle, { minMs: 1500, idleMs: 1500, maxMs: 10000 });
|
|
104
|
-
}
|
|
105
|
-
rootUrl = branchesHandle.url;
|
|
106
|
-
dlog("init wrapped in BranchesDoc=%s", rootUrl);
|
|
107
|
-
await (0, branches_js_1.writeBranchFile)(root, branches_js_1.DEFAULT_BRANCH);
|
|
108
|
-
}
|
|
109
87
|
await (0, config_js_1.writeConfig)(root, {
|
|
110
88
|
version: config_js_1.CONFIG_VERSION,
|
|
111
|
-
rootUrl,
|
|
89
|
+
rootUrl: folderUrl,
|
|
112
90
|
backend: opts.backend,
|
|
113
91
|
shape: opts.shape,
|
|
114
92
|
artifactDirectories: artifactDirs,
|
|
115
|
-
branches: useBranches,
|
|
116
93
|
});
|
|
117
|
-
dlog("init complete: rootUrl=%s",
|
|
118
|
-
return
|
|
94
|
+
dlog("init complete: rootUrl=%s", folderUrl);
|
|
95
|
+
return folderUrl;
|
|
119
96
|
}
|
|
120
97
|
finally {
|
|
121
98
|
await repo.shutdown();
|
|
@@ -137,36 +114,36 @@ async function clone(opts) {
|
|
|
137
114
|
const repo = await (0, repo_js_1.openRepo)(opts.backend, (0, config_js_1.storageDir)(root), { offline: !online });
|
|
138
115
|
try {
|
|
139
116
|
const shape = await (0, index_js_1.resolveShape)(opts.shape);
|
|
140
|
-
|
|
117
|
+
let folderHandle = await repo.find(opts.url);
|
|
141
118
|
if (online) {
|
|
142
|
-
await (0, repo_js_1.waitForSync)(
|
|
119
|
+
await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 15000 });
|
|
143
120
|
}
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
const
|
|
151
|
-
|
|
121
|
+
let storedUrl = opts.url;
|
|
122
|
+
const branchesDoc = asBranchesDoc(folderHandle.doc());
|
|
123
|
+
if (branchesDoc) {
|
|
124
|
+
if (!opts.onBranchesDoc) {
|
|
125
|
+
throw new Error(`URL ${opts.url} is a legacy branches doc; pushwork no longer supports branches. Provide an onBranchesDoc callback (or use the CLI, which will prompt you to pick a branch).`);
|
|
126
|
+
}
|
|
127
|
+
const branches = Object.entries(branchesDoc.branches).map(([name, url]) => ({ name, url }));
|
|
128
|
+
const chosenUrl = await opts.onBranchesDoc({
|
|
129
|
+
title: branchesDoc.title,
|
|
130
|
+
branches,
|
|
131
|
+
});
|
|
132
|
+
dlog("clone branches doc → chose %s", chosenUrl);
|
|
133
|
+
folderHandle = await repo.find(chosenUrl);
|
|
152
134
|
if (online) {
|
|
153
135
|
await (0, repo_js_1.waitForSync)(folderHandle, { idleMs: 1500, maxMs: 15000 });
|
|
154
136
|
}
|
|
155
|
-
|
|
156
|
-
dlog("clone branch=%s folder=%s", branchName, folderHandle.url);
|
|
157
|
-
}
|
|
158
|
-
else if (opts.branch) {
|
|
159
|
-
throw new Error(`--branch passed but root doc is not a branches doc (type=${docType})`);
|
|
137
|
+
storedUrl = chosenUrl;
|
|
160
138
|
}
|
|
161
139
|
const tree = await shape.decode({ repo, root: folderHandle });
|
|
162
140
|
await materializeTree(repo, root, tree);
|
|
163
141
|
await (0, config_js_1.writeConfig)(root, {
|
|
164
142
|
version: config_js_1.CONFIG_VERSION,
|
|
165
|
-
rootUrl:
|
|
143
|
+
rootUrl: storedUrl,
|
|
166
144
|
backend: opts.backend,
|
|
167
145
|
shape: opts.shape,
|
|
168
146
|
artifactDirectories: artifactDirs,
|
|
169
|
-
branches: useBranches,
|
|
170
147
|
});
|
|
171
148
|
dlog("clone complete");
|
|
172
149
|
}
|
|
@@ -174,6 +151,22 @@ async function clone(opts) {
|
|
|
174
151
|
await repo.shutdown();
|
|
175
152
|
}
|
|
176
153
|
}
|
|
154
|
+
function asBranchesDoc(doc) {
|
|
155
|
+
if (!doc || typeof doc !== "object")
|
|
156
|
+
return null;
|
|
157
|
+
const meta = doc["@patchwork"];
|
|
158
|
+
if (!meta || typeof meta !== "object")
|
|
159
|
+
return null;
|
|
160
|
+
if (meta.type !== "branches")
|
|
161
|
+
return null;
|
|
162
|
+
const branches = doc.branches;
|
|
163
|
+
if (!branches || typeof branches !== "object")
|
|
164
|
+
return null;
|
|
165
|
+
return {
|
|
166
|
+
title: meta.title,
|
|
167
|
+
branches: branches,
|
|
168
|
+
};
|
|
169
|
+
}
|
|
177
170
|
async function url(cwd) {
|
|
178
171
|
const config = await (0, config_js_1.readConfig)(path.resolve(cwd));
|
|
179
172
|
return config.rootUrl;
|
|
@@ -181,77 +174,89 @@ async function url(cwd) {
|
|
|
181
174
|
async function sync(cwd, opts = {}) {
|
|
182
175
|
if (opts.nuclear) {
|
|
183
176
|
await nuclearizeRepo(cwd);
|
|
177
|
+
await publishCurrentTree(cwd);
|
|
178
|
+
return;
|
|
184
179
|
}
|
|
185
180
|
await commitWorkdir(cwd, { online: true });
|
|
186
181
|
}
|
|
187
182
|
/**
|
|
188
|
-
*
|
|
189
|
-
*
|
|
190
|
-
*
|
|
191
|
-
*
|
|
192
|
-
|
|
183
|
+
* Open an online repo, subscribe the root folder and every file leaf so the
|
|
184
|
+
* network adapter announces them to peers, then wait for the local heads to
|
|
185
|
+
* settle. No decode/diff/encode — used after nuclearizeRepo, where every doc
|
|
186
|
+
* is freshly created locally and the server has nothing to merge in.
|
|
187
|
+
*/
|
|
188
|
+
async function publishCurrentTree(cwd) {
|
|
189
|
+
const root = path.resolve(cwd);
|
|
190
|
+
const config = await (0, config_js_1.readConfig)(root);
|
|
191
|
+
dlog("publish root=%s", root);
|
|
192
|
+
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: false });
|
|
193
|
+
try {
|
|
194
|
+
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
195
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
196
|
+
const tree = await shape.decode({ repo, root: folderHandle });
|
|
197
|
+
// Touch every leaf so the network adapter knows to push it.
|
|
198
|
+
for (const [, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
|
|
199
|
+
await repo.find(fileUrl);
|
|
200
|
+
}
|
|
201
|
+
stampLastSyncAt(folderHandle);
|
|
202
|
+
await (0, repo_js_1.waitForSync)(folderHandle, {
|
|
203
|
+
minMs: 3000,
|
|
204
|
+
idleMs: 1500,
|
|
205
|
+
maxMs: 15000,
|
|
206
|
+
});
|
|
207
|
+
dlog("publish complete");
|
|
208
|
+
}
|
|
209
|
+
finally {
|
|
210
|
+
await repo.shutdown();
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Re-create every UnixFileEntry doc this repo references with a fresh URL,
|
|
215
|
+
* then rewrite the existing folder doc's leaves to point at the new file
|
|
216
|
+
* URLs. The folder doc URL itself is preserved so anyone holding it keeps
|
|
217
|
+
* tracking this repo. Offline; the next sync publishes the new file docs
|
|
218
|
+
* and the rewritten folder doc to the server.
|
|
193
219
|
*
|
|
194
|
-
*
|
|
195
|
-
* Anyone
|
|
196
|
-
* client just stops referencing them.
|
|
220
|
+
* The previous file-doc URLs are orphaned from this repo's perspective.
|
|
221
|
+
* Anyone holding one of those URLs directly continues to work from it;
|
|
222
|
+
* this client just stops referencing them.
|
|
197
223
|
*/
|
|
198
224
|
async function nuclearizeRepo(cwd) {
|
|
199
225
|
const root = path.resolve(cwd);
|
|
200
226
|
const config = await (0, config_js_1.readConfig)(root);
|
|
201
|
-
|
|
202
|
-
dlog("nuclear root=%s branches=%s current=%s", root, config.branches, branchName);
|
|
227
|
+
dlog("nuclear root=%s rootUrl=%s", root, config.rootUrl);
|
|
203
228
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
204
229
|
try {
|
|
205
230
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
206
|
-
const
|
|
231
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
207
232
|
const title = path.basename(root) || undefined;
|
|
208
|
-
const
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
const
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
mimeType: oldDoc.mimeType,
|
|
222
|
-
content: oldDoc.content,
|
|
223
|
-
});
|
|
224
|
-
let finalUrl = newFileHandle.url;
|
|
225
|
-
if ((0, automerge_repo_1.parseAutomergeUrl)(fileUrl).heads) {
|
|
226
|
-
finalUrl = (0, index_js_1.pinUrl)(newFileHandle);
|
|
227
|
-
}
|
|
228
|
-
(0, index_js_1.setFileAt)(newTree, posixPath.split("/").filter(Boolean), finalUrl);
|
|
229
|
-
}
|
|
230
|
-
// Encode without previousRoot → fresh folder/directory doc URL.
|
|
231
|
-
return shape.encode({ repo, tree: newTree, title: folderTitle });
|
|
232
|
-
};
|
|
233
|
-
let newRootUrl;
|
|
234
|
-
if (config.branches && (0, branches_js_1.isBranchesDoc)(oldRootHandle.doc())) {
|
|
235
|
-
const oldDoc = oldRootHandle.doc();
|
|
236
|
-
const newBranches = {};
|
|
237
|
-
for (const [name, oldFolderUrl] of Object.entries(oldDoc.branches)) {
|
|
238
|
-
newBranches[name] = await rebuildFolder(oldFolderUrl, title);
|
|
239
|
-
dlog("nuclear rebuilt branch %s → %s", name, newBranches[name]);
|
|
240
|
-
}
|
|
241
|
-
const newRoot = repo.create({
|
|
242
|
-
"@patchwork": {
|
|
243
|
-
type: "branches",
|
|
244
|
-
...(title ? { title } : {}),
|
|
245
|
-
},
|
|
246
|
-
branches: newBranches,
|
|
233
|
+
const oldTree = await shape.decode({ repo, root: folderHandle });
|
|
234
|
+
// For each leaf: read content, create a fresh UnixFileEntry doc.
|
|
235
|
+
const newTree = (0, index_js_1.newDir)();
|
|
236
|
+
for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(oldTree)) {
|
|
237
|
+
const bare = (0, index_js_1.stripHeads)(fileUrl);
|
|
238
|
+
const oldFileHandle = await repo.find(bare);
|
|
239
|
+
const oldDoc = oldFileHandle.doc();
|
|
240
|
+
const newFileHandle = repo.create({
|
|
241
|
+
"@patchwork": { type: "file" },
|
|
242
|
+
name: oldDoc.name,
|
|
243
|
+
extension: oldDoc.extension,
|
|
244
|
+
mimeType: oldDoc.mimeType,
|
|
245
|
+
content: oldDoc.content,
|
|
247
246
|
});
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
247
|
+
let finalUrl = newFileHandle.url;
|
|
248
|
+
if ((0, index_js_1.isInArtifactDir)(posixPath, config.artifactDirectories)) {
|
|
249
|
+
finalUrl = (0, index_js_1.pinUrl)(newFileHandle);
|
|
250
|
+
}
|
|
251
|
+
(0, index_js_1.setFileAt)(newTree, posixPath.split("/").filter(Boolean), finalUrl);
|
|
252
|
+
}
|
|
253
|
+
// Mutate the existing folder doc in place — same URL, new file leaves.
|
|
254
|
+
await shape.encode({
|
|
255
|
+
repo,
|
|
256
|
+
tree: newTree,
|
|
257
|
+
previousRoot: folderHandle,
|
|
258
|
+
title,
|
|
259
|
+
});
|
|
255
260
|
}
|
|
256
261
|
finally {
|
|
257
262
|
await repo.shutdown();
|
|
@@ -263,28 +268,13 @@ async function save(cwd) {
|
|
|
263
268
|
async function commitWorkdir(cwd, { online }) {
|
|
264
269
|
const root = path.resolve(cwd);
|
|
265
270
|
const config = await (0, config_js_1.readConfig)(root);
|
|
266
|
-
|
|
267
|
-
dlog("commit online=%s root=%s branch=%s", online, root, branchName);
|
|
271
|
+
dlog("commit online=%s root=%s", online, root);
|
|
268
272
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), {
|
|
269
273
|
offline: !online,
|
|
270
274
|
});
|
|
271
275
|
try {
|
|
272
276
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
273
|
-
const
|
|
274
|
-
// In branches mode + online, touch every branch's folder doc so the
|
|
275
|
-
// network adapter announces them. Without this, a branch created
|
|
276
|
-
// offline (`pushwork branch X`) is never pushed to the server, even
|
|
277
|
-
// though its entry is in the BranchesDoc.
|
|
278
|
-
const otherBranchHandles = [];
|
|
279
|
-
if (online && config.branches && (0, branches_js_1.isBranchesDoc)(rootHandle.doc())) {
|
|
280
|
-
const doc = rootHandle.doc();
|
|
281
|
-
for (const [name, url] of Object.entries(doc.branches)) {
|
|
282
|
-
if (name === branchName)
|
|
283
|
-
continue;
|
|
284
|
-
otherBranchHandles.push(await repo.find(url));
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
|
|
277
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
288
278
|
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
289
279
|
const previousFiles = await readFileBytes(repo, previousTree);
|
|
290
280
|
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
@@ -296,25 +286,23 @@ async function commitWorkdir(cwd, { online }) {
|
|
|
296
286
|
await shape.encode({ repo, tree: newTree, previousRoot: folderHandle });
|
|
297
287
|
}
|
|
298
288
|
if (online) {
|
|
299
|
-
// Always stamp lastSyncAt on a sync, regardless of whether the
|
|
300
|
-
// working tree changed — a sync is also a checkpoint that "we
|
|
301
|
-
// reconciled with the server at this time."
|
|
302
|
-
stampLastSyncAt(folderHandle);
|
|
303
|
-
// Wait for the current branch's folder, the BranchesDoc itself
|
|
304
|
-
// (when in branches mode), and any other branch folder docs to
|
|
305
|
-
// flush. The maxMs is generous so a brand-new offline-created
|
|
306
|
-
// branch reliably propagates.
|
|
307
289
|
await (0, repo_js_1.waitForSync)(folderHandle, {
|
|
308
290
|
minMs: 3000,
|
|
309
291
|
idleMs: 1500,
|
|
310
292
|
maxMs: 15000,
|
|
311
293
|
});
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
294
|
+
// After peer changes have settled, refresh the folder doc so its
|
|
295
|
+
// pinned (artifact) leaves reference each file doc's current
|
|
296
|
+
// heads. Bare URLs already track current heads implicitly.
|
|
297
|
+
const refreshed = await refreshFolderPins(repo, folderHandle, shape, config.artifactDirectories);
|
|
298
|
+
// Always stamp lastSyncAt — a sync is also a checkpoint that
|
|
299
|
+
// "we reconciled with the server at this time" — and let any
|
|
300
|
+
// resulting changes flush.
|
|
301
|
+
stampLastSyncAt(folderHandle);
|
|
302
|
+
await (0, repo_js_1.waitForSync)(folderHandle, {
|
|
303
|
+
idleMs: 1500,
|
|
304
|
+
maxMs: refreshed ? 10000 : 5000,
|
|
305
|
+
});
|
|
318
306
|
}
|
|
319
307
|
const finalTree = await shape.decode({ repo, root: folderHandle });
|
|
320
308
|
await materializeTree(repo, root, finalTree);
|
|
@@ -324,21 +312,71 @@ async function commitWorkdir(cwd, { online }) {
|
|
|
324
312
|
await repo.shutdown();
|
|
325
313
|
}
|
|
326
314
|
}
|
|
315
|
+
/**
|
|
316
|
+
* List the current Automerge heads for the root folder doc and every file
|
|
317
|
+
* leaf it references. Offline; never contacts a sync server.
|
|
318
|
+
*
|
|
319
|
+
* `pathspec` filters results: exact match, or prefix match against a folder
|
|
320
|
+
* (e.g. "src" or "src/" matches "src/index.ts"). Pass "/" to show only the
|
|
321
|
+
* root folder doc.
|
|
322
|
+
*/
|
|
323
|
+
async function heads(cwd, pathspec) {
|
|
324
|
+
const root = path.resolve(cwd);
|
|
325
|
+
const config = await (0, config_js_1.readConfig)(root);
|
|
326
|
+
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
327
|
+
try {
|
|
328
|
+
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
329
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
330
|
+
const tree = await shape.decode({ repo, root: folderHandle });
|
|
331
|
+
const out = [];
|
|
332
|
+
const matches = (p) => matchesPathspec(p, pathspec);
|
|
333
|
+
if (matches("/")) {
|
|
334
|
+
out.push({
|
|
335
|
+
path: "/",
|
|
336
|
+
url: config.rootUrl,
|
|
337
|
+
heads: folderHandle.heads() ?? [],
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
|
|
341
|
+
if (!matches(posixPath))
|
|
342
|
+
continue;
|
|
343
|
+
const handle = await repo.find(fileUrl);
|
|
344
|
+
out.push({
|
|
345
|
+
path: posixPath,
|
|
346
|
+
url: fileUrl,
|
|
347
|
+
heads: handle.heads() ?? [],
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
out.sort((a, b) => a.path.localeCompare(b.path));
|
|
351
|
+
return out;
|
|
352
|
+
}
|
|
353
|
+
finally {
|
|
354
|
+
await repo.shutdown();
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
function matchesPathspec(path, spec) {
|
|
358
|
+
if (!spec)
|
|
359
|
+
return true;
|
|
360
|
+
if (spec === "/")
|
|
361
|
+
return path === "/";
|
|
362
|
+
const trimmed = spec.endsWith("/") ? spec.slice(0, -1) : spec;
|
|
363
|
+
if (path === trimmed)
|
|
364
|
+
return true;
|
|
365
|
+
return path.startsWith(trimmed + "/");
|
|
366
|
+
}
|
|
327
367
|
async function status(cwd) {
|
|
328
368
|
const root = path.resolve(cwd);
|
|
329
369
|
const config = await (0, config_js_1.readConfig)(root);
|
|
330
|
-
const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
|
|
331
370
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
332
371
|
try {
|
|
333
372
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
334
|
-
const
|
|
335
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
|
|
373
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
336
374
|
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
337
375
|
const previousFiles = await readFileBytes(repo, previousTree);
|
|
338
376
|
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
339
377
|
const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
|
|
340
378
|
const diff = computeDiff(previousFiles, fsFiles);
|
|
341
|
-
return {
|
|
379
|
+
return { diff };
|
|
342
380
|
}
|
|
343
381
|
finally {
|
|
344
382
|
await repo.shutdown();
|
|
@@ -347,12 +385,10 @@ async function status(cwd) {
|
|
|
347
385
|
async function diff(cwd, limitToPath) {
|
|
348
386
|
const root = path.resolve(cwd);
|
|
349
387
|
const config = await (0, config_js_1.readConfig)(root);
|
|
350
|
-
const branchName = config.branches ? await (0, branches_js_1.readBranchFile)(root) : null;
|
|
351
388
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
352
389
|
try {
|
|
353
390
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
354
|
-
const
|
|
355
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
|
|
391
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
356
392
|
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
357
393
|
const previousFiles = await readFileBytes(repo, previousTree);
|
|
358
394
|
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
@@ -381,309 +417,19 @@ async function diff(cwd, limitToPath) {
|
|
|
381
417
|
await repo.shutdown();
|
|
382
418
|
}
|
|
383
419
|
}
|
|
384
|
-
async function listBranches(cwd) {
|
|
385
|
-
const root = path.resolve(cwd);
|
|
386
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
387
|
-
if (!config.branches) {
|
|
388
|
-
throw new Error("pushwork repo has no branches");
|
|
389
|
-
}
|
|
390
|
-
const current = await (0, branches_js_1.readBranchFile)(root);
|
|
391
|
-
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
392
|
-
try {
|
|
393
|
-
const rootHandle = await repo.find(config.rootUrl);
|
|
394
|
-
const doc = rootHandle.doc();
|
|
395
|
-
if (!(0, branches_js_1.isBranchesDoc)(doc)) {
|
|
396
|
-
throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
|
|
397
|
-
}
|
|
398
|
-
return { current, names: (0, branches_js_1.listBranchNames)(doc) };
|
|
399
|
-
}
|
|
400
|
-
finally {
|
|
401
|
-
await repo.shutdown();
|
|
402
|
-
}
|
|
403
|
-
}
|
|
404
|
-
async function currentBranch(cwd) {
|
|
405
|
-
const root = path.resolve(cwd);
|
|
406
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
407
|
-
if (!config.branches)
|
|
408
|
-
return null;
|
|
409
|
-
return (0, branches_js_1.readBranchFile)(root);
|
|
410
|
-
}
|
|
411
|
-
async function createBranch(cwd, name) {
|
|
412
|
-
if (!name)
|
|
413
|
-
throw new Error("branch name is required");
|
|
414
|
-
if (name.includes("/") || name.includes("\\")) {
|
|
415
|
-
throw new Error("branch name may not contain slashes");
|
|
416
|
-
}
|
|
417
|
-
const root = path.resolve(cwd);
|
|
418
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
419
|
-
if (!config.branches)
|
|
420
|
-
throw new Error("pushwork repo has no branches");
|
|
421
|
-
const currentName = await (0, branches_js_1.readBranchFile)(root);
|
|
422
|
-
if (!currentName)
|
|
423
|
-
throw new Error("no current branch is set");
|
|
424
|
-
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
425
|
-
try {
|
|
426
|
-
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
427
|
-
const rootHandle = await repo.find(config.rootUrl);
|
|
428
|
-
const doc = rootHandle.doc();
|
|
429
|
-
if (!(0, branches_js_1.isBranchesDoc)(doc)) {
|
|
430
|
-
throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
|
|
431
|
-
}
|
|
432
|
-
if (doc.branches[name]) {
|
|
433
|
-
throw new Error(`branch "${name}" already exists`);
|
|
434
|
-
}
|
|
435
|
-
const sourceUrl = doc.branches[currentName];
|
|
436
|
-
if (!sourceUrl) {
|
|
437
|
-
throw new Error(`current branch "${currentName}" not found in branches doc`);
|
|
438
|
-
}
|
|
439
|
-
const sourceHandle = await repo.find(sourceUrl);
|
|
440
|
-
// Clone the folder doc.
|
|
441
|
-
const clonedFolder = repo.clone(sourceHandle);
|
|
442
|
-
dlog("createBranch %s cloned folder %s → %s", name, sourceUrl, clonedFolder.url);
|
|
443
|
-
// Deep-clone every file doc the source folder references, then rewrite
|
|
444
|
-
// the cloned folder's leaves to point at the new file URLs. Without
|
|
445
|
-
// this step both branches would alias the same UnixFileEntry docs and
|
|
446
|
-
// editing one branch would silently mutate the other.
|
|
447
|
-
const sourceTree = await shape.decode({ repo, root: sourceHandle });
|
|
448
|
-
const fileUrlRemap = new Map();
|
|
449
|
-
for (const [, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
|
|
450
|
-
const bare = (0, index_js_1.stripHeads)(fileUrl);
|
|
451
|
-
if (fileUrlRemap.has(bare))
|
|
452
|
-
continue;
|
|
453
|
-
const orig = await repo.find(bare);
|
|
454
|
-
const cloned = repo.clone(orig);
|
|
455
|
-
fileUrlRemap.set(bare, cloned.url);
|
|
456
|
-
dlog("createBranch cloned file %s → %s", bare, cloned.url);
|
|
457
|
-
}
|
|
458
|
-
const newTree = (0, index_js_1.newDir)();
|
|
459
|
-
for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(sourceTree)) {
|
|
460
|
-
const bare = (0, index_js_1.stripHeads)(fileUrl);
|
|
461
|
-
const remappedBare = fileUrlRemap.get(bare);
|
|
462
|
-
if (!remappedBare)
|
|
463
|
-
continue;
|
|
464
|
-
const parsed = (0, automerge_repo_1.parseAutomergeUrl)(fileUrl);
|
|
465
|
-
// Preserve heads-pinning if the source URL was pinned.
|
|
466
|
-
let finalUrl = remappedBare;
|
|
467
|
-
if (parsed.heads) {
|
|
468
|
-
const newHandle = await repo.find(remappedBare);
|
|
469
|
-
finalUrl = (0, index_js_1.pinUrl)(newHandle);
|
|
470
|
-
}
|
|
471
|
-
const segments = posixPath.split("/").filter(Boolean);
|
|
472
|
-
(0, index_js_1.setFileAt)(newTree, segments, finalUrl);
|
|
473
|
-
}
|
|
474
|
-
await shape.encode({ repo, tree: newTree, previousRoot: clonedFolder });
|
|
475
|
-
rootHandle.change((d) => {
|
|
476
|
-
d.branches[name] = clonedFolder.url;
|
|
477
|
-
});
|
|
478
|
-
// Switch to the new branch. The deep clone has identical content to the
|
|
479
|
-
// source, so the working tree on disk is already correct — we just
|
|
480
|
-
// update .pushwork/branch.
|
|
481
|
-
await (0, branches_js_1.writeBranchFile)(root, name);
|
|
482
|
-
dlog("createBranch switched to %s", name);
|
|
483
|
-
return clonedFolder.url;
|
|
484
|
-
}
|
|
485
|
-
finally {
|
|
486
|
-
await repo.shutdown();
|
|
487
|
-
}
|
|
488
|
-
}
|
|
489
|
-
/**
|
|
490
|
-
* Apply changes from `source` branch onto the current branch.
|
|
491
|
-
*
|
|
492
|
-
* For each path:
|
|
493
|
-
* - In both branches: their UnixFileEntry docs share Automerge history (deep
|
|
494
|
-
* cloned at branch creation), so we Automerge-merge source's content into
|
|
495
|
-
* target's. Concurrent edits are CRDT-merged inside each file doc.
|
|
496
|
-
* - Only in source: deep-clone the source's file doc into a new doc and add
|
|
497
|
-
* it to target's folder. Editing on either branch afterward stays isolated.
|
|
498
|
-
* - Only in target: untouched. We don't propagate deletions from source — the
|
|
499
|
-
* user can do that explicitly.
|
|
500
|
-
*
|
|
501
|
-
* Refuses if the working tree has uncommitted changes against the current
|
|
502
|
-
* branch (run `pushwork save` first). Offline only — propagation happens on
|
|
503
|
-
* the next `pushwork sync`.
|
|
504
|
-
*/
|
|
505
|
-
/**
|
|
506
|
-
* Compute what `merge <source>` would do without mutating any docs or the
|
|
507
|
-
* working tree. For paths in both branches we apply the merge to a *clone*
|
|
508
|
-
* of the target's file doc to learn the merged bytes; for paths only in
|
|
509
|
-
* source we just read source's bytes.
|
|
510
|
-
*/
|
|
511
|
-
async function previewMerge(cwd, source) {
|
|
512
|
-
if (!source)
|
|
513
|
-
throw new Error("source branch name is required");
|
|
514
|
-
const root = path.resolve(cwd);
|
|
515
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
516
|
-
if (!config.branches)
|
|
517
|
-
throw new Error("pushwork repo has no branches");
|
|
518
|
-
const targetName = await (0, branches_js_1.readBranchFile)(root);
|
|
519
|
-
if (!targetName)
|
|
520
|
-
throw new Error("no current branch is set");
|
|
521
|
-
if (source === targetName) {
|
|
522
|
-
throw new Error(`cannot merge "${source}" into itself`);
|
|
523
|
-
}
|
|
524
|
-
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
525
|
-
try {
|
|
526
|
-
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
527
|
-
const rootHandle = await repo.find(config.rootUrl);
|
|
528
|
-
const branchesDoc = rootHandle.doc();
|
|
529
|
-
if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
|
|
530
|
-
throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
|
|
531
|
-
}
|
|
532
|
-
if (!branchesDoc.branches[source]) {
|
|
533
|
-
throw new Error(`source branch "${source}" does not exist`);
|
|
534
|
-
}
|
|
535
|
-
const targetFolder = await repo.find(branchesDoc.branches[targetName]);
|
|
536
|
-
const sourceFolder = await repo.find(branchesDoc.branches[source]);
|
|
537
|
-
const tTree = await shape.decode({ repo, root: targetFolder });
|
|
538
|
-
const sTree = await shape.decode({ repo, root: sourceFolder });
|
|
539
|
-
const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
|
|
540
|
-
const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
|
|
541
|
-
const entries = [];
|
|
542
|
-
for (const [posixPath, sUrl] of sLeaves) {
|
|
543
|
-
const tUrl = tLeaves.get(posixPath);
|
|
544
|
-
const sBare = (0, index_js_1.stripHeads)(sUrl);
|
|
545
|
-
const sHandle = await repo.find(sBare);
|
|
546
|
-
if (!tUrl) {
|
|
547
|
-
entries.push({
|
|
548
|
-
path: posixPath,
|
|
549
|
-
kind: "added",
|
|
550
|
-
after: (0, index_js_1.contentToBytes)(sHandle.doc().content),
|
|
551
|
-
});
|
|
552
|
-
continue;
|
|
553
|
-
}
|
|
554
|
-
const tBare = (0, index_js_1.stripHeads)(tUrl);
|
|
555
|
-
if (tBare === sBare)
|
|
556
|
-
continue;
|
|
557
|
-
const tHandle = await repo.find(tBare);
|
|
558
|
-
const before = (0, index_js_1.contentToBytes)(tHandle.doc().content);
|
|
559
|
-
// Compute merge result without touching the target doc.
|
|
560
|
-
const merged = Automerge.merge(Automerge.clone(tHandle.doc()), Automerge.clone(sHandle.doc()));
|
|
561
|
-
const after = (0, index_js_1.contentToBytes)(merged.content);
|
|
562
|
-
if ((0, fs_tree_js_1.byteEq)(before, after))
|
|
563
|
-
continue;
|
|
564
|
-
entries.push({ path: posixPath, kind: "merged", before, after });
|
|
565
|
-
}
|
|
566
|
-
entries.sort((a, b) => a.path.localeCompare(b.path));
|
|
567
|
-
return { source, target: targetName, entries };
|
|
568
|
-
}
|
|
569
|
-
finally {
|
|
570
|
-
await repo.shutdown();
|
|
571
|
-
}
|
|
572
|
-
}
|
|
573
|
-
async function mergeBranch(cwd, source) {
|
|
574
|
-
if (!source)
|
|
575
|
-
throw new Error("source branch name is required");
|
|
576
|
-
const root = path.resolve(cwd);
|
|
577
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
578
|
-
if (!config.branches)
|
|
579
|
-
throw new Error("pushwork repo has no branches");
|
|
580
|
-
const targetName = await (0, branches_js_1.readBranchFile)(root);
|
|
581
|
-
if (!targetName)
|
|
582
|
-
throw new Error("no current branch is set");
|
|
583
|
-
if (source === targetName) {
|
|
584
|
-
throw new Error(`cannot merge "${source}" into itself`);
|
|
585
|
-
}
|
|
586
|
-
dlog("merge source=%s target=%s", source, targetName);
|
|
587
|
-
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
588
|
-
try {
|
|
589
|
-
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
590
|
-
const rootHandle = await repo.find(config.rootUrl);
|
|
591
|
-
const branchesDoc = rootHandle.doc();
|
|
592
|
-
if (!(0, branches_js_1.isBranchesDoc)(branchesDoc)) {
|
|
593
|
-
throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
|
|
594
|
-
}
|
|
595
|
-
if (!branchesDoc.branches[source]) {
|
|
596
|
-
throw new Error(`source branch "${source}" does not exist`);
|
|
597
|
-
}
|
|
598
|
-
const targetUrl = branchesDoc.branches[targetName];
|
|
599
|
-
const sourceUrl = branchesDoc.branches[source];
|
|
600
|
-
const targetFolder = await repo.find(targetUrl);
|
|
601
|
-
const sourceFolder = await repo.find(sourceUrl);
|
|
602
|
-
// Refuse on dirty working tree (mirror switchBranch policy).
|
|
603
|
-
const tFiles = await readFileBytes(repo, await shape.decode({ repo, root: targetFolder }));
|
|
604
|
-
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
605
|
-
const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
|
|
606
|
-
const dirty = computeDiff(tFiles, fsFiles);
|
|
607
|
-
if (dirty.added.length || dirty.modified.length || dirty.deleted.length) {
|
|
608
|
-
throw new Error(`refusing to merge: working tree has uncommitted changes on branch "${targetName}". run \`pushwork save\` first.`);
|
|
609
|
-
}
|
|
610
|
-
const tTree = await shape.decode({ repo, root: targetFolder });
|
|
611
|
-
const sTree = await shape.decode({ repo, root: sourceFolder });
|
|
612
|
-
const tLeaves = (0, index_js_1.flattenLeaves)(tTree);
|
|
613
|
-
const sLeaves = (0, index_js_1.flattenLeaves)(sTree);
|
|
614
|
-
const merged = [];
|
|
615
|
-
const added = [];
|
|
616
|
-
// For paths in both: merge file docs in place.
|
|
617
|
-
for (const [posixPath, sUrl] of sLeaves) {
|
|
618
|
-
const tUrl = tLeaves.get(posixPath);
|
|
619
|
-
if (!tUrl)
|
|
620
|
-
continue;
|
|
621
|
-
const tBare = (0, index_js_1.stripHeads)(tUrl);
|
|
622
|
-
const sBare = (0, index_js_1.stripHeads)(sUrl);
|
|
623
|
-
if (tBare === sBare) {
|
|
624
|
-
// Same file doc identity (shared) — already in sync, nothing to do.
|
|
625
|
-
continue;
|
|
626
|
-
}
|
|
627
|
-
const tHandle = await repo.find(tBare);
|
|
628
|
-
const sHandle = await repo.find(sBare);
|
|
629
|
-
tHandle.update((d) => Automerge.merge(d, Automerge.clone(sHandle.doc())));
|
|
630
|
-
merged.push(posixPath);
|
|
631
|
-
dlog("merge merged file at %s (%s ← %s)", posixPath, tBare, sBare);
|
|
632
|
-
}
|
|
633
|
-
// For paths only in source: deep-clone source's file doc, add to target's folder.
|
|
634
|
-
const newLeaves = new Map();
|
|
635
|
-
for (const [posixPath, sUrl] of sLeaves) {
|
|
636
|
-
if (tLeaves.has(posixPath))
|
|
637
|
-
continue;
|
|
638
|
-
const sBare = (0, index_js_1.stripHeads)(sUrl);
|
|
639
|
-
const sHandle = await repo.find(sBare);
|
|
640
|
-
const cloned = repo.clone(sHandle);
|
|
641
|
-
let finalUrl = cloned.url;
|
|
642
|
-
const parsed = (0, automerge_repo_1.parseAutomergeUrl)(sUrl);
|
|
643
|
-
if (parsed.heads) {
|
|
644
|
-
finalUrl = (0, index_js_1.pinUrl)(cloned);
|
|
645
|
-
}
|
|
646
|
-
newLeaves.set(posixPath, finalUrl);
|
|
647
|
-
added.push(posixPath);
|
|
648
|
-
dlog("merge added %s url=%s", posixPath, finalUrl);
|
|
649
|
-
}
|
|
650
|
-
if (newLeaves.size > 0) {
|
|
651
|
-
// Build a tree for the encode call: existing target leaves + new ones.
|
|
652
|
-
const nextTree = (0, index_js_1.newDir)();
|
|
653
|
-
for (const [p, url] of tLeaves) {
|
|
654
|
-
(0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
|
|
655
|
-
}
|
|
656
|
-
for (const [p, url] of newLeaves) {
|
|
657
|
-
(0, index_js_1.setFileAt)(nextTree, p.split("/").filter(Boolean), url);
|
|
658
|
-
}
|
|
659
|
-
await shape.encode({ repo, tree: nextTree, previousRoot: targetFolder });
|
|
660
|
-
}
|
|
661
|
-
// Materialize current branch (target) onto disk to reflect the merge.
|
|
662
|
-
const finalTree = await shape.decode({ repo, root: targetFolder });
|
|
663
|
-
await materializeTree(repo, root, finalTree);
|
|
664
|
-
merged.sort();
|
|
665
|
-
added.sort();
|
|
666
|
-
return { source, target: targetName, merged, added };
|
|
667
|
-
}
|
|
668
|
-
finally {
|
|
669
|
-
await repo.shutdown();
|
|
670
|
-
}
|
|
671
|
-
}
|
|
672
420
|
/**
|
|
673
|
-
* Capture the working tree's changes against the
|
|
674
|
-
*
|
|
675
|
-
*
|
|
421
|
+
* Capture the working tree's changes against the saved state into a local
|
|
422
|
+
* snarf, then reset the working tree to the saved state. Snarfs live in
|
|
423
|
+
* `.pushwork/snarf/` and are never synced.
|
|
676
424
|
*/
|
|
677
425
|
async function cutWorkdir(cwd, opts = {}) {
|
|
678
426
|
const root = path.resolve(cwd);
|
|
679
427
|
const config = await (0, config_js_1.readConfig)(root);
|
|
680
|
-
|
|
681
|
-
dlog("cut root=%s branch=%s name=%s", root, branchName, opts.name ?? "(unnamed)");
|
|
428
|
+
dlog("cut root=%s name=%s", root, opts.name ?? "(unnamed)");
|
|
682
429
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
683
430
|
try {
|
|
684
431
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
685
|
-
const
|
|
686
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
|
|
432
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
687
433
|
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
688
434
|
const previousFiles = await readFileBytes(repo, previousTree);
|
|
689
435
|
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
@@ -692,13 +438,13 @@ async function cutWorkdir(cwd, opts = {}) {
|
|
|
692
438
|
for (const [p, bytes] of fsFiles) {
|
|
693
439
|
const prev = previousFiles.get(p);
|
|
694
440
|
if (!prev) {
|
|
695
|
-
entries.push({ path: p, kind: "added", contentBase64: (0,
|
|
441
|
+
entries.push({ path: p, kind: "added", contentBase64: (0, snarf_js_1.encodeBytes)(bytes) });
|
|
696
442
|
}
|
|
697
443
|
else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
|
|
698
444
|
entries.push({
|
|
699
445
|
path: p,
|
|
700
446
|
kind: "modified",
|
|
701
|
-
contentBase64: (0,
|
|
447
|
+
contentBase64: (0, snarf_js_1.encodeBytes)(bytes),
|
|
702
448
|
});
|
|
703
449
|
}
|
|
704
450
|
}
|
|
@@ -710,35 +456,32 @@ async function cutWorkdir(cwd, opts = {}) {
|
|
|
710
456
|
throw new Error("nothing to cut: working tree clean");
|
|
711
457
|
}
|
|
712
458
|
entries.sort((a, b) => a.path.localeCompare(b.path));
|
|
713
|
-
const
|
|
459
|
+
const snarf = await (0, snarf_js_1.appendSnarf)(root, {
|
|
714
460
|
name: opts.name,
|
|
715
|
-
branch: branchName,
|
|
716
461
|
entries,
|
|
717
462
|
});
|
|
718
|
-
// Reset working tree to the
|
|
463
|
+
// Reset working tree to the saved state.
|
|
719
464
|
await materializeTree(repo, root, previousTree);
|
|
720
|
-
dlog("cut complete id=%d entries=%d",
|
|
721
|
-
return { id:
|
|
465
|
+
dlog("cut complete id=%d entries=%d", snarf.id, entries.length);
|
|
466
|
+
return { id: snarf.id, entries: entries.length };
|
|
722
467
|
}
|
|
723
468
|
finally {
|
|
724
469
|
await repo.shutdown();
|
|
725
470
|
}
|
|
726
471
|
}
|
|
727
472
|
/**
|
|
728
|
-
* Apply a
|
|
473
|
+
* Apply a snarf on top of the current working tree, then remove the snarf
|
|
729
474
|
* entry. Refuses if the working tree has uncommitted changes (caller can
|
|
730
475
|
* `pushwork save` or `pushwork cut` first).
|
|
731
476
|
*/
|
|
732
|
-
async function
|
|
477
|
+
async function pasteSnarf(cwd, selector) {
|
|
733
478
|
const root = path.resolve(cwd);
|
|
734
479
|
const config = await (0, config_js_1.readConfig)(root);
|
|
735
|
-
|
|
736
|
-
// Check the working tree is clean against the current branch state.
|
|
480
|
+
// Check the working tree is clean against the saved state.
|
|
737
481
|
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
738
482
|
try {
|
|
739
483
|
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
740
|
-
const
|
|
741
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, branchName);
|
|
484
|
+
const folderHandle = await repo.find(config.rootUrl);
|
|
742
485
|
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
743
486
|
const previousFiles = await readFileBytes(repo, previousTree);
|
|
744
487
|
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
@@ -751,13 +494,13 @@ async function pasteStash(cwd, selector) {
|
|
|
751
494
|
finally {
|
|
752
495
|
await repo.shutdown();
|
|
753
496
|
}
|
|
754
|
-
const
|
|
755
|
-
if (!
|
|
497
|
+
const snarf = await (0, snarf_js_1.takeSnarf)(root, selector);
|
|
498
|
+
if (!snarf) {
|
|
756
499
|
throw new Error(selector
|
|
757
|
-
? `no
|
|
758
|
-
: "nothing to paste: no
|
|
500
|
+
? `no snarf matches "${selector}"`
|
|
501
|
+
: "nothing to paste: no snarfs");
|
|
759
502
|
}
|
|
760
|
-
for (const entry of
|
|
503
|
+
for (const entry of snarf.entries) {
|
|
761
504
|
const target = path.join(root, fromPosix(entry.path));
|
|
762
505
|
if (entry.kind === "deleted") {
|
|
763
506
|
try {
|
|
@@ -769,118 +512,15 @@ async function pasteStash(cwd, selector) {
|
|
|
769
512
|
await pruneEmptyDirs(root, path.dirname(fromPosix(entry.path)));
|
|
770
513
|
}
|
|
771
514
|
else if (entry.contentBase64 != null) {
|
|
772
|
-
const bytes = (0,
|
|
515
|
+
const bytes = (0, snarf_js_1.decodeBytes)(entry.contentBase64);
|
|
773
516
|
await (0, fs_tree_js_1.writeFileAtomic)(target, bytes);
|
|
774
517
|
}
|
|
775
518
|
}
|
|
776
|
-
dlog("paste complete id=%d entries=%d",
|
|
777
|
-
return { id:
|
|
519
|
+
dlog("paste complete id=%d entries=%d", snarf.id, snarf.entries.length);
|
|
520
|
+
return { id: snarf.id, name: snarf.name, entries: snarf.entries.length };
|
|
778
521
|
}
|
|
779
|
-
async function
|
|
780
|
-
return (0,
|
|
781
|
-
}
|
|
782
|
-
async function switchBranch(cwd, name) {
|
|
783
|
-
if (!name)
|
|
784
|
-
throw new Error("branch name is required");
|
|
785
|
-
const root = path.resolve(cwd);
|
|
786
|
-
const config = await (0, config_js_1.readConfig)(root);
|
|
787
|
-
if (!config.branches)
|
|
788
|
-
throw new Error("pushwork repo has no branches");
|
|
789
|
-
const currentName = await (0, branches_js_1.readBranchFile)(root);
|
|
790
|
-
const repo = await (0, repo_js_1.openRepo)(config.backend, (0, config_js_1.storageDir)(root), { offline: true });
|
|
791
|
-
try {
|
|
792
|
-
const shape = await (0, index_js_1.resolveShape)(config.shape);
|
|
793
|
-
const rootHandle = await repo.find(config.rootUrl);
|
|
794
|
-
const doc = rootHandle.doc();
|
|
795
|
-
if (!(0, branches_js_1.isBranchesDoc)(doc)) {
|
|
796
|
-
throw new Error(`root doc at ${config.rootUrl} is not a branches doc`);
|
|
797
|
-
}
|
|
798
|
-
if (!doc.branches[name]) {
|
|
799
|
-
throw new Error(`branch "${name}" does not exist`);
|
|
800
|
-
}
|
|
801
|
-
const stranded = !!currentName && !doc.branches[currentName];
|
|
802
|
-
// Refuse if the working dir has uncommitted changes against the current
|
|
803
|
-
// branch. The user can `pushwork save` to commit, or `pushwork cut` +
|
|
804
|
-
// `pushwork paste` to carry the changes across the switch.
|
|
805
|
-
if (currentName && !stranded) {
|
|
806
|
-
const folderHandle = await (0, branches_js_1.resolveEffectiveRoot)(repo, rootHandle, currentName);
|
|
807
|
-
const previousTree = await shape.decode({ repo, root: folderHandle });
|
|
808
|
-
const previousFiles = await readFileBytes(repo, previousTree);
|
|
809
|
-
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
810
|
-
const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
|
|
811
|
-
const d = computeDiff(previousFiles, fsFiles);
|
|
812
|
-
if (d.added.length || d.modified.length || d.deleted.length) {
|
|
813
|
-
throw new Error(`refusing to switch: working tree has uncommitted changes on branch "${currentName}". run \`pushwork save\` to commit, or \`pushwork cut\` + \`pushwork switch ${name}\` + \`pushwork paste\` to carry them across.`);
|
|
814
|
-
}
|
|
815
|
-
}
|
|
816
|
-
// Materialize from the new branch.
|
|
817
|
-
const newFolder = await repo.find(doc.branches[name]);
|
|
818
|
-
const tree = await shape.decode({ repo, root: newFolder });
|
|
819
|
-
// Stranded: the current branch is gone, so we have no reference for a
|
|
820
|
-
// dirty check. Auto-cut working changes against the destination branch,
|
|
821
|
-
// materialize, then auto-paste so the user's work survives the switch.
|
|
822
|
-
let strandedStashId = null;
|
|
823
|
-
if (stranded) {
|
|
824
|
-
const newFiles = await readFileBytes(repo, tree);
|
|
825
|
-
const ig = await (0, ignore_js_1.loadIgnore)(root);
|
|
826
|
-
const fsFiles = await (0, fs_tree_js_1.walkDir)(root, ig);
|
|
827
|
-
const entries = [];
|
|
828
|
-
for (const [p, bytes] of fsFiles) {
|
|
829
|
-
const prev = newFiles.get(p);
|
|
830
|
-
if (!prev) {
|
|
831
|
-
entries.push({ path: p, kind: "added", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
|
|
832
|
-
}
|
|
833
|
-
else if (!(0, fs_tree_js_1.byteEq)(prev.bytes, bytes)) {
|
|
834
|
-
entries.push({ path: p, kind: "modified", contentBase64: (0, stash_js_1.encodeBytes)(bytes) });
|
|
835
|
-
}
|
|
836
|
-
}
|
|
837
|
-
for (const [p] of newFiles) {
|
|
838
|
-
if (!fsFiles.has(p))
|
|
839
|
-
entries.push({ path: p, kind: "deleted" });
|
|
840
|
-
}
|
|
841
|
-
if (entries.length > 0) {
|
|
842
|
-
entries.sort((a, b) => a.path.localeCompare(b.path));
|
|
843
|
-
const stash = await (0, stash_js_1.appendStash)(root, {
|
|
844
|
-
name: `stranded-from-${currentName}`,
|
|
845
|
-
branch: currentName,
|
|
846
|
-
entries,
|
|
847
|
-
});
|
|
848
|
-
strandedStashId = stash.id;
|
|
849
|
-
process.stderr.write(`warning: branch "${currentName}" no longer exists; auto-cut ${entries.length} entr${entries.length === 1 ? "y" : "ies"} as stash #${stash.id} and will auto-paste after switch\n`);
|
|
850
|
-
}
|
|
851
|
-
else {
|
|
852
|
-
process.stderr.write(`warning: branch "${currentName}" no longer exists; switching (working tree already matches "${name}")\n`);
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
await materializeTree(repo, root, tree);
|
|
856
|
-
await (0, branches_js_1.writeBranchFile)(root, name);
|
|
857
|
-
dlog("switch → %s", name);
|
|
858
|
-
if (strandedStashId != null) {
|
|
859
|
-
const stash = await (0, stash_js_1.takeStash)(root, String(strandedStashId));
|
|
860
|
-
if (stash) {
|
|
861
|
-
for (const entry of stash.entries) {
|
|
862
|
-
const target = path.join(root, fromPosix(entry.path));
|
|
863
|
-
if (entry.kind === "deleted") {
|
|
864
|
-
try {
|
|
865
|
-
await fs.unlink(target);
|
|
866
|
-
}
|
|
867
|
-
catch {
|
|
868
|
-
// already gone
|
|
869
|
-
}
|
|
870
|
-
await pruneEmptyDirs(root, path.dirname(fromPosix(entry.path)));
|
|
871
|
-
}
|
|
872
|
-
else if (entry.contentBase64 != null) {
|
|
873
|
-
const bytes = (0, stash_js_1.decodeBytes)(entry.contentBase64);
|
|
874
|
-
await (0, fs_tree_js_1.writeFileAtomic)(target, bytes);
|
|
875
|
-
}
|
|
876
|
-
}
|
|
877
|
-
dlog("stranded auto-paste applied stash #%d (%d entries)", stash.id, stash.entries.length);
|
|
878
|
-
}
|
|
879
|
-
}
|
|
880
|
-
}
|
|
881
|
-
finally {
|
|
882
|
-
await repo.shutdown();
|
|
883
|
-
}
|
|
522
|
+
async function showSnarfs(cwd) {
|
|
523
|
+
return (0, snarf_js_1.listSnarfs)(path.resolve(cwd));
|
|
884
524
|
}
|
|
885
525
|
function stampLastSyncAt(handle) {
|
|
886
526
|
handle.change((d) => {
|
|
@@ -938,17 +578,13 @@ async function pushFiles(repo, fsFiles, previous, artifactDirs) {
|
|
|
938
578
|
}
|
|
939
579
|
else if (prev) {
|
|
940
580
|
// Changed path: mutate the existing file doc in place. This keeps
|
|
941
|
-
// the file URL stable
|
|
581
|
+
// the file URL stable across edits and avoids the propagation
|
|
942
582
|
// race where a brand-new file doc URL is referenced by the folder
|
|
943
583
|
// before its bytes have reached the sync server.
|
|
944
584
|
//
|
|
945
585
|
// For string content (text files) we use Automerge.updateText so
|
|
946
586
|
// concurrent character-level edits merge correctly. Bytes and
|
|
947
587
|
// ImmutableString are atomic — last writer wins on the field.
|
|
948
|
-
//
|
|
949
|
-
// Branch isolation is enforced separately: `createBranch` deep
|
|
950
|
-
// clones every file doc the source branch references, so two
|
|
951
|
-
// branches never share a UnixFileEntry doc identity.
|
|
952
588
|
const refreshUrl = (0, index_js_1.stripHeads)(prev.url);
|
|
953
589
|
const handle = await repo.find(refreshUrl);
|
|
954
590
|
handle.change((d) => {
|
|
@@ -989,6 +625,34 @@ async function pushFiles(repo, fsFiles, previous, artifactDirs) {
|
|
|
989
625
|
dlog("pushFiles done: %d created, %d updated, %d unchanged", created, updated, unchanged);
|
|
990
626
|
return root;
|
|
991
627
|
}
|
|
628
|
+
/**
|
|
629
|
+
* Re-pin every artifact leaf in the folder doc to its file doc's current
|
|
630
|
+
* heads. Bare (non-artifact) URLs are left as-is since they already track
|
|
631
|
+
* current heads implicitly. Returns true if any leaf URL was rewritten.
|
|
632
|
+
*/
|
|
633
|
+
async function refreshFolderPins(repo, folderHandle, shape, artifactDirs) {
|
|
634
|
+
const tree = await shape.decode({ repo, root: folderHandle });
|
|
635
|
+
const refreshed = (0, index_js_1.newDir)();
|
|
636
|
+
let changed = false;
|
|
637
|
+
for (const [posixPath, currentUrl] of (0, index_js_1.flattenLeaves)(tree)) {
|
|
638
|
+
const segments = posixPath.split("/").filter(Boolean);
|
|
639
|
+
let finalUrl = currentUrl;
|
|
640
|
+
if ((0, index_js_1.isInArtifactDir)(posixPath, artifactDirs)) {
|
|
641
|
+
const handle = await repo.find((0, index_js_1.stripHeads)(currentUrl));
|
|
642
|
+
const repinned = (0, index_js_1.pinUrl)(handle);
|
|
643
|
+
if (repinned !== currentUrl) {
|
|
644
|
+
finalUrl = repinned;
|
|
645
|
+
changed = true;
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
(0, index_js_1.setFileAt)(refreshed, segments, finalUrl);
|
|
649
|
+
}
|
|
650
|
+
if (changed) {
|
|
651
|
+
dlog("refreshFolderPins: re-pinned artifacts to current heads");
|
|
652
|
+
await shape.encode({ repo, tree: refreshed, previousRoot: folderHandle });
|
|
653
|
+
}
|
|
654
|
+
return changed;
|
|
655
|
+
}
|
|
992
656
|
async function readFileBytes(repo, tree) {
|
|
993
657
|
const out = new Map();
|
|
994
658
|
for (const [posixPath, fileUrl] of (0, index_js_1.flattenLeaves)(tree)) {
|