pushwork 2.0.0-preview → 2.0.0-preview.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/branches.d.ts +1 -0
- package/dist/branches.d.ts.map +1 -1
- package/dist/cli/commands.d.ts +71 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +794 -0
- package/dist/cli/commands.js.map +1 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +19 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli.js +67 -112
- package/dist/cli.js.map +1 -1
- package/dist/commands.d.ts +58 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +975 -0
- package/dist/commands.js.map +1 -0
- package/dist/config/index.d.ts +71 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +314 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config.d.ts +1 -2
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +1 -2
- package/dist/config.js.map +1 -1
- package/dist/core/change-detection.d.ts +80 -0
- package/dist/core/change-detection.d.ts.map +1 -0
- package/dist/core/change-detection.js +560 -0
- package/dist/core/change-detection.js.map +1 -0
- package/dist/core/config.d.ts +81 -0
- package/dist/core/config.d.ts.map +1 -0
- package/dist/core/config.js +304 -0
- package/dist/core/config.js.map +1 -0
- package/dist/core/index.d.ts +6 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +22 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/move-detection.d.ts +34 -0
- package/dist/core/move-detection.d.ts.map +1 -0
- package/dist/core/move-detection.js +128 -0
- package/dist/core/move-detection.js.map +1 -0
- package/dist/core/snapshot.d.ts +105 -0
- package/dist/core/snapshot.d.ts.map +1 -0
- package/dist/core/snapshot.js +254 -0
- package/dist/core/snapshot.js.map +1 -0
- package/dist/core/sync-engine.d.ts +177 -0
- package/dist/core/sync-engine.d.ts.map +1 -0
- package/dist/core/sync-engine.js +1471 -0
- package/dist/core/sync-engine.js.map +1 -0
- package/dist/index.d.ts +2 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -14
- package/dist/index.js.map +1 -1
- package/dist/pushwork.d.ts +28 -61
- package/dist/pushwork.d.ts.map +1 -1
- package/dist/pushwork.js +127 -445
- package/dist/pushwork.js.map +1 -1
- package/dist/shapes/types.d.ts +1 -0
- package/dist/shapes/types.d.ts.map +1 -1
- package/dist/shapes/types.js.map +1 -1
- package/dist/shapes/vfs.d.ts.map +1 -1
- package/dist/shapes/vfs.js +6 -2
- package/dist/shapes/vfs.js.map +1 -1
- package/dist/snarf.d.ts +21 -0
- package/dist/snarf.d.ts.map +1 -0
- package/dist/snarf.js +117 -0
- package/dist/snarf.js.map +1 -0
- package/dist/stash.d.ts +0 -2
- package/dist/stash.d.ts.map +1 -1
- package/dist/stash.js +0 -1
- package/dist/stash.js.map +1 -1
- package/dist/types/config.d.ts +102 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +10 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/documents.d.ts +88 -0
- package/dist/types/documents.d.ts.map +1 -0
- package/dist/types/documents.js +23 -0
- package/dist/types/documents.js.map +1 -0
- package/dist/types/index.d.ts +4 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +20 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/snapshot.d.ts +64 -0
- package/dist/types/snapshot.d.ts.map +1 -0
- package/dist/types/snapshot.js +3 -0
- package/dist/types/snapshot.js.map +1 -0
- package/dist/utils/content-similarity.d.ts +53 -0
- package/dist/utils/content-similarity.d.ts.map +1 -0
- package/dist/utils/content-similarity.js +155 -0
- package/dist/utils/content-similarity.js.map +1 -0
- package/dist/utils/content.d.ts +10 -0
- package/dist/utils/content.d.ts.map +1 -0
- package/dist/utils/content.js +35 -0
- package/dist/utils/content.js.map +1 -0
- package/dist/utils/directory.d.ts +24 -0
- package/dist/utils/directory.d.ts.map +1 -0
- package/dist/utils/directory.js +56 -0
- package/dist/utils/directory.js.map +1 -0
- package/dist/utils/fs.d.ts +74 -0
- package/dist/utils/fs.d.ts.map +1 -0
- package/dist/utils/fs.js +298 -0
- package/dist/utils/fs.js.map +1 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/mime-types.d.ts +13 -0
- package/dist/utils/mime-types.d.ts.map +1 -0
- package/dist/utils/mime-types.js +247 -0
- package/dist/utils/mime-types.js.map +1 -0
- package/dist/utils/network-sync.d.ts +30 -0
- package/dist/utils/network-sync.d.ts.map +1 -0
- package/dist/utils/network-sync.js +391 -0
- package/dist/utils/network-sync.js.map +1 -0
- package/dist/utils/node-polyfills.d.ts +9 -0
- package/dist/utils/node-polyfills.d.ts.map +1 -0
- package/dist/utils/node-polyfills.js +9 -0
- package/dist/utils/node-polyfills.js.map +1 -0
- package/dist/utils/output.d.ts +129 -0
- package/dist/utils/output.d.ts.map +1 -0
- package/dist/utils/output.js +375 -0
- package/dist/utils/output.js.map +1 -0
- package/dist/utils/repo-factory.d.ts +15 -0
- package/dist/utils/repo-factory.d.ts.map +1 -0
- package/dist/utils/repo-factory.js +156 -0
- package/dist/utils/repo-factory.js.map +1 -0
- package/dist/utils/string-similarity.d.ts +14 -0
- package/dist/utils/string-similarity.d.ts.map +1 -0
- package/dist/utils/string-similarity.js +43 -0
- package/dist/utils/string-similarity.js.map +1 -0
- package/dist/utils/text-diff.d.ts +37 -0
- package/dist/utils/text-diff.d.ts.map +1 -0
- package/dist/utils/text-diff.js +131 -0
- package/dist/utils/text-diff.js.map +1 -0
- package/dist/utils/trace.d.ts +19 -0
- package/dist/utils/trace.d.ts.map +1 -0
- package/dist/utils/trace.js +68 -0
- package/dist/utils/trace.js.map +1 -0
- package/dist/version.d.ts +11 -0
- package/dist/version.d.ts.map +1 -0
- package/dist/version.js +93 -0
- package/dist/version.js.map +1 -0
- package/package.json +5 -1
- package/.prettierrc +0 -9
- package/flake.lock +0 -128
- package/flake.nix +0 -66
- package/pnpm-workspace.yaml +0 -5
- package/src/branches.ts +0 -93
- package/src/cli.ts +0 -292
- package/src/config.ts +0 -64
- package/src/fs-tree.ts +0 -70
- package/src/ignore.ts +0 -33
- package/src/index.ts +0 -38
- package/src/log.ts +0 -8
- package/src/pushwork.ts +0 -1055
- package/src/repo.ts +0 -76
- package/src/shapes/custom.ts +0 -29
- package/src/shapes/file.ts +0 -115
- package/src/shapes/index.ts +0 -19
- package/src/shapes/patchwork-folder.ts +0 -156
- package/src/shapes/types.ts +0 -79
- package/src/shapes/vfs.ts +0 -93
- package/src/stash.ts +0 -106
- package/test/integration/branches.test.ts +0 -389
- package/test/integration/pushwork.test.ts +0 -547
- package/test/setup.ts +0 -29
- package/test/unit/doc-shape.test.ts +0 -612
- package/tsconfig.json +0 -22
- package/vitest.config.ts +0 -14
package/dist/commands.js
ADDED
|
@@ -0,0 +1,975 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.init = init;
|
|
40
|
+
exports.sync = sync;
|
|
41
|
+
exports.diff = diff;
|
|
42
|
+
exports.status = status;
|
|
43
|
+
exports.log = log;
|
|
44
|
+
exports.checkout = checkout;
|
|
45
|
+
exports.clone = clone;
|
|
46
|
+
exports.url = url;
|
|
47
|
+
exports.rm = rm;
|
|
48
|
+
exports.commit = commit;
|
|
49
|
+
exports.ls = ls;
|
|
50
|
+
exports.config = config;
|
|
51
|
+
exports.watch = watch;
|
|
52
|
+
exports.root = root;
|
|
53
|
+
const path = __importStar(require("path"));
|
|
54
|
+
const fs = __importStar(require("fs/promises"));
|
|
55
|
+
const fsSync = __importStar(require("fs"));
|
|
56
|
+
const diffLib = __importStar(require("diff"));
|
|
57
|
+
const child_process_1 = require("child_process");
|
|
58
|
+
const config_1 = require("./types/config");
|
|
59
|
+
const core_1 = require("./core");
|
|
60
|
+
const utils_1 = require("./utils");
|
|
61
|
+
const config_2 = require("./core/config");
|
|
62
|
+
const repo_factory_1 = require("./utils/repo-factory");
|
|
63
|
+
const output_1 = require("./utils/output");
|
|
64
|
+
const network_sync_1 = require("./utils/network-sync");
|
|
65
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
66
|
+
/**
|
|
67
|
+
* Initialize repository directory structure and configuration
|
|
68
|
+
* Shared logic for init and clone commands
|
|
69
|
+
*/
|
|
70
|
+
async function initializeRepository(resolvedPath, overrides, sub = false) {
|
|
71
|
+
// Create .pushwork directory structure
|
|
72
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
73
|
+
await (0, utils_1.ensureDirectoryExists)(syncToolDir);
|
|
74
|
+
await (0, utils_1.ensureDirectoryExists)(path.join(syncToolDir, "automerge"));
|
|
75
|
+
// Persist Subduction mode + server in config so subsequent commands pick
|
|
76
|
+
// them up. Without persisting sync_server here, `.pushwork/config.json`
|
|
77
|
+
// would retain the default WebSocket server even in --sub mode, and
|
|
78
|
+
// `pushwork config` / `status` would misreport the endpoint.
|
|
79
|
+
if (sub) {
|
|
80
|
+
const { sync_server_storage_id: _discarded, ...rest } = overrides;
|
|
81
|
+
overrides = {
|
|
82
|
+
...rest,
|
|
83
|
+
subduction: true,
|
|
84
|
+
sync_server: rest.sync_server ?? config_1.DEFAULT_SUBDUCTION_SERVER,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
// Create configuration with overrides
|
|
88
|
+
const configManager = new config_2.ConfigManager(resolvedPath);
|
|
89
|
+
let config = await configManager.initializeWithOverrides(overrides);
|
|
90
|
+
if (sub && config.sync_server_storage_id !== undefined) {
|
|
91
|
+
config = { ...config, sync_server_storage_id: undefined };
|
|
92
|
+
await configManager.save(config);
|
|
93
|
+
}
|
|
94
|
+
// Create repository and sync engine
|
|
95
|
+
const repo = await (0, repo_factory_1.createRepo)(resolvedPath, config, sub);
|
|
96
|
+
const syncEngine = new core_1.SyncEngine(repo, resolvedPath, config);
|
|
97
|
+
return { config, repo, syncEngine };
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Shared pre-action that ensures repository and sync engine are properly initialized
|
|
101
|
+
* This function always works, with or without network connectivity
|
|
102
|
+
*/
|
|
103
|
+
async function setupCommandContext(workingDir = process.cwd(), options) {
|
|
104
|
+
const resolvedPath = path.resolve(workingDir);
|
|
105
|
+
// Check if initialized
|
|
106
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
107
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
108
|
+
throw new Error('Directory not initialized for sync. Run "pushwork init" first.');
|
|
109
|
+
}
|
|
110
|
+
// Load configuration
|
|
111
|
+
const configManager = new config_2.ConfigManager(resolvedPath);
|
|
112
|
+
let config;
|
|
113
|
+
if (options?.forceDefaults) {
|
|
114
|
+
// Force mode: use defaults, only preserving backend-selection keys from
|
|
115
|
+
// local config (root_directory_url, subduction flag, and the sync
|
|
116
|
+
// endpoint the user originally chose). Everything else (exclude
|
|
117
|
+
// patterns, artifact dirs, move threshold, etc.) is reset to defaults.
|
|
118
|
+
const localConfig = await configManager.load();
|
|
119
|
+
config = configManager.getDefaultDirectoryConfig();
|
|
120
|
+
if (localConfig?.root_directory_url) {
|
|
121
|
+
config.root_directory_url = localConfig.root_directory_url;
|
|
122
|
+
}
|
|
123
|
+
if (localConfig?.subduction) {
|
|
124
|
+
config.subduction = localConfig.subduction;
|
|
125
|
+
config.sync_server = localConfig.sync_server ?? config_1.DEFAULT_SUBDUCTION_SERVER;
|
|
126
|
+
// sync_server_storage_id is meaningless in Subduction mode; drop it
|
|
127
|
+
// so the in-memory config reflects reality.
|
|
128
|
+
config.sync_server_storage_id = undefined;
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
// WebSocket mode: preserve the user's custom server + storage id
|
|
132
|
+
// if they configured one. Without this, `pushwork sync` (default
|
|
133
|
+
// force mode) would silently reset a custom --sync-server back to
|
|
134
|
+
// DEFAULT_SYNC_SERVER on every run.
|
|
135
|
+
if (localConfig?.sync_server) {
|
|
136
|
+
config.sync_server = localConfig.sync_server;
|
|
137
|
+
}
|
|
138
|
+
if (localConfig?.sync_server_storage_id) {
|
|
139
|
+
config.sync_server_storage_id = localConfig.sync_server_storage_id;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
config = await configManager.getMerged();
|
|
145
|
+
}
|
|
146
|
+
// Override sync_enabled if explicitly specified (e.g., for local-only operations)
|
|
147
|
+
if (options?.syncEnabled !== undefined) {
|
|
148
|
+
config = { ...config, sync_enabled: options.syncEnabled };
|
|
149
|
+
}
|
|
150
|
+
const sub = config.subduction ?? false;
|
|
151
|
+
if (sub) {
|
|
152
|
+
// Default to the Subduction endpoint only if the user hasn't
|
|
153
|
+
// configured one. Respect any explicit sync_server value (including
|
|
154
|
+
// custom Subduction endpoints set via `init --sub --sync-server ...`).
|
|
155
|
+
if (!config.sync_server) {
|
|
156
|
+
config.sync_server = config_1.DEFAULT_SUBDUCTION_SERVER;
|
|
157
|
+
}
|
|
158
|
+
// sync_server_storage_id is a WebSocket-mode concept; clear it so
|
|
159
|
+
// the in-memory config reflects what waitForSync actually uses
|
|
160
|
+
// (head-stability polling, not getSyncInfo verification).
|
|
161
|
+
config.sync_server_storage_id = undefined;
|
|
162
|
+
}
|
|
163
|
+
// Create repo with config
|
|
164
|
+
const repo = await (0, repo_factory_1.createRepo)(resolvedPath, config, sub);
|
|
165
|
+
// Create sync engine
|
|
166
|
+
const syncEngine = new core_1.SyncEngine(repo, resolvedPath, config);
|
|
167
|
+
return {
|
|
168
|
+
repo,
|
|
169
|
+
syncEngine,
|
|
170
|
+
config,
|
|
171
|
+
workingDir: resolvedPath,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Safely shutdown a repository with proper error handling
|
|
176
|
+
*/
|
|
177
|
+
async function safeRepoShutdown(repo) {
|
|
178
|
+
// TEMPORARY WORKAROUND: pushwork's Subduction sync-verification only
|
|
179
|
+
// watches local head stability, which doesn't actually confirm the
|
|
180
|
+
// server received anything. Give any in-flight `syncWithAllPeers`
|
|
181
|
+
// calls a chance to finish (and the scheduler time to heal transient
|
|
182
|
+
// failures) before we tear the repo down. Remove once awaitSynced()
|
|
183
|
+
// (or equivalent) lands in @automerge/automerge-repo@subduction.
|
|
184
|
+
const graceMsEnv = process.env.PUSHWORK_SYNC_GRACE_MS;
|
|
185
|
+
const graceMs = graceMsEnv !== undefined ? Number(graceMsEnv) : 3000;
|
|
186
|
+
if (Number.isFinite(graceMs) && graceMs > 0) {
|
|
187
|
+
await new Promise((resolve) => setTimeout(resolve, graceMs));
|
|
188
|
+
}
|
|
189
|
+
// Handle uncaught WebSocket errors that occur during shutdown
|
|
190
|
+
const uncaughtErrorHandler = (err) => {
|
|
191
|
+
if (err.message.includes("WebSocket")) {
|
|
192
|
+
// Silently suppress WebSocket errors during shutdown
|
|
193
|
+
return;
|
|
194
|
+
}
|
|
195
|
+
// Re-throw non-WebSocket errors
|
|
196
|
+
throw err;
|
|
197
|
+
};
|
|
198
|
+
// Add the error handler before shutdown
|
|
199
|
+
process.on("uncaughtException", uncaughtErrorHandler);
|
|
200
|
+
try {
|
|
201
|
+
await repo.shutdown();
|
|
202
|
+
}
|
|
203
|
+
catch (shutdownError) {
|
|
204
|
+
// WebSocket errors during shutdown are common and non-critical
|
|
205
|
+
// Silently ignore them - they don't affect data integrity
|
|
206
|
+
const errorMessage = shutdownError instanceof Error
|
|
207
|
+
? shutdownError.message
|
|
208
|
+
: String(shutdownError);
|
|
209
|
+
// Ignore WebSocket-related errors entirely
|
|
210
|
+
if (errorMessage.includes("WebSocket")) {
|
|
211
|
+
// Silently ignore WebSocket shutdown errors
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
finally {
|
|
216
|
+
process.off("uncaughtException", uncaughtErrorHandler);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Initialize sync in a directory
|
|
221
|
+
*/
|
|
222
|
+
async function init(targetPath, options = {}) {
|
|
223
|
+
const resolvedPath = path.resolve(targetPath);
|
|
224
|
+
const sub = options.sub ?? false;
|
|
225
|
+
output_1.out.task(`Initializing`);
|
|
226
|
+
if (sub) {
|
|
227
|
+
output_1.out.taskLine("Using Subduction sync backend", true);
|
|
228
|
+
}
|
|
229
|
+
await (0, utils_1.ensureDirectoryExists)(resolvedPath);
|
|
230
|
+
// Check if already initialized
|
|
231
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
232
|
+
if (await (0, utils_1.pathExists)(syncToolDir)) {
|
|
233
|
+
output_1.out.error("Directory already initialized for sync");
|
|
234
|
+
output_1.out.exit(1);
|
|
235
|
+
}
|
|
236
|
+
// Initialize repository with optional CLI overrides
|
|
237
|
+
output_1.out.update("Setting up repository");
|
|
238
|
+
const { repo, syncEngine, config } = await initializeRepository(resolvedPath, {
|
|
239
|
+
sync_server: options.syncServer,
|
|
240
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
241
|
+
}, sub);
|
|
242
|
+
// Create new root directory document
|
|
243
|
+
output_1.out.update("Creating root directory");
|
|
244
|
+
const dirName = path.basename(resolvedPath);
|
|
245
|
+
const rootDoc = {
|
|
246
|
+
"@patchwork": { type: "folder" },
|
|
247
|
+
name: dirName,
|
|
248
|
+
title: dirName,
|
|
249
|
+
docs: [],
|
|
250
|
+
};
|
|
251
|
+
const rootHandle = repo.create(rootDoc);
|
|
252
|
+
// Set root directory URL in snapshot
|
|
253
|
+
await syncEngine.setRootDirectoryUrl(rootHandle.url);
|
|
254
|
+
// Wait for root document to sync to server if sync is enabled.
|
|
255
|
+
// With Subduction, we skip StorageId-based sync verification —
|
|
256
|
+
// the SubductionSource handles sync internally.
|
|
257
|
+
if (config.sync_enabled && !sub) {
|
|
258
|
+
if (config.sync_server_storage_id) {
|
|
259
|
+
output_1.out.update("Syncing to server");
|
|
260
|
+
const { failed } = await (0, network_sync_1.waitForSync)([rootHandle], config.sync_server_storage_id);
|
|
261
|
+
if (failed.length > 0) {
|
|
262
|
+
output_1.out.taskLine("Root document failed to sync to server", true);
|
|
263
|
+
// Continue anyway - the document is created locally and will sync later
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
else {
|
|
267
|
+
// WebSocket mode without a storage id can't verify delivery via
|
|
268
|
+
// getSyncInfo. Warn loudly so users don't silently end up with
|
|
269
|
+
// data that never reached the server.
|
|
270
|
+
output_1.out.taskLine("Warning: sync_server_storage_id is not set; skipping post-init sync verification", true);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
// Run initial sync to capture existing files
|
|
274
|
+
output_1.out.update("Running initial sync");
|
|
275
|
+
const result = await syncEngine.sync({ sub });
|
|
276
|
+
output_1.out.update("Writing to disk");
|
|
277
|
+
await safeRepoShutdown(repo);
|
|
278
|
+
output_1.out.done("Initialized");
|
|
279
|
+
output_1.out.successBlock("INITIALIZED", rootHandle.url);
|
|
280
|
+
if (result.filesChanged > 0) {
|
|
281
|
+
output_1.out.info(`Synced ${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
282
|
+
}
|
|
283
|
+
process.exit();
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Run bidirectional sync
|
|
287
|
+
*/
|
|
288
|
+
async function sync(targetPath = ".", options) {
|
|
289
|
+
output_1.out.task(options.nuclear
|
|
290
|
+
? "Nuclear syncing"
|
|
291
|
+
: options.gentle
|
|
292
|
+
? "Gentle syncing"
|
|
293
|
+
: "Syncing");
|
|
294
|
+
const { repo, syncEngine, config } = await setupCommandContext(targetPath, {
|
|
295
|
+
forceDefaults: !options.gentle,
|
|
296
|
+
});
|
|
297
|
+
const sub = config.subduction ?? false;
|
|
298
|
+
if (sub) {
|
|
299
|
+
output_1.out.taskLine("Using Subduction sync backend (from config)", true);
|
|
300
|
+
}
|
|
301
|
+
if (options.nuclear) {
|
|
302
|
+
await syncEngine.nuclearReset();
|
|
303
|
+
}
|
|
304
|
+
if (options.dryRun) {
|
|
305
|
+
output_1.out.update("Analyzing changes");
|
|
306
|
+
const preview = await syncEngine.previewChanges();
|
|
307
|
+
if (preview.changes.length === 0 && preview.moves.length === 0) {
|
|
308
|
+
output_1.out.done("Already synced");
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
output_1.out.done();
|
|
312
|
+
output_1.out.infoBlock("CHANGES");
|
|
313
|
+
output_1.out.obj({
|
|
314
|
+
Changes: preview.changes.length.toString(),
|
|
315
|
+
Moves: preview.moves.length > 0 ? preview.moves.length.toString() : undefined,
|
|
316
|
+
});
|
|
317
|
+
output_1.out.log("");
|
|
318
|
+
output_1.out.log("Files:");
|
|
319
|
+
for (const change of preview.changes.slice(0, 10)) {
|
|
320
|
+
const prefix = change.changeType === "local_only"
|
|
321
|
+
? "[local] "
|
|
322
|
+
: change.changeType === "remote_only"
|
|
323
|
+
? "[remote] "
|
|
324
|
+
: "[conflict]";
|
|
325
|
+
output_1.out.log(` ${prefix} ${change.path}`);
|
|
326
|
+
}
|
|
327
|
+
if (preview.changes.length > 10) {
|
|
328
|
+
output_1.out.log(` ... and ${preview.changes.length - 10} more`);
|
|
329
|
+
}
|
|
330
|
+
if (preview.moves.length > 0) {
|
|
331
|
+
output_1.out.log("");
|
|
332
|
+
output_1.out.log("Moves:");
|
|
333
|
+
for (const move of preview.moves.slice(0, 5)) {
|
|
334
|
+
output_1.out.log(` ${move.fromPath} → ${move.toPath}`);
|
|
335
|
+
}
|
|
336
|
+
if (preview.moves.length > 5) {
|
|
337
|
+
output_1.out.log(` ... and ${preview.moves.length - 5} more`);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
output_1.out.log("");
|
|
341
|
+
output_1.out.log("Run without --dry-run to apply these changes");
|
|
342
|
+
}
|
|
343
|
+
else {
|
|
344
|
+
const result = await syncEngine.sync({ sub });
|
|
345
|
+
output_1.out.taskLine("Writing to disk");
|
|
346
|
+
await safeRepoShutdown(repo);
|
|
347
|
+
if (result.success) {
|
|
348
|
+
output_1.out.done("Synced");
|
|
349
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
350
|
+
}
|
|
351
|
+
else {
|
|
352
|
+
output_1.out.successBlock("SYNCED", `${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
353
|
+
}
|
|
354
|
+
if (result.warnings.length > 0) {
|
|
355
|
+
output_1.out.log("");
|
|
356
|
+
output_1.out.warnBlock("WARNINGS", `${result.warnings.length} warnings`);
|
|
357
|
+
for (const warning of result.warnings.slice(0, 5)) {
|
|
358
|
+
output_1.out.log(` ${warning}`);
|
|
359
|
+
}
|
|
360
|
+
if (result.warnings.length > 5) {
|
|
361
|
+
output_1.out.log(` ... and ${result.warnings.length - 5} more`);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
else {
|
|
366
|
+
output_1.out.done("partial", false);
|
|
367
|
+
output_1.out.warnBlock("PARTIAL", `${result.filesChanged} updated, ${result.errors.length} errors`);
|
|
368
|
+
output_1.out.obj({
|
|
369
|
+
Files: result.filesChanged,
|
|
370
|
+
Errors: result.errors.length,
|
|
371
|
+
});
|
|
372
|
+
result.errors
|
|
373
|
+
.slice(0, 5)
|
|
374
|
+
.forEach((error) => output_1.out.error(`${error.path}: ${error.error.message}`));
|
|
375
|
+
if (result.errors.length > 5) {
|
|
376
|
+
output_1.out.warn(`... and ${result.errors.length - 5} more errors`);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
// Always print the root URL
|
|
380
|
+
const rootUrl = await syncEngine.getRootDirectoryUrl();
|
|
381
|
+
if (rootUrl) {
|
|
382
|
+
output_1.out.info(`Root: ${rootUrl}`);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
process.exit();
|
|
386
|
+
}
|
|
387
|
+
/**
|
|
388
|
+
* Show differences between local and remote
|
|
389
|
+
*/
|
|
390
|
+
async function diff(targetPath = ".", options) {
|
|
391
|
+
output_1.out.task("Analyzing changes");
|
|
392
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, { syncEnabled: false });
|
|
393
|
+
const preview = await syncEngine.previewChanges();
|
|
394
|
+
output_1.out.done();
|
|
395
|
+
if (options.nameOnly) {
|
|
396
|
+
for (const change of preview.changes) {
|
|
397
|
+
output_1.out.log(change.path);
|
|
398
|
+
}
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
if (preview.changes.length === 0) {
|
|
402
|
+
output_1.out.success("No changes detected");
|
|
403
|
+
await safeRepoShutdown(repo);
|
|
404
|
+
output_1.out.exit();
|
|
405
|
+
return;
|
|
406
|
+
}
|
|
407
|
+
output_1.out.warn(`${preview.changes.length} changes detected`);
|
|
408
|
+
for (const change of preview.changes) {
|
|
409
|
+
const prefix = change.changeType === "local_only"
|
|
410
|
+
? "[local] "
|
|
411
|
+
: change.changeType === "remote_only"
|
|
412
|
+
? "[remote] "
|
|
413
|
+
: "[conflict]";
|
|
414
|
+
try {
|
|
415
|
+
// Get old content (from snapshot/remote)
|
|
416
|
+
const oldContent = change.remoteContent || "";
|
|
417
|
+
// Get new content (current local)
|
|
418
|
+
const newContent = change.localContent || "";
|
|
419
|
+
// Convert binary content to string representation if needed
|
|
420
|
+
const oldText = typeof oldContent === "string"
|
|
421
|
+
? oldContent
|
|
422
|
+
: `<binary content: ${oldContent.length} bytes>`;
|
|
423
|
+
const newText = typeof newContent === "string"
|
|
424
|
+
? newContent
|
|
425
|
+
: `<binary content: ${newContent.length} bytes>`;
|
|
426
|
+
// Generate unified diff
|
|
427
|
+
const diffResult = diffLib.createPatch(change.path, oldText, newText, "previous", "current");
|
|
428
|
+
// Skip the header lines and process the diff
|
|
429
|
+
const lines = diffResult.split("\n").slice(4); // Skip index, ===, ---, +++ lines
|
|
430
|
+
if (lines.length === 0 || (lines.length === 1 && lines[0] === "")) {
|
|
431
|
+
output_1.out.log(`${prefix}${change.path} (content identical)`, "cyan");
|
|
432
|
+
continue;
|
|
433
|
+
}
|
|
434
|
+
// Extract first hunk header and show inline with path
|
|
435
|
+
let firstHunk = "";
|
|
436
|
+
let diffLines = lines;
|
|
437
|
+
if (lines[0]?.startsWith("@@")) {
|
|
438
|
+
firstHunk = ` ${lines[0]}`;
|
|
439
|
+
diffLines = lines.slice(1);
|
|
440
|
+
}
|
|
441
|
+
output_1.out.log(`${prefix}${change.path}${firstHunk}`, "cyan");
|
|
442
|
+
for (const line of diffLines) {
|
|
443
|
+
if (line.startsWith("@@")) {
|
|
444
|
+
// Additional hunk headers
|
|
445
|
+
output_1.out.log(line, "dim");
|
|
446
|
+
}
|
|
447
|
+
else if (line.startsWith("+")) {
|
|
448
|
+
// Added line
|
|
449
|
+
output_1.out.log(line, "green");
|
|
450
|
+
}
|
|
451
|
+
else if (line.startsWith("-")) {
|
|
452
|
+
// Removed line
|
|
453
|
+
output_1.out.log(line, "red");
|
|
454
|
+
}
|
|
455
|
+
else if (line.startsWith(" ") || line === "") {
|
|
456
|
+
// Context line or empty
|
|
457
|
+
output_1.out.log(line, "dim");
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
catch (error) {
|
|
462
|
+
output_1.out.log(`${prefix}${change.path} (diff error: ${error})`, "cyan");
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
await safeRepoShutdown(repo);
|
|
466
|
+
}
|
|
467
|
+
/**
|
|
468
|
+
* Show sync status
|
|
469
|
+
*/
|
|
470
|
+
async function status(targetPath = ".", options = {}) {
|
|
471
|
+
const { repo, syncEngine, config } = await setupCommandContext(targetPath, { syncEnabled: false });
|
|
472
|
+
const syncStatus = await syncEngine.getStatus();
|
|
473
|
+
output_1.out.infoBlock("STATUS");
|
|
474
|
+
const statusInfo = {};
|
|
475
|
+
const fileCount = syncStatus.snapshot?.files.size || 0;
|
|
476
|
+
statusInfo["URL"] = syncStatus.snapshot?.rootDirectoryUrl;
|
|
477
|
+
statusInfo["Files"] = syncStatus.snapshot
|
|
478
|
+
? `${fileCount} tracked`
|
|
479
|
+
: undefined;
|
|
480
|
+
statusInfo["Backend"] = config?.subduction ? "subduction" : "websocket";
|
|
481
|
+
statusInfo["Sync"] = config?.sync_server;
|
|
482
|
+
// Add more detailed info in verbose mode
|
|
483
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
484
|
+
try {
|
|
485
|
+
const rootHandle = await repo.find(syncStatus.snapshot.rootDirectoryUrl);
|
|
486
|
+
const rootDoc = await rootHandle.doc();
|
|
487
|
+
if (rootDoc) {
|
|
488
|
+
statusInfo["Entries"] = rootDoc.docs.length;
|
|
489
|
+
statusInfo["Directories"] = syncStatus.snapshot.directories.size;
|
|
490
|
+
if (rootDoc.lastSyncAt) {
|
|
491
|
+
const lastSyncDate = new Date(rootDoc.lastSyncAt);
|
|
492
|
+
statusInfo["Last sync"] = lastSyncDate.toISOString();
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
catch (error) {
|
|
497
|
+
output_1.out.warn(`Warning: Could not load detailed info: ${error}`);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
statusInfo["Changes"] = syncStatus.hasChanges
|
|
501
|
+
? `${syncStatus.changeCount} pending`
|
|
502
|
+
: undefined;
|
|
503
|
+
statusInfo["Status"] = !syncStatus.hasChanges ? "up to date" : undefined;
|
|
504
|
+
output_1.out.obj(statusInfo);
|
|
505
|
+
// Show verbose details if requested
|
|
506
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
507
|
+
const rootHandle = await repo.find(syncStatus.snapshot.rootDirectoryUrl);
|
|
508
|
+
const rootDoc = await rootHandle.doc();
|
|
509
|
+
if (rootDoc) {
|
|
510
|
+
output_1.out.infoBlock("HEADS");
|
|
511
|
+
output_1.out.arr(rootHandle.heads());
|
|
512
|
+
if (syncStatus.snapshot && syncStatus.snapshot.files.size > 0) {
|
|
513
|
+
output_1.out.infoBlock("TRACKED FILES");
|
|
514
|
+
const filesObj = {};
|
|
515
|
+
syncStatus.snapshot.files.forEach((entry, filePath) => {
|
|
516
|
+
filesObj[filePath] = entry.url;
|
|
517
|
+
});
|
|
518
|
+
output_1.out.obj(filesObj);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
if (syncStatus.hasChanges && !options.verbose) {
|
|
523
|
+
output_1.out.info("Run 'pushwork diff' to see changes");
|
|
524
|
+
}
|
|
525
|
+
await safeRepoShutdown(repo);
|
|
526
|
+
}
|
|
527
|
+
/**
|
|
528
|
+
* Show sync history
|
|
529
|
+
*/
|
|
530
|
+
async function log(targetPath = ".", _options) {
|
|
531
|
+
const { repo: logRepo, workingDir } = await setupCommandContext(targetPath, { syncEnabled: false });
|
|
532
|
+
// TODO: Implement history tracking
|
|
533
|
+
const snapshotPath = path.join(workingDir, config_2.ConfigManager.CONFIG_DIR, "snapshot.json");
|
|
534
|
+
if (await (0, utils_1.pathExists)(snapshotPath)) {
|
|
535
|
+
const stats = await fs.stat(snapshotPath);
|
|
536
|
+
output_1.out.infoBlock("HISTORY", "Sync history (stub)");
|
|
537
|
+
output_1.out.obj({ "Last sync": stats.mtime.toISOString() });
|
|
538
|
+
}
|
|
539
|
+
else {
|
|
540
|
+
output_1.out.info("No sync history found");
|
|
541
|
+
}
|
|
542
|
+
await safeRepoShutdown(logRepo);
|
|
543
|
+
}
|
|
544
|
+
/**
|
|
545
|
+
* Checkout/restore from previous sync
|
|
546
|
+
*/
|
|
547
|
+
async function checkout(syncId, targetPath = ".", _options) {
|
|
548
|
+
const { workingDir } = await setupCommandContext(targetPath);
|
|
549
|
+
// TODO: Implement checkout functionality
|
|
550
|
+
output_1.out.warnBlock("NOT IMPLEMENTED", "Checkout not yet implemented");
|
|
551
|
+
output_1.out.obj({
|
|
552
|
+
"Sync ID": syncId,
|
|
553
|
+
Path: workingDir,
|
|
554
|
+
});
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Clone an existing synced directory from an AutomergeUrl
|
|
558
|
+
*/
|
|
559
|
+
async function clone(rootUrl, targetPath, options) {
|
|
560
|
+
// Validate that rootUrl is actually an Automerge URL
|
|
561
|
+
if (!rootUrl.startsWith("automerge:")) {
|
|
562
|
+
output_1.out.error(`Invalid Automerge URL: ${rootUrl}\n` +
|
|
563
|
+
`Expected format: automerge:XXXXX\n` +
|
|
564
|
+
`Usage: pushwork clone <automerge-url> <path>`);
|
|
565
|
+
output_1.out.exit(1);
|
|
566
|
+
}
|
|
567
|
+
const resolvedPath = path.resolve(targetPath);
|
|
568
|
+
const sub = options.sub ?? false;
|
|
569
|
+
output_1.out.task(`Cloning ${rootUrl}`);
|
|
570
|
+
if (sub) {
|
|
571
|
+
output_1.out.taskLine("Using Subduction sync backend", true);
|
|
572
|
+
}
|
|
573
|
+
// Check if directory exists and handle --force
|
|
574
|
+
if (await (0, utils_1.pathExists)(resolvedPath)) {
|
|
575
|
+
const files = await fs.readdir(resolvedPath);
|
|
576
|
+
if (files.length > 0 && !options.force) {
|
|
577
|
+
output_1.out.error("Target directory is not empty. Use --force to overwrite");
|
|
578
|
+
output_1.out.exit(1);
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
else {
|
|
582
|
+
await (0, utils_1.ensureDirectoryExists)(resolvedPath);
|
|
583
|
+
}
|
|
584
|
+
// Check if already initialized
|
|
585
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
586
|
+
if (await (0, utils_1.pathExists)(syncToolDir)) {
|
|
587
|
+
if (!options.force) {
|
|
588
|
+
output_1.out.error("Directory already initialized. Use --force to overwrite");
|
|
589
|
+
output_1.out.exit(1);
|
|
590
|
+
}
|
|
591
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
592
|
+
}
|
|
593
|
+
// Initialize repository with optional CLI overrides
|
|
594
|
+
output_1.out.update("Setting up repository");
|
|
595
|
+
const { config, repo, syncEngine } = await initializeRepository(resolvedPath, {
|
|
596
|
+
sync_server: options.syncServer,
|
|
597
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
598
|
+
}, sub);
|
|
599
|
+
// Connect to existing root directory and download files
|
|
600
|
+
output_1.out.update("Downloading files");
|
|
601
|
+
await syncEngine.setRootDirectoryUrl(rootUrl);
|
|
602
|
+
const result = await syncEngine.sync({ sub });
|
|
603
|
+
output_1.out.update("Writing to disk");
|
|
604
|
+
await safeRepoShutdown(repo);
|
|
605
|
+
output_1.out.done();
|
|
606
|
+
output_1.out.obj({
|
|
607
|
+
Path: resolvedPath,
|
|
608
|
+
Files: `${result.filesChanged} downloaded`,
|
|
609
|
+
Backend: config.subduction ? "subduction" : "websocket",
|
|
610
|
+
Sync: config.sync_server,
|
|
611
|
+
});
|
|
612
|
+
output_1.out.successBlock("CLONED", rootUrl);
|
|
613
|
+
process.exit();
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Get the root URL for the current pushwork repository
|
|
617
|
+
*/
|
|
618
|
+
async function url(targetPath = ".") {
|
|
619
|
+
const resolvedPath = path.resolve(targetPath);
|
|
620
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
621
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
622
|
+
output_1.out.error("Directory not initialized for sync");
|
|
623
|
+
output_1.out.exit(1);
|
|
624
|
+
}
|
|
625
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
626
|
+
if (!(await (0, utils_1.pathExists)(snapshotPath))) {
|
|
627
|
+
output_1.out.error("No snapshot found");
|
|
628
|
+
output_1.out.exit(1);
|
|
629
|
+
}
|
|
630
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
631
|
+
const snapshot = JSON.parse(snapshotData);
|
|
632
|
+
if (snapshot.rootDirectoryUrl) {
|
|
633
|
+
// Output just the URL for easy use in scripts
|
|
634
|
+
output_1.out.log(snapshot.rootDirectoryUrl);
|
|
635
|
+
}
|
|
636
|
+
else {
|
|
637
|
+
output_1.out.error("No root URL found in snapshot");
|
|
638
|
+
output_1.out.exit(1);
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Remove local pushwork data and log URL for recovery
|
|
643
|
+
*/
|
|
644
|
+
async function rm(targetPath = ".") {
|
|
645
|
+
const resolvedPath = path.resolve(targetPath);
|
|
646
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
647
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
648
|
+
output_1.out.error("Directory not initialized for sync");
|
|
649
|
+
output_1.out.exit(1);
|
|
650
|
+
}
|
|
651
|
+
// Read the URL before deletion for recovery
|
|
652
|
+
let recoveryUrl = "";
|
|
653
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
654
|
+
if (await (0, utils_1.pathExists)(snapshotPath)) {
|
|
655
|
+
try {
|
|
656
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
657
|
+
const snapshot = JSON.parse(snapshotData);
|
|
658
|
+
recoveryUrl = snapshot.rootDirectoryUrl || null;
|
|
659
|
+
}
|
|
660
|
+
catch (error) {
|
|
661
|
+
output_1.out.error(`Remove failed: ${error}`);
|
|
662
|
+
output_1.out.exit(1);
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
output_1.out.task("Removing local pushwork data");
|
|
667
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
668
|
+
output_1.out.done();
|
|
669
|
+
output_1.out.warnBlock("REMOVED", recoveryUrl);
|
|
670
|
+
process.exit();
|
|
671
|
+
}
|
|
672
|
+
async function commit(targetPath, _options = {}) {
|
|
673
|
+
output_1.out.task("Committing local changes");
|
|
674
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, { syncEnabled: false });
|
|
675
|
+
const result = await syncEngine.commitLocal();
|
|
676
|
+
await safeRepoShutdown(repo);
|
|
677
|
+
output_1.out.done();
|
|
678
|
+
if (result.errors.length > 0) {
|
|
679
|
+
output_1.out.errorBlock("ERROR", `${result.errors.length} errors`);
|
|
680
|
+
result.errors.forEach((error) => output_1.out.error(error));
|
|
681
|
+
output_1.out.exit(1);
|
|
682
|
+
}
|
|
683
|
+
output_1.out.successBlock("COMMITTED", `${result.filesChanged} files`);
|
|
684
|
+
output_1.out.obj({
|
|
685
|
+
Files: result.filesChanged,
|
|
686
|
+
Directories: result.directoriesChanged,
|
|
687
|
+
});
|
|
688
|
+
if (result.warnings.length > 0) {
|
|
689
|
+
result.warnings.forEach((warning) => output_1.out.warn(warning));
|
|
690
|
+
}
|
|
691
|
+
process.exit();
|
|
692
|
+
}
|
|
693
|
+
/**
|
|
694
|
+
* List tracked files
|
|
695
|
+
*/
|
|
696
|
+
async function ls(targetPath = ".", options = {}) {
|
|
697
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, { syncEnabled: false });
|
|
698
|
+
const syncStatus = await syncEngine.getStatus();
|
|
699
|
+
if (!syncStatus.snapshot) {
|
|
700
|
+
output_1.out.error("No snapshot found");
|
|
701
|
+
await safeRepoShutdown(repo);
|
|
702
|
+
output_1.out.exit(1);
|
|
703
|
+
return;
|
|
704
|
+
}
|
|
705
|
+
const files = Array.from(syncStatus.snapshot.files.entries()).sort(([pathA], [pathB]) => pathA.localeCompare(pathB));
|
|
706
|
+
if (files.length === 0) {
|
|
707
|
+
output_1.out.info("No tracked files");
|
|
708
|
+
await safeRepoShutdown(repo);
|
|
709
|
+
return;
|
|
710
|
+
}
|
|
711
|
+
if (options.verbose) {
|
|
712
|
+
// Long format with URLs
|
|
713
|
+
for (const [filePath, entry] of files) {
|
|
714
|
+
const url = entry?.url || "unknown";
|
|
715
|
+
output_1.out.log(`${filePath} -> ${url}`);
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
else {
|
|
719
|
+
// Simple list
|
|
720
|
+
for (const [filePath] of files) {
|
|
721
|
+
output_1.out.log(filePath);
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
await safeRepoShutdown(repo);
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* View or edit configuration
|
|
728
|
+
*/
|
|
729
|
+
async function config(targetPath = ".", options = {}) {
|
|
730
|
+
const resolvedPath = path.resolve(targetPath);
|
|
731
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
732
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
733
|
+
output_1.out.error("Directory not initialized for sync");
|
|
734
|
+
output_1.out.exit(1);
|
|
735
|
+
}
|
|
736
|
+
const configManager = new config_2.ConfigManager(resolvedPath);
|
|
737
|
+
const config = await configManager.getMerged();
|
|
738
|
+
if (options.list) {
|
|
739
|
+
// List all configuration
|
|
740
|
+
output_1.out.infoBlock("CONFIGURATION", "Full configuration");
|
|
741
|
+
output_1.out.log(JSON.stringify(config, null, 2));
|
|
742
|
+
}
|
|
743
|
+
else if (options.get) {
|
|
744
|
+
// Get specific config value
|
|
745
|
+
const keys = options.get.split(".");
|
|
746
|
+
let value = config;
|
|
747
|
+
for (const key of keys) {
|
|
748
|
+
value = value?.[key];
|
|
749
|
+
}
|
|
750
|
+
if (value !== undefined) {
|
|
751
|
+
output_1.out.log(typeof value === "object" ? JSON.stringify(value, null, 2) : value);
|
|
752
|
+
}
|
|
753
|
+
else {
|
|
754
|
+
output_1.out.error(`Config key not found: ${options.get}`);
|
|
755
|
+
output_1.out.exit(1);
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
else {
|
|
759
|
+
// Show basic config info
|
|
760
|
+
output_1.out.infoBlock("CONFIGURATION");
|
|
761
|
+
output_1.out.obj({
|
|
762
|
+
Backend: config.subduction ? "subduction" : "websocket",
|
|
763
|
+
"Sync server": config.sync_server || "default",
|
|
764
|
+
"Sync enabled": config.sync_enabled ? "yes" : "no",
|
|
765
|
+
Exclusions: config.exclude_patterns?.length,
|
|
766
|
+
});
|
|
767
|
+
output_1.out.log("");
|
|
768
|
+
output_1.out.log("Use --list to see full configuration");
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
/**
|
|
772
|
+
* Watch a directory and sync after build script completes
|
|
773
|
+
*/
|
|
774
|
+
async function watch(targetPath = ".", options = {}) {
|
|
775
|
+
const script = options.script || "pnpm build";
|
|
776
|
+
const watchDir = options.watchDir || "src"; // Default to watching 'src' directory
|
|
777
|
+
const verbose = options.verbose || false;
|
|
778
|
+
const { repo, syncEngine, config, workingDir } = await setupCommandContext(targetPath);
|
|
779
|
+
const sub = config.subduction ?? false;
|
|
780
|
+
const absoluteWatchDir = path.resolve(workingDir, watchDir);
|
|
781
|
+
// Check if watch directory exists
|
|
782
|
+
if (!(await (0, utils_1.pathExists)(absoluteWatchDir))) {
|
|
783
|
+
output_1.out.error(`Watch directory does not exist: ${watchDir}`);
|
|
784
|
+
await safeRepoShutdown(repo);
|
|
785
|
+
output_1.out.exit(1);
|
|
786
|
+
return;
|
|
787
|
+
}
|
|
788
|
+
output_1.out.spicyBlock("WATCHING", `${chalk_1.default.underline((0, utils_1.formatRelativePath)(watchDir))} for changes...`);
|
|
789
|
+
if (sub) {
|
|
790
|
+
output_1.out.info("Using Subduction sync backend (from config)");
|
|
791
|
+
}
|
|
792
|
+
output_1.out.info(`Build script: ${script}`);
|
|
793
|
+
output_1.out.info(`Working directory: ${workingDir}`);
|
|
794
|
+
let isProcessing = false;
|
|
795
|
+
let pendingChange = false;
|
|
796
|
+
// Function to run build and sync
|
|
797
|
+
const runBuildAndSync = async () => {
|
|
798
|
+
if (isProcessing) {
|
|
799
|
+
pendingChange = true;
|
|
800
|
+
return;
|
|
801
|
+
}
|
|
802
|
+
isProcessing = true;
|
|
803
|
+
pendingChange = false;
|
|
804
|
+
try {
|
|
805
|
+
output_1.out.spicy(`[${new Date().toLocaleTimeString()}] Changes detected...`);
|
|
806
|
+
// Run build script
|
|
807
|
+
const buildResult = await runScript(script, workingDir, verbose);
|
|
808
|
+
if (!buildResult.success) {
|
|
809
|
+
output_1.out.warn("Build script failed");
|
|
810
|
+
if (buildResult.output) {
|
|
811
|
+
output_1.out.log("");
|
|
812
|
+
output_1.out.log(buildResult.output);
|
|
813
|
+
}
|
|
814
|
+
isProcessing = false;
|
|
815
|
+
if (pendingChange) {
|
|
816
|
+
setImmediate(() => runBuildAndSync());
|
|
817
|
+
}
|
|
818
|
+
return;
|
|
819
|
+
}
|
|
820
|
+
output_1.out.info("Build completed...");
|
|
821
|
+
// Run sync
|
|
822
|
+
output_1.out.task("Syncing");
|
|
823
|
+
const result = await syncEngine.sync({ sub });
|
|
824
|
+
if (result.success) {
|
|
825
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
826
|
+
output_1.out.done("Already synced");
|
|
827
|
+
}
|
|
828
|
+
else {
|
|
829
|
+
output_1.out.done(`Synced ${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
830
|
+
}
|
|
831
|
+
}
|
|
832
|
+
else {
|
|
833
|
+
output_1.out.warn(`⚠ Partial sync: ${result.filesChanged} updated, ${result.errors.length} errors`);
|
|
834
|
+
result.errors
|
|
835
|
+
.slice(0, 3)
|
|
836
|
+
.forEach((error) => output_1.out.error(` ${error.path}: ${error.error.message}`));
|
|
837
|
+
if (result.errors.length > 3) {
|
|
838
|
+
output_1.out.warn(` ... and ${result.errors.length - 3} more errors`);
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
if (result.warnings.length > 0) {
|
|
842
|
+
result.warnings
|
|
843
|
+
.slice(0, 3)
|
|
844
|
+
.forEach((warning) => output_1.out.warn(` ${warning}`));
|
|
845
|
+
if (result.warnings.length > 3) {
|
|
846
|
+
output_1.out.warn(` ... and ${result.warnings.length - 3} more warnings`);
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
catch (error) {
|
|
851
|
+
output_1.out.error(`Error during build/sync: ${error}`);
|
|
852
|
+
}
|
|
853
|
+
finally {
|
|
854
|
+
isProcessing = false;
|
|
855
|
+
// If changes occurred while we were processing, run again
|
|
856
|
+
if (pendingChange) {
|
|
857
|
+
setImmediate(() => runBuildAndSync());
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
};
|
|
861
|
+
// Set up file watcher - watches everything in the specified directory
|
|
862
|
+
const watcher = fsSync.watch(absoluteWatchDir, { recursive: true }, (_eventType, filename) => {
|
|
863
|
+
if (filename) {
|
|
864
|
+
runBuildAndSync();
|
|
865
|
+
}
|
|
866
|
+
});
|
|
867
|
+
// Handle graceful shutdown
|
|
868
|
+
const shutdown = async () => {
|
|
869
|
+
output_1.out.log("");
|
|
870
|
+
output_1.out.info("Shutting down...");
|
|
871
|
+
watcher.close();
|
|
872
|
+
await safeRepoShutdown(repo);
|
|
873
|
+
output_1.out.rainbow("Goodbye!");
|
|
874
|
+
process.exit(0);
|
|
875
|
+
};
|
|
876
|
+
process.on("SIGINT", shutdown);
|
|
877
|
+
process.on("SIGTERM", shutdown);
|
|
878
|
+
// Run initial build and sync
|
|
879
|
+
await runBuildAndSync();
|
|
880
|
+
// Keep process alive
|
|
881
|
+
await new Promise(() => { }); // Never resolves, keeps watching
|
|
882
|
+
}
|
|
883
|
+
/**
|
|
884
|
+
* Run a shell script and wait for completion
|
|
885
|
+
*/
|
|
886
|
+
async function runScript(script, cwd, verbose) {
|
|
887
|
+
return new Promise((resolve) => {
|
|
888
|
+
const [command, ...args] = script.split(" ");
|
|
889
|
+
const child = (0, child_process_1.spawn)(command, args, {
|
|
890
|
+
cwd,
|
|
891
|
+
stdio: verbose ? "inherit" : "pipe", // Show output directly if verbose, otherwise capture
|
|
892
|
+
shell: true,
|
|
893
|
+
});
|
|
894
|
+
let output = "";
|
|
895
|
+
// Capture output if not verbose (so we can show it on error)
|
|
896
|
+
if (!verbose) {
|
|
897
|
+
child.stdout?.on("data", (data) => {
|
|
898
|
+
output += data.toString();
|
|
899
|
+
});
|
|
900
|
+
child.stderr?.on("data", (data) => {
|
|
901
|
+
output += data.toString();
|
|
902
|
+
});
|
|
903
|
+
}
|
|
904
|
+
child.on("close", (code) => {
|
|
905
|
+
resolve({
|
|
906
|
+
success: code === 0,
|
|
907
|
+
output: !verbose ? output : undefined,
|
|
908
|
+
});
|
|
909
|
+
});
|
|
910
|
+
child.on("error", (error) => {
|
|
911
|
+
output_1.out.error(`Failed to run script: ${error.message}`);
|
|
912
|
+
resolve({
|
|
913
|
+
success: false,
|
|
914
|
+
output: !verbose ? output : undefined,
|
|
915
|
+
});
|
|
916
|
+
});
|
|
917
|
+
});
|
|
918
|
+
}
|
|
919
|
+
/**
|
|
920
|
+
* Set root directory URL for an existing or new pushwork directory
|
|
921
|
+
*/
|
|
922
|
+
async function root(rootUrl, targetPath = ".", options = {}) {
|
|
923
|
+
if (!rootUrl.startsWith("automerge:")) {
|
|
924
|
+
output_1.out.error(`Invalid Automerge URL: ${rootUrl}\n` +
|
|
925
|
+
`Expected format: automerge:XXXXX`);
|
|
926
|
+
output_1.out.exit(1);
|
|
927
|
+
}
|
|
928
|
+
const resolvedPath = path.resolve(targetPath);
|
|
929
|
+
const syncToolDir = path.join(resolvedPath, config_2.ConfigManager.CONFIG_DIR);
|
|
930
|
+
const sub = options.sub ?? false;
|
|
931
|
+
if (await (0, utils_1.pathExists)(syncToolDir)) {
|
|
932
|
+
if (!options.force) {
|
|
933
|
+
output_1.out.error("Directory already initialized for pushwork. Use --force to overwrite");
|
|
934
|
+
output_1.out.exit(1);
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
await (0, utils_1.ensureDirectoryExists)(syncToolDir);
|
|
938
|
+
await (0, utils_1.ensureDirectoryExists)(path.join(syncToolDir, "automerge"));
|
|
939
|
+
// Create minimal snapshot with just the root URL
|
|
940
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
941
|
+
const snapshot = {
|
|
942
|
+
timestamp: Date.now(),
|
|
943
|
+
rootPath: resolvedPath,
|
|
944
|
+
rootDirectoryUrl: rootUrl,
|
|
945
|
+
files: [],
|
|
946
|
+
directories: [],
|
|
947
|
+
};
|
|
948
|
+
await fs.writeFile(snapshotPath, JSON.stringify(snapshot, null, 2), "utf-8");
|
|
949
|
+
// Ensure config exists. In Subduction mode, persist the backend choice
|
|
950
|
+
// and the correct server so subsequent `sync` runs use the right endpoint.
|
|
951
|
+
const configManager = new config_2.ConfigManager(resolvedPath);
|
|
952
|
+
if (sub) {
|
|
953
|
+
let cfg = await configManager.initializeWithOverrides({
|
|
954
|
+
subduction: true,
|
|
955
|
+
sync_server: config_1.DEFAULT_SUBDUCTION_SERVER,
|
|
956
|
+
});
|
|
957
|
+
// Strip dead-baggage storage_id that getDefaultDirectoryConfig seeded.
|
|
958
|
+
if (cfg.sync_server_storage_id !== undefined) {
|
|
959
|
+
cfg = { ...cfg, sync_server_storage_id: undefined };
|
|
960
|
+
await configManager.save(cfg);
|
|
961
|
+
}
|
|
962
|
+
}
|
|
963
|
+
else {
|
|
964
|
+
await configManager.initializeWithOverrides({});
|
|
965
|
+
}
|
|
966
|
+
output_1.out.successBlock("ROOT SET", rootUrl);
|
|
967
|
+
if (sub) {
|
|
968
|
+
output_1.out.info("Using Subduction sync backend");
|
|
969
|
+
}
|
|
970
|
+
process.exit();
|
|
971
|
+
}
|
|
972
|
+
function plural(word, count) {
|
|
973
|
+
return count === 1 ? word : `${word}s`;
|
|
974
|
+
}
|
|
975
|
+
//# sourceMappingURL=commands.js.map
|