pushwork 1.0.4 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -328
- package/dist/.pushwork/automerge/3P/Dm3ekE2pmjGnWvDaG3vSR7ww98/snapshot/aa2349c94955ea561f698720142f9d884a6872d9f82dc332d578c216beb0df0e +0 -0
- package/dist/.pushwork/automerge/st/orage-adapter-id +1 -0
- package/dist/.pushwork/config.json +15 -0
- package/dist/.pushwork/snapshot.json +7 -0
- package/dist/cli.js +231 -170
- package/dist/cli.js.map +1 -1
- package/dist/commands.d.ts +51 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +799 -0
- package/dist/commands.js.map +1 -0
- package/dist/core/change-detection.d.ts +6 -19
- package/dist/core/change-detection.d.ts.map +1 -1
- package/dist/core/change-detection.js +101 -80
- package/dist/core/change-detection.js.map +1 -1
- package/dist/{config/index.d.ts → core/config.d.ts} +13 -3
- package/dist/core/config.d.ts.map +1 -0
- package/dist/{config/index.js → core/config.js} +55 -73
- package/dist/core/config.js.map +1 -0
- package/dist/core/index.d.ts +1 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +1 -1
- package/dist/core/index.js.map +1 -1
- package/dist/core/move-detection.d.ts +12 -50
- package/dist/core/move-detection.d.ts.map +1 -1
- package/dist/core/move-detection.js +58 -139
- package/dist/core/move-detection.js.map +1 -1
- package/dist/core/snapshot.d.ts +0 -4
- package/dist/core/snapshot.d.ts.map +1 -1
- package/dist/core/snapshot.js +2 -11
- package/dist/core/snapshot.js.map +1 -1
- package/dist/core/sync-engine.d.ts +5 -11
- package/dist/core/sync-engine.d.ts.map +1 -1
- package/dist/core/sync-engine.js +220 -362
- package/dist/core/sync-engine.js.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -6
- package/dist/index.js.map +1 -1
- package/dist/types/config.d.ts +43 -67
- package/dist/types/config.d.ts.map +1 -1
- package/dist/types/config.js +6 -0
- package/dist/types/config.js.map +1 -1
- package/dist/types/documents.d.ts +15 -3
- package/dist/types/documents.d.ts.map +1 -1
- package/dist/types/documents.js.map +1 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +0 -3
- package/dist/types/index.js.map +1 -1
- package/dist/types/snapshot.d.ts +3 -21
- package/dist/types/snapshot.d.ts.map +1 -1
- package/dist/types/snapshot.js +0 -14
- package/dist/types/snapshot.js.map +1 -1
- package/dist/utils/content.d.ts.map +1 -1
- package/dist/utils/content.js +2 -6
- package/dist/utils/content.js.map +1 -1
- package/dist/utils/directory.d.ts +10 -0
- package/dist/utils/directory.d.ts.map +1 -0
- package/dist/utils/directory.js +37 -0
- package/dist/utils/directory.js.map +1 -0
- package/dist/utils/fs.d.ts +15 -2
- package/dist/utils/fs.d.ts.map +1 -1
- package/dist/utils/fs.js +63 -53
- package/dist/utils/fs.js.map +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/index.js +1 -4
- package/dist/utils/index.js.map +1 -1
- package/dist/utils/mime-types.d.ts.map +1 -1
- package/dist/utils/mime-types.js +11 -4
- package/dist/utils/mime-types.js.map +1 -1
- package/dist/utils/network-sync.d.ts +0 -6
- package/dist/utils/network-sync.d.ts.map +1 -1
- package/dist/utils/network-sync.js +55 -99
- package/dist/utils/network-sync.js.map +1 -1
- package/dist/utils/output.d.ts +129 -0
- package/dist/utils/output.d.ts.map +1 -0
- package/dist/utils/output.js +375 -0
- package/dist/utils/output.js.map +1 -0
- package/dist/utils/repo-factory.d.ts +2 -6
- package/dist/utils/repo-factory.d.ts.map +1 -1
- package/dist/utils/repo-factory.js +8 -22
- package/dist/utils/repo-factory.js.map +1 -1
- package/dist/utils/string-similarity.d.ts +14 -0
- package/dist/utils/string-similarity.d.ts.map +1 -0
- package/dist/utils/string-similarity.js +43 -0
- package/dist/utils/string-similarity.js.map +1 -0
- package/dist/utils/trace.d.ts +19 -0
- package/dist/utils/trace.d.ts.map +1 -0
- package/dist/utils/trace.js +68 -0
- package/dist/utils/trace.js.map +1 -0
- package/package.json +17 -12
- package/src/cli.ts +326 -252
- package/src/commands.ts +988 -0
- package/src/core/change-detection.ts +199 -162
- package/src/{config/index.ts → core/config.ts} +65 -82
- package/src/core/index.ts +1 -1
- package/src/core/move-detection.ts +74 -180
- package/src/core/snapshot.ts +2 -12
- package/src/core/sync-engine.ts +248 -499
- package/src/index.ts +0 -10
- package/src/types/config.ts +50 -72
- package/src/types/documents.ts +16 -3
- package/src/types/index.ts +0 -5
- package/src/types/snapshot.ts +1 -23
- package/src/utils/content.ts +2 -6
- package/src/utils/directory.ts +50 -0
- package/src/utils/fs.ts +67 -56
- package/src/utils/index.ts +1 -6
- package/src/utils/mime-types.ts +12 -4
- package/src/utils/network-sync.ts +79 -137
- package/src/utils/output.ts +450 -0
- package/src/utils/repo-factory.ts +13 -31
- package/src/utils/string-similarity.ts +54 -0
- package/src/utils/trace.ts +70 -0
- package/test/integration/exclude-patterns.test.ts +6 -15
- package/test/integration/fuzzer.test.ts +308 -391
- package/test/integration/init-sync.test.ts +89 -0
- package/test/integration/sync-deletion.test.ts +2 -61
- package/test/integration/sync-flow.test.ts +4 -24
- package/test/jest.setup.ts +34 -0
- package/test/unit/deletion-behavior.test.ts +3 -14
- package/test/unit/enhanced-mime-detection.test.ts +0 -22
- package/test/unit/snapshot.test.ts +2 -29
- package/test/unit/sync-convergence.test.ts +3 -198
- package/test/unit/sync-timing.test.ts +0 -44
- package/test/unit/utils.test.ts +0 -2
- package/tsconfig.json +3 -3
- package/dist/browser/browser-sync-engine.d.ts +0 -64
- package/dist/browser/browser-sync-engine.d.ts.map +0 -1
- package/dist/browser/browser-sync-engine.js +0 -303
- package/dist/browser/browser-sync-engine.js.map +0 -1
- package/dist/browser/filesystem-adapter.d.ts +0 -84
- package/dist/browser/filesystem-adapter.d.ts.map +0 -1
- package/dist/browser/filesystem-adapter.js +0 -413
- package/dist/browser/filesystem-adapter.js.map +0 -1
- package/dist/browser/index.d.ts +0 -36
- package/dist/browser/index.d.ts.map +0 -1
- package/dist/browser/index.js +0 -90
- package/dist/browser/index.js.map +0 -1
- package/dist/browser/types.d.ts +0 -70
- package/dist/browser/types.d.ts.map +0 -1
- package/dist/browser/types.js +0 -6
- package/dist/browser/types.js.map +0 -1
- package/dist/cli/commands.d.ts +0 -77
- package/dist/cli/commands.d.ts.map +0 -1
- package/dist/cli/commands.js +0 -904
- package/dist/cli/commands.js.map +0 -1
- package/dist/cli/index.d.ts +0 -2
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -19
- package/dist/cli/index.js.map +0 -1
- package/dist/config/index.d.ts.map +0 -1
- package/dist/config/index.js.map +0 -1
- package/dist/core/isomorphic-snapshot.d.ts +0 -58
- package/dist/core/isomorphic-snapshot.d.ts.map +0 -1
- package/dist/core/isomorphic-snapshot.js +0 -204
- package/dist/core/isomorphic-snapshot.js.map +0 -1
- package/dist/platform/browser-filesystem.d.ts +0 -26
- package/dist/platform/browser-filesystem.d.ts.map +0 -1
- package/dist/platform/browser-filesystem.js +0 -91
- package/dist/platform/browser-filesystem.js.map +0 -1
- package/dist/platform/filesystem.d.ts +0 -29
- package/dist/platform/filesystem.d.ts.map +0 -1
- package/dist/platform/filesystem.js +0 -65
- package/dist/platform/filesystem.js.map +0 -1
- package/dist/platform/node-filesystem.d.ts +0 -21
- package/dist/platform/node-filesystem.d.ts.map +0 -1
- package/dist/platform/node-filesystem.js +0 -93
- package/dist/platform/node-filesystem.js.map +0 -1
- package/dist/utils/content-similarity.d.ts +0 -53
- package/dist/utils/content-similarity.d.ts.map +0 -1
- package/dist/utils/content-similarity.js +0 -155
- package/dist/utils/content-similarity.js.map +0 -1
- package/dist/utils/fs-browser.d.ts +0 -57
- package/dist/utils/fs-browser.d.ts.map +0 -1
- package/dist/utils/fs-browser.js +0 -311
- package/dist/utils/fs-browser.js.map +0 -1
- package/dist/utils/fs-node.d.ts +0 -53
- package/dist/utils/fs-node.d.ts.map +0 -1
- package/dist/utils/fs-node.js +0 -220
- package/dist/utils/fs-node.js.map +0 -1
- package/dist/utils/isomorphic.d.ts +0 -29
- package/dist/utils/isomorphic.d.ts.map +0 -1
- package/dist/utils/isomorphic.js +0 -139
- package/dist/utils/isomorphic.js.map +0 -1
- package/dist/utils/pure.d.ts +0 -25
- package/dist/utils/pure.d.ts.map +0 -1
- package/dist/utils/pure.js +0 -112
- package/dist/utils/pure.js.map +0 -1
- package/src/cli/commands.ts +0 -1207
- package/src/cli/index.ts +0 -2
- package/src/utils/content-similarity.ts +0 -194
- package/test/README-TESTING-GAPS.md +0 -174
- package/test/unit/content-similarity.test.ts +0 -236
package/src/commands.ts
ADDED
|
@@ -0,0 +1,988 @@
|
|
|
1
|
+
import * as path from "path";
|
|
2
|
+
import * as fs from "fs/promises";
|
|
3
|
+
import * as fsSync from "fs";
|
|
4
|
+
import { Repo, AutomergeUrl } from "@automerge/automerge-repo";
|
|
5
|
+
import * as diffLib from "diff";
|
|
6
|
+
import { spawn } from "child_process";
|
|
7
|
+
import {
|
|
8
|
+
CloneOptions,
|
|
9
|
+
SyncOptions,
|
|
10
|
+
DiffOptions,
|
|
11
|
+
LogOptions,
|
|
12
|
+
CheckoutOptions,
|
|
13
|
+
InitOptions,
|
|
14
|
+
ConfigOptions,
|
|
15
|
+
StatusOptions,
|
|
16
|
+
WatchOptions,
|
|
17
|
+
DirectoryConfig,
|
|
18
|
+
DirectoryDocument,
|
|
19
|
+
CommandOptions,
|
|
20
|
+
} from "./types";
|
|
21
|
+
import { SyncEngine } from "./core";
|
|
22
|
+
import { pathExists, ensureDirectoryExists, formatRelativePath } from "./utils";
|
|
23
|
+
import { ConfigManager } from "./core/config";
|
|
24
|
+
import { createRepo } from "./utils/repo-factory";
|
|
25
|
+
import { out } from "./utils/output";
|
|
26
|
+
import { waitForSync } from "./utils/network-sync";
|
|
27
|
+
import chalk from "chalk";
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Shared context that commands can use
|
|
31
|
+
*/
|
|
32
|
+
interface CommandContext {
|
|
33
|
+
repo: Repo;
|
|
34
|
+
syncEngine: SyncEngine;
|
|
35
|
+
config: DirectoryConfig;
|
|
36
|
+
workingDir: string;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Initialize repository directory structure and configuration
|
|
41
|
+
* Shared logic for init and clone commands
|
|
42
|
+
*/
|
|
43
|
+
async function initializeRepository(
|
|
44
|
+
resolvedPath: string,
|
|
45
|
+
overrides: Partial<DirectoryConfig>
|
|
46
|
+
): Promise<{ config: DirectoryConfig; repo: Repo; syncEngine: SyncEngine }> {
|
|
47
|
+
// Create .pushwork directory structure
|
|
48
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
49
|
+
await ensureDirectoryExists(syncToolDir);
|
|
50
|
+
await ensureDirectoryExists(path.join(syncToolDir, "automerge"));
|
|
51
|
+
|
|
52
|
+
// Create configuration with overrides
|
|
53
|
+
const configManager = new ConfigManager(resolvedPath);
|
|
54
|
+
const config = await configManager.initializeWithOverrides(overrides);
|
|
55
|
+
|
|
56
|
+
// Create repository and sync engine
|
|
57
|
+
const repo = await createRepo(resolvedPath, config);
|
|
58
|
+
const syncEngine = new SyncEngine(repo, resolvedPath, config);
|
|
59
|
+
|
|
60
|
+
return { config, repo, syncEngine };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Shared pre-action that ensures repository and sync engine are properly initialized
|
|
65
|
+
* This function always works, with or without network connectivity
|
|
66
|
+
*/
|
|
67
|
+
async function setupCommandContext(
|
|
68
|
+
workingDir: string = process.cwd(),
|
|
69
|
+
syncEnabled?: boolean
|
|
70
|
+
): Promise<CommandContext> {
|
|
71
|
+
const resolvedPath = path.resolve(workingDir);
|
|
72
|
+
|
|
73
|
+
// Check if initialized
|
|
74
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
75
|
+
if (!(await pathExists(syncToolDir))) {
|
|
76
|
+
throw new Error(
|
|
77
|
+
'Directory not initialized for sync. Run "pushwork init" first.'
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Load configuration
|
|
82
|
+
const configManager = new ConfigManager(resolvedPath);
|
|
83
|
+
let config = await configManager.getMerged();
|
|
84
|
+
|
|
85
|
+
// Override sync_enabled if explicitly specified (e.g., for local-only operations)
|
|
86
|
+
if (syncEnabled !== undefined) {
|
|
87
|
+
config = { ...config, sync_enabled: syncEnabled };
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Create repo with config
|
|
91
|
+
const repo = await createRepo(resolvedPath, config);
|
|
92
|
+
|
|
93
|
+
// Create sync engine
|
|
94
|
+
const syncEngine = new SyncEngine(repo, resolvedPath, config);
|
|
95
|
+
|
|
96
|
+
return {
|
|
97
|
+
repo,
|
|
98
|
+
syncEngine,
|
|
99
|
+
config,
|
|
100
|
+
workingDir: resolvedPath,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Safely shutdown a repository with proper error handling
|
|
105
|
+
*/
|
|
106
|
+
async function safeRepoShutdown(repo: Repo): Promise<void> {
|
|
107
|
+
// Handle uncaught WebSocket errors that occur during shutdown
|
|
108
|
+
const uncaughtErrorHandler = (err: Error) => {
|
|
109
|
+
if (err.message.includes("WebSocket")) {
|
|
110
|
+
// Silently suppress WebSocket errors during shutdown
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
// Re-throw non-WebSocket errors
|
|
114
|
+
throw err;
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
// Add the error handler before shutdown
|
|
118
|
+
process.on("uncaughtException", uncaughtErrorHandler);
|
|
119
|
+
|
|
120
|
+
try {
|
|
121
|
+
await repo.shutdown();
|
|
122
|
+
} catch (shutdownError) {
|
|
123
|
+
// WebSocket errors during shutdown are common and non-critical
|
|
124
|
+
// Silently ignore them - they don't affect data integrity
|
|
125
|
+
const errorMessage =
|
|
126
|
+
shutdownError instanceof Error
|
|
127
|
+
? shutdownError.message
|
|
128
|
+
: String(shutdownError);
|
|
129
|
+
|
|
130
|
+
// Ignore WebSocket-related errors entirely
|
|
131
|
+
if (errorMessage.includes("WebSocket")) {
|
|
132
|
+
// Silently ignore WebSocket shutdown errors
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
} finally {
|
|
136
|
+
process.off("uncaughtException", uncaughtErrorHandler);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Initialize sync in a directory
|
|
142
|
+
*/
|
|
143
|
+
export async function init(
|
|
144
|
+
targetPath: string,
|
|
145
|
+
options: InitOptions = {}
|
|
146
|
+
): Promise<void> {
|
|
147
|
+
const resolvedPath = path.resolve(targetPath);
|
|
148
|
+
|
|
149
|
+
out.task(`Initializing`);
|
|
150
|
+
|
|
151
|
+
await ensureDirectoryExists(resolvedPath);
|
|
152
|
+
|
|
153
|
+
// Check if already initialized
|
|
154
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
155
|
+
if (await pathExists(syncToolDir)) {
|
|
156
|
+
out.error("Directory already initialized for sync");
|
|
157
|
+
out.exit(1);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Initialize repository with optional CLI overrides
|
|
161
|
+
out.update("Setting up repository");
|
|
162
|
+
const { repo, syncEngine, config } = await initializeRepository(resolvedPath, {
|
|
163
|
+
sync_server: options.syncServer,
|
|
164
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// Create new root directory document
|
|
168
|
+
out.update("Creating root directory");
|
|
169
|
+
const rootDoc: DirectoryDocument = {
|
|
170
|
+
"@patchwork": { type: "folder" },
|
|
171
|
+
docs: [],
|
|
172
|
+
};
|
|
173
|
+
const rootHandle = repo.create(rootDoc);
|
|
174
|
+
|
|
175
|
+
// Set root directory URL in snapshot
|
|
176
|
+
await syncEngine.setRootDirectoryUrl(rootHandle.url);
|
|
177
|
+
|
|
178
|
+
// Wait for root document to sync to server if sync is enabled
|
|
179
|
+
// This ensures the document is uploaded before we exit
|
|
180
|
+
// waitForSync() verifies the server has the document by comparing local and remote heads
|
|
181
|
+
if (config.sync_enabled && config.sync_server_storage_id) {
|
|
182
|
+
try {
|
|
183
|
+
out.update("Syncing to server");
|
|
184
|
+
await waitForSync([rootHandle], config.sync_server_storage_id);
|
|
185
|
+
} catch (error) {
|
|
186
|
+
out.taskLine(`Network sync failed: ${error}`, true);
|
|
187
|
+
// Continue anyway - the document is created locally and will sync later
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Run initial sync to capture existing files
|
|
192
|
+
out.update("Running initial sync");
|
|
193
|
+
const result = await syncEngine.sync();
|
|
194
|
+
|
|
195
|
+
out.update("Writing to disk");
|
|
196
|
+
await safeRepoShutdown(repo);
|
|
197
|
+
|
|
198
|
+
out.done("Initialized");
|
|
199
|
+
out.successBlock("INITIALIZED", rootHandle.url);
|
|
200
|
+
if (result.filesChanged > 0) {
|
|
201
|
+
out.info(`Synced ${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
process.exit();
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Run bidirectional sync
|
|
209
|
+
*/
|
|
210
|
+
export async function sync(
|
|
211
|
+
targetPath = ".",
|
|
212
|
+
options: SyncOptions
|
|
213
|
+
): Promise<void> {
|
|
214
|
+
out.task("Syncing");
|
|
215
|
+
|
|
216
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath);
|
|
217
|
+
|
|
218
|
+
if (options.dryRun) {
|
|
219
|
+
out.update("Analyzing changes");
|
|
220
|
+
const preview = await syncEngine.previewChanges();
|
|
221
|
+
|
|
222
|
+
if (preview.changes.length === 0 && preview.moves.length === 0) {
|
|
223
|
+
out.done("Already synced");
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
out.done();
|
|
228
|
+
out.infoBlock("CHANGES");
|
|
229
|
+
out.obj({
|
|
230
|
+
Changes: preview.changes.length.toString(),
|
|
231
|
+
Moves:
|
|
232
|
+
preview.moves.length > 0 ? preview.moves.length.toString() : undefined,
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
out.log("");
|
|
236
|
+
out.log("Files:");
|
|
237
|
+
for (const change of preview.changes.slice(0, 10)) {
|
|
238
|
+
const prefix =
|
|
239
|
+
change.changeType === "local_only"
|
|
240
|
+
? "[local] "
|
|
241
|
+
: change.changeType === "remote_only"
|
|
242
|
+
? "[remote] "
|
|
243
|
+
: "[conflict]";
|
|
244
|
+
out.log(` ${prefix} ${change.path}`);
|
|
245
|
+
}
|
|
246
|
+
if (preview.changes.length > 10) {
|
|
247
|
+
out.log(` ... and ${preview.changes.length - 10} more`);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if (preview.moves.length > 0) {
|
|
251
|
+
out.log("");
|
|
252
|
+
out.log("Moves:");
|
|
253
|
+
for (const move of preview.moves.slice(0, 5)) {
|
|
254
|
+
out.log(` ${move.fromPath} → ${move.toPath}`);
|
|
255
|
+
}
|
|
256
|
+
if (preview.moves.length > 5) {
|
|
257
|
+
out.log(` ... and ${preview.moves.length - 5} more`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
out.log("");
|
|
262
|
+
out.log("Run without --dry-run to apply these changes");
|
|
263
|
+
} else {
|
|
264
|
+
const result = await syncEngine.sync();
|
|
265
|
+
|
|
266
|
+
out.taskLine("Writing to disk");
|
|
267
|
+
await safeRepoShutdown(repo);
|
|
268
|
+
|
|
269
|
+
if (result.success) {
|
|
270
|
+
out.done("Synced");
|
|
271
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
272
|
+
} else {
|
|
273
|
+
out.successBlock(
|
|
274
|
+
"SYNCED",
|
|
275
|
+
`${result.filesChanged} ${plural("file", result.filesChanged)}`
|
|
276
|
+
);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
if (result.warnings.length > 0) {
|
|
280
|
+
out.log("");
|
|
281
|
+
out.warnBlock("WARNINGS", `${result.warnings.length} warnings`);
|
|
282
|
+
for (const warning of result.warnings.slice(0, 5)) {
|
|
283
|
+
out.log(` ${warning}`);
|
|
284
|
+
}
|
|
285
|
+
if (result.warnings.length > 5) {
|
|
286
|
+
out.log(` ... and ${result.warnings.length - 5} more`);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
} else {
|
|
290
|
+
out.done("partial", false);
|
|
291
|
+
out.warnBlock(
|
|
292
|
+
"PARTIAL",
|
|
293
|
+
`${result.filesChanged} updated, ${result.errors.length} errors`
|
|
294
|
+
);
|
|
295
|
+
out.obj({
|
|
296
|
+
Files: result.filesChanged,
|
|
297
|
+
Errors: result.errors.length,
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
result.errors
|
|
301
|
+
.slice(0, 5)
|
|
302
|
+
.forEach((error) => out.error(`${error.path}: ${error.error.message}`));
|
|
303
|
+
if (result.errors.length > 5) {
|
|
304
|
+
out.warn(`... and ${result.errors.length - 5} more errors`);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
process.exit();
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
/**
|
|
313
|
+
* Show differences between local and remote
|
|
314
|
+
*/
|
|
315
|
+
export async function diff(
|
|
316
|
+
targetPath = ".",
|
|
317
|
+
options: DiffOptions
|
|
318
|
+
): Promise<void> {
|
|
319
|
+
out.task("Analyzing changes");
|
|
320
|
+
|
|
321
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
322
|
+
const preview = await syncEngine.previewChanges();
|
|
323
|
+
|
|
324
|
+
out.done();
|
|
325
|
+
|
|
326
|
+
if (options.nameOnly) {
|
|
327
|
+
for (const change of preview.changes) {
|
|
328
|
+
out.log(change.path);
|
|
329
|
+
}
|
|
330
|
+
return;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
if (preview.changes.length === 0) {
|
|
334
|
+
out.success("No changes detected");
|
|
335
|
+
await safeRepoShutdown(repo);
|
|
336
|
+
out.exit();
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
out.warn(`${preview.changes.length} changes detected`);
|
|
341
|
+
|
|
342
|
+
for (const change of preview.changes) {
|
|
343
|
+
const prefix =
|
|
344
|
+
change.changeType === "local_only"
|
|
345
|
+
? "[local] "
|
|
346
|
+
: change.changeType === "remote_only"
|
|
347
|
+
? "[remote] "
|
|
348
|
+
: "[conflict]";
|
|
349
|
+
|
|
350
|
+
try {
|
|
351
|
+
// Get old content (from snapshot/remote)
|
|
352
|
+
const oldContent = change.remoteContent || "";
|
|
353
|
+
// Get new content (current local)
|
|
354
|
+
const newContent = change.localContent || "";
|
|
355
|
+
|
|
356
|
+
// Convert binary content to string representation if needed
|
|
357
|
+
const oldText =
|
|
358
|
+
typeof oldContent === "string"
|
|
359
|
+
? oldContent
|
|
360
|
+
: `<binary content: ${oldContent.length} bytes>`;
|
|
361
|
+
const newText =
|
|
362
|
+
typeof newContent === "string"
|
|
363
|
+
? newContent
|
|
364
|
+
: `<binary content: ${newContent.length} bytes>`;
|
|
365
|
+
|
|
366
|
+
// Generate unified diff
|
|
367
|
+
const diffResult = diffLib.createPatch(
|
|
368
|
+
change.path,
|
|
369
|
+
oldText,
|
|
370
|
+
newText,
|
|
371
|
+
"previous",
|
|
372
|
+
"current"
|
|
373
|
+
);
|
|
374
|
+
|
|
375
|
+
// Skip the header lines and process the diff
|
|
376
|
+
const lines = diffResult.split("\n").slice(4); // Skip index, ===, ---, +++ lines
|
|
377
|
+
|
|
378
|
+
if (lines.length === 0 || (lines.length === 1 && lines[0] === "")) {
|
|
379
|
+
out.log(`${prefix}${change.path} (content identical)`, "cyan");
|
|
380
|
+
continue;
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
// Extract first hunk header and show inline with path
|
|
384
|
+
let firstHunk = "";
|
|
385
|
+
let diffLines = lines;
|
|
386
|
+
if (lines[0]?.startsWith("@@")) {
|
|
387
|
+
firstHunk = ` ${lines[0]}`;
|
|
388
|
+
diffLines = lines.slice(1);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
out.log(`${prefix}${change.path}${firstHunk}`, "cyan");
|
|
392
|
+
|
|
393
|
+
for (const line of diffLines) {
|
|
394
|
+
if (line.startsWith("@@")) {
|
|
395
|
+
// Additional hunk headers
|
|
396
|
+
out.log(line, "dim");
|
|
397
|
+
} else if (line.startsWith("+")) {
|
|
398
|
+
// Added line
|
|
399
|
+
out.log(line, "green");
|
|
400
|
+
} else if (line.startsWith("-")) {
|
|
401
|
+
// Removed line
|
|
402
|
+
out.log(line, "red");
|
|
403
|
+
} else if (line.startsWith(" ") || line === "") {
|
|
404
|
+
// Context line or empty
|
|
405
|
+
out.log(line, "dim");
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
} catch (error) {
|
|
409
|
+
out.log(`${prefix}${change.path} (diff error: ${error})`, "cyan");
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
await safeRepoShutdown(repo);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
/**
|
|
417
|
+
* Show sync status
|
|
418
|
+
*/
|
|
419
|
+
export async function status(
|
|
420
|
+
targetPath: string = ".",
|
|
421
|
+
options: StatusOptions = {}
|
|
422
|
+
): Promise<void> {
|
|
423
|
+
const { repo, syncEngine, config } = await setupCommandContext(
|
|
424
|
+
targetPath,
|
|
425
|
+
false
|
|
426
|
+
);
|
|
427
|
+
const syncStatus = await syncEngine.getStatus();
|
|
428
|
+
|
|
429
|
+
out.infoBlock("STATUS");
|
|
430
|
+
|
|
431
|
+
const statusInfo: Record<string, any> = {};
|
|
432
|
+
const fileCount = syncStatus.snapshot?.files.size || 0;
|
|
433
|
+
|
|
434
|
+
statusInfo["URL"] = syncStatus.snapshot?.rootDirectoryUrl;
|
|
435
|
+
statusInfo["Files"] = syncStatus.snapshot
|
|
436
|
+
? `${fileCount} tracked`
|
|
437
|
+
: undefined;
|
|
438
|
+
statusInfo["Sync"] = config?.sync_server;
|
|
439
|
+
|
|
440
|
+
// Add more detailed info in verbose mode
|
|
441
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
442
|
+
try {
|
|
443
|
+
const rootHandle = await repo.find<DirectoryDocument>(
|
|
444
|
+
syncStatus.snapshot.rootDirectoryUrl
|
|
445
|
+
);
|
|
446
|
+
const rootDoc = await rootHandle.doc();
|
|
447
|
+
|
|
448
|
+
if (rootDoc) {
|
|
449
|
+
statusInfo["Entries"] = rootDoc.docs.length;
|
|
450
|
+
statusInfo["Directories"] = syncStatus.snapshot.directories.size;
|
|
451
|
+
if (rootDoc.lastSyncAt) {
|
|
452
|
+
const lastSyncDate = new Date(rootDoc.lastSyncAt);
|
|
453
|
+
statusInfo["Last sync"] = lastSyncDate.toISOString();
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
} catch (error) {
|
|
457
|
+
out.warn(`Warning: Could not load detailed info: ${error}`);
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
statusInfo["Changes"] = syncStatus.hasChanges
|
|
462
|
+
? `${syncStatus.changeCount} pending`
|
|
463
|
+
: undefined;
|
|
464
|
+
statusInfo["Status"] = !syncStatus.hasChanges ? "up to date" : undefined;
|
|
465
|
+
|
|
466
|
+
out.obj(statusInfo);
|
|
467
|
+
|
|
468
|
+
// Show verbose details if requested
|
|
469
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
470
|
+
const rootHandle = await repo.find<DirectoryDocument>(
|
|
471
|
+
syncStatus.snapshot.rootDirectoryUrl
|
|
472
|
+
);
|
|
473
|
+
const rootDoc = await rootHandle.doc();
|
|
474
|
+
|
|
475
|
+
if (rootDoc) {
|
|
476
|
+
out.infoBlock("HEADS");
|
|
477
|
+
out.arr(rootHandle.heads());
|
|
478
|
+
|
|
479
|
+
if (syncStatus.snapshot && syncStatus.snapshot.files.size > 0) {
|
|
480
|
+
out.infoBlock("TRACKED FILES");
|
|
481
|
+
const filesObj: Record<string, string> = {};
|
|
482
|
+
syncStatus.snapshot.files.forEach((entry, filePath) => {
|
|
483
|
+
filesObj[filePath] = entry.url;
|
|
484
|
+
});
|
|
485
|
+
out.obj(filesObj);
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
if (syncStatus.hasChanges && !options.verbose) {
|
|
491
|
+
out.info("Run 'pushwork diff' to see changes");
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
await safeRepoShutdown(repo);
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
/**
|
|
498
|
+
* Show sync history
|
|
499
|
+
*/
|
|
500
|
+
export async function log(
|
|
501
|
+
targetPath = ".",
|
|
502
|
+
_options: LogOptions
|
|
503
|
+
): Promise<void> {
|
|
504
|
+
const { repo: logRepo, workingDir } = await setupCommandContext(
|
|
505
|
+
targetPath,
|
|
506
|
+
false
|
|
507
|
+
);
|
|
508
|
+
|
|
509
|
+
// TODO: Implement history tracking
|
|
510
|
+
const snapshotPath = path.join(
|
|
511
|
+
workingDir,
|
|
512
|
+
ConfigManager.CONFIG_DIR,
|
|
513
|
+
"snapshot.json"
|
|
514
|
+
);
|
|
515
|
+
if (await pathExists(snapshotPath)) {
|
|
516
|
+
const stats = await fs.stat(snapshotPath);
|
|
517
|
+
out.infoBlock("HISTORY", "Sync history (stub)");
|
|
518
|
+
out.obj({ "Last sync": stats.mtime.toISOString() });
|
|
519
|
+
} else {
|
|
520
|
+
out.info("No sync history found");
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
await safeRepoShutdown(logRepo);
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
/**
|
|
527
|
+
* Checkout/restore from previous sync
|
|
528
|
+
*/
|
|
529
|
+
export async function checkout(
|
|
530
|
+
syncId: string,
|
|
531
|
+
targetPath = ".",
|
|
532
|
+
_options: CheckoutOptions
|
|
533
|
+
): Promise<void> {
|
|
534
|
+
const { workingDir } = await setupCommandContext(targetPath);
|
|
535
|
+
|
|
536
|
+
// TODO: Implement checkout functionality
|
|
537
|
+
out.warnBlock("NOT IMPLEMENTED", "Checkout not yet implemented");
|
|
538
|
+
out.obj({
|
|
539
|
+
"Sync ID": syncId,
|
|
540
|
+
Path: workingDir,
|
|
541
|
+
});
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
/**
|
|
545
|
+
* Clone an existing synced directory from an AutomergeUrl
|
|
546
|
+
*/
|
|
547
|
+
export async function clone(
|
|
548
|
+
rootUrl: string,
|
|
549
|
+
targetPath: string,
|
|
550
|
+
options: CloneOptions
|
|
551
|
+
): Promise<void> {
|
|
552
|
+
const resolvedPath = path.resolve(targetPath);
|
|
553
|
+
|
|
554
|
+
out.task(`Cloning ${rootUrl}`);
|
|
555
|
+
|
|
556
|
+
// Check if directory exists and handle --force
|
|
557
|
+
if (await pathExists(resolvedPath)) {
|
|
558
|
+
const files = await fs.readdir(resolvedPath);
|
|
559
|
+
if (files.length > 0 && !options.force) {
|
|
560
|
+
out.error("Target directory is not empty. Use --force to overwrite");
|
|
561
|
+
out.exit(1);
|
|
562
|
+
}
|
|
563
|
+
} else {
|
|
564
|
+
await ensureDirectoryExists(resolvedPath);
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// Check if already initialized
|
|
568
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
569
|
+
if (await pathExists(syncToolDir)) {
|
|
570
|
+
if (!options.force) {
|
|
571
|
+
out.error("Directory already initialized. Use --force to overwrite");
|
|
572
|
+
out.exit(1);
|
|
573
|
+
}
|
|
574
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
// Initialize repository with optional CLI overrides
|
|
578
|
+
out.update("Setting up repository");
|
|
579
|
+
const { config, repo, syncEngine } = await initializeRepository(
|
|
580
|
+
resolvedPath,
|
|
581
|
+
{
|
|
582
|
+
sync_server: options.syncServer,
|
|
583
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
584
|
+
}
|
|
585
|
+
);
|
|
586
|
+
|
|
587
|
+
// Connect to existing root directory and download files
|
|
588
|
+
out.update("Downloading files");
|
|
589
|
+
await syncEngine.setRootDirectoryUrl(rootUrl as AutomergeUrl);
|
|
590
|
+
const result = await syncEngine.sync();
|
|
591
|
+
|
|
592
|
+
out.update("Writing to disk");
|
|
593
|
+
await safeRepoShutdown(repo);
|
|
594
|
+
|
|
595
|
+
out.done();
|
|
596
|
+
|
|
597
|
+
out.obj({
|
|
598
|
+
Path: resolvedPath,
|
|
599
|
+
Files: `${result.filesChanged} downloaded`,
|
|
600
|
+
Sync: config.sync_server,
|
|
601
|
+
});
|
|
602
|
+
out.successBlock("CLONED", rootUrl);
|
|
603
|
+
process.exit();
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
/**
|
|
607
|
+
* Get the root URL for the current pushwork repository
|
|
608
|
+
*/
|
|
609
|
+
export async function url(targetPath: string = "."): Promise<void> {
|
|
610
|
+
const resolvedPath = path.resolve(targetPath);
|
|
611
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
612
|
+
|
|
613
|
+
if (!(await pathExists(syncToolDir))) {
|
|
614
|
+
out.error("Directory not initialized for sync");
|
|
615
|
+
out.exit(1);
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
619
|
+
if (!(await pathExists(snapshotPath))) {
|
|
620
|
+
out.error("No snapshot found");
|
|
621
|
+
out.exit(1);
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
625
|
+
const snapshot = JSON.parse(snapshotData);
|
|
626
|
+
|
|
627
|
+
if (snapshot.rootDirectoryUrl) {
|
|
628
|
+
// Output just the URL for easy use in scripts
|
|
629
|
+
out.log(snapshot.rootDirectoryUrl);
|
|
630
|
+
} else {
|
|
631
|
+
out.error("No root URL found in snapshot");
|
|
632
|
+
out.exit(1);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
/**
|
|
637
|
+
* Remove local pushwork data and log URL for recovery
|
|
638
|
+
*/
|
|
639
|
+
export async function rm(targetPath: string = "."): Promise<void> {
|
|
640
|
+
const resolvedPath = path.resolve(targetPath);
|
|
641
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
642
|
+
|
|
643
|
+
if (!(await pathExists(syncToolDir))) {
|
|
644
|
+
out.error("Directory not initialized for sync");
|
|
645
|
+
out.exit(1);
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
// Read the URL before deletion for recovery
|
|
649
|
+
let recoveryUrl = "";
|
|
650
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
651
|
+
if (await pathExists(snapshotPath)) {
|
|
652
|
+
try {
|
|
653
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
654
|
+
const snapshot = JSON.parse(snapshotData);
|
|
655
|
+
recoveryUrl = snapshot.rootDirectoryUrl || null;
|
|
656
|
+
} catch (error) {
|
|
657
|
+
out.error(`Remove failed: ${error}`);
|
|
658
|
+
out.exit(1);
|
|
659
|
+
return;
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
out.task("Removing local pushwork data");
|
|
664
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
665
|
+
out.done();
|
|
666
|
+
|
|
667
|
+
out.warnBlock("REMOVED", recoveryUrl);
|
|
668
|
+
process.exit();
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
export async function commit(
|
|
672
|
+
targetPath: string,
|
|
673
|
+
_options: CommandOptions = {}
|
|
674
|
+
): Promise<void> {
|
|
675
|
+
out.task("Committing local changes");
|
|
676
|
+
|
|
677
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
678
|
+
|
|
679
|
+
const result = await syncEngine.commitLocal();
|
|
680
|
+
await safeRepoShutdown(repo);
|
|
681
|
+
|
|
682
|
+
out.done();
|
|
683
|
+
|
|
684
|
+
if (result.errors.length > 0) {
|
|
685
|
+
out.errorBlock("ERROR", `${result.errors.length} errors`);
|
|
686
|
+
result.errors.forEach((error) => out.error(error));
|
|
687
|
+
out.exit(1);
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
out.successBlock("COMMITTED", `${result.filesChanged} files`);
|
|
691
|
+
out.obj({
|
|
692
|
+
Files: result.filesChanged,
|
|
693
|
+
Directories: result.directoriesChanged,
|
|
694
|
+
});
|
|
695
|
+
|
|
696
|
+
if (result.warnings.length > 0) {
|
|
697
|
+
result.warnings.forEach((warning) => out.warn(warning));
|
|
698
|
+
}
|
|
699
|
+
process.exit();
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
/**
|
|
703
|
+
* List tracked files
|
|
704
|
+
*/
|
|
705
|
+
export async function ls(
|
|
706
|
+
targetPath: string = ".",
|
|
707
|
+
options: CommandOptions = {}
|
|
708
|
+
): Promise<void> {
|
|
709
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
710
|
+
const syncStatus = await syncEngine.getStatus();
|
|
711
|
+
|
|
712
|
+
if (!syncStatus.snapshot) {
|
|
713
|
+
out.error("No snapshot found");
|
|
714
|
+
await safeRepoShutdown(repo);
|
|
715
|
+
out.exit(1);
|
|
716
|
+
return;
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
const files = Array.from(syncStatus.snapshot.files.entries()).sort(
|
|
720
|
+
([pathA], [pathB]) => pathA.localeCompare(pathB)
|
|
721
|
+
);
|
|
722
|
+
|
|
723
|
+
if (files.length === 0) {
|
|
724
|
+
out.info("No tracked files");
|
|
725
|
+
await safeRepoShutdown(repo);
|
|
726
|
+
return;
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
if (options.verbose) {
|
|
730
|
+
// Long format with URLs
|
|
731
|
+
for (const [filePath, entry] of files) {
|
|
732
|
+
const url = entry?.url || "unknown";
|
|
733
|
+
out.log(`${filePath} -> ${url}`);
|
|
734
|
+
}
|
|
735
|
+
} else {
|
|
736
|
+
// Simple list
|
|
737
|
+
for (const [filePath] of files) {
|
|
738
|
+
out.log(filePath);
|
|
739
|
+
}
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
await safeRepoShutdown(repo);
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
/**
|
|
746
|
+
* View or edit configuration
|
|
747
|
+
*/
|
|
748
|
+
export async function config(
|
|
749
|
+
targetPath: string = ".",
|
|
750
|
+
options: ConfigOptions = {}
|
|
751
|
+
): Promise<void> {
|
|
752
|
+
const resolvedPath = path.resolve(targetPath);
|
|
753
|
+
const syncToolDir = path.join(resolvedPath, ConfigManager.CONFIG_DIR);
|
|
754
|
+
|
|
755
|
+
if (!(await pathExists(syncToolDir))) {
|
|
756
|
+
out.error("Directory not initialized for sync");
|
|
757
|
+
out.exit(1);
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
const configManager = new ConfigManager(resolvedPath);
|
|
761
|
+
const config = await configManager.getMerged();
|
|
762
|
+
|
|
763
|
+
if (options.list) {
|
|
764
|
+
// List all configuration
|
|
765
|
+
out.infoBlock("CONFIGURATION", "Full configuration");
|
|
766
|
+
out.log(JSON.stringify(config, null, 2));
|
|
767
|
+
} else if (options.get) {
|
|
768
|
+
// Get specific config value
|
|
769
|
+
const keys = options.get.split(".");
|
|
770
|
+
let value: any = config;
|
|
771
|
+
for (const key of keys) {
|
|
772
|
+
value = value?.[key];
|
|
773
|
+
}
|
|
774
|
+
if (value !== undefined) {
|
|
775
|
+
out.log(
|
|
776
|
+
typeof value === "object" ? JSON.stringify(value, null, 2) : value
|
|
777
|
+
);
|
|
778
|
+
} else {
|
|
779
|
+
out.error(`Config key not found: ${options.get}`);
|
|
780
|
+
out.exit(1);
|
|
781
|
+
}
|
|
782
|
+
} else {
|
|
783
|
+
// Show basic config info
|
|
784
|
+
out.infoBlock("CONFIGURATION");
|
|
785
|
+
out.obj({
|
|
786
|
+
"Sync server": config.sync_server || "default",
|
|
787
|
+
"Sync enabled": config.sync_enabled ? "yes" : "no",
|
|
788
|
+
Exclusions: config.exclude_patterns?.length,
|
|
789
|
+
});
|
|
790
|
+
out.log("");
|
|
791
|
+
out.log("Use --list to see full configuration");
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
/**
|
|
796
|
+
* Watch a directory and sync after build script completes
|
|
797
|
+
*/
|
|
798
|
+
export async function watch(
|
|
799
|
+
targetPath: string = ".",
|
|
800
|
+
options: WatchOptions = {}
|
|
801
|
+
): Promise<void> {
|
|
802
|
+
const script = options.script || "pnpm build";
|
|
803
|
+
const watchDir = options.watchDir || "src"; // Default to watching 'src' directory
|
|
804
|
+
const verbose = options.verbose || false;
|
|
805
|
+
const { repo, syncEngine, workingDir } = await setupCommandContext(
|
|
806
|
+
targetPath
|
|
807
|
+
);
|
|
808
|
+
|
|
809
|
+
const absoluteWatchDir = path.resolve(workingDir, watchDir);
|
|
810
|
+
|
|
811
|
+
// Check if watch directory exists
|
|
812
|
+
if (!(await pathExists(absoluteWatchDir))) {
|
|
813
|
+
out.error(`Watch directory does not exist: ${watchDir}`);
|
|
814
|
+
await safeRepoShutdown(repo);
|
|
815
|
+
out.exit(1);
|
|
816
|
+
return;
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
out.spicyBlock(
|
|
820
|
+
"WATCHING",
|
|
821
|
+
`${chalk.underline(formatRelativePath(watchDir))} for changes...`
|
|
822
|
+
);
|
|
823
|
+
out.info(`Build script: ${script}`);
|
|
824
|
+
out.info(`Working directory: ${workingDir}`);
|
|
825
|
+
|
|
826
|
+
let isProcessing = false;
|
|
827
|
+
let pendingChange = false;
|
|
828
|
+
|
|
829
|
+
// Function to run build and sync
|
|
830
|
+
const runBuildAndSync = async () => {
|
|
831
|
+
if (isProcessing) {
|
|
832
|
+
pendingChange = true;
|
|
833
|
+
return;
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
isProcessing = true;
|
|
837
|
+
pendingChange = false;
|
|
838
|
+
|
|
839
|
+
try {
|
|
840
|
+
out.spicy(`[${new Date().toLocaleTimeString()}] Changes detected...`);
|
|
841
|
+
// Run build script
|
|
842
|
+
const buildResult = await runScript(script, workingDir, verbose);
|
|
843
|
+
|
|
844
|
+
if (!buildResult.success) {
|
|
845
|
+
out.warn("Build script failed");
|
|
846
|
+
if (buildResult.output) {
|
|
847
|
+
out.log("");
|
|
848
|
+
out.log(buildResult.output);
|
|
849
|
+
}
|
|
850
|
+
isProcessing = false;
|
|
851
|
+
if (pendingChange) {
|
|
852
|
+
setImmediate(() => runBuildAndSync());
|
|
853
|
+
}
|
|
854
|
+
return;
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
out.info("Build completed...");
|
|
858
|
+
|
|
859
|
+
// Run sync
|
|
860
|
+
out.task("Syncing");
|
|
861
|
+
const result = await syncEngine.sync();
|
|
862
|
+
|
|
863
|
+
if (result.success) {
|
|
864
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
865
|
+
out.done("Already synced");
|
|
866
|
+
} else {
|
|
867
|
+
out.done(
|
|
868
|
+
`Synced ${result.filesChanged} ${plural(
|
|
869
|
+
"file",
|
|
870
|
+
result.filesChanged
|
|
871
|
+
)}`
|
|
872
|
+
);
|
|
873
|
+
}
|
|
874
|
+
} else {
|
|
875
|
+
out.warn(
|
|
876
|
+
`⚠ Partial sync: ${result.filesChanged} updated, ${result.errors.length} errors`
|
|
877
|
+
);
|
|
878
|
+
result.errors
|
|
879
|
+
.slice(0, 3)
|
|
880
|
+
.forEach((error) =>
|
|
881
|
+
out.error(` ${error.path}: ${error.error.message}`)
|
|
882
|
+
);
|
|
883
|
+
if (result.errors.length > 3) {
|
|
884
|
+
out.warn(` ... and ${result.errors.length - 3} more errors`);
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
if (result.warnings.length > 0) {
|
|
889
|
+
result.warnings
|
|
890
|
+
.slice(0, 3)
|
|
891
|
+
.forEach((warning) => out.warn(` ${warning}`));
|
|
892
|
+
if (result.warnings.length > 3) {
|
|
893
|
+
out.warn(` ... and ${result.warnings.length - 3} more warnings`);
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
} catch (error) {
|
|
897
|
+
out.error(`Error during build/sync: ${error}`);
|
|
898
|
+
} finally {
|
|
899
|
+
isProcessing = false;
|
|
900
|
+
|
|
901
|
+
// If changes occurred while we were processing, run again
|
|
902
|
+
if (pendingChange) {
|
|
903
|
+
setImmediate(() => runBuildAndSync());
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
};
|
|
907
|
+
|
|
908
|
+
// Set up file watcher - watches everything in the specified directory
|
|
909
|
+
const watcher = fsSync.watch(
|
|
910
|
+
absoluteWatchDir,
|
|
911
|
+
{ recursive: true },
|
|
912
|
+
(_eventType, filename) => {
|
|
913
|
+
if (filename) {
|
|
914
|
+
runBuildAndSync();
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
);
|
|
918
|
+
|
|
919
|
+
// Handle graceful shutdown
|
|
920
|
+
const shutdown = async () => {
|
|
921
|
+
out.log("");
|
|
922
|
+
out.info("Shutting down...");
|
|
923
|
+
watcher.close();
|
|
924
|
+
await safeRepoShutdown(repo);
|
|
925
|
+
out.rainbow("Goodbye!");
|
|
926
|
+
process.exit(0);
|
|
927
|
+
};
|
|
928
|
+
|
|
929
|
+
process.on("SIGINT", shutdown);
|
|
930
|
+
process.on("SIGTERM", shutdown);
|
|
931
|
+
|
|
932
|
+
// Run initial build and sync
|
|
933
|
+
await runBuildAndSync();
|
|
934
|
+
|
|
935
|
+
// Keep process alive
|
|
936
|
+
await new Promise(() => {}); // Never resolves, keeps watching
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
/**
|
|
940
|
+
* Run a shell script and wait for completion
|
|
941
|
+
*/
|
|
942
|
+
async function runScript(
|
|
943
|
+
script: string,
|
|
944
|
+
cwd: string,
|
|
945
|
+
verbose: boolean
|
|
946
|
+
): Promise<{ success: boolean; output?: string }> {
|
|
947
|
+
return new Promise((resolve) => {
|
|
948
|
+
const [command, ...args] = script.split(" ");
|
|
949
|
+
const child = spawn(command, args, {
|
|
950
|
+
cwd,
|
|
951
|
+
stdio: verbose ? "inherit" : "pipe", // Show output directly if verbose, otherwise capture
|
|
952
|
+
shell: true,
|
|
953
|
+
});
|
|
954
|
+
|
|
955
|
+
let output = "";
|
|
956
|
+
|
|
957
|
+
// Capture output if not verbose (so we can show it on error)
|
|
958
|
+
if (!verbose) {
|
|
959
|
+
child.stdout?.on("data", (data) => {
|
|
960
|
+
output += data.toString();
|
|
961
|
+
});
|
|
962
|
+
child.stderr?.on("data", (data) => {
|
|
963
|
+
output += data.toString();
|
|
964
|
+
});
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
child.on("close", (code) => {
|
|
968
|
+
resolve({
|
|
969
|
+
success: code === 0,
|
|
970
|
+
output: !verbose ? output : undefined,
|
|
971
|
+
});
|
|
972
|
+
});
|
|
973
|
+
|
|
974
|
+
child.on("error", (error) => {
|
|
975
|
+
out.error(`Failed to run script: ${error.message}`);
|
|
976
|
+
resolve({
|
|
977
|
+
success: false,
|
|
978
|
+
output: !verbose ? output : undefined,
|
|
979
|
+
});
|
|
980
|
+
});
|
|
981
|
+
});
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
// TODO: Add push and pull commands later
|
|
985
|
+
|
|
986
|
+
function plural(word: string, count: number): string {
|
|
987
|
+
return count === 1 ? word : `${word}s`;
|
|
988
|
+
}
|