pushwork 1.0.4 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -328
- package/dist/.pushwork/automerge/3P/Dm3ekE2pmjGnWvDaG3vSR7ww98/snapshot/aa2349c94955ea561f698720142f9d884a6872d9f82dc332d578c216beb0df0e +0 -0
- package/dist/.pushwork/automerge/st/orage-adapter-id +1 -0
- package/dist/.pushwork/config.json +15 -0
- package/dist/.pushwork/snapshot.json +7 -0
- package/dist/cli.js +231 -170
- package/dist/cli.js.map +1 -1
- package/dist/commands.d.ts +51 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +799 -0
- package/dist/commands.js.map +1 -0
- package/dist/core/change-detection.d.ts +6 -19
- package/dist/core/change-detection.d.ts.map +1 -1
- package/dist/core/change-detection.js +101 -80
- package/dist/core/change-detection.js.map +1 -1
- package/dist/{config/index.d.ts → core/config.d.ts} +13 -3
- package/dist/core/config.d.ts.map +1 -0
- package/dist/{config/index.js → core/config.js} +55 -73
- package/dist/core/config.js.map +1 -0
- package/dist/core/index.d.ts +1 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +1 -1
- package/dist/core/index.js.map +1 -1
- package/dist/core/move-detection.d.ts +12 -50
- package/dist/core/move-detection.d.ts.map +1 -1
- package/dist/core/move-detection.js +58 -139
- package/dist/core/move-detection.js.map +1 -1
- package/dist/core/snapshot.d.ts +0 -4
- package/dist/core/snapshot.d.ts.map +1 -1
- package/dist/core/snapshot.js +2 -11
- package/dist/core/snapshot.js.map +1 -1
- package/dist/core/sync-engine.d.ts +5 -11
- package/dist/core/sync-engine.d.ts.map +1 -1
- package/dist/core/sync-engine.js +220 -362
- package/dist/core/sync-engine.js.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -6
- package/dist/index.js.map +1 -1
- package/dist/types/config.d.ts +43 -67
- package/dist/types/config.d.ts.map +1 -1
- package/dist/types/config.js +6 -0
- package/dist/types/config.js.map +1 -1
- package/dist/types/documents.d.ts +15 -3
- package/dist/types/documents.d.ts.map +1 -1
- package/dist/types/documents.js.map +1 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +0 -3
- package/dist/types/index.js.map +1 -1
- package/dist/types/snapshot.d.ts +3 -21
- package/dist/types/snapshot.d.ts.map +1 -1
- package/dist/types/snapshot.js +0 -14
- package/dist/types/snapshot.js.map +1 -1
- package/dist/utils/content.d.ts.map +1 -1
- package/dist/utils/content.js +2 -6
- package/dist/utils/content.js.map +1 -1
- package/dist/utils/directory.d.ts +10 -0
- package/dist/utils/directory.d.ts.map +1 -0
- package/dist/utils/directory.js +37 -0
- package/dist/utils/directory.js.map +1 -0
- package/dist/utils/fs.d.ts +15 -2
- package/dist/utils/fs.d.ts.map +1 -1
- package/dist/utils/fs.js +63 -53
- package/dist/utils/fs.js.map +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/index.js +1 -4
- package/dist/utils/index.js.map +1 -1
- package/dist/utils/mime-types.d.ts.map +1 -1
- package/dist/utils/mime-types.js +11 -4
- package/dist/utils/mime-types.js.map +1 -1
- package/dist/utils/network-sync.d.ts +0 -6
- package/dist/utils/network-sync.d.ts.map +1 -1
- package/dist/utils/network-sync.js +55 -99
- package/dist/utils/network-sync.js.map +1 -1
- package/dist/utils/output.d.ts +129 -0
- package/dist/utils/output.d.ts.map +1 -0
- package/dist/utils/output.js +375 -0
- package/dist/utils/output.js.map +1 -0
- package/dist/utils/repo-factory.d.ts +2 -6
- package/dist/utils/repo-factory.d.ts.map +1 -1
- package/dist/utils/repo-factory.js +8 -22
- package/dist/utils/repo-factory.js.map +1 -1
- package/dist/utils/string-similarity.d.ts +14 -0
- package/dist/utils/string-similarity.d.ts.map +1 -0
- package/dist/utils/string-similarity.js +43 -0
- package/dist/utils/string-similarity.js.map +1 -0
- package/dist/utils/trace.d.ts +19 -0
- package/dist/utils/trace.d.ts.map +1 -0
- package/dist/utils/trace.js +68 -0
- package/dist/utils/trace.js.map +1 -0
- package/package.json +17 -12
- package/src/cli.ts +326 -252
- package/src/commands.ts +988 -0
- package/src/core/change-detection.ts +199 -162
- package/src/{config/index.ts → core/config.ts} +65 -82
- package/src/core/index.ts +1 -1
- package/src/core/move-detection.ts +74 -180
- package/src/core/snapshot.ts +2 -12
- package/src/core/sync-engine.ts +248 -499
- package/src/index.ts +0 -10
- package/src/types/config.ts +50 -72
- package/src/types/documents.ts +16 -3
- package/src/types/index.ts +0 -5
- package/src/types/snapshot.ts +1 -23
- package/src/utils/content.ts +2 -6
- package/src/utils/directory.ts +50 -0
- package/src/utils/fs.ts +67 -56
- package/src/utils/index.ts +1 -6
- package/src/utils/mime-types.ts +12 -4
- package/src/utils/network-sync.ts +79 -137
- package/src/utils/output.ts +450 -0
- package/src/utils/repo-factory.ts +13 -31
- package/src/utils/string-similarity.ts +54 -0
- package/src/utils/trace.ts +70 -0
- package/test/integration/exclude-patterns.test.ts +6 -15
- package/test/integration/fuzzer.test.ts +308 -391
- package/test/integration/init-sync.test.ts +89 -0
- package/test/integration/sync-deletion.test.ts +2 -61
- package/test/integration/sync-flow.test.ts +4 -24
- package/test/jest.setup.ts +34 -0
- package/test/unit/deletion-behavior.test.ts +3 -14
- package/test/unit/enhanced-mime-detection.test.ts +0 -22
- package/test/unit/snapshot.test.ts +2 -29
- package/test/unit/sync-convergence.test.ts +3 -198
- package/test/unit/sync-timing.test.ts +0 -44
- package/test/unit/utils.test.ts +0 -2
- package/tsconfig.json +3 -3
- package/dist/browser/browser-sync-engine.d.ts +0 -64
- package/dist/browser/browser-sync-engine.d.ts.map +0 -1
- package/dist/browser/browser-sync-engine.js +0 -303
- package/dist/browser/browser-sync-engine.js.map +0 -1
- package/dist/browser/filesystem-adapter.d.ts +0 -84
- package/dist/browser/filesystem-adapter.d.ts.map +0 -1
- package/dist/browser/filesystem-adapter.js +0 -413
- package/dist/browser/filesystem-adapter.js.map +0 -1
- package/dist/browser/index.d.ts +0 -36
- package/dist/browser/index.d.ts.map +0 -1
- package/dist/browser/index.js +0 -90
- package/dist/browser/index.js.map +0 -1
- package/dist/browser/types.d.ts +0 -70
- package/dist/browser/types.d.ts.map +0 -1
- package/dist/browser/types.js +0 -6
- package/dist/browser/types.js.map +0 -1
- package/dist/cli/commands.d.ts +0 -77
- package/dist/cli/commands.d.ts.map +0 -1
- package/dist/cli/commands.js +0 -904
- package/dist/cli/commands.js.map +0 -1
- package/dist/cli/index.d.ts +0 -2
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -19
- package/dist/cli/index.js.map +0 -1
- package/dist/config/index.d.ts.map +0 -1
- package/dist/config/index.js.map +0 -1
- package/dist/core/isomorphic-snapshot.d.ts +0 -58
- package/dist/core/isomorphic-snapshot.d.ts.map +0 -1
- package/dist/core/isomorphic-snapshot.js +0 -204
- package/dist/core/isomorphic-snapshot.js.map +0 -1
- package/dist/platform/browser-filesystem.d.ts +0 -26
- package/dist/platform/browser-filesystem.d.ts.map +0 -1
- package/dist/platform/browser-filesystem.js +0 -91
- package/dist/platform/browser-filesystem.js.map +0 -1
- package/dist/platform/filesystem.d.ts +0 -29
- package/dist/platform/filesystem.d.ts.map +0 -1
- package/dist/platform/filesystem.js +0 -65
- package/dist/platform/filesystem.js.map +0 -1
- package/dist/platform/node-filesystem.d.ts +0 -21
- package/dist/platform/node-filesystem.d.ts.map +0 -1
- package/dist/platform/node-filesystem.js +0 -93
- package/dist/platform/node-filesystem.js.map +0 -1
- package/dist/utils/content-similarity.d.ts +0 -53
- package/dist/utils/content-similarity.d.ts.map +0 -1
- package/dist/utils/content-similarity.js +0 -155
- package/dist/utils/content-similarity.js.map +0 -1
- package/dist/utils/fs-browser.d.ts +0 -57
- package/dist/utils/fs-browser.d.ts.map +0 -1
- package/dist/utils/fs-browser.js +0 -311
- package/dist/utils/fs-browser.js.map +0 -1
- package/dist/utils/fs-node.d.ts +0 -53
- package/dist/utils/fs-node.d.ts.map +0 -1
- package/dist/utils/fs-node.js +0 -220
- package/dist/utils/fs-node.js.map +0 -1
- package/dist/utils/isomorphic.d.ts +0 -29
- package/dist/utils/isomorphic.d.ts.map +0 -1
- package/dist/utils/isomorphic.js +0 -139
- package/dist/utils/isomorphic.js.map +0 -1
- package/dist/utils/pure.d.ts +0 -25
- package/dist/utils/pure.d.ts.map +0 -1
- package/dist/utils/pure.js +0 -112
- package/dist/utils/pure.js.map +0 -1
- package/src/cli/commands.ts +0 -1207
- package/src/cli/index.ts +0 -2
- package/src/utils/content-similarity.ts +0 -194
- package/test/README-TESTING-GAPS.md +0 -174
- package/test/unit/content-similarity.test.ts +0 -236
package/dist/commands.js
ADDED
|
@@ -0,0 +1,799 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.init = init;
|
|
40
|
+
exports.sync = sync;
|
|
41
|
+
exports.diff = diff;
|
|
42
|
+
exports.status = status;
|
|
43
|
+
exports.log = log;
|
|
44
|
+
exports.checkout = checkout;
|
|
45
|
+
exports.clone = clone;
|
|
46
|
+
exports.url = url;
|
|
47
|
+
exports.rm = rm;
|
|
48
|
+
exports.commit = commit;
|
|
49
|
+
exports.ls = ls;
|
|
50
|
+
exports.config = config;
|
|
51
|
+
exports.watch = watch;
|
|
52
|
+
const path = __importStar(require("path"));
|
|
53
|
+
const fs = __importStar(require("fs/promises"));
|
|
54
|
+
const fsSync = __importStar(require("fs"));
|
|
55
|
+
const diffLib = __importStar(require("diff"));
|
|
56
|
+
const child_process_1 = require("child_process");
|
|
57
|
+
const core_1 = require("./core");
|
|
58
|
+
const utils_1 = require("./utils");
|
|
59
|
+
const config_1 = require("./core/config");
|
|
60
|
+
const repo_factory_1 = require("./utils/repo-factory");
|
|
61
|
+
const output_1 = require("./utils/output");
|
|
62
|
+
const network_sync_1 = require("./utils/network-sync");
|
|
63
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
64
|
+
/**
|
|
65
|
+
* Initialize repository directory structure and configuration
|
|
66
|
+
* Shared logic for init and clone commands
|
|
67
|
+
*/
|
|
68
|
+
async function initializeRepository(resolvedPath, overrides) {
|
|
69
|
+
// Create .pushwork directory structure
|
|
70
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
71
|
+
await (0, utils_1.ensureDirectoryExists)(syncToolDir);
|
|
72
|
+
await (0, utils_1.ensureDirectoryExists)(path.join(syncToolDir, "automerge"));
|
|
73
|
+
// Create configuration with overrides
|
|
74
|
+
const configManager = new config_1.ConfigManager(resolvedPath);
|
|
75
|
+
const config = await configManager.initializeWithOverrides(overrides);
|
|
76
|
+
// Create repository and sync engine
|
|
77
|
+
const repo = await (0, repo_factory_1.createRepo)(resolvedPath, config);
|
|
78
|
+
const syncEngine = new core_1.SyncEngine(repo, resolvedPath, config);
|
|
79
|
+
return { config, repo, syncEngine };
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Shared pre-action that ensures repository and sync engine are properly initialized
|
|
83
|
+
* This function always works, with or without network connectivity
|
|
84
|
+
*/
|
|
85
|
+
async function setupCommandContext(workingDir = process.cwd(), syncEnabled) {
|
|
86
|
+
const resolvedPath = path.resolve(workingDir);
|
|
87
|
+
// Check if initialized
|
|
88
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
89
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
90
|
+
throw new Error('Directory not initialized for sync. Run "pushwork init" first.');
|
|
91
|
+
}
|
|
92
|
+
// Load configuration
|
|
93
|
+
const configManager = new config_1.ConfigManager(resolvedPath);
|
|
94
|
+
let config = await configManager.getMerged();
|
|
95
|
+
// Override sync_enabled if explicitly specified (e.g., for local-only operations)
|
|
96
|
+
if (syncEnabled !== undefined) {
|
|
97
|
+
config = { ...config, sync_enabled: syncEnabled };
|
|
98
|
+
}
|
|
99
|
+
// Create repo with config
|
|
100
|
+
const repo = await (0, repo_factory_1.createRepo)(resolvedPath, config);
|
|
101
|
+
// Create sync engine
|
|
102
|
+
const syncEngine = new core_1.SyncEngine(repo, resolvedPath, config);
|
|
103
|
+
return {
|
|
104
|
+
repo,
|
|
105
|
+
syncEngine,
|
|
106
|
+
config,
|
|
107
|
+
workingDir: resolvedPath,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Safely shutdown a repository with proper error handling
|
|
112
|
+
*/
|
|
113
|
+
async function safeRepoShutdown(repo) {
|
|
114
|
+
// Handle uncaught WebSocket errors that occur during shutdown
|
|
115
|
+
const uncaughtErrorHandler = (err) => {
|
|
116
|
+
if (err.message.includes("WebSocket")) {
|
|
117
|
+
// Silently suppress WebSocket errors during shutdown
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
// Re-throw non-WebSocket errors
|
|
121
|
+
throw err;
|
|
122
|
+
};
|
|
123
|
+
// Add the error handler before shutdown
|
|
124
|
+
process.on("uncaughtException", uncaughtErrorHandler);
|
|
125
|
+
try {
|
|
126
|
+
await repo.shutdown();
|
|
127
|
+
}
|
|
128
|
+
catch (shutdownError) {
|
|
129
|
+
// WebSocket errors during shutdown are common and non-critical
|
|
130
|
+
// Silently ignore them - they don't affect data integrity
|
|
131
|
+
const errorMessage = shutdownError instanceof Error
|
|
132
|
+
? shutdownError.message
|
|
133
|
+
: String(shutdownError);
|
|
134
|
+
// Ignore WebSocket-related errors entirely
|
|
135
|
+
if (errorMessage.includes("WebSocket")) {
|
|
136
|
+
// Silently ignore WebSocket shutdown errors
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
finally {
|
|
141
|
+
process.off("uncaughtException", uncaughtErrorHandler);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Initialize sync in a directory
|
|
146
|
+
*/
|
|
147
|
+
async function init(targetPath, options = {}) {
|
|
148
|
+
const resolvedPath = path.resolve(targetPath);
|
|
149
|
+
output_1.out.task(`Initializing`);
|
|
150
|
+
await (0, utils_1.ensureDirectoryExists)(resolvedPath);
|
|
151
|
+
// Check if already initialized
|
|
152
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
153
|
+
if (await (0, utils_1.pathExists)(syncToolDir)) {
|
|
154
|
+
output_1.out.error("Directory already initialized for sync");
|
|
155
|
+
output_1.out.exit(1);
|
|
156
|
+
}
|
|
157
|
+
// Initialize repository with optional CLI overrides
|
|
158
|
+
output_1.out.update("Setting up repository");
|
|
159
|
+
const { repo, syncEngine, config } = await initializeRepository(resolvedPath, {
|
|
160
|
+
sync_server: options.syncServer,
|
|
161
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
162
|
+
});
|
|
163
|
+
// Create new root directory document
|
|
164
|
+
output_1.out.update("Creating root directory");
|
|
165
|
+
const rootDoc = {
|
|
166
|
+
"@patchwork": { type: "folder" },
|
|
167
|
+
docs: [],
|
|
168
|
+
};
|
|
169
|
+
const rootHandle = repo.create(rootDoc);
|
|
170
|
+
// Set root directory URL in snapshot
|
|
171
|
+
await syncEngine.setRootDirectoryUrl(rootHandle.url);
|
|
172
|
+
// Wait for root document to sync to server if sync is enabled
|
|
173
|
+
// This ensures the document is uploaded before we exit
|
|
174
|
+
// waitForSync() verifies the server has the document by comparing local and remote heads
|
|
175
|
+
if (config.sync_enabled && config.sync_server_storage_id) {
|
|
176
|
+
try {
|
|
177
|
+
output_1.out.update("Syncing to server");
|
|
178
|
+
await (0, network_sync_1.waitForSync)([rootHandle], config.sync_server_storage_id);
|
|
179
|
+
}
|
|
180
|
+
catch (error) {
|
|
181
|
+
output_1.out.taskLine(`Network sync failed: ${error}`, true);
|
|
182
|
+
// Continue anyway - the document is created locally and will sync later
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
// Run initial sync to capture existing files
|
|
186
|
+
output_1.out.update("Running initial sync");
|
|
187
|
+
const result = await syncEngine.sync();
|
|
188
|
+
output_1.out.update("Writing to disk");
|
|
189
|
+
await safeRepoShutdown(repo);
|
|
190
|
+
output_1.out.done("Initialized");
|
|
191
|
+
output_1.out.successBlock("INITIALIZED", rootHandle.url);
|
|
192
|
+
if (result.filesChanged > 0) {
|
|
193
|
+
output_1.out.info(`Synced ${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
194
|
+
}
|
|
195
|
+
process.exit();
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Run bidirectional sync
|
|
199
|
+
*/
|
|
200
|
+
async function sync(targetPath = ".", options) {
|
|
201
|
+
output_1.out.task("Syncing");
|
|
202
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath);
|
|
203
|
+
if (options.dryRun) {
|
|
204
|
+
output_1.out.update("Analyzing changes");
|
|
205
|
+
const preview = await syncEngine.previewChanges();
|
|
206
|
+
if (preview.changes.length === 0 && preview.moves.length === 0) {
|
|
207
|
+
output_1.out.done("Already synced");
|
|
208
|
+
return;
|
|
209
|
+
}
|
|
210
|
+
output_1.out.done();
|
|
211
|
+
output_1.out.infoBlock("CHANGES");
|
|
212
|
+
output_1.out.obj({
|
|
213
|
+
Changes: preview.changes.length.toString(),
|
|
214
|
+
Moves: preview.moves.length > 0 ? preview.moves.length.toString() : undefined,
|
|
215
|
+
});
|
|
216
|
+
output_1.out.log("");
|
|
217
|
+
output_1.out.log("Files:");
|
|
218
|
+
for (const change of preview.changes.slice(0, 10)) {
|
|
219
|
+
const prefix = change.changeType === "local_only"
|
|
220
|
+
? "[local] "
|
|
221
|
+
: change.changeType === "remote_only"
|
|
222
|
+
? "[remote] "
|
|
223
|
+
: "[conflict]";
|
|
224
|
+
output_1.out.log(` ${prefix} ${change.path}`);
|
|
225
|
+
}
|
|
226
|
+
if (preview.changes.length > 10) {
|
|
227
|
+
output_1.out.log(` ... and ${preview.changes.length - 10} more`);
|
|
228
|
+
}
|
|
229
|
+
if (preview.moves.length > 0) {
|
|
230
|
+
output_1.out.log("");
|
|
231
|
+
output_1.out.log("Moves:");
|
|
232
|
+
for (const move of preview.moves.slice(0, 5)) {
|
|
233
|
+
output_1.out.log(` ${move.fromPath} → ${move.toPath}`);
|
|
234
|
+
}
|
|
235
|
+
if (preview.moves.length > 5) {
|
|
236
|
+
output_1.out.log(` ... and ${preview.moves.length - 5} more`);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
output_1.out.log("");
|
|
240
|
+
output_1.out.log("Run without --dry-run to apply these changes");
|
|
241
|
+
}
|
|
242
|
+
else {
|
|
243
|
+
const result = await syncEngine.sync();
|
|
244
|
+
output_1.out.taskLine("Writing to disk");
|
|
245
|
+
await safeRepoShutdown(repo);
|
|
246
|
+
if (result.success) {
|
|
247
|
+
output_1.out.done("Synced");
|
|
248
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
249
|
+
}
|
|
250
|
+
else {
|
|
251
|
+
output_1.out.successBlock("SYNCED", `${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
252
|
+
}
|
|
253
|
+
if (result.warnings.length > 0) {
|
|
254
|
+
output_1.out.log("");
|
|
255
|
+
output_1.out.warnBlock("WARNINGS", `${result.warnings.length} warnings`);
|
|
256
|
+
for (const warning of result.warnings.slice(0, 5)) {
|
|
257
|
+
output_1.out.log(` ${warning}`);
|
|
258
|
+
}
|
|
259
|
+
if (result.warnings.length > 5) {
|
|
260
|
+
output_1.out.log(` ... and ${result.warnings.length - 5} more`);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
else {
|
|
265
|
+
output_1.out.done("partial", false);
|
|
266
|
+
output_1.out.warnBlock("PARTIAL", `${result.filesChanged} updated, ${result.errors.length} errors`);
|
|
267
|
+
output_1.out.obj({
|
|
268
|
+
Files: result.filesChanged,
|
|
269
|
+
Errors: result.errors.length,
|
|
270
|
+
});
|
|
271
|
+
result.errors
|
|
272
|
+
.slice(0, 5)
|
|
273
|
+
.forEach((error) => output_1.out.error(`${error.path}: ${error.error.message}`));
|
|
274
|
+
if (result.errors.length > 5) {
|
|
275
|
+
output_1.out.warn(`... and ${result.errors.length - 5} more errors`);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
process.exit();
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Show differences between local and remote
|
|
283
|
+
*/
|
|
284
|
+
async function diff(targetPath = ".", options) {
|
|
285
|
+
output_1.out.task("Analyzing changes");
|
|
286
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
287
|
+
const preview = await syncEngine.previewChanges();
|
|
288
|
+
output_1.out.done();
|
|
289
|
+
if (options.nameOnly) {
|
|
290
|
+
for (const change of preview.changes) {
|
|
291
|
+
output_1.out.log(change.path);
|
|
292
|
+
}
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
if (preview.changes.length === 0) {
|
|
296
|
+
output_1.out.success("No changes detected");
|
|
297
|
+
await safeRepoShutdown(repo);
|
|
298
|
+
output_1.out.exit();
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
output_1.out.warn(`${preview.changes.length} changes detected`);
|
|
302
|
+
for (const change of preview.changes) {
|
|
303
|
+
const prefix = change.changeType === "local_only"
|
|
304
|
+
? "[local] "
|
|
305
|
+
: change.changeType === "remote_only"
|
|
306
|
+
? "[remote] "
|
|
307
|
+
: "[conflict]";
|
|
308
|
+
try {
|
|
309
|
+
// Get old content (from snapshot/remote)
|
|
310
|
+
const oldContent = change.remoteContent || "";
|
|
311
|
+
// Get new content (current local)
|
|
312
|
+
const newContent = change.localContent || "";
|
|
313
|
+
// Convert binary content to string representation if needed
|
|
314
|
+
const oldText = typeof oldContent === "string"
|
|
315
|
+
? oldContent
|
|
316
|
+
: `<binary content: ${oldContent.length} bytes>`;
|
|
317
|
+
const newText = typeof newContent === "string"
|
|
318
|
+
? newContent
|
|
319
|
+
: `<binary content: ${newContent.length} bytes>`;
|
|
320
|
+
// Generate unified diff
|
|
321
|
+
const diffResult = diffLib.createPatch(change.path, oldText, newText, "previous", "current");
|
|
322
|
+
// Skip the header lines and process the diff
|
|
323
|
+
const lines = diffResult.split("\n").slice(4); // Skip index, ===, ---, +++ lines
|
|
324
|
+
if (lines.length === 0 || (lines.length === 1 && lines[0] === "")) {
|
|
325
|
+
output_1.out.log(`${prefix}${change.path} (content identical)`, "cyan");
|
|
326
|
+
continue;
|
|
327
|
+
}
|
|
328
|
+
// Extract first hunk header and show inline with path
|
|
329
|
+
let firstHunk = "";
|
|
330
|
+
let diffLines = lines;
|
|
331
|
+
if (lines[0]?.startsWith("@@")) {
|
|
332
|
+
firstHunk = ` ${lines[0]}`;
|
|
333
|
+
diffLines = lines.slice(1);
|
|
334
|
+
}
|
|
335
|
+
output_1.out.log(`${prefix}${change.path}${firstHunk}`, "cyan");
|
|
336
|
+
for (const line of diffLines) {
|
|
337
|
+
if (line.startsWith("@@")) {
|
|
338
|
+
// Additional hunk headers
|
|
339
|
+
output_1.out.log(line, "dim");
|
|
340
|
+
}
|
|
341
|
+
else if (line.startsWith("+")) {
|
|
342
|
+
// Added line
|
|
343
|
+
output_1.out.log(line, "green");
|
|
344
|
+
}
|
|
345
|
+
else if (line.startsWith("-")) {
|
|
346
|
+
// Removed line
|
|
347
|
+
output_1.out.log(line, "red");
|
|
348
|
+
}
|
|
349
|
+
else if (line.startsWith(" ") || line === "") {
|
|
350
|
+
// Context line or empty
|
|
351
|
+
output_1.out.log(line, "dim");
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
catch (error) {
|
|
356
|
+
output_1.out.log(`${prefix}${change.path} (diff error: ${error})`, "cyan");
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
await safeRepoShutdown(repo);
|
|
360
|
+
}
|
|
361
|
+
/**
|
|
362
|
+
* Show sync status
|
|
363
|
+
*/
|
|
364
|
+
async function status(targetPath = ".", options = {}) {
|
|
365
|
+
const { repo, syncEngine, config } = await setupCommandContext(targetPath, false);
|
|
366
|
+
const syncStatus = await syncEngine.getStatus();
|
|
367
|
+
output_1.out.infoBlock("STATUS");
|
|
368
|
+
const statusInfo = {};
|
|
369
|
+
const fileCount = syncStatus.snapshot?.files.size || 0;
|
|
370
|
+
statusInfo["URL"] = syncStatus.snapshot?.rootDirectoryUrl;
|
|
371
|
+
statusInfo["Files"] = syncStatus.snapshot
|
|
372
|
+
? `${fileCount} tracked`
|
|
373
|
+
: undefined;
|
|
374
|
+
statusInfo["Sync"] = config?.sync_server;
|
|
375
|
+
// Add more detailed info in verbose mode
|
|
376
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
377
|
+
try {
|
|
378
|
+
const rootHandle = await repo.find(syncStatus.snapshot.rootDirectoryUrl);
|
|
379
|
+
const rootDoc = await rootHandle.doc();
|
|
380
|
+
if (rootDoc) {
|
|
381
|
+
statusInfo["Entries"] = rootDoc.docs.length;
|
|
382
|
+
statusInfo["Directories"] = syncStatus.snapshot.directories.size;
|
|
383
|
+
if (rootDoc.lastSyncAt) {
|
|
384
|
+
const lastSyncDate = new Date(rootDoc.lastSyncAt);
|
|
385
|
+
statusInfo["Last sync"] = lastSyncDate.toISOString();
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
catch (error) {
|
|
390
|
+
output_1.out.warn(`Warning: Could not load detailed info: ${error}`);
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
statusInfo["Changes"] = syncStatus.hasChanges
|
|
394
|
+
? `${syncStatus.changeCount} pending`
|
|
395
|
+
: undefined;
|
|
396
|
+
statusInfo["Status"] = !syncStatus.hasChanges ? "up to date" : undefined;
|
|
397
|
+
output_1.out.obj(statusInfo);
|
|
398
|
+
// Show verbose details if requested
|
|
399
|
+
if (options.verbose && syncStatus.snapshot?.rootDirectoryUrl) {
|
|
400
|
+
const rootHandle = await repo.find(syncStatus.snapshot.rootDirectoryUrl);
|
|
401
|
+
const rootDoc = await rootHandle.doc();
|
|
402
|
+
if (rootDoc) {
|
|
403
|
+
output_1.out.infoBlock("HEADS");
|
|
404
|
+
output_1.out.arr(rootHandle.heads());
|
|
405
|
+
if (syncStatus.snapshot && syncStatus.snapshot.files.size > 0) {
|
|
406
|
+
output_1.out.infoBlock("TRACKED FILES");
|
|
407
|
+
const filesObj = {};
|
|
408
|
+
syncStatus.snapshot.files.forEach((entry, filePath) => {
|
|
409
|
+
filesObj[filePath] = entry.url;
|
|
410
|
+
});
|
|
411
|
+
output_1.out.obj(filesObj);
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
if (syncStatus.hasChanges && !options.verbose) {
|
|
416
|
+
output_1.out.info("Run 'pushwork diff' to see changes");
|
|
417
|
+
}
|
|
418
|
+
await safeRepoShutdown(repo);
|
|
419
|
+
}
|
|
420
|
+
/**
|
|
421
|
+
* Show sync history
|
|
422
|
+
*/
|
|
423
|
+
async function log(targetPath = ".", _options) {
|
|
424
|
+
const { repo: logRepo, workingDir } = await setupCommandContext(targetPath, false);
|
|
425
|
+
// TODO: Implement history tracking
|
|
426
|
+
const snapshotPath = path.join(workingDir, config_1.ConfigManager.CONFIG_DIR, "snapshot.json");
|
|
427
|
+
if (await (0, utils_1.pathExists)(snapshotPath)) {
|
|
428
|
+
const stats = await fs.stat(snapshotPath);
|
|
429
|
+
output_1.out.infoBlock("HISTORY", "Sync history (stub)");
|
|
430
|
+
output_1.out.obj({ "Last sync": stats.mtime.toISOString() });
|
|
431
|
+
}
|
|
432
|
+
else {
|
|
433
|
+
output_1.out.info("No sync history found");
|
|
434
|
+
}
|
|
435
|
+
await safeRepoShutdown(logRepo);
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Checkout/restore from previous sync
|
|
439
|
+
*/
|
|
440
|
+
async function checkout(syncId, targetPath = ".", _options) {
|
|
441
|
+
const { workingDir } = await setupCommandContext(targetPath);
|
|
442
|
+
// TODO: Implement checkout functionality
|
|
443
|
+
output_1.out.warnBlock("NOT IMPLEMENTED", "Checkout not yet implemented");
|
|
444
|
+
output_1.out.obj({
|
|
445
|
+
"Sync ID": syncId,
|
|
446
|
+
Path: workingDir,
|
|
447
|
+
});
|
|
448
|
+
}
|
|
449
|
+
/**
|
|
450
|
+
* Clone an existing synced directory from an AutomergeUrl
|
|
451
|
+
*/
|
|
452
|
+
async function clone(rootUrl, targetPath, options) {
|
|
453
|
+
const resolvedPath = path.resolve(targetPath);
|
|
454
|
+
output_1.out.task(`Cloning ${rootUrl}`);
|
|
455
|
+
// Check if directory exists and handle --force
|
|
456
|
+
if (await (0, utils_1.pathExists)(resolvedPath)) {
|
|
457
|
+
const files = await fs.readdir(resolvedPath);
|
|
458
|
+
if (files.length > 0 && !options.force) {
|
|
459
|
+
output_1.out.error("Target directory is not empty. Use --force to overwrite");
|
|
460
|
+
output_1.out.exit(1);
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
else {
|
|
464
|
+
await (0, utils_1.ensureDirectoryExists)(resolvedPath);
|
|
465
|
+
}
|
|
466
|
+
// Check if already initialized
|
|
467
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
468
|
+
if (await (0, utils_1.pathExists)(syncToolDir)) {
|
|
469
|
+
if (!options.force) {
|
|
470
|
+
output_1.out.error("Directory already initialized. Use --force to overwrite");
|
|
471
|
+
output_1.out.exit(1);
|
|
472
|
+
}
|
|
473
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
474
|
+
}
|
|
475
|
+
// Initialize repository with optional CLI overrides
|
|
476
|
+
output_1.out.update("Setting up repository");
|
|
477
|
+
const { config, repo, syncEngine } = await initializeRepository(resolvedPath, {
|
|
478
|
+
sync_server: options.syncServer,
|
|
479
|
+
sync_server_storage_id: options.syncServerStorageId,
|
|
480
|
+
});
|
|
481
|
+
// Connect to existing root directory and download files
|
|
482
|
+
output_1.out.update("Downloading files");
|
|
483
|
+
await syncEngine.setRootDirectoryUrl(rootUrl);
|
|
484
|
+
const result = await syncEngine.sync();
|
|
485
|
+
output_1.out.update("Writing to disk");
|
|
486
|
+
await safeRepoShutdown(repo);
|
|
487
|
+
output_1.out.done();
|
|
488
|
+
output_1.out.obj({
|
|
489
|
+
Path: resolvedPath,
|
|
490
|
+
Files: `${result.filesChanged} downloaded`,
|
|
491
|
+
Sync: config.sync_server,
|
|
492
|
+
});
|
|
493
|
+
output_1.out.successBlock("CLONED", rootUrl);
|
|
494
|
+
process.exit();
|
|
495
|
+
}
|
|
496
|
+
/**
|
|
497
|
+
* Get the root URL for the current pushwork repository
|
|
498
|
+
*/
|
|
499
|
+
async function url(targetPath = ".") {
|
|
500
|
+
const resolvedPath = path.resolve(targetPath);
|
|
501
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
502
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
503
|
+
output_1.out.error("Directory not initialized for sync");
|
|
504
|
+
output_1.out.exit(1);
|
|
505
|
+
}
|
|
506
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
507
|
+
if (!(await (0, utils_1.pathExists)(snapshotPath))) {
|
|
508
|
+
output_1.out.error("No snapshot found");
|
|
509
|
+
output_1.out.exit(1);
|
|
510
|
+
}
|
|
511
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
512
|
+
const snapshot = JSON.parse(snapshotData);
|
|
513
|
+
if (snapshot.rootDirectoryUrl) {
|
|
514
|
+
// Output just the URL for easy use in scripts
|
|
515
|
+
output_1.out.log(snapshot.rootDirectoryUrl);
|
|
516
|
+
}
|
|
517
|
+
else {
|
|
518
|
+
output_1.out.error("No root URL found in snapshot");
|
|
519
|
+
output_1.out.exit(1);
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Remove local pushwork data and log URL for recovery
|
|
524
|
+
*/
|
|
525
|
+
async function rm(targetPath = ".") {
|
|
526
|
+
const resolvedPath = path.resolve(targetPath);
|
|
527
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
528
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
529
|
+
output_1.out.error("Directory not initialized for sync");
|
|
530
|
+
output_1.out.exit(1);
|
|
531
|
+
}
|
|
532
|
+
// Read the URL before deletion for recovery
|
|
533
|
+
let recoveryUrl = "";
|
|
534
|
+
const snapshotPath = path.join(syncToolDir, "snapshot.json");
|
|
535
|
+
if (await (0, utils_1.pathExists)(snapshotPath)) {
|
|
536
|
+
try {
|
|
537
|
+
const snapshotData = await fs.readFile(snapshotPath, "utf-8");
|
|
538
|
+
const snapshot = JSON.parse(snapshotData);
|
|
539
|
+
recoveryUrl = snapshot.rootDirectoryUrl || null;
|
|
540
|
+
}
|
|
541
|
+
catch (error) {
|
|
542
|
+
output_1.out.error(`Remove failed: ${error}`);
|
|
543
|
+
output_1.out.exit(1);
|
|
544
|
+
return;
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
output_1.out.task("Removing local pushwork data");
|
|
548
|
+
await fs.rm(syncToolDir, { recursive: true, force: true });
|
|
549
|
+
output_1.out.done();
|
|
550
|
+
output_1.out.warnBlock("REMOVED", recoveryUrl);
|
|
551
|
+
process.exit();
|
|
552
|
+
}
|
|
553
|
+
async function commit(targetPath, _options = {}) {
|
|
554
|
+
output_1.out.task("Committing local changes");
|
|
555
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
556
|
+
const result = await syncEngine.commitLocal();
|
|
557
|
+
await safeRepoShutdown(repo);
|
|
558
|
+
output_1.out.done();
|
|
559
|
+
if (result.errors.length > 0) {
|
|
560
|
+
output_1.out.errorBlock("ERROR", `${result.errors.length} errors`);
|
|
561
|
+
result.errors.forEach((error) => output_1.out.error(error));
|
|
562
|
+
output_1.out.exit(1);
|
|
563
|
+
}
|
|
564
|
+
output_1.out.successBlock("COMMITTED", `${result.filesChanged} files`);
|
|
565
|
+
output_1.out.obj({
|
|
566
|
+
Files: result.filesChanged,
|
|
567
|
+
Directories: result.directoriesChanged,
|
|
568
|
+
});
|
|
569
|
+
if (result.warnings.length > 0) {
|
|
570
|
+
result.warnings.forEach((warning) => output_1.out.warn(warning));
|
|
571
|
+
}
|
|
572
|
+
process.exit();
|
|
573
|
+
}
|
|
574
|
+
/**
|
|
575
|
+
* List tracked files
|
|
576
|
+
*/
|
|
577
|
+
async function ls(targetPath = ".", options = {}) {
|
|
578
|
+
const { repo, syncEngine } = await setupCommandContext(targetPath, false);
|
|
579
|
+
const syncStatus = await syncEngine.getStatus();
|
|
580
|
+
if (!syncStatus.snapshot) {
|
|
581
|
+
output_1.out.error("No snapshot found");
|
|
582
|
+
await safeRepoShutdown(repo);
|
|
583
|
+
output_1.out.exit(1);
|
|
584
|
+
return;
|
|
585
|
+
}
|
|
586
|
+
const files = Array.from(syncStatus.snapshot.files.entries()).sort(([pathA], [pathB]) => pathA.localeCompare(pathB));
|
|
587
|
+
if (files.length === 0) {
|
|
588
|
+
output_1.out.info("No tracked files");
|
|
589
|
+
await safeRepoShutdown(repo);
|
|
590
|
+
return;
|
|
591
|
+
}
|
|
592
|
+
if (options.verbose) {
|
|
593
|
+
// Long format with URLs
|
|
594
|
+
for (const [filePath, entry] of files) {
|
|
595
|
+
const url = entry?.url || "unknown";
|
|
596
|
+
output_1.out.log(`${filePath} -> ${url}`);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
else {
|
|
600
|
+
// Simple list
|
|
601
|
+
for (const [filePath] of files) {
|
|
602
|
+
output_1.out.log(filePath);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
await safeRepoShutdown(repo);
|
|
606
|
+
}
|
|
607
|
+
/**
|
|
608
|
+
* View or edit configuration
|
|
609
|
+
*/
|
|
610
|
+
async function config(targetPath = ".", options = {}) {
|
|
611
|
+
const resolvedPath = path.resolve(targetPath);
|
|
612
|
+
const syncToolDir = path.join(resolvedPath, config_1.ConfigManager.CONFIG_DIR);
|
|
613
|
+
if (!(await (0, utils_1.pathExists)(syncToolDir))) {
|
|
614
|
+
output_1.out.error("Directory not initialized for sync");
|
|
615
|
+
output_1.out.exit(1);
|
|
616
|
+
}
|
|
617
|
+
const configManager = new config_1.ConfigManager(resolvedPath);
|
|
618
|
+
const config = await configManager.getMerged();
|
|
619
|
+
if (options.list) {
|
|
620
|
+
// List all configuration
|
|
621
|
+
output_1.out.infoBlock("CONFIGURATION", "Full configuration");
|
|
622
|
+
output_1.out.log(JSON.stringify(config, null, 2));
|
|
623
|
+
}
|
|
624
|
+
else if (options.get) {
|
|
625
|
+
// Get specific config value
|
|
626
|
+
const keys = options.get.split(".");
|
|
627
|
+
let value = config;
|
|
628
|
+
for (const key of keys) {
|
|
629
|
+
value = value?.[key];
|
|
630
|
+
}
|
|
631
|
+
if (value !== undefined) {
|
|
632
|
+
output_1.out.log(typeof value === "object" ? JSON.stringify(value, null, 2) : value);
|
|
633
|
+
}
|
|
634
|
+
else {
|
|
635
|
+
output_1.out.error(`Config key not found: ${options.get}`);
|
|
636
|
+
output_1.out.exit(1);
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
else {
|
|
640
|
+
// Show basic config info
|
|
641
|
+
output_1.out.infoBlock("CONFIGURATION");
|
|
642
|
+
output_1.out.obj({
|
|
643
|
+
"Sync server": config.sync_server || "default",
|
|
644
|
+
"Sync enabled": config.sync_enabled ? "yes" : "no",
|
|
645
|
+
Exclusions: config.exclude_patterns?.length,
|
|
646
|
+
});
|
|
647
|
+
output_1.out.log("");
|
|
648
|
+
output_1.out.log("Use --list to see full configuration");
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
/**
|
|
652
|
+
* Watch a directory and sync after build script completes
|
|
653
|
+
*/
|
|
654
|
+
async function watch(targetPath = ".", options = {}) {
|
|
655
|
+
const script = options.script || "pnpm build";
|
|
656
|
+
const watchDir = options.watchDir || "src"; // Default to watching 'src' directory
|
|
657
|
+
const verbose = options.verbose || false;
|
|
658
|
+
const { repo, syncEngine, workingDir } = await setupCommandContext(targetPath);
|
|
659
|
+
const absoluteWatchDir = path.resolve(workingDir, watchDir);
|
|
660
|
+
// Check if watch directory exists
|
|
661
|
+
if (!(await (0, utils_1.pathExists)(absoluteWatchDir))) {
|
|
662
|
+
output_1.out.error(`Watch directory does not exist: ${watchDir}`);
|
|
663
|
+
await safeRepoShutdown(repo);
|
|
664
|
+
output_1.out.exit(1);
|
|
665
|
+
return;
|
|
666
|
+
}
|
|
667
|
+
output_1.out.spicyBlock("WATCHING", `${chalk_1.default.underline((0, utils_1.formatRelativePath)(watchDir))} for changes...`);
|
|
668
|
+
output_1.out.info(`Build script: ${script}`);
|
|
669
|
+
output_1.out.info(`Working directory: ${workingDir}`);
|
|
670
|
+
let isProcessing = false;
|
|
671
|
+
let pendingChange = false;
|
|
672
|
+
// Function to run build and sync
|
|
673
|
+
const runBuildAndSync = async () => {
|
|
674
|
+
if (isProcessing) {
|
|
675
|
+
pendingChange = true;
|
|
676
|
+
return;
|
|
677
|
+
}
|
|
678
|
+
isProcessing = true;
|
|
679
|
+
pendingChange = false;
|
|
680
|
+
try {
|
|
681
|
+
output_1.out.spicy(`[${new Date().toLocaleTimeString()}] Changes detected...`);
|
|
682
|
+
// Run build script
|
|
683
|
+
const buildResult = await runScript(script, workingDir, verbose);
|
|
684
|
+
if (!buildResult.success) {
|
|
685
|
+
output_1.out.warn("Build script failed");
|
|
686
|
+
if (buildResult.output) {
|
|
687
|
+
output_1.out.log("");
|
|
688
|
+
output_1.out.log(buildResult.output);
|
|
689
|
+
}
|
|
690
|
+
isProcessing = false;
|
|
691
|
+
if (pendingChange) {
|
|
692
|
+
setImmediate(() => runBuildAndSync());
|
|
693
|
+
}
|
|
694
|
+
return;
|
|
695
|
+
}
|
|
696
|
+
output_1.out.info("Build completed...");
|
|
697
|
+
// Run sync
|
|
698
|
+
output_1.out.task("Syncing");
|
|
699
|
+
const result = await syncEngine.sync();
|
|
700
|
+
if (result.success) {
|
|
701
|
+
if (result.filesChanged === 0 && result.directoriesChanged === 0) {
|
|
702
|
+
output_1.out.done("Already synced");
|
|
703
|
+
}
|
|
704
|
+
else {
|
|
705
|
+
output_1.out.done(`Synced ${result.filesChanged} ${plural("file", result.filesChanged)}`);
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
else {
|
|
709
|
+
output_1.out.warn(`⚠ Partial sync: ${result.filesChanged} updated, ${result.errors.length} errors`);
|
|
710
|
+
result.errors
|
|
711
|
+
.slice(0, 3)
|
|
712
|
+
.forEach((error) => output_1.out.error(` ${error.path}: ${error.error.message}`));
|
|
713
|
+
if (result.errors.length > 3) {
|
|
714
|
+
output_1.out.warn(` ... and ${result.errors.length - 3} more errors`);
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
if (result.warnings.length > 0) {
|
|
718
|
+
result.warnings
|
|
719
|
+
.slice(0, 3)
|
|
720
|
+
.forEach((warning) => output_1.out.warn(` ${warning}`));
|
|
721
|
+
if (result.warnings.length > 3) {
|
|
722
|
+
output_1.out.warn(` ... and ${result.warnings.length - 3} more warnings`);
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
catch (error) {
|
|
727
|
+
output_1.out.error(`Error during build/sync: ${error}`);
|
|
728
|
+
}
|
|
729
|
+
finally {
|
|
730
|
+
isProcessing = false;
|
|
731
|
+
// If changes occurred while we were processing, run again
|
|
732
|
+
if (pendingChange) {
|
|
733
|
+
setImmediate(() => runBuildAndSync());
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
};
|
|
737
|
+
// Set up file watcher - watches everything in the specified directory
|
|
738
|
+
const watcher = fsSync.watch(absoluteWatchDir, { recursive: true }, (_eventType, filename) => {
|
|
739
|
+
if (filename) {
|
|
740
|
+
runBuildAndSync();
|
|
741
|
+
}
|
|
742
|
+
});
|
|
743
|
+
// Handle graceful shutdown
|
|
744
|
+
const shutdown = async () => {
|
|
745
|
+
output_1.out.log("");
|
|
746
|
+
output_1.out.info("Shutting down...");
|
|
747
|
+
watcher.close();
|
|
748
|
+
await safeRepoShutdown(repo);
|
|
749
|
+
output_1.out.rainbow("Goodbye!");
|
|
750
|
+
process.exit(0);
|
|
751
|
+
};
|
|
752
|
+
process.on("SIGINT", shutdown);
|
|
753
|
+
process.on("SIGTERM", shutdown);
|
|
754
|
+
// Run initial build and sync
|
|
755
|
+
await runBuildAndSync();
|
|
756
|
+
// Keep process alive
|
|
757
|
+
await new Promise(() => { }); // Never resolves, keeps watching
|
|
758
|
+
}
|
|
759
|
+
/**
|
|
760
|
+
* Run a shell script and wait for completion
|
|
761
|
+
*/
|
|
762
|
+
async function runScript(script, cwd, verbose) {
|
|
763
|
+
return new Promise((resolve) => {
|
|
764
|
+
const [command, ...args] = script.split(" ");
|
|
765
|
+
const child = (0, child_process_1.spawn)(command, args, {
|
|
766
|
+
cwd,
|
|
767
|
+
stdio: verbose ? "inherit" : "pipe", // Show output directly if verbose, otherwise capture
|
|
768
|
+
shell: true,
|
|
769
|
+
});
|
|
770
|
+
let output = "";
|
|
771
|
+
// Capture output if not verbose (so we can show it on error)
|
|
772
|
+
if (!verbose) {
|
|
773
|
+
child.stdout?.on("data", (data) => {
|
|
774
|
+
output += data.toString();
|
|
775
|
+
});
|
|
776
|
+
child.stderr?.on("data", (data) => {
|
|
777
|
+
output += data.toString();
|
|
778
|
+
});
|
|
779
|
+
}
|
|
780
|
+
child.on("close", (code) => {
|
|
781
|
+
resolve({
|
|
782
|
+
success: code === 0,
|
|
783
|
+
output: !verbose ? output : undefined,
|
|
784
|
+
});
|
|
785
|
+
});
|
|
786
|
+
child.on("error", (error) => {
|
|
787
|
+
output_1.out.error(`Failed to run script: ${error.message}`);
|
|
788
|
+
resolve({
|
|
789
|
+
success: false,
|
|
790
|
+
output: !verbose ? output : undefined,
|
|
791
|
+
});
|
|
792
|
+
});
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
// TODO: Add push and pull commands later
|
|
796
|
+
function plural(word, count) {
|
|
797
|
+
return count === 1 ? word : `${word}s`;
|
|
798
|
+
}
|
|
799
|
+
//# sourceMappingURL=commands.js.map
|