@mod-computer/cli 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -0
- package/dist/cli.bundle.js +24633 -13744
- package/dist/cli.bundle.js.map +4 -4
- package/dist/cli.js +23 -12
- package/dist/commands/add.js +245 -0
- package/dist/commands/auth.js +129 -21
- package/dist/commands/comment.js +568 -0
- package/dist/commands/diff.js +182 -0
- package/dist/commands/index.js +33 -3
- package/dist/commands/init.js +545 -326
- package/dist/commands/ls.js +135 -0
- package/dist/commands/members.js +687 -0
- package/dist/commands/mv.js +282 -0
- package/dist/commands/rm.js +257 -0
- package/dist/commands/status.js +273 -306
- package/dist/commands/sync.js +99 -75
- package/dist/commands/trace.js +1752 -0
- package/dist/commands/workspace.js +354 -330
- package/dist/config/features.js +8 -3
- package/dist/config/release-profiles/development.json +4 -1
- package/dist/config/release-profiles/mvp.json +4 -2
- package/dist/daemon/conflict-resolution.js +172 -0
- package/dist/daemon/content-hash.js +31 -0
- package/dist/daemon/file-sync.js +985 -0
- package/dist/daemon/index.js +203 -0
- package/dist/daemon/mime-types.js +166 -0
- package/dist/daemon/offline-queue.js +211 -0
- package/dist/daemon/path-utils.js +64 -0
- package/dist/daemon/share-policy.js +83 -0
- package/dist/daemon/wasm-errors.js +189 -0
- package/dist/daemon/worker.js +557 -0
- package/dist/daemon-worker.js +3 -2
- package/dist/errors/workspace-errors.js +48 -0
- package/dist/lib/auth-server.js +89 -26
- package/dist/lib/browser.js +1 -1
- package/dist/lib/diff.js +284 -0
- package/dist/lib/formatters.js +204 -0
- package/dist/lib/git.js +137 -0
- package/dist/lib/local-fs.js +201 -0
- package/dist/lib/prompts.js +56 -0
- package/dist/lib/storage.js +11 -1
- package/dist/lib/trace-formatters.js +314 -0
- package/dist/services/add-service.js +554 -0
- package/dist/services/add-validation.js +124 -0
- package/dist/services/mod-config.js +8 -2
- package/dist/services/modignore-service.js +2 -0
- package/dist/stores/use-workspaces-store.js +36 -14
- package/dist/types/add-types.js +99 -0
- package/dist/types/config.js +1 -1
- package/dist/types/workspace-connection.js +53 -2
- package/package.json +7 -5
- package/commands/execute.md +0 -156
- package/commands/overview.md +0 -233
- package/commands/review.md +0 -151
- package/commands/spec.md +0 -169
|
@@ -0,0 +1,985 @@
|
|
|
1
|
+
// glassware[type="implementation", id="cli-file-sync--2133c3ea", requirements="requirement-cli-files-app-2--6765b2c1,requirement-cli-files-app-3--552aece6,requirement-cli-files-app-4--c5ef157e,requirement-cli-files-app-5--89893346,requirement-cli-files-folder-4--4a430204,requirement-cli-files-folder-5--5eeb9173"]
|
|
2
|
+
// glassware[type="implementation", id="impl-file-sync-distributed--049867a6", specifications="specification-spec-explicit-find--6f6cb47d,specification-spec-workspace-first--957d888f,specification-spec-load-all-files--ce785102,specification-spec-write-to-filesystem--e5c4ecac,specification-spec-file-watch--ec0b56ec,specification-spec-file-to-doc--00985bce,specification-spec-doc-to-file--2cfa261a"]
|
|
3
|
+
// glassware[type="implementation", id="impl-file-sync-content-hash--864987f0", requirements="requirement-cli-sync-content-hash--cc8aff1f"]
|
|
4
|
+
// glassware[type="implementation", id="impl-file-sync-conflict--f8fde34d", requirements="requirement-cli-sync-conflict-lww--626f73f4,requirement-cli-sync-conflict-log--7ac92a94"]
|
|
5
|
+
// glassware[type="implementation", id="impl-file-sync-branch--a4e5b6c7", specifications="specification-spec-apply-override--5ca6c6cb,specification-spec-resolve-file--abf4f572,specification-spec-get-tree--0de14051,specification-spec-cow-inherit--35d6d54a,specification-spec-cow-automatic--2f90e325"]
|
|
6
|
+
/**
|
|
7
|
+
* File sync service - syncs local files to workspace Automerge documents.
|
|
8
|
+
* Creates hierarchical folder structure matching local directory structure.
|
|
9
|
+
* Supports bidirectional sync: local→remote and remote→local.
|
|
10
|
+
*
|
|
11
|
+
* Features:
|
|
12
|
+
* - Content hash: Skip syncing unchanged files
|
|
13
|
+
* - Conflict resolution: Last-write-wins with conflict logging
|
|
14
|
+
*/
|
|
15
|
+
import fs from 'fs';
|
|
16
|
+
import path from 'path';
|
|
17
|
+
import { createModWorkspace, detectMimeType, shouldIgnore, getTextContent, setTextContent, GitBranchService, mimeTypeToCanvasType } from '@mod/mod-core';
|
|
18
|
+
import { addWorkspaceToSharePolicy, addFilesToSharePolicy, addFileToSharePolicy, removeFileFromSharePolicy } from './share-policy.js';
|
|
19
|
+
import { computeContentHash } from './content-hash.js';
|
|
20
|
+
import { ConflictResolver } from './conflict-resolution.js';
|
|
21
|
+
// glassware[type="implementation", id="impl-file-sync-binary-size--ce349854", requirements="requirement-cli-sync-content-binary--5f017e47"]
|
|
22
|
+
const MAX_INLINE_BINARY_SIZE = 5 * 1024 * 1024; // 5MB
|
|
23
|
+
// Timeout for pending writes - how long to ignore chokidar events after a remote write
|
|
24
|
+
const PENDING_WRITE_TIMEOUT_MS = 2000;
|
|
25
|
+
// Text-syncable MIME type prefixes
|
|
26
|
+
const TEXT_MIME_PREFIXES = [
|
|
27
|
+
'text/',
|
|
28
|
+
'application/json',
|
|
29
|
+
'application/javascript',
|
|
30
|
+
'application/typescript',
|
|
31
|
+
'application/xml',
|
|
32
|
+
'application/yaml',
|
|
33
|
+
'application/x-yaml',
|
|
34
|
+
];
|
|
35
|
+
/**
|
|
36
|
+
* Initialize file sync context for a workspace connection.
|
|
37
|
+
*/
|
|
38
|
+
export async function createFileSyncContext(repo, connection, gitBranch, log = console.log) {
|
|
39
|
+
const modWorkspace = createModWorkspace(repo);
|
|
40
|
+
const workspace = await modWorkspace.openWorkspace(connection.workspaceId);
|
|
41
|
+
// Initialize git integration if this is a git repo
|
|
42
|
+
// Use GitBranchService for initialization, then workspace.gitBranch for operations
|
|
43
|
+
let gitBranchService = null;
|
|
44
|
+
if (gitBranch) {
|
|
45
|
+
try {
|
|
46
|
+
// Initialize git state via GitBranchService (one-time setup)
|
|
47
|
+
gitBranchService = new GitBranchService(repo);
|
|
48
|
+
await gitBranchService.ensureGitState(connection.workspaceId, {
|
|
49
|
+
seedBranch: gitBranch,
|
|
50
|
+
defaultBranch: gitBranch
|
|
51
|
+
});
|
|
52
|
+
// glassware[type="implementation", id="impl-cli-acknowledge--c13c825c", specifications="specification-spec-acknowledge--de7ee6ef"]
|
|
53
|
+
// Record branch detection via workspace.gitBranch API (RFC 0003)
|
|
54
|
+
const clientId = connection.workspaceId.replace(/[^a-zA-Z0-9_-]/g, '').slice(0, 20);
|
|
55
|
+
const sourceId = `cli:${clientId}`;
|
|
56
|
+
await workspace.gitBranch.acknowledge(sourceId, gitBranch);
|
|
57
|
+
log(`[file-sync] Git integration enabled for branch: ${gitBranch}`);
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
log(`[file-sync] Failed to initialize git state: ${error?.message || error}`);
|
|
61
|
+
gitBranchService = null;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
// Build folder cache from existing folders
|
|
65
|
+
const folderCache = new Map();
|
|
66
|
+
const existingFolders = await workspace.folder.list();
|
|
67
|
+
const existingFolderDocIds = [];
|
|
68
|
+
for (const folder of existingFolders) {
|
|
69
|
+
if (folder.path) {
|
|
70
|
+
folderCache.set(folder.path, folder.id);
|
|
71
|
+
}
|
|
72
|
+
// Add both logical ID and documentId to share policy
|
|
73
|
+
existingFolderDocIds.push(folder.id);
|
|
74
|
+
if (folder.documentId) {
|
|
75
|
+
existingFolderDocIds.push(folder.documentId);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
// Register workspace with sharePolicy
|
|
79
|
+
addWorkspaceToSharePolicy(connection.workspaceId);
|
|
80
|
+
// Register existing folders with sharePolicy
|
|
81
|
+
if (existingFolderDocIds.length > 0) {
|
|
82
|
+
addFilesToSharePolicy(existingFolderDocIds);
|
|
83
|
+
log(`[file-sync] Added ${existingFolders.length} existing folders to share policy`);
|
|
84
|
+
}
|
|
85
|
+
// Build file path caches - forward (path -> docId) and reverse (docId -> path)
|
|
86
|
+
const filePathCache = new Map();
|
|
87
|
+
const docIdToPathCache = new Map();
|
|
88
|
+
const existingFiles = await workspace.file.list();
|
|
89
|
+
// Register existing file IDs with sharePolicy
|
|
90
|
+
const existingFileIds = existingFiles.map(f => f.id);
|
|
91
|
+
if (existingFileIds.length > 0) {
|
|
92
|
+
addFilesToSharePolicy(existingFileIds);
|
|
93
|
+
}
|
|
94
|
+
for (const fileRef of existingFiles) {
|
|
95
|
+
try {
|
|
96
|
+
const handle = await workspace.file.get(fileRef.id);
|
|
97
|
+
if (handle) {
|
|
98
|
+
const doc = handle.doc();
|
|
99
|
+
// Try to get path from multiple locations:
|
|
100
|
+
// 1. doc.metadata.path (CLI-created files)
|
|
101
|
+
// 2. doc.path (legacy)
|
|
102
|
+
// 3. fileRef.path (workspace reference)
|
|
103
|
+
// 4. fileRef.name (fallback to filename only)
|
|
104
|
+
const filePath = doc?.metadata?.path ||
|
|
105
|
+
doc?.path ||
|
|
106
|
+
fileRef?.path ||
|
|
107
|
+
fileRef?.name ||
|
|
108
|
+
null;
|
|
109
|
+
if (filePath && typeof filePath === 'string') {
|
|
110
|
+
filePathCache.set(filePath, fileRef.id);
|
|
111
|
+
docIdToPathCache.set(fileRef.id, filePath);
|
|
112
|
+
log(`[file-sync] Cached file: ${fileRef.id} -> ${filePath}`);
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
log(`[file-sync] No path found for file: ${fileRef.id} (name: ${fileRef?.name || 'unknown'})`);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
// Skip files we can't load
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
// 3. Map NEW files to current git branch (only files not already in branch tree)
|
|
124
|
+
// Skip files that are inherited from parent branches to avoid incorrectly marking them as overrides
|
|
125
|
+
if (gitBranch && filePathCache.size > 0) {
|
|
126
|
+
let mappedCount = 0;
|
|
127
|
+
let skippedInherited = 0;
|
|
128
|
+
// Get the current branch tree to check which files already exist
|
|
129
|
+
const branchTree = await workspace.gitBranch.listTree({ branch: gitBranch });
|
|
130
|
+
const existingPaths = new Set([
|
|
131
|
+
...Object.keys(branchTree.overrides ?? {}),
|
|
132
|
+
...Object.keys(branchTree.inherited ?? {})
|
|
133
|
+
]);
|
|
134
|
+
for (const [relativePath, docId] of filePathCache.entries()) {
|
|
135
|
+
// Skip .git directory files - they shouldn't be in the branch tree
|
|
136
|
+
if (relativePath.startsWith('.git/') || relativePath === '.git') {
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
// Skip files that already exist in the branch tree (either as override or inherited)
|
|
140
|
+
// This prevents incorrectly marking inherited files as overrides
|
|
141
|
+
if (existingPaths.has(relativePath)) {
|
|
142
|
+
skippedInherited++;
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
try {
|
|
146
|
+
await workspace.gitBranch.applyOverride(gitBranch, relativePath, docId);
|
|
147
|
+
mappedCount++;
|
|
148
|
+
}
|
|
149
|
+
catch (error) {
|
|
150
|
+
// Silently skip - file may already be mapped
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
log(`[file-sync] Mapped ${mappedCount} new files to branch: ${gitBranch} (skipped ${skippedInherited} existing, ${filePathCache.size - mappedCount - skippedInherited} .git files)`);
|
|
154
|
+
}
|
|
155
|
+
return {
|
|
156
|
+
repo,
|
|
157
|
+
connection,
|
|
158
|
+
workspace,
|
|
159
|
+
gitBranch,
|
|
160
|
+
gitBranchService,
|
|
161
|
+
folderCache,
|
|
162
|
+
filePathCache,
|
|
163
|
+
docIdToPathCache,
|
|
164
|
+
pendingWrites: new Map(),
|
|
165
|
+
subscriptionCleanups: [],
|
|
166
|
+
log,
|
|
167
|
+
contentHashCache: new Map(),
|
|
168
|
+
lastSyncCache: new Map(),
|
|
169
|
+
conflictResolver: new ConflictResolver(log),
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Check if a file is text-based (syncable as text content).
|
|
174
|
+
*/
|
|
175
|
+
function isTextFile(mimeType) {
|
|
176
|
+
return TEXT_MIME_PREFIXES.some(prefix => mimeType.startsWith(prefix));
|
|
177
|
+
}
|
|
178
|
+
// glassware[type="implementation", id="impl-file-sync-binary-skip--a894dca3", requirements="requirement-cli-sync-binary-skip-limit--9b0b0cef"]
|
|
179
|
+
function shouldSkipLargeBinary(isText, size, relativePath, log) {
|
|
180
|
+
if (isText || size <= MAX_INLINE_BINARY_SIZE)
|
|
181
|
+
return false;
|
|
182
|
+
log(`[file-sync] Skipping large binary file (${size} bytes): ${relativePath}`);
|
|
183
|
+
return true;
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Normalize a file path to forward slashes (for cross-platform consistency).
|
|
187
|
+
*/
|
|
188
|
+
function normalizePath(absolutePath, connectionRoot) {
|
|
189
|
+
const relative = path.relative(connectionRoot, absolutePath);
|
|
190
|
+
return relative.split(path.sep).join('/');
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Get the parent folder path from a file path.
|
|
194
|
+
*/
|
|
195
|
+
function getParentPath(relativePath) {
|
|
196
|
+
const parts = relativePath.split('/');
|
|
197
|
+
parts.pop(); // Remove filename
|
|
198
|
+
return parts.length > 0 ? parts.join('/') : null;
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Get directory components from a file path.
|
|
202
|
+
*/
|
|
203
|
+
function getDirectoryParts(relativePath) {
|
|
204
|
+
const parts = relativePath.split('/');
|
|
205
|
+
parts.pop(); // Remove filename
|
|
206
|
+
return parts;
|
|
207
|
+
}
|
|
208
|
+
/**
|
|
209
|
+
* Ensure folder hierarchy exists for a file path.
|
|
210
|
+
* Creates folders as needed with proper parent-child relationships.
|
|
211
|
+
* Returns the containing folder's logical ID.
|
|
212
|
+
*/
|
|
213
|
+
async function ensureFolderHierarchy(ctx, relativePath) {
|
|
214
|
+
const dirParts = getDirectoryParts(relativePath);
|
|
215
|
+
if (dirParts.length === 0) {
|
|
216
|
+
return null; // File is in root
|
|
217
|
+
}
|
|
218
|
+
let currentPath = '';
|
|
219
|
+
let parentFolderId = null;
|
|
220
|
+
let folderId = null;
|
|
221
|
+
for (const folderName of dirParts) {
|
|
222
|
+
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
|
|
223
|
+
// Check cache first
|
|
224
|
+
const cachedId = ctx.folderCache.get(currentPath);
|
|
225
|
+
if (cachedId) {
|
|
226
|
+
parentFolderId = cachedId;
|
|
227
|
+
folderId = cachedId;
|
|
228
|
+
continue;
|
|
229
|
+
}
|
|
230
|
+
// Create folder with parent relationship
|
|
231
|
+
const folderRef = await ctx.workspace.folder.create(currentPath, {
|
|
232
|
+
name: folderName,
|
|
233
|
+
parentFolderId: parentFolderId, // Set parent for nested folders
|
|
234
|
+
});
|
|
235
|
+
// Cache the new folder
|
|
236
|
+
ctx.folderCache.set(currentPath, folderRef.id);
|
|
237
|
+
// CRITICAL: Add BOTH the logical ID and documentId to share policy
|
|
238
|
+
// The logical ID is used for folder operations, documentId for sync
|
|
239
|
+
ctx.log(`[file-sync] Created folder: ${currentPath} (id: ${folderRef.id}, docId: ${folderRef.documentId})`);
|
|
240
|
+
addFileToSharePolicy(folderRef.id);
|
|
241
|
+
if (folderRef.documentId) {
|
|
242
|
+
addFileToSharePolicy(folderRef.documentId);
|
|
243
|
+
}
|
|
244
|
+
// Update parent for next iteration
|
|
245
|
+
parentFolderId = folderRef.id;
|
|
246
|
+
folderId = folderRef.id;
|
|
247
|
+
}
|
|
248
|
+
return folderId;
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Handle file creation - create FileDocument and add to folder hierarchy.
|
|
252
|
+
*/
|
|
253
|
+
export async function handleFileCreate(ctx, absolutePath) {
|
|
254
|
+
const relativePath = normalizePath(absolutePath, ctx.connection.path);
|
|
255
|
+
const fileName = path.basename(absolutePath);
|
|
256
|
+
// Check if file already exists in cache
|
|
257
|
+
if (ctx.filePathCache.has(relativePath)) {
|
|
258
|
+
// File already synced, treat as change
|
|
259
|
+
return handleFileChange(ctx, absolutePath);
|
|
260
|
+
}
|
|
261
|
+
// Read file content
|
|
262
|
+
let rawContent;
|
|
263
|
+
try {
|
|
264
|
+
rawContent = fs.readFileSync(absolutePath);
|
|
265
|
+
}
|
|
266
|
+
catch (error) {
|
|
267
|
+
if (error.code === 'EACCES') {
|
|
268
|
+
console.log(`[file-sync] Permission denied: ${relativePath}`);
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
throw error;
|
|
272
|
+
}
|
|
273
|
+
const mimeType = detectMimeType(fileName, rawContent);
|
|
274
|
+
const isText = isTextFile(mimeType);
|
|
275
|
+
if (shouldSkipLargeBinary(isText, rawContent.length, relativePath, ctx.log)) {
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
// Ensure folder hierarchy exists - returns folder's logical ID
|
|
279
|
+
const folderId = await ensureFolderHierarchy(ctx, relativePath);
|
|
280
|
+
const now = new Date().toISOString();
|
|
281
|
+
const textContent = isText ? rawContent.toString('utf-8') : '';
|
|
282
|
+
const canvasType = mimeTypeToCanvasType(mimeType);
|
|
283
|
+
const isCodeFile = canvasType === 'code';
|
|
284
|
+
// Create file document matching web app's expected structure
|
|
285
|
+
// Code files: Store plain text directly (no prosemirror formatting)
|
|
286
|
+
// Text/markdown files: Create with empty text first, then apply setTextContent for proper formatting
|
|
287
|
+
// Binary files: { binary: base64, metadata: {...} }
|
|
288
|
+
const fileData = isText
|
|
289
|
+
? {
|
|
290
|
+
text: isCodeFile ? textContent : '', // Code files get content directly, text files use setTextContent
|
|
291
|
+
metadata: {
|
|
292
|
+
type: 'text',
|
|
293
|
+
name: fileName,
|
|
294
|
+
description: `Synced from ${relativePath}`,
|
|
295
|
+
createdAt: now,
|
|
296
|
+
updatedAt: now,
|
|
297
|
+
path: relativePath,
|
|
298
|
+
mimeType,
|
|
299
|
+
size: rawContent.length,
|
|
300
|
+
gitBranch: ctx.gitBranch,
|
|
301
|
+
createdBy: 'cli',
|
|
302
|
+
},
|
|
303
|
+
}
|
|
304
|
+
: {
|
|
305
|
+
binary: rawContent.toString('base64'),
|
|
306
|
+
metadata: {
|
|
307
|
+
type: 'binary',
|
|
308
|
+
name: fileName,
|
|
309
|
+
description: `Synced from ${relativePath}`,
|
|
310
|
+
createdAt: now,
|
|
311
|
+
updatedAt: now,
|
|
312
|
+
path: relativePath,
|
|
313
|
+
mimeType,
|
|
314
|
+
size: rawContent.length,
|
|
315
|
+
gitBranch: ctx.gitBranch,
|
|
316
|
+
createdBy: 'cli',
|
|
317
|
+
},
|
|
318
|
+
};
|
|
319
|
+
const fileHandle = await ctx.workspace.file.create(fileData, {
|
|
320
|
+
name: fileName,
|
|
321
|
+
mimeType,
|
|
322
|
+
folderId: folderId ? folderId : undefined,
|
|
323
|
+
});
|
|
324
|
+
// Apply proper richtext formatting for text/markdown files using setTextContent
|
|
325
|
+
// This converts markdown to automerge-prosemirror spans/marks format
|
|
326
|
+
// Code files should NOT use setTextContent - they need plain text for CodeMirror
|
|
327
|
+
if (isText && !isCodeFile && textContent.length > 0) {
|
|
328
|
+
await setTextContent(fileHandle, ['text'], textContent);
|
|
329
|
+
}
|
|
330
|
+
// Wait for document to sync to server
|
|
331
|
+
// The sync server needs time to:
|
|
332
|
+
// 1. Receive the workspace update with fileRef
|
|
333
|
+
// 2. Subscribe to the new file document
|
|
334
|
+
// 3. Request and receive the file content from CLI
|
|
335
|
+
console.log(`[file-sync] Waiting for document sync: ${fileHandle.documentId}`);
|
|
336
|
+
// Trigger a find() to ensure the document is in the repo's handles
|
|
337
|
+
// This can help the sync protocol recognize the document
|
|
338
|
+
try {
|
|
339
|
+
const docHandle = await ctx.repo.find(fileHandle.documentId);
|
|
340
|
+
if (docHandle) {
|
|
341
|
+
// Wait for the document to have heads (meaning it has content)
|
|
342
|
+
let attempts = 0;
|
|
343
|
+
const maxAttempts = 20; // 20 * 200ms = 4 seconds max
|
|
344
|
+
while (attempts < maxAttempts) {
|
|
345
|
+
const heads = docHandle.heads();
|
|
346
|
+
if (heads && heads.length > 0) {
|
|
347
|
+
console.log(`[file-sync] Document has heads after ${attempts * 200}ms: ${heads[0]?.slice(0, 12)}`);
|
|
348
|
+
break;
|
|
349
|
+
}
|
|
350
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
351
|
+
attempts++;
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
catch (error) {
|
|
356
|
+
console.log(`[file-sync] Could not verify document sync: ${error}`);
|
|
357
|
+
}
|
|
358
|
+
// Additional wait for network propagation
|
|
359
|
+
await new Promise(resolve => setTimeout(resolve, 1500));
|
|
360
|
+
// Cache the new file
|
|
361
|
+
ctx.filePathCache.set(relativePath, fileHandle.documentId);
|
|
362
|
+
ctx.docIdToPathCache.set(fileHandle.documentId, relativePath);
|
|
363
|
+
// Register file with sharePolicy
|
|
364
|
+
addFileToSharePolicy(fileHandle.documentId);
|
|
365
|
+
// Also register folder if it was created
|
|
366
|
+
if (folderId) {
|
|
367
|
+
addFileToSharePolicy(folderId);
|
|
368
|
+
}
|
|
369
|
+
// Map file to current git branch via workspace.gitBranch API (RFC 0003)
|
|
370
|
+
if (ctx.gitBranch && !relativePath.startsWith('.git/') && relativePath !== '.git') {
|
|
371
|
+
try {
|
|
372
|
+
ctx.log(`[file-sync] Applying override: branch=${ctx.gitBranch}, path=${relativePath}, docId=${fileHandle.documentId}`);
|
|
373
|
+
await ctx.workspace.gitBranch.applyOverride(ctx.gitBranch, relativePath, fileHandle.documentId);
|
|
374
|
+
ctx.log(`[file-sync] Override applied successfully: ${relativePath}`);
|
|
375
|
+
}
|
|
376
|
+
catch (error) {
|
|
377
|
+
ctx.log(`[file-sync] Failed to apply override: ${relativePath} - ${error?.message || error}`);
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
else {
|
|
381
|
+
ctx.log(`[file-sync] Skipping applyOverride: gitBranch=${ctx.gitBranch}, path=${relativePath}`);
|
|
382
|
+
}
|
|
383
|
+
console.log(`[file-sync] Created: ${relativePath} (id: ${fileHandle.documentId})`);
|
|
384
|
+
}
|
|
385
|
+
/**
|
|
386
|
+
* Handle file change - update existing FileDocument.
|
|
387
|
+
* Uses content hash to skip syncing unchanged files.
|
|
388
|
+
* Implements copy-on-write for inherited files (spec-cow-first-edit--wip).
|
|
389
|
+
*/
|
|
390
|
+
// glassware[type="implementation", id="impl-file-change-cow--0a222408", specifications="specification-spec-cow-first-edit--08a99329,specification-spec-cow-transparent--15f0b363"]
|
|
391
|
+
export async function handleFileChange(ctx, absolutePath) {
|
|
392
|
+
const relativePath = normalizePath(absolutePath, ctx.connection.path);
|
|
393
|
+
const fileName = path.basename(absolutePath);
|
|
394
|
+
// Read file content
|
|
395
|
+
let rawContent;
|
|
396
|
+
try {
|
|
397
|
+
rawContent = fs.readFileSync(absolutePath);
|
|
398
|
+
}
|
|
399
|
+
catch (error) {
|
|
400
|
+
if (error.code === 'EACCES') {
|
|
401
|
+
ctx.log(`[file-sync] Permission denied: ${relativePath}`);
|
|
402
|
+
return;
|
|
403
|
+
}
|
|
404
|
+
if (error.code === 'ENOENT') {
|
|
405
|
+
// File was deleted before we could read it
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
throw error;
|
|
409
|
+
}
|
|
410
|
+
const mimeType = detectMimeType(fileName, rawContent);
|
|
411
|
+
const isText = isTextFile(mimeType);
|
|
412
|
+
if (shouldSkipLargeBinary(isText, rawContent.length, relativePath, ctx.log)) {
|
|
413
|
+
return;
|
|
414
|
+
}
|
|
415
|
+
// Check content hash - skip if unchanged
|
|
416
|
+
const newHash = computeContentHash(rawContent);
|
|
417
|
+
const existingHash = ctx.contentHashCache.get(relativePath);
|
|
418
|
+
if (existingHash && existingHash === newHash) {
|
|
419
|
+
ctx.log(`[file-sync] Skipping unchanged file (hash match): ${relativePath}`);
|
|
420
|
+
return;
|
|
421
|
+
}
|
|
422
|
+
// Check cache for existing file
|
|
423
|
+
let existingFileId = ctx.filePathCache.get(relativePath);
|
|
424
|
+
if (!existingFileId) {
|
|
425
|
+
// File doesn't exist in cache, create it
|
|
426
|
+
// Remove from cache first to avoid infinite loop
|
|
427
|
+
ctx.filePathCache.delete(relativePath);
|
|
428
|
+
return handleFileCreate(ctx, absolutePath);
|
|
429
|
+
}
|
|
430
|
+
// BD-8: Check if file needs copy-on-write (inherited from parent branch)
|
|
431
|
+
if (ctx.gitBranch && !relativePath.startsWith('.git/') && relativePath !== '.git') {
|
|
432
|
+
try {
|
|
433
|
+
const cowResult = await ctx.workspace.gitBranch.checkCopyOnWrite(relativePath, { branch: ctx.gitBranch });
|
|
434
|
+
if (cowResult.needsCopyOnWrite && cowResult.sourceDocId) {
|
|
435
|
+
ctx.log(`[file-sync] Copy-on-write needed for inherited file: ${relativePath}`);
|
|
436
|
+
// Create a new document with the updated content
|
|
437
|
+
const parentDir = path.dirname(relativePath);
|
|
438
|
+
const folderId = await ensureFolderHierarchy(ctx, relativePath);
|
|
439
|
+
const textContent = isText ? rawContent.toString('utf-8') : '';
|
|
440
|
+
const cowCanvasType = mimeTypeToCanvasType(mimeType);
|
|
441
|
+
const cowIsCodeFile = cowCanvasType === 'code';
|
|
442
|
+
// Code files: Store plain text directly
|
|
443
|
+
// Text/markdown files: Create with empty text, then apply setTextContent
|
|
444
|
+
const fileData = isText
|
|
445
|
+
? { text: cowIsCodeFile ? textContent : '' }
|
|
446
|
+
: { binary: rawContent.toString('base64') };
|
|
447
|
+
const newFileHandle = await ctx.workspace.file.create({
|
|
448
|
+
...fileData,
|
|
449
|
+
metadata: {
|
|
450
|
+
path: relativePath,
|
|
451
|
+
mimeType,
|
|
452
|
+
size: rawContent.length,
|
|
453
|
+
gitBranch: ctx.gitBranch,
|
|
454
|
+
createdBy: 'cli',
|
|
455
|
+
contentHash: newHash,
|
|
456
|
+
},
|
|
457
|
+
}, {
|
|
458
|
+
name: fileName,
|
|
459
|
+
mimeType,
|
|
460
|
+
folderId: folderId ? folderId : undefined,
|
|
461
|
+
});
|
|
462
|
+
// Apply proper richtext formatting for text/markdown files only (not code files)
|
|
463
|
+
if (isText && !cowIsCodeFile && textContent.length > 0) {
|
|
464
|
+
await setTextContent(newFileHandle, ['text'], textContent);
|
|
465
|
+
}
|
|
466
|
+
// Apply the override to track the branch-specific document
|
|
467
|
+
await ctx.workspace.gitBranch.applyOverride(ctx.gitBranch, relativePath, newFileHandle.documentId);
|
|
468
|
+
// Update caches with new document
|
|
469
|
+
ctx.filePathCache.set(relativePath, newFileHandle.documentId);
|
|
470
|
+
ctx.docIdToPathCache.delete(existingFileId);
|
|
471
|
+
ctx.docIdToPathCache.set(newFileHandle.documentId, relativePath);
|
|
472
|
+
existingFileId = newFileHandle.documentId;
|
|
473
|
+
// Register with sharePolicy
|
|
474
|
+
addFileToSharePolicy(newFileHandle.documentId);
|
|
475
|
+
ctx.log(`[file-sync] Copy-on-write complete: ${relativePath} -> ${newFileHandle.documentId}`);
|
|
476
|
+
// Update caches
|
|
477
|
+
ctx.contentHashCache.set(relativePath, newHash);
|
|
478
|
+
ctx.lastSyncCache.set(relativePath, new Date().toISOString());
|
|
479
|
+
return; // Done - new document created with content
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
catch (error) {
|
|
483
|
+
ctx.log(`[file-sync] Copy-on-write check failed: ${error?.message || error}`);
|
|
484
|
+
// Fall through to normal update
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
// Update file content
|
|
488
|
+
const now = new Date().toISOString();
|
|
489
|
+
const updateCanvasType = mimeTypeToCanvasType(mimeType);
|
|
490
|
+
const updateIsCodeFile = updateCanvasType === 'code';
|
|
491
|
+
if (isText) {
|
|
492
|
+
const textContent = rawContent.toString('utf-8');
|
|
493
|
+
const handle = await ctx.workspace.file.get(existingFileId);
|
|
494
|
+
if (handle) {
|
|
495
|
+
if (updateIsCodeFile) {
|
|
496
|
+
// Code files: Use simple text update (no prosemirror formatting)
|
|
497
|
+
await ctx.workspace.file.update(existingFileId, {
|
|
498
|
+
text: textContent,
|
|
499
|
+
metadata: {
|
|
500
|
+
updatedAt: now,
|
|
501
|
+
size: rawContent.length,
|
|
502
|
+
contentHash: newHash,
|
|
503
|
+
},
|
|
504
|
+
});
|
|
505
|
+
ctx.log(`[file-sync] Updated (code): ${relativePath}`);
|
|
506
|
+
}
|
|
507
|
+
else {
|
|
508
|
+
// Text/markdown files: Use setTextContent for automerge-prosemirror format
|
|
509
|
+
await setTextContent(handle, ['text'], textContent);
|
|
510
|
+
// Update metadata separately with content hash
|
|
511
|
+
await ctx.workspace.file.update(existingFileId, {
|
|
512
|
+
metadata: {
|
|
513
|
+
updatedAt: now,
|
|
514
|
+
size: rawContent.length,
|
|
515
|
+
contentHash: newHash,
|
|
516
|
+
},
|
|
517
|
+
});
|
|
518
|
+
ctx.log(`[file-sync] Updated (richtext): ${relativePath}`);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
else {
|
|
522
|
+
ctx.log(`[file-sync] Could not get handle for: ${relativePath}`);
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
else {
|
|
526
|
+
// Binary files use simple update
|
|
527
|
+
await ctx.workspace.file.update(existingFileId, {
|
|
528
|
+
binary: rawContent.toString('base64'),
|
|
529
|
+
metadata: {
|
|
530
|
+
updatedAt: now,
|
|
531
|
+
size: rawContent.length,
|
|
532
|
+
contentHash: newHash,
|
|
533
|
+
},
|
|
534
|
+
});
|
|
535
|
+
ctx.log(`[file-sync] Updated (binary): ${relativePath}`);
|
|
536
|
+
}
|
|
537
|
+
// Update caches
|
|
538
|
+
ctx.contentHashCache.set(relativePath, newHash);
|
|
539
|
+
ctx.lastSyncCache.set(relativePath, now);
|
|
540
|
+
}
|
|
541
|
+
/**
|
|
542
|
+
* Handle file deletion - soft-delete FileDocument.
|
|
543
|
+
* For branch-scoped files, marks as deleted on the current branch (spec-mark-deleted--wip).
|
|
544
|
+
*/
|
|
545
|
+
// glassware[type="implementation", id="impl-file-delete-branch--caf49639", specifications="specification-spec-mark-deleted--4b162505"]
|
|
546
|
+
export async function handleFileDelete(ctx, absolutePath) {
|
|
547
|
+
const relativePath = normalizePath(absolutePath, ctx.connection.path);
|
|
548
|
+
// Check cache for existing file
|
|
549
|
+
const existingFileId = ctx.filePathCache.get(relativePath);
|
|
550
|
+
if (!existingFileId) {
|
|
551
|
+
return; // File wasn't synced
|
|
552
|
+
}
|
|
553
|
+
// For git-enabled workspaces, mark the file as deleted on the current branch
|
|
554
|
+
// This preserves the file on parent branches while hiding it on this branch
|
|
555
|
+
if (ctx.gitBranch && !relativePath.startsWith('.git/') && relativePath !== '.git') {
|
|
556
|
+
try {
|
|
557
|
+
await ctx.workspace.gitBranch.markDeleted(ctx.gitBranch, relativePath);
|
|
558
|
+
ctx.log(`[file-sync] Marked deleted on branch ${ctx.gitBranch}: ${relativePath}`);
|
|
559
|
+
}
|
|
560
|
+
catch (error) {
|
|
561
|
+
ctx.log(`[file-sync] Could not mark deleted on branch: ${error?.message || error}`);
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
// Delete file document
|
|
565
|
+
await ctx.workspace.file.delete(existingFileId);
|
|
566
|
+
// Remove from cache
|
|
567
|
+
ctx.filePathCache.delete(relativePath);
|
|
568
|
+
ctx.docIdToPathCache.delete(existingFileId);
|
|
569
|
+
// Remove from sharePolicy
|
|
570
|
+
removeFileFromSharePolicy(existingFileId);
|
|
571
|
+
console.log(`[file-sync] Deleted: ${relativePath}`);
|
|
572
|
+
}
|
|
573
|
+
/**
|
|
574
|
+
* Perform initial sync of all files in connected directory.
|
|
575
|
+
*/
|
|
576
|
+
export async function performInitialSync(ctx, ignorePatterns) {
|
|
577
|
+
const stats = { synced: 0, skipped: 0 };
|
|
578
|
+
async function walkDirectory(dirPath) {
|
|
579
|
+
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
580
|
+
for (const entry of entries) {
|
|
581
|
+
const fullPath = path.join(dirPath, entry.name);
|
|
582
|
+
const relativePath = normalizePath(fullPath, ctx.connection.path);
|
|
583
|
+
// Check ignore patterns
|
|
584
|
+
if (shouldIgnore(relativePath, ignorePatterns)) {
|
|
585
|
+
ctx.log(`[initial-sync] Skipping ignored: ${relativePath}`);
|
|
586
|
+
stats.skipped++;
|
|
587
|
+
continue;
|
|
588
|
+
}
|
|
589
|
+
if (entry.isDirectory()) {
|
|
590
|
+
ctx.log(`[initial-sync] Entering directory: ${relativePath}`);
|
|
591
|
+
await walkDirectory(fullPath);
|
|
592
|
+
}
|
|
593
|
+
else if (entry.isFile()) {
|
|
594
|
+
// Check if file already exists in workspace to prevent duplicates
|
|
595
|
+
if (ctx.filePathCache.has(relativePath)) {
|
|
596
|
+
ctx.log(`[initial-sync] Skipping existing file: ${relativePath}`);
|
|
597
|
+
stats.skipped++;
|
|
598
|
+
continue;
|
|
599
|
+
}
|
|
600
|
+
ctx.log(`[initial-sync] Importing file: ${relativePath}`);
|
|
601
|
+
try {
|
|
602
|
+
await handleFileCreate(ctx, fullPath);
|
|
603
|
+
stats.synced++;
|
|
604
|
+
}
|
|
605
|
+
catch (error) {
|
|
606
|
+
ctx.log(`[initial-sync] Error syncing ${relativePath}: ${error}`);
|
|
607
|
+
stats.skipped++;
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
ctx.log(`[initial-sync] Starting directory walk from ${ctx.connection.path}`);
|
|
613
|
+
await walkDirectory(ctx.connection.path);
|
|
614
|
+
ctx.log(`[initial-sync] Directory walk complete: ${stats.synced} synced, ${stats.skipped} skipped`);
|
|
615
|
+
return stats;
|
|
616
|
+
}
|
|
617
|
+
// shouldIgnore and matchPattern imported from @mod/mod-core
|
|
618
|
+
/**
|
|
619
|
+
* Check if a file path has a pending write (to avoid circular updates).
|
|
620
|
+
*/
|
|
621
|
+
export function isPendingWrite(ctx, relativePath) {
|
|
622
|
+
const expiry = ctx.pendingWrites.get(relativePath);
|
|
623
|
+
if (!expiry)
|
|
624
|
+
return false;
|
|
625
|
+
if (Date.now() > expiry) {
|
|
626
|
+
// Expired, clean up
|
|
627
|
+
ctx.pendingWrites.delete(relativePath);
|
|
628
|
+
return false;
|
|
629
|
+
}
|
|
630
|
+
return true;
|
|
631
|
+
}
|
|
632
|
+
/**
|
|
633
|
+
* Mark a file as having a pending write (to avoid circular updates).
|
|
634
|
+
*/
|
|
635
|
+
function markPendingWrite(ctx, relativePath) {
|
|
636
|
+
ctx.pendingWrites.set(relativePath, Date.now() + PENDING_WRITE_TIMEOUT_MS);
|
|
637
|
+
}
|
|
638
|
+
/**
|
|
639
|
+
* Write remote file content to local filesystem.
|
|
640
|
+
* Uses conflict resolution to handle concurrent local/remote edits.
|
|
641
|
+
* @param ctx File sync context
|
|
642
|
+
* @param doc The file document from Automerge
|
|
643
|
+
* @param relativePath The relative path of the file
|
|
644
|
+
*/
|
|
645
|
+
async function writeRemoteFileToFilesystem(ctx, doc, relativePath) {
|
|
646
|
+
const absolutePath = path.join(ctx.connection.path, relativePath);
|
|
647
|
+
// Check if file was deleted
|
|
648
|
+
if (doc.metadata?.deletedAt) {
|
|
649
|
+
if (fs.existsSync(absolutePath)) {
|
|
650
|
+
ctx.log(`[file-sync] Remote delete: ${relativePath}`);
|
|
651
|
+
markPendingWrite(ctx, relativePath);
|
|
652
|
+
fs.unlinkSync(absolutePath);
|
|
653
|
+
}
|
|
654
|
+
return;
|
|
655
|
+
}
|
|
656
|
+
// Ensure directory exists
|
|
657
|
+
const dirPath = path.dirname(absolutePath);
|
|
658
|
+
if (!fs.existsSync(dirPath)) {
|
|
659
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
660
|
+
}
|
|
661
|
+
// Determine content - use getTextContent which handles automerge-prosemirror spans
|
|
662
|
+
let content = null;
|
|
663
|
+
// Try text content first (handles both plain text and automerge-prosemirror spans)
|
|
664
|
+
const textContent = getTextContent(doc, ['text']);
|
|
665
|
+
if (textContent) {
|
|
666
|
+
content = textContent;
|
|
667
|
+
}
|
|
668
|
+
else if (doc.content?.text !== undefined) {
|
|
669
|
+
// ModFile structure with content.text
|
|
670
|
+
const nestedTextContent = getTextContent(doc, ['content', 'text']);
|
|
671
|
+
if (nestedTextContent) {
|
|
672
|
+
content = nestedTextContent;
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
// Binary content
|
|
676
|
+
if (!content) {
|
|
677
|
+
if (doc.binary) {
|
|
678
|
+
content = Buffer.from(doc.binary, 'base64');
|
|
679
|
+
}
|
|
680
|
+
else if (doc.content?.binary) {
|
|
681
|
+
content = Buffer.from(doc.content.binary, 'base64');
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
if (content === null) {
|
|
685
|
+
ctx.log(`[file-sync] No content found for remote file: ${relativePath}`);
|
|
686
|
+
return;
|
|
687
|
+
}
|
|
688
|
+
const remoteContent = typeof content === 'string' ? Buffer.from(content, 'utf-8') : content;
|
|
689
|
+
const remoteHash = computeContentHash(remoteContent);
|
|
690
|
+
// Check for conflicts with local file
|
|
691
|
+
if (fs.existsSync(absolutePath)) {
|
|
692
|
+
try {
|
|
693
|
+
const localContent = fs.readFileSync(absolutePath);
|
|
694
|
+
// If content is identical, skip
|
|
695
|
+
if (localContent.equals(remoteContent)) {
|
|
696
|
+
// Update hash cache to track this state
|
|
697
|
+
ctx.contentHashCache.set(relativePath, remoteHash);
|
|
698
|
+
return;
|
|
699
|
+
}
|
|
700
|
+
// Check for conflict using conflict resolver
|
|
701
|
+
const localStat = fs.statSync(absolutePath);
|
|
702
|
+
const localHash = computeContentHash(localContent);
|
|
703
|
+
const cachedHash = ctx.contentHashCache.get(relativePath);
|
|
704
|
+
const lastSyncTime = ctx.lastSyncCache.get(relativePath) || '1970-01-01T00:00:00Z';
|
|
705
|
+
// Build state for conflict resolution
|
|
706
|
+
const localState = {
|
|
707
|
+
path: relativePath,
|
|
708
|
+
modified: cachedHash !== undefined && cachedHash !== localHash, // Changed since last sync
|
|
709
|
+
localMtime: localStat.mtime.toISOString(),
|
|
710
|
+
lastSyncedAt: lastSyncTime,
|
|
711
|
+
contentHash: localHash,
|
|
712
|
+
};
|
|
713
|
+
const remoteState = {
|
|
714
|
+
path: relativePath,
|
|
715
|
+
updatedAt: doc.metadata?.updatedAt || new Date().toISOString(),
|
|
716
|
+
contentHash: remoteHash,
|
|
717
|
+
};
|
|
718
|
+
// Resolve conflict
|
|
719
|
+
const { resolution, reason, isConflict } = ctx.conflictResolver.checkAndResolve(localState, remoteState);
|
|
720
|
+
if (isConflict) {
|
|
721
|
+
ctx.log(`[file-sync] Conflict detected for ${relativePath}: ${reason}`);
|
|
722
|
+
}
|
|
723
|
+
if (resolution === 'use-local') {
|
|
724
|
+
ctx.log(`[file-sync] Keeping local version: ${relativePath}`);
|
|
725
|
+
return;
|
|
726
|
+
}
|
|
727
|
+
// resolution === 'use-remote', proceed with write
|
|
728
|
+
ctx.log(`[file-sync] Using remote version: ${relativePath}`);
|
|
729
|
+
}
|
|
730
|
+
catch (error) {
|
|
731
|
+
// Can't read local file, proceed with write
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
ctx.log(`[file-sync] Writing remote change: ${relativePath}`);
|
|
735
|
+
markPendingWrite(ctx, relativePath);
|
|
736
|
+
if (typeof content === 'string') {
|
|
737
|
+
fs.writeFileSync(absolutePath, content, 'utf-8');
|
|
738
|
+
}
|
|
739
|
+
else {
|
|
740
|
+
fs.writeFileSync(absolutePath, content);
|
|
741
|
+
}
|
|
742
|
+
// Update caches after successful write
|
|
743
|
+
ctx.contentHashCache.set(relativePath, remoteHash);
|
|
744
|
+
ctx.lastSyncCache.set(relativePath, new Date().toISOString());
|
|
745
|
+
}
|
|
746
|
+
/**
|
|
747
|
+
* Subscribe to remote changes for all files in the workspace.
|
|
748
|
+
* Writes remote changes back to the local filesystem.
|
|
749
|
+
* Also watches the workspace document for new files added remotely.
|
|
750
|
+
*/
|
|
751
|
+
export async function subscribeToRemoteChanges(ctx) {
|
|
752
|
+
ctx.log(`[file-sync] Setting up remote change subscriptions...`);
|
|
753
|
+
// Subscribe to existing files
|
|
754
|
+
const existingFiles = await ctx.workspace.file.list();
|
|
755
|
+
const subscribedFileIds = new Set();
|
|
756
|
+
for (const fileRef of existingFiles) {
|
|
757
|
+
try {
|
|
758
|
+
await subscribeToFileChanges(ctx, fileRef.id);
|
|
759
|
+
subscribedFileIds.add(fileRef.id);
|
|
760
|
+
}
|
|
761
|
+
catch (error) {
|
|
762
|
+
ctx.log(`[file-sync] Failed to subscribe to file ${fileRef.id}: ${error?.message || error}`);
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
ctx.log(`[file-sync] Subscribed to ${ctx.subscriptionCleanups.length} file documents`);
|
|
766
|
+
// Subscribe to workspace document changes to detect new files added remotely
|
|
767
|
+
await subscribeToWorkspaceChanges(ctx, subscribedFileIds);
|
|
768
|
+
}
|
|
769
|
+
/**
|
|
770
|
+
* Subscribe to workspace document changes to detect new files added by other clients.
|
|
771
|
+
* When a new file is detected, subscribe to it and write it to the local filesystem.
|
|
772
|
+
* Also detects deleted files and removes them from cache.
|
|
773
|
+
* Uses WorkspaceHandle's built-in subscription API for cleaner abstraction.
|
|
774
|
+
*/
|
|
775
|
+
async function subscribeToWorkspaceChanges(ctx, subscribedFileIds) {
|
|
776
|
+
ctx.log(`[file-sync] Setting up workspace subscription for new files...`);
|
|
777
|
+
try {
|
|
778
|
+
// Use WorkspaceHandle's on() method for file-added events
|
|
779
|
+
ctx.workspace.on('file-added', async (event) => {
|
|
780
|
+
const fileId = event.fileId;
|
|
781
|
+
const fileName = event.fileName;
|
|
782
|
+
if (!fileId || subscribedFileIds.has(fileId))
|
|
783
|
+
return;
|
|
784
|
+
ctx.log(`[file-sync] Detected new remote file: ${fileId} (${fileName || 'unnamed'})`);
|
|
785
|
+
subscribedFileIds.add(fileId);
|
|
786
|
+
try {
|
|
787
|
+
// Subscribe to the new file for future changes
|
|
788
|
+
await subscribeToFileChanges(ctx, fileId);
|
|
789
|
+
// Add to sharePolicy
|
|
790
|
+
addFilesToSharePolicy([fileId]);
|
|
791
|
+
// Immediately fetch and write the file to local filesystem
|
|
792
|
+
const fileHandle = await ctx.repo.find(fileId);
|
|
793
|
+
if (fileHandle) {
|
|
794
|
+
await fileHandle.whenReady?.();
|
|
795
|
+
const fileDoc = fileHandle.doc?.();
|
|
796
|
+
if (fileDoc) {
|
|
797
|
+
// Get file path from metadata
|
|
798
|
+
const filePath = fileDoc.metadata?.path || fileName;
|
|
799
|
+
if (filePath && typeof filePath === 'string') {
|
|
800
|
+
// Cache the path mappings
|
|
801
|
+
ctx.filePathCache.set(filePath, fileId);
|
|
802
|
+
ctx.docIdToPathCache.set(fileId, filePath);
|
|
803
|
+
// Write file to local filesystem
|
|
804
|
+
ctx.log(`[file-sync] Writing new remote file to local: ${filePath}`);
|
|
805
|
+
await writeRemoteFileToFilesystem(ctx, fileDoc, filePath);
|
|
806
|
+
}
|
|
807
|
+
else {
|
|
808
|
+
ctx.log(`[file-sync] No path found for new file ${fileId}`);
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
catch (error) {
|
|
814
|
+
ctx.log(`[file-sync] Failed to process new file ${fileId}: ${error?.message || error}`);
|
|
815
|
+
}
|
|
816
|
+
});
|
|
817
|
+
ctx.log(`[file-sync] ✓ Subscribed to workspace file-added events`);
|
|
818
|
+
// Subscribe to file-removed events to clean up cache
|
|
819
|
+
ctx.workspace.on('file-removed', async (event) => {
|
|
820
|
+
const fileId = event.fileId;
|
|
821
|
+
ctx.log(`[file-sync] File removed remotely: ${fileId}`);
|
|
822
|
+
// Remove from tracking sets
|
|
823
|
+
subscribedFileIds.delete(fileId);
|
|
824
|
+
// Remove from caches
|
|
825
|
+
const filePath = ctx.docIdToPathCache.get(fileId);
|
|
826
|
+
if (filePath) {
|
|
827
|
+
ctx.filePathCache.delete(filePath);
|
|
828
|
+
ctx.docIdToPathCache.delete(fileId);
|
|
829
|
+
ctx.log(`[file-sync] Cleared cache for deleted file: ${filePath}`);
|
|
830
|
+
}
|
|
831
|
+
// Remove from share policy
|
|
832
|
+
removeFileFromSharePolicy(fileId);
|
|
833
|
+
});
|
|
834
|
+
// Also subscribe to change events to sync caches
|
|
835
|
+
ctx.workspace.on('change', (event) => {
|
|
836
|
+
const currentFileIds = new Set(event.fileRefs.map((f) => f.id));
|
|
837
|
+
const currentFolderIds = new Set(event.folderRefs.map((f) => f.id));
|
|
838
|
+
// Detect files that were in cache but no longer in workspace
|
|
839
|
+
for (const [filePath, fileId] of ctx.filePathCache.entries()) {
|
|
840
|
+
if (!currentFileIds.has(fileId)) {
|
|
841
|
+
ctx.log(`[file-sync] Detected deleted file from cache sync: ${filePath}`);
|
|
842
|
+
ctx.filePathCache.delete(filePath);
|
|
843
|
+
ctx.docIdToPathCache.delete(fileId);
|
|
844
|
+
subscribedFileIds.delete(fileId);
|
|
845
|
+
removeFileFromSharePolicy(fileId);
|
|
846
|
+
}
|
|
847
|
+
}
|
|
848
|
+
// Detect folders that were deleted
|
|
849
|
+
for (const [folderPath, folderId] of ctx.folderCache.entries()) {
|
|
850
|
+
if (!currentFolderIds.has(folderId)) {
|
|
851
|
+
ctx.log(`[file-sync] Detected deleted folder from cache sync: ${folderPath}`);
|
|
852
|
+
ctx.folderCache.delete(folderPath);
|
|
853
|
+
removeFileFromSharePolicy(folderId);
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
ctx.log(`[file-sync] Workspace changed: ${event.fileRefs?.length || 0} files, ${event.folderRefs?.length || 0} folders`);
|
|
857
|
+
});
|
|
858
|
+
}
|
|
859
|
+
catch (error) {
|
|
860
|
+
ctx.log(`[file-sync] Failed to subscribe to workspace changes: ${error?.message || error}`);
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
// Debounce delay for remote change events (ms)
|
|
864
|
+
const REMOTE_CHANGE_DEBOUNCE_MS = 300;
|
|
865
|
+
/**
|
|
866
|
+
* Subscribe to changes on a single file document.
|
|
867
|
+
*/
|
|
868
|
+
async function subscribeToFileChanges(ctx, fileId) {
|
|
869
|
+
// Get the raw Automerge handle
|
|
870
|
+
const handle = await ctx.repo.find(fileId);
|
|
871
|
+
if (!handle) {
|
|
872
|
+
ctx.log(`[file-sync] Could not find handle for ${fileId}`);
|
|
873
|
+
return;
|
|
874
|
+
}
|
|
875
|
+
// Wait for document to be ready
|
|
876
|
+
await handle.whenReady?.();
|
|
877
|
+
// Get initial doc info for logging - use cached path if doc doesn't have it
|
|
878
|
+
const initialDoc = handle.doc?.();
|
|
879
|
+
const initialPath = initialDoc?.metadata?.path ||
|
|
880
|
+
initialDoc?.path ||
|
|
881
|
+
ctx.docIdToPathCache.get(fileId) ||
|
|
882
|
+
'unknown';
|
|
883
|
+
ctx.log(`[file-sync] Subscribing to file: ${fileId} (path: ${initialPath})`);
|
|
884
|
+
// Track debounce timer for this file
|
|
885
|
+
let debounceTimer = null;
|
|
886
|
+
// Track if a write is in progress to avoid concurrent writes
|
|
887
|
+
let writeInProgress = false;
|
|
888
|
+
const processChange = async () => {
|
|
889
|
+
if (writeInProgress) {
|
|
890
|
+
ctx.log(`[file-sync] Write in progress for ${fileId}, scheduling retry`);
|
|
891
|
+
// Schedule another check after current write completes
|
|
892
|
+
debounceTimer = setTimeout(processChange, REMOTE_CHANGE_DEBOUNCE_MS);
|
|
893
|
+
return;
|
|
894
|
+
}
|
|
895
|
+
try {
|
|
896
|
+
// Get fresh document state after debounce
|
|
897
|
+
const doc = handle.doc?.();
|
|
898
|
+
if (!doc) {
|
|
899
|
+
ctx.log(`[file-sync] No doc in change event for ${fileId}`);
|
|
900
|
+
return;
|
|
901
|
+
}
|
|
902
|
+
// Get the relative path - try document metadata first, then fall back to cached path
|
|
903
|
+
let relativePath = doc.metadata?.path || doc.path;
|
|
904
|
+
// If not in doc, use the cached path from when we set up subscriptions
|
|
905
|
+
if (!relativePath || typeof relativePath !== 'string') {
|
|
906
|
+
relativePath = ctx.docIdToPathCache.get(fileId);
|
|
907
|
+
}
|
|
908
|
+
if (!relativePath || typeof relativePath !== 'string') {
|
|
909
|
+
ctx.log(`[file-sync] No valid path found for ${fileId} (not in doc or cache)`);
|
|
910
|
+
return;
|
|
911
|
+
}
|
|
912
|
+
// Skip if this is a pending write (we just wrote this file)
|
|
913
|
+
if (isPendingWrite(ctx, relativePath)) {
|
|
914
|
+
ctx.log(`[file-sync] Skipping pending write for ${relativePath}`);
|
|
915
|
+
return;
|
|
916
|
+
}
|
|
917
|
+
// Write to filesystem
|
|
918
|
+
ctx.log(`[file-sync] Processing remote change for ${relativePath}`);
|
|
919
|
+
writeInProgress = true;
|
|
920
|
+
try {
|
|
921
|
+
await writeRemoteFileToFilesystem(ctx, doc, relativePath);
|
|
922
|
+
}
|
|
923
|
+
finally {
|
|
924
|
+
writeInProgress = false;
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
catch (error) {
|
|
928
|
+
ctx.log(`[file-sync] Error handling remote change: ${error?.message || error}`);
|
|
929
|
+
writeInProgress = false;
|
|
930
|
+
}
|
|
931
|
+
};
|
|
932
|
+
const onChange = (payload) => {
|
|
933
|
+
ctx.log(`[file-sync] Change event received for ${fileId}`);
|
|
934
|
+
// Debounce: clear existing timer and set a new one
|
|
935
|
+
// This ensures we process changes after a quiet period, avoiding
|
|
936
|
+
// processing intermediate states during rapid sync updates
|
|
937
|
+
if (debounceTimer) {
|
|
938
|
+
clearTimeout(debounceTimer);
|
|
939
|
+
}
|
|
940
|
+
debounceTimer = setTimeout(processChange, REMOTE_CHANGE_DEBOUNCE_MS);
|
|
941
|
+
};
|
|
942
|
+
// Subscribe to changes
|
|
943
|
+
if (typeof handle.on === 'function') {
|
|
944
|
+
handle.on('change', onChange);
|
|
945
|
+
ctx.log(`[file-sync] ✓ Subscribed to change events for ${fileId}`);
|
|
946
|
+
}
|
|
947
|
+
else {
|
|
948
|
+
ctx.log(`[file-sync] ✗ Handle has no .on() method for ${fileId}`);
|
|
949
|
+
}
|
|
950
|
+
// Store cleanup function
|
|
951
|
+
ctx.subscriptionCleanups.push(() => {
|
|
952
|
+
if (debounceTimer) {
|
|
953
|
+
clearTimeout(debounceTimer);
|
|
954
|
+
}
|
|
955
|
+
handle.off?.('change', onChange);
|
|
956
|
+
});
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* Subscribe to a newly created file for remote changes.
|
|
960
|
+
* Call this after creating a new file document.
|
|
961
|
+
*/
|
|
962
|
+
export async function subscribeToNewFile(ctx, fileId) {
|
|
963
|
+
try {
|
|
964
|
+
await subscribeToFileChanges(ctx, fileId);
|
|
965
|
+
}
|
|
966
|
+
catch (error) {
|
|
967
|
+
ctx.log(`[file-sync] Failed to subscribe to new file ${fileId}: ${error?.message || error}`);
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
/**
|
|
971
|
+
* Cleanup all remote change subscriptions.
|
|
972
|
+
*/
|
|
973
|
+
export function cleanupRemoteSubscriptions(ctx) {
|
|
974
|
+
ctx.log(`[file-sync] Cleaning up ${ctx.subscriptionCleanups.length} subscriptions...`);
|
|
975
|
+
for (const cleanup of ctx.subscriptionCleanups) {
|
|
976
|
+
try {
|
|
977
|
+
cleanup();
|
|
978
|
+
}
|
|
979
|
+
catch (error) {
|
|
980
|
+
// Ignore cleanup errors
|
|
981
|
+
}
|
|
982
|
+
}
|
|
983
|
+
ctx.subscriptionCleanups = [];
|
|
984
|
+
ctx.pendingWrites.clear();
|
|
985
|
+
}
|