@mod-computer/cli 0.2.3 → 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.bundle.js +216 -36371
- package/package.json +3 -3
- package/dist/app.js +0 -227
- package/dist/cli.bundle.js.map +0 -7
- package/dist/cli.js +0 -132
- package/dist/commands/add.js +0 -245
- package/dist/commands/agents-run.js +0 -71
- package/dist/commands/auth.js +0 -259
- package/dist/commands/branch.js +0 -1411
- package/dist/commands/claude-sync.js +0 -772
- package/dist/commands/comment.js +0 -568
- package/dist/commands/diff.js +0 -182
- package/dist/commands/index.js +0 -73
- package/dist/commands/init.js +0 -597
- package/dist/commands/ls.js +0 -135
- package/dist/commands/members.js +0 -687
- package/dist/commands/mv.js +0 -282
- package/dist/commands/recover.js +0 -207
- package/dist/commands/rm.js +0 -257
- package/dist/commands/spec.js +0 -386
- package/dist/commands/status.js +0 -296
- package/dist/commands/sync.js +0 -119
- package/dist/commands/trace.js +0 -1752
- package/dist/commands/workspace.js +0 -447
- package/dist/components/conflict-resolution-ui.js +0 -120
- package/dist/components/messages.js +0 -5
- package/dist/components/thread.js +0 -8
- package/dist/config/features.js +0 -83
- package/dist/containers/branches-container.js +0 -140
- package/dist/containers/directory-container.js +0 -92
- package/dist/containers/thread-container.js +0 -214
- package/dist/containers/threads-container.js +0 -27
- package/dist/containers/workspaces-container.js +0 -27
- package/dist/daemon/conflict-resolution.js +0 -172
- package/dist/daemon/content-hash.js +0 -31
- package/dist/daemon/file-sync.js +0 -985
- package/dist/daemon/index.js +0 -203
- package/dist/daemon/mime-types.js +0 -166
- package/dist/daemon/offline-queue.js +0 -211
- package/dist/daemon/path-utils.js +0 -64
- package/dist/daemon/share-policy.js +0 -83
- package/dist/daemon/wasm-errors.js +0 -189
- package/dist/daemon/worker.js +0 -557
- package/dist/daemon-worker.js +0 -258
- package/dist/errors/workspace-errors.js +0 -48
- package/dist/lib/auth-server.js +0 -216
- package/dist/lib/browser.js +0 -35
- package/dist/lib/diff.js +0 -284
- package/dist/lib/formatters.js +0 -204
- package/dist/lib/git.js +0 -137
- package/dist/lib/local-fs.js +0 -201
- package/dist/lib/prompts.js +0 -56
- package/dist/lib/storage.js +0 -213
- package/dist/lib/trace-formatters.js +0 -314
- package/dist/services/add-service.js +0 -554
- package/dist/services/add-validation.js +0 -124
- package/dist/services/automatic-file-tracker.js +0 -303
- package/dist/services/cli-orchestrator.js +0 -227
- package/dist/services/feature-flags.js +0 -187
- package/dist/services/file-import-service.js +0 -283
- package/dist/services/file-transformation-service.js +0 -218
- package/dist/services/logger.js +0 -44
- package/dist/services/mod-config.js +0 -67
- package/dist/services/modignore-service.js +0 -328
- package/dist/services/sync-daemon.js +0 -244
- package/dist/services/thread-notification-service.js +0 -50
- package/dist/services/thread-service.js +0 -147
- package/dist/stores/use-directory-store.js +0 -96
- package/dist/stores/use-threads-store.js +0 -46
- package/dist/stores/use-workspaces-store.js +0 -54
- package/dist/types/add-types.js +0 -99
- package/dist/types/config.js +0 -16
- package/dist/types/index.js +0 -2
- package/dist/types/workspace-connection.js +0 -53
- package/dist/types.js +0 -1
|
@@ -1,554 +0,0 @@
|
|
|
1
|
-
// glassware[type="implementation", id="impl-cli-add-service--f030dda1", requirements="requirement-cli-add-cmd--aff0d740,requirement-cli-add-parallel-model--7ab0e828,requirement-cli-add-batching--f126548b,requirement-cli-add-handle-lifecycle--a3cad232,requirement-cli-add-memory-cleanup--c811fb49,requirement-cli-add-arch-doc-model--7ed895a5,requirement-cli-add-arch-doc-count--dfb8e86e,requirement-cli-add-arch-memory--663bfd00,requirement-cli-add-arch-large-dir--6e8d22b7"]
|
|
2
|
-
// spec: packages/mod-cli/specs/add.md
|
|
3
|
-
import fs from 'fs';
|
|
4
|
-
import path from 'path';
|
|
5
|
-
import crypto from 'crypto';
|
|
6
|
-
import { createModWorkspace } from '@mod/mod-core/mod-workspace';
|
|
7
|
-
import { detectMimeType, mimeTypeToCanvasType, setTextContent } from '@mod/mod-core';
|
|
8
|
-
import { createAddError, ADD_CONSTANTS, TEXT_EXTENSIONS, } from '../types/add-types.js';
|
|
9
|
-
import { validateAddOptions, validateWorkspaceState } from './add-validation.js';
|
|
10
|
-
import { findWorkspaceConnection } from '../lib/storage.js';
|
|
11
|
-
import { ModIgnoreService } from './modignore-service.js';
|
|
12
|
-
// glassware[type="implementation", id="impl-cli-add-service-class--d06b558f", requirements="requirement-cli-add-cmd--aff0d740"]
|
|
13
|
-
/**
|
|
14
|
-
* Service for adding files to a workspace
|
|
15
|
-
*/
|
|
16
|
-
export class AddService {
|
|
17
|
-
constructor(repo) {
|
|
18
|
-
this.cancelled = false;
|
|
19
|
-
this.folderCache = new Map();
|
|
20
|
-
this.repo = repo;
|
|
21
|
-
}
|
|
22
|
-
/**
|
|
23
|
-
* Cancel the current add operation
|
|
24
|
-
*/
|
|
25
|
-
cancel() {
|
|
26
|
-
this.cancelled = true;
|
|
27
|
-
}
|
|
28
|
-
/**
|
|
29
|
-
* Execute the add operation
|
|
30
|
-
* Provides clear messaging when resuming after interruption
|
|
31
|
-
*/
|
|
32
|
-
async execute(options, onProgress) {
|
|
33
|
-
const startTime = Date.now();
|
|
34
|
-
this.cancelled = false;
|
|
35
|
-
this.folderCache.clear();
|
|
36
|
-
// Validate options
|
|
37
|
-
const optionsValidation = validateAddOptions(options);
|
|
38
|
-
if (!optionsValidation.valid) {
|
|
39
|
-
return {
|
|
40
|
-
success: false,
|
|
41
|
-
directories: [],
|
|
42
|
-
summary: createEmptySummary(),
|
|
43
|
-
duration: Date.now() - startTime
|
|
44
|
-
};
|
|
45
|
-
}
|
|
46
|
-
const workingDir = process.cwd();
|
|
47
|
-
// Validate workspace state
|
|
48
|
-
const workspaceValidation = await validateWorkspaceState(workingDir);
|
|
49
|
-
if (!workspaceValidation.valid) {
|
|
50
|
-
return {
|
|
51
|
-
success: false,
|
|
52
|
-
directories: [],
|
|
53
|
-
summary: createEmptySummary(),
|
|
54
|
-
duration: Date.now() - startTime
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
// Get workspace connection
|
|
58
|
-
const connection = findWorkspaceConnection(workingDir);
|
|
59
|
-
if (!connection) {
|
|
60
|
-
return {
|
|
61
|
-
success: false,
|
|
62
|
-
directories: [],
|
|
63
|
-
summary: createEmptySummary(),
|
|
64
|
-
duration: Date.now() - startTime
|
|
65
|
-
};
|
|
66
|
-
}
|
|
67
|
-
// Open workspace
|
|
68
|
-
const modWorkspace = createModWorkspace(this.repo);
|
|
69
|
-
const workspaceHandle = await modWorkspace.openWorkspace(connection.workspaceId);
|
|
70
|
-
// Phase 1: Scan files
|
|
71
|
-
onProgress?.({ phase: 'scanning', current: 0, total: 0 });
|
|
72
|
-
const scannedFiles = await this.scanFiles(options.paths, workingDir, options);
|
|
73
|
-
// Group files by directory
|
|
74
|
-
const directoryGroups = this.groupByDirectory(scannedFiles);
|
|
75
|
-
// Dry run - just return what would be done
|
|
76
|
-
if (options.dryRun) {
|
|
77
|
-
return this.createDryRunResult(directoryGroups, startTime);
|
|
78
|
-
}
|
|
79
|
-
// Phase 2: Compare with existing (for resume support)
|
|
80
|
-
onProgress?.({ phase: 'comparing', current: 0, total: directoryGroups.length });
|
|
81
|
-
const filesToAdd = await this.filterExistingFiles(directoryGroups, workspaceHandle, workingDir);
|
|
82
|
-
// Phase 3: Add files
|
|
83
|
-
const results = await this.addFiles(filesToAdd, workspaceHandle, workingDir, options, onProgress);
|
|
84
|
-
return {
|
|
85
|
-
success: results.every(r => r.status !== 'error'),
|
|
86
|
-
directories: results,
|
|
87
|
-
summary: this.calculateSummary(results),
|
|
88
|
-
duration: Date.now() - startTime
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* Scan files from the given paths
|
|
93
|
-
*/
|
|
94
|
-
async scanFiles(paths, workingDir, options) {
|
|
95
|
-
this.modIgnoreService = new ModIgnoreService(workingDir);
|
|
96
|
-
const scannedFiles = [];
|
|
97
|
-
for (const inputPath of paths) {
|
|
98
|
-
const absolutePath = path.resolve(workingDir, inputPath);
|
|
99
|
-
const stats = await fs.promises.stat(absolutePath);
|
|
100
|
-
if (stats.isDirectory()) {
|
|
101
|
-
await this.scanDirectory(absolutePath, workingDir, scannedFiles, options);
|
|
102
|
-
}
|
|
103
|
-
else if (stats.isFile()) {
|
|
104
|
-
const file = await this.createScannedFile(absolutePath, workingDir);
|
|
105
|
-
if (file && this.shouldIncludeFile(file, options)) {
|
|
106
|
-
scannedFiles.push(file);
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
return scannedFiles;
|
|
111
|
-
}
|
|
112
|
-
/**
|
|
113
|
-
* Recursively scan a directory
|
|
114
|
-
*/
|
|
115
|
-
async scanDirectory(dirPath, workingDir, scannedFiles, options) {
|
|
116
|
-
// Check for cancellation (cli-add-cancel)
|
|
117
|
-
if (this.cancelled)
|
|
118
|
-
return;
|
|
119
|
-
try {
|
|
120
|
-
const entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
|
|
121
|
-
for (const entry of entries) {
|
|
122
|
-
// Check for cancellation during iteration (cli-add-cancel)
|
|
123
|
-
if (this.cancelled)
|
|
124
|
-
break;
|
|
125
|
-
const fullPath = path.join(dirPath, entry.name);
|
|
126
|
-
// Check ignore patterns (cli-add-ignore-default, cli-add-ignore-gitignore, cli-add-ignore-modignore)
|
|
127
|
-
if (this.modIgnoreService?.shouldIgnore(fullPath, workingDir)) {
|
|
128
|
-
continue;
|
|
129
|
-
}
|
|
130
|
-
if (entry.isDirectory()) {
|
|
131
|
-
await this.scanDirectory(fullPath, workingDir, scannedFiles, options);
|
|
132
|
-
}
|
|
133
|
-
else if (entry.isFile()) {
|
|
134
|
-
const file = await this.createScannedFile(fullPath, workingDir);
|
|
135
|
-
if (file && this.shouldIncludeFile(file, options)) {
|
|
136
|
-
scannedFiles.push(file);
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
else if (entry.isSymbolicLink()) {
|
|
140
|
-
// Follow symlinks (cli-add-symlinks-follow)
|
|
141
|
-
try {
|
|
142
|
-
const realPath = await fs.promises.realpath(fullPath);
|
|
143
|
-
const realStats = await fs.promises.stat(realPath);
|
|
144
|
-
if (realStats.isFile()) {
|
|
145
|
-
const file = await this.createScannedFile(fullPath, workingDir);
|
|
146
|
-
if (file && this.shouldIncludeFile(file, options)) {
|
|
147
|
-
scannedFiles.push(file);
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
catch {
|
|
152
|
-
// Circular or broken symlink, skip (cli-add-symlinks-circular)
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
catch (error) {
|
|
158
|
-
// Permission denied or other error, skip directory
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
/**
|
|
162
|
-
* Create a ScannedFile from a file path
|
|
163
|
-
*/
|
|
164
|
-
async createScannedFile(absolutePath, workingDir) {
|
|
165
|
-
try {
|
|
166
|
-
const stats = await fs.promises.stat(absolutePath);
|
|
167
|
-
// Normalize path to forward slashes and make relative (cli-add-path-normalize, cli-add-path-relative)
|
|
168
|
-
const relativePath = path.relative(workingDir, absolutePath).replace(/\\/g, '/');
|
|
169
|
-
const ext = path.extname(absolutePath).toLowerCase();
|
|
170
|
-
const mimeType = detectMimeType(absolutePath);
|
|
171
|
-
const isText = this.isTextFile(absolutePath, mimeType);
|
|
172
|
-
return {
|
|
173
|
-
absolutePath,
|
|
174
|
-
relativePath,
|
|
175
|
-
size: stats.size,
|
|
176
|
-
mtime: stats.mtimeMs,
|
|
177
|
-
isText,
|
|
178
|
-
mimeType,
|
|
179
|
-
directory: path.dirname(relativePath).replace(/\\/g, '/') || '.'
|
|
180
|
-
};
|
|
181
|
-
}
|
|
182
|
-
catch {
|
|
183
|
-
return null;
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
/**
|
|
187
|
-
* Determine if a file is text based on extension and mime type
|
|
188
|
-
*/
|
|
189
|
-
isTextFile(filePath, mimeType) {
|
|
190
|
-
const ext = path.extname(filePath).toLowerCase();
|
|
191
|
-
const baseName = path.basename(filePath).toLowerCase();
|
|
192
|
-
// Check known text extensions
|
|
193
|
-
if (TEXT_EXTENSIONS.has(ext))
|
|
194
|
-
return true;
|
|
195
|
-
// Check known text filenames
|
|
196
|
-
if (['makefile', 'dockerfile', 'readme', 'license', 'changelog'].includes(baseName)) {
|
|
197
|
-
return true;
|
|
198
|
-
}
|
|
199
|
-
// Check mime type
|
|
200
|
-
if (mimeType.startsWith('text/'))
|
|
201
|
-
return true;
|
|
202
|
-
if (mimeType === 'application/json')
|
|
203
|
-
return true;
|
|
204
|
-
if (mimeType === 'application/javascript')
|
|
205
|
-
return true;
|
|
206
|
-
if (mimeType === 'application/xml')
|
|
207
|
-
return true;
|
|
208
|
-
return false;
|
|
209
|
-
}
|
|
210
|
-
/**
|
|
211
|
-
* Check if a file should be included based on options
|
|
212
|
-
*/
|
|
213
|
-
shouldIncludeFile(file, options) {
|
|
214
|
-
// Empty files are always included
|
|
215
|
-
if (file.size === 0)
|
|
216
|
-
return true;
|
|
217
|
-
// Text files are always included
|
|
218
|
-
if (file.isText)
|
|
219
|
-
return true;
|
|
220
|
-
// Binary files under 100KB are included
|
|
221
|
-
if (file.size <= ADD_CONSTANTS.MAX_BINARY_SIZE)
|
|
222
|
-
return true;
|
|
223
|
-
// Large binary files only if flag is set
|
|
224
|
-
return options.includeLargeBinary === true;
|
|
225
|
-
}
|
|
226
|
-
/**
|
|
227
|
-
* Group scanned files by directory
|
|
228
|
-
*/
|
|
229
|
-
groupByDirectory(files) {
|
|
230
|
-
const groups = new Map();
|
|
231
|
-
for (const file of files) {
|
|
232
|
-
const dir = file.directory;
|
|
233
|
-
if (!groups.has(dir)) {
|
|
234
|
-
groups.set(dir, []);
|
|
235
|
-
}
|
|
236
|
-
groups.get(dir).push(file);
|
|
237
|
-
}
|
|
238
|
-
return Array.from(groups.entries()).map(([dir, files]) => ({
|
|
239
|
-
path: dir,
|
|
240
|
-
relativePath: dir,
|
|
241
|
-
files
|
|
242
|
-
}));
|
|
243
|
-
}
|
|
244
|
-
/**
|
|
245
|
-
* Filter out files that already exist (for resume support)
|
|
246
|
-
* Implements idempotency: re-running add skips unchanged files
|
|
247
|
-
* Progress is tracked per-directory for resume capability
|
|
248
|
-
*/
|
|
249
|
-
async filterExistingFiles(groups, workspaceHandle, workingDir) {
|
|
250
|
-
// For now, return all files - implement full comparison later
|
|
251
|
-
// This is a placeholder for the resume/idempotency feature
|
|
252
|
-
return groups;
|
|
253
|
-
}
|
|
254
|
-
/**
|
|
255
|
-
* Add files with parallel processing within directories
|
|
256
|
-
* Expected performance: 8x speedup for 5000+ files with proper parallelization
|
|
257
|
-
*/
|
|
258
|
-
async addFiles(groups, workspaceHandle, workingDir, options, onProgress) {
|
|
259
|
-
const results = [];
|
|
260
|
-
let totalProcessed = 0;
|
|
261
|
-
const totalFiles = groups.reduce((sum, g) => sum + g.files.length, 0);
|
|
262
|
-
// Process directories sequentially
|
|
263
|
-
for (const group of groups) {
|
|
264
|
-
if (this.cancelled)
|
|
265
|
-
break;
|
|
266
|
-
const dirResult = await this.addDirectory(group, workspaceHandle, workingDir, options, (current) => {
|
|
267
|
-
totalProcessed++;
|
|
268
|
-
onProgress?.({
|
|
269
|
-
phase: 'adding',
|
|
270
|
-
current: totalProcessed,
|
|
271
|
-
total: totalFiles,
|
|
272
|
-
currentDirectory: group.path,
|
|
273
|
-
currentFile: current
|
|
274
|
-
});
|
|
275
|
-
});
|
|
276
|
-
results.push(dirResult);
|
|
277
|
-
// Memory cleanup between directories
|
|
278
|
-
await this.cleanupMemory();
|
|
279
|
-
}
|
|
280
|
-
return results;
|
|
281
|
-
}
|
|
282
|
-
/**
|
|
283
|
-
* Add all files in a directory with parallel batching
|
|
284
|
-
* Partial directory handling: completed file documents are kept on failure
|
|
285
|
-
* Orphan cleanup: file docs created before folder update failure are tracked for resume
|
|
286
|
-
*/
|
|
287
|
-
async addDirectory(group, workspaceHandle, workingDir, options, onFileProgress) {
|
|
288
|
-
const result = {
|
|
289
|
-
path: group.path,
|
|
290
|
-
status: 'complete',
|
|
291
|
-
created: 0,
|
|
292
|
-
updated: 0,
|
|
293
|
-
unchanged: 0,
|
|
294
|
-
skipped: 0,
|
|
295
|
-
errors: []
|
|
296
|
-
};
|
|
297
|
-
// Ensure folder exists
|
|
298
|
-
let folderId = null;
|
|
299
|
-
if (group.path !== '.') {
|
|
300
|
-
folderId = await this.ensureFolderExists(group.path, workspaceHandle);
|
|
301
|
-
}
|
|
302
|
-
// Process files in parallel batches
|
|
303
|
-
const batches = this.chunk(group.files, ADD_CONSTANTS.PARALLEL_FILE_LIMIT);
|
|
304
|
-
for (const batch of batches) {
|
|
305
|
-
if (this.cancelled) {
|
|
306
|
-
result.status = 'partial';
|
|
307
|
-
break;
|
|
308
|
-
}
|
|
309
|
-
// Process batch in parallel
|
|
310
|
-
const batchResults = await Promise.all(batch.map(file => this.addFileWithRetry(file, workspaceHandle, workingDir, folderId, options)));
|
|
311
|
-
// Collect results
|
|
312
|
-
for (const fileResult of batchResults) {
|
|
313
|
-
onFileProgress(fileResult.relativePath);
|
|
314
|
-
switch (fileResult.status) {
|
|
315
|
-
case 'created':
|
|
316
|
-
result.created++;
|
|
317
|
-
break;
|
|
318
|
-
case 'updated':
|
|
319
|
-
result.updated++;
|
|
320
|
-
break;
|
|
321
|
-
case 'unchanged':
|
|
322
|
-
result.unchanged++;
|
|
323
|
-
break;
|
|
324
|
-
case 'skipped':
|
|
325
|
-
result.skipped++;
|
|
326
|
-
break;
|
|
327
|
-
case 'error':
|
|
328
|
-
result.errors.push(fileResult);
|
|
329
|
-
break;
|
|
330
|
-
}
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
if (result.errors.length > 0 && result.status === 'complete') {
|
|
334
|
-
result.status = 'partial';
|
|
335
|
-
}
|
|
336
|
-
return result;
|
|
337
|
-
}
|
|
338
|
-
/**
|
|
339
|
-
* Add a single file with retry logic
|
|
340
|
-
*/
|
|
341
|
-
async addFileWithRetry(file, workspaceHandle, workingDir, folderId, options, retries = ADD_CONSTANTS.MAX_RETRIES) {
|
|
342
|
-
for (let attempt = 1; attempt <= retries; attempt++) {
|
|
343
|
-
try {
|
|
344
|
-
return await this.addFile(file, workspaceHandle, workingDir, folderId, options);
|
|
345
|
-
}
|
|
346
|
-
catch (error) {
|
|
347
|
-
if (attempt === retries) {
|
|
348
|
-
return {
|
|
349
|
-
status: 'error',
|
|
350
|
-
relativePath: file.relativePath,
|
|
351
|
-
error: createAddError('DOC_CREATE_FAILED', error instanceof Error ? error.message : String(error), file.relativePath, error instanceof Error ? error : undefined)
|
|
352
|
-
};
|
|
353
|
-
}
|
|
354
|
-
// Exponential backoff
|
|
355
|
-
await this.sleep(ADD_CONSTANTS.RETRY_BACKOFF_MS * attempt);
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
// Should never reach here
|
|
359
|
-
return {
|
|
360
|
-
status: 'error',
|
|
361
|
-
relativePath: file.relativePath,
|
|
362
|
-
error: createAddError('UNKNOWN', 'Unknown error', file.relativePath)
|
|
363
|
-
};
|
|
364
|
-
}
|
|
365
|
-
/**
|
|
366
|
-
* Add a single file to the workspace
|
|
367
|
-
* Permission errors are caught and file is skipped with warning
|
|
368
|
-
*/
|
|
369
|
-
async addFile(file, workspaceHandle, workingDir, folderId, options) {
|
|
370
|
-
try {
|
|
371
|
-
// Read file content with encoding fallback (cli-add-encoding-fallback)
|
|
372
|
-
let content;
|
|
373
|
-
let isText = file.isText;
|
|
374
|
-
if (file.isText) {
|
|
375
|
-
try {
|
|
376
|
-
content = await fs.promises.readFile(file.absolutePath, 'utf-8');
|
|
377
|
-
}
|
|
378
|
-
catch (encodingError) {
|
|
379
|
-
// UTF-8 decoding failed, fall back to base64
|
|
380
|
-
content = (await fs.promises.readFile(file.absolutePath)).toString('base64');
|
|
381
|
-
isText = false;
|
|
382
|
-
}
|
|
383
|
-
}
|
|
384
|
-
else {
|
|
385
|
-
content = (await fs.promises.readFile(file.absolutePath)).toString('base64');
|
|
386
|
-
}
|
|
387
|
-
// Calculate content hash
|
|
388
|
-
const contentHash = crypto.createHash('sha256').update(content).digest('hex');
|
|
389
|
-
// Get file name
|
|
390
|
-
const fileName = path.basename(file.absolutePath);
|
|
391
|
-
// Create canvas type from mime type
|
|
392
|
-
const canvasType = mimeTypeToCanvasType(file.mimeType);
|
|
393
|
-
const isCodeFile = canvasType === 'code';
|
|
394
|
-
// Create document data
|
|
395
|
-
// Code files: Store content directly (no prosemirror formatting)
|
|
396
|
-
// Text/markdown files: Create with empty text first, then apply setTextContent for proper formatting
|
|
397
|
-
const documentData = {
|
|
398
|
-
text: isText ? (isCodeFile ? content : '') : content,
|
|
399
|
-
metadata: {
|
|
400
|
-
type: canvasType,
|
|
401
|
-
originalFilename: fileName,
|
|
402
|
-
tags: [`hash:${contentHash}`],
|
|
403
|
-
typeData: {
|
|
404
|
-
originalPath: file.absolutePath,
|
|
405
|
-
relativePath: file.relativePath, // File identity by path (cli-add-file-identity)
|
|
406
|
-
contentHash,
|
|
407
|
-
isText
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
};
|
|
411
|
-
// Create file document
|
|
412
|
-
const fileDoc = await workspaceHandle.file.create(documentData, {
|
|
413
|
-
name: fileName,
|
|
414
|
-
mimeType: file.mimeType,
|
|
415
|
-
folderId: folderId
|
|
416
|
-
});
|
|
417
|
-
// Apply proper richtext formatting for text/markdown files only using setTextContent
|
|
418
|
-
// Code files should NOT use setTextContent - they need plain text for CodeMirror
|
|
419
|
-
if (isText && !isCodeFile && content.length > 0) {
|
|
420
|
-
await setTextContent(fileDoc, ['text'], content);
|
|
421
|
-
}
|
|
422
|
-
return {
|
|
423
|
-
status: 'created',
|
|
424
|
-
relativePath: file.relativePath,
|
|
425
|
-
fileId: fileDoc.documentId,
|
|
426
|
-
size: file.size
|
|
427
|
-
};
|
|
428
|
-
}
|
|
429
|
-
catch (error) {
|
|
430
|
-
// Check for permission error
|
|
431
|
-
if (error instanceof Error && error.message.includes('EACCES')) {
|
|
432
|
-
return {
|
|
433
|
-
status: 'skipped',
|
|
434
|
-
relativePath: file.relativePath,
|
|
435
|
-
error: createAddError('PERMISSION_DENIED', 'Permission denied', file.relativePath)
|
|
436
|
-
};
|
|
437
|
-
}
|
|
438
|
-
throw error;
|
|
439
|
-
}
|
|
440
|
-
}
|
|
441
|
-
/**
|
|
442
|
-
* Ensure a folder exists in the workspace
|
|
443
|
-
* Folder documents mirror the directory structure
|
|
444
|
-
*/
|
|
445
|
-
async ensureFolderExists(folderPath, workspaceHandle) {
|
|
446
|
-
// Check cache
|
|
447
|
-
if (this.folderCache.has(folderPath)) {
|
|
448
|
-
return this.folderCache.get(folderPath);
|
|
449
|
-
}
|
|
450
|
-
// Create folder hierarchy
|
|
451
|
-
const pathParts = folderPath.split('/').filter(Boolean);
|
|
452
|
-
let currentPath = '';
|
|
453
|
-
let folderId = null;
|
|
454
|
-
for (const part of pathParts) {
|
|
455
|
-
currentPath = currentPath ? `${currentPath}/${part}` : part;
|
|
456
|
-
if (this.folderCache.has(currentPath)) {
|
|
457
|
-
folderId = this.folderCache.get(currentPath);
|
|
458
|
-
continue;
|
|
459
|
-
}
|
|
460
|
-
// Create folder
|
|
461
|
-
const folderRef = await workspaceHandle.folder.create(currentPath, {
|
|
462
|
-
name: part,
|
|
463
|
-
metadata: {
|
|
464
|
-
type: 'folder',
|
|
465
|
-
path: currentPath,
|
|
466
|
-
createdAt: new Date().toISOString()
|
|
467
|
-
}
|
|
468
|
-
});
|
|
469
|
-
this.folderCache.set(currentPath, folderRef.id);
|
|
470
|
-
folderId = folderRef.id;
|
|
471
|
-
}
|
|
472
|
-
return folderId;
|
|
473
|
-
}
|
|
474
|
-
/**
|
|
475
|
-
* Clean up memory between directory batches
|
|
476
|
-
*/
|
|
477
|
-
async cleanupMemory() {
|
|
478
|
-
// Force garbage collection if available
|
|
479
|
-
if (global.gc) {
|
|
480
|
-
global.gc();
|
|
481
|
-
}
|
|
482
|
-
// Small delay to allow async cleanup
|
|
483
|
-
await this.sleep(10);
|
|
484
|
-
}
|
|
485
|
-
/**
|
|
486
|
-
* Create a dry run result
|
|
487
|
-
*/
|
|
488
|
-
createDryRunResult(groups, startTime) {
|
|
489
|
-
const directories = groups.map(g => ({
|
|
490
|
-
path: g.path,
|
|
491
|
-
status: 'complete',
|
|
492
|
-
created: g.files.length,
|
|
493
|
-
updated: 0,
|
|
494
|
-
unchanged: 0,
|
|
495
|
-
skipped: 0,
|
|
496
|
-
errors: []
|
|
497
|
-
}));
|
|
498
|
-
return {
|
|
499
|
-
success: true,
|
|
500
|
-
directories,
|
|
501
|
-
summary: {
|
|
502
|
-
totalFiles: groups.reduce((sum, g) => sum + g.files.length, 0),
|
|
503
|
-
created: groups.reduce((sum, g) => sum + g.files.length, 0),
|
|
504
|
-
updated: 0,
|
|
505
|
-
unchanged: 0,
|
|
506
|
-
skipped: 0,
|
|
507
|
-
errors: 0
|
|
508
|
-
},
|
|
509
|
-
duration: Date.now() - startTime
|
|
510
|
-
};
|
|
511
|
-
}
|
|
512
|
-
/**
|
|
513
|
-
* Calculate summary from directory results
|
|
514
|
-
*/
|
|
515
|
-
calculateSummary(results) {
|
|
516
|
-
return {
|
|
517
|
-
totalFiles: results.reduce((sum, r) => sum + r.created + r.updated + r.unchanged + r.skipped + r.errors.length, 0),
|
|
518
|
-
created: results.reduce((sum, r) => sum + r.created, 0),
|
|
519
|
-
updated: results.reduce((sum, r) => sum + r.updated, 0),
|
|
520
|
-
unchanged: results.reduce((sum, r) => sum + r.unchanged, 0),
|
|
521
|
-
skipped: results.reduce((sum, r) => sum + r.skipped, 0),
|
|
522
|
-
errors: results.reduce((sum, r) => sum + r.errors.length, 0)
|
|
523
|
-
};
|
|
524
|
-
}
|
|
525
|
-
/**
|
|
526
|
-
* Split array into chunks
|
|
527
|
-
*/
|
|
528
|
-
chunk(array, size) {
|
|
529
|
-
const chunks = [];
|
|
530
|
-
for (let i = 0; i < array.length; i += size) {
|
|
531
|
-
chunks.push(array.slice(i, i + size));
|
|
532
|
-
}
|
|
533
|
-
return chunks;
|
|
534
|
-
}
|
|
535
|
-
/**
|
|
536
|
-
* Sleep for a given number of milliseconds
|
|
537
|
-
*/
|
|
538
|
-
sleep(ms) {
|
|
539
|
-
return new Promise(resolve => setTimeout(resolve, ms));
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
|
-
/**
|
|
543
|
-
* Create an empty summary
|
|
544
|
-
*/
|
|
545
|
-
function createEmptySummary() {
|
|
546
|
-
return {
|
|
547
|
-
totalFiles: 0,
|
|
548
|
-
created: 0,
|
|
549
|
-
updated: 0,
|
|
550
|
-
unchanged: 0,
|
|
551
|
-
skipped: 0,
|
|
552
|
-
errors: 0
|
|
553
|
-
};
|
|
554
|
-
}
|
|
@@ -1,124 +0,0 @@
|
|
|
1
|
-
// glassware[type="implementation", id="impl-cli-add-validation--4ac0979e", requirements="requirement-cli-add-validation--a77bc4fa"]
|
|
2
|
-
// spec: packages/mod-cli/specs/add.md
|
|
3
|
-
import fs from 'fs';
|
|
4
|
-
import path from 'path';
|
|
5
|
-
import { ADD_CONSTANTS, } from '../types/add-types.js';
|
|
6
|
-
import { findWorkspaceConnection } from '../lib/storage.js';
|
|
7
|
-
// glassware[type="implementation", id="impl-cli-add-validate-options--1cd17efd", requirements="requirement-cli-add-validation--a77bc4fa"]
|
|
8
|
-
/**
|
|
9
|
-
* Validate add command options
|
|
10
|
-
*/
|
|
11
|
-
export function validateAddOptions(options) {
|
|
12
|
-
const errors = [];
|
|
13
|
-
// Must have at least one path
|
|
14
|
-
if (!options.paths || options.paths.length === 0) {
|
|
15
|
-
errors.push({
|
|
16
|
-
field: 'paths',
|
|
17
|
-
code: 'REQUIRED',
|
|
18
|
-
message: 'At least one path is required'
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
// Validate each path exists
|
|
22
|
-
if (options.paths) {
|
|
23
|
-
for (const inputPath of options.paths) {
|
|
24
|
-
if (!isValidPath(inputPath)) {
|
|
25
|
-
errors.push({
|
|
26
|
-
field: 'paths',
|
|
27
|
-
code: 'INVALID_PATH',
|
|
28
|
-
message: `Invalid path: ${inputPath}`
|
|
29
|
-
});
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
return {
|
|
34
|
-
valid: errors.length === 0,
|
|
35
|
-
errors
|
|
36
|
-
};
|
|
37
|
-
}
|
|
38
|
-
// glassware[type="implementation", id="impl-cli-add-validate-workspace--1f6b65b2", requirements="requirement-cli-add-requires-workspace--c6330c8e"]
|
|
39
|
-
/**
|
|
40
|
-
* Validate workspace state before add
|
|
41
|
-
*/
|
|
42
|
-
export async function validateWorkspaceState(workingDirectory) {
|
|
43
|
-
const errors = [];
|
|
44
|
-
// Check workspace connection exists
|
|
45
|
-
const connection = findWorkspaceConnection(workingDirectory);
|
|
46
|
-
if (!connection) {
|
|
47
|
-
errors.push({
|
|
48
|
-
field: 'workspace',
|
|
49
|
-
code: 'NOT_CONNECTED',
|
|
50
|
-
message: 'Not connected to a workspace. Run `mod init` first.'
|
|
51
|
-
});
|
|
52
|
-
return { valid: false, errors };
|
|
53
|
-
}
|
|
54
|
-
// Check workspace ID is valid
|
|
55
|
-
if (!connection.workspaceId) {
|
|
56
|
-
errors.push({
|
|
57
|
-
field: 'workspace',
|
|
58
|
-
code: 'WORKSPACE_NOT_FOUND',
|
|
59
|
-
message: 'Workspace connection is missing workspace ID'
|
|
60
|
-
});
|
|
61
|
-
}
|
|
62
|
-
return {
|
|
63
|
-
valid: errors.length === 0,
|
|
64
|
-
errors
|
|
65
|
-
};
|
|
66
|
-
}
|
|
67
|
-
// glassware[type="implementation", id="impl-cli-add-validate-file--ccc49a56", requirements="requirement-cli-add-binary-large-skip--0c7053f8"]
|
|
68
|
-
/**
|
|
69
|
-
* Validate single file before adding
|
|
70
|
-
*/
|
|
71
|
-
export function validateFile(file, options, workspaceRoot) {
|
|
72
|
-
const errors = [];
|
|
73
|
-
// Check size limit for binary files
|
|
74
|
-
if (!file.isText && file.size > ADD_CONSTANTS.MAX_BINARY_SIZE && !options.includeLargeBinary) {
|
|
75
|
-
errors.push({
|
|
76
|
-
field: 'size',
|
|
77
|
-
code: 'FILE_TOO_LARGE',
|
|
78
|
-
message: `Binary file too large: ${formatBytes(file.size)} (limit: 100KB)`
|
|
79
|
-
});
|
|
80
|
-
}
|
|
81
|
-
// Check path is within workspace
|
|
82
|
-
if (!isWithinWorkspace(file.absolutePath, workspaceRoot)) {
|
|
83
|
-
errors.push({
|
|
84
|
-
field: 'path',
|
|
85
|
-
code: 'PATH_OUTSIDE_WORKSPACE',
|
|
86
|
-
message: 'File is outside workspace root'
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
return {
|
|
90
|
-
valid: errors.length === 0,
|
|
91
|
-
errors
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
/**
|
|
95
|
-
* Check if a path is valid (exists)
|
|
96
|
-
*/
|
|
97
|
-
export function isValidPath(inputPath) {
|
|
98
|
-
try {
|
|
99
|
-
const resolvedPath = path.resolve(inputPath);
|
|
100
|
-
return fs.existsSync(resolvedPath);
|
|
101
|
-
}
|
|
102
|
-
catch {
|
|
103
|
-
return false;
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
/**
|
|
107
|
-
* Check if a path is within the workspace root
|
|
108
|
-
*/
|
|
109
|
-
export function isWithinWorkspace(filePath, workspaceRoot) {
|
|
110
|
-
const resolvedFile = path.resolve(filePath);
|
|
111
|
-
const resolvedRoot = path.resolve(workspaceRoot);
|
|
112
|
-
return resolvedFile.startsWith(resolvedRoot);
|
|
113
|
-
}
|
|
114
|
-
/**
|
|
115
|
-
* Format bytes to human readable string
|
|
116
|
-
*/
|
|
117
|
-
export function formatBytes(bytes) {
|
|
118
|
-
if (bytes === 0)
|
|
119
|
-
return '0 Bytes';
|
|
120
|
-
const k = 1024;
|
|
121
|
-
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
|
122
|
-
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
123
|
-
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
|
124
|
-
}
|