@suco/su-auggie-mcp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,587 @@
1
+ /**
2
+ * FileIndexer - Centralized file indexing with file-level error handling.
3
+ *
4
+ * Single entry point for ALL file indexing operations:
5
+ * - Initial discovery indexing
6
+ * - File watcher changes
7
+ * - Retry (tier 1 & tier 2)
8
+ *
9
+ * Errors are handled at FILE level, not BATCH level.
10
+ */
11
+ import * as fs from 'node:fs';
12
+ import * as path from 'node:path';
13
+ import { BlobTooLargeError, APIError } from '@augmentcode/auggie-sdk';
14
+ import PQueue from 'p-queue';
15
+ import { INDEXING_CONFIG, PERSISTENCE_CONFIG, isPersistenceEnabled } from './config.js';
16
+ import { createLogger } from './logger.js';
17
+ import { sendLogToClient } from './mcp-notifications.js';
18
+ import { AdaptiveBatcher } from './adaptive-batcher.js';
19
+ const logger = createLogger('FileIndexer');
20
+ /**
21
+ * Classify an error at the FILE level.
22
+ * Returns: skip | retryable | permanent | fatal
23
+ * @internal Exported for testing
24
+ */
25
+ export function classifyFileError(err) {
26
+ // Skip - file cannot be indexed (not a failure)
27
+ if (err instanceof BlobTooLargeError)
28
+ return 'skip';
29
+ const msg = err instanceof Error ? err.message.toLowerCase() : String(err).toLowerCase();
30
+ if (msg.includes('too large'))
31
+ return 'skip';
32
+ if (msg.includes('enoent') || msg.includes('not found'))
33
+ return 'skip';
34
+ if (msg.includes('eacces') || msg.includes('permission denied'))
35
+ return 'skip';
36
+ if (msg.includes('eisdir') || msg.includes('is a directory'))
37
+ return 'skip';
38
+ // Fatal - stop everything
39
+ if (err instanceof APIError) {
40
+ if (err.status === 401 || err.status === 403)
41
+ return 'fatal';
42
+ // Rate limit or server errors are retryable
43
+ if (err.status === 429 || err.status >= 500)
44
+ return 'retryable';
45
+ // Other client errors are permanent
46
+ if (err.status >= 400)
47
+ return 'permanent';
48
+ }
49
+ // Network/timeout errors are retryable
50
+ if (msg.includes('fetch failed') || msg.includes('network'))
51
+ return 'retryable';
52
+ if (msg.includes('timeout') || msg.includes('econnreset'))
53
+ return 'retryable';
54
+ if (msg.includes('rate') || msg.includes('throttl'))
55
+ return 'retryable';
56
+ // Default to retryable (optimistic)
57
+ return 'retryable';
58
+ }
59
+ /**
60
+ * Check if a file is binary (non-text).
61
+ * Uses null byte detection in first 8KB.
62
+ * @internal Exported for testing
63
+ */
64
+ export function isBinaryFile(buffer) {
65
+ const checkLength = Math.min(buffer.length, 8192);
66
+ for (let i = 0; i < checkLength; i++) {
67
+ if (buffer[i] === 0)
68
+ return true;
69
+ }
70
+ return false;
71
+ }
72
+ /**
73
+ * Calculate exponential backoff delay with jitter.
74
+ * @internal Exported for testing
75
+ */
76
+ export function calculateBackoff(attempt) {
77
+ const base = INDEXING_CONFIG.RETRY_BASE_DELAY_MS;
78
+ const max = INDEXING_CONFIG.RETRY_MAX_DELAY_MS;
79
+ const exponential = base * Math.pow(2, attempt);
80
+ const jitter = Math.random() * base * 0.5;
81
+ return Math.min(exponential + jitter, max);
82
+ }
83
+ /**
84
+ * FileIndexer - centralized indexing with file-level error handling.
85
+ */
86
+ export class FileIndexer {
87
+ context;
88
+ root;
89
+ batcher;
90
+ indexQueue;
91
+ // Tier 1: Immediate retry queue (in-memory)
92
+ retryQueue = [];
93
+ // Tier 2: Cooldown queue (persisted)
94
+ cooldownQueue = [];
95
+ cooldownTimer = null;
96
+ cooldownNextRetryAt = null;
97
+ // Statistics
98
+ stats = {
99
+ indexed: 0,
100
+ skipped: 0,
101
+ recovered: 0,
102
+ failed: 0,
103
+ };
104
+ constructor(context, root) {
105
+ this.context = context;
106
+ this.root = root;
107
+ this.batcher = new AdaptiveBatcher();
108
+ this.indexQueue = new PQueue({ concurrency: this.batcher.getConcurrency() });
109
+ this.loadCooldownQueue();
110
+ }
111
+ /** Get current statistics */
112
+ getStats() {
113
+ const currentCycle = this.cooldownQueue.length > 0
114
+ ? Math.max(...this.cooldownQueue.map(e => e.cooldownCycle))
115
+ : 0;
116
+ return {
117
+ indexed: this.stats.indexed,
118
+ skipped: this.stats.skipped,
119
+ pendingRetry: this.retryQueue.length,
120
+ pendingCooldown: this.cooldownQueue.length,
121
+ recovered: this.stats.recovered,
122
+ failed: this.stats.failed,
123
+ cooldownCycle: currentCycle,
124
+ cooldownNextRetryAt: this.cooldownNextRetryAt?.toISOString() ?? null,
125
+ };
126
+ }
127
+ /** Get batcher metrics */
128
+ getBatcherMetrics() {
129
+ return this.batcher.getMetrics();
130
+ }
131
+ /** Get batcher for external access */
132
+ getBatcher() {
133
+ return this.batcher;
134
+ }
135
+ /**
136
+ * Pre-filter a file before reading contents.
137
+ * Returns null if file should be skipped, or file entry if valid.
138
+ */
139
+ preFilterFile(absolutePath) {
140
+ try {
141
+ const stat = fs.statSync(absolutePath);
142
+ // Size check
143
+ if (stat.size > INDEXING_CONFIG.MAX_FILE_SIZE) {
144
+ logger.debug(`Skip: too large (${stat.size} bytes): ${absolutePath}`);
145
+ this.stats.skipped++;
146
+ return null;
147
+ }
148
+ // Empty file check
149
+ if (stat.size === 0) {
150
+ logger.debug(`Skip: empty file: ${absolutePath}`);
151
+ this.stats.skipped++;
152
+ return null;
153
+ }
154
+ // Read file
155
+ const buffer = fs.readFileSync(absolutePath);
156
+ // Binary check
157
+ if (isBinaryFile(buffer)) {
158
+ logger.debug(`Skip: binary file: ${absolutePath}`);
159
+ this.stats.skipped++;
160
+ return null;
161
+ }
162
+ const contents = buffer.toString('utf-8');
163
+ const relativePath = path.relative(this.root, absolutePath);
164
+ return {
165
+ entry: { path: relativePath, contents },
166
+ metadata: { mtimeMs: stat.mtimeMs, size: stat.size },
167
+ };
168
+ }
169
+ catch (err) {
170
+ const errorType = classifyFileError(err);
171
+ if (errorType === 'skip') {
172
+ logger.debug(`Skip: ${err instanceof Error ? err.message : err}: ${absolutePath}`);
173
+ this.stats.skipped++;
174
+ }
175
+ return null;
176
+ }
177
+ }
178
+ /**
179
+ * Index a batch of files with error isolation.
180
+ * On batch failure, isolates problematic files and retries good ones.
181
+ */
182
+ async indexBatchWithIsolation(files, _metadata) {
183
+ const succeeded = [];
184
+ const failed = [];
185
+ if (files.length === 0)
186
+ return { succeeded, failed };
187
+ const batchStart = Date.now();
188
+ try {
189
+ await this.context.addToIndex(files, { waitForIndexing: false });
190
+ // All succeeded
191
+ succeeded.push(...files.map(f => f.path));
192
+ this.stats.indexed += files.length;
193
+ const batchTime = Date.now() - batchStart;
194
+ this.batcher.recordBatchComplete(batchTime, files.length, files.reduce((sum, f) => sum + Buffer.byteLength(f.contents, 'utf-8'), 0), false, false);
195
+ this.indexQueue.concurrency = this.batcher.getConcurrency();
196
+ }
197
+ catch (err) {
198
+ const errorType = classifyFileError(err);
199
+ const errorMsg = err instanceof Error ? err.message : String(err);
200
+ if (errorType === 'fatal') {
201
+ // Fatal error - mark all as failed and re-throw
202
+ for (const f of files) {
203
+ failed.push({ path: f.path, error: 'fatal', message: errorMsg });
204
+ }
205
+ throw err;
206
+ }
207
+ // Batch failed - isolate problematic files
208
+ if (files.length === 1) {
209
+ // Single file batch - this file is the problem
210
+ failed.push({ path: files[0].path, error: errorType, message: errorMsg });
211
+ }
212
+ else {
213
+ // Multi-file batch - try one by one to isolate
214
+ logger.info(`Batch failed, isolating ${files.length} files...`);
215
+ for (const file of files) {
216
+ try {
217
+ await this.context.addToIndex([file], { waitForIndexing: false });
218
+ succeeded.push(file.path);
219
+ this.stats.indexed++;
220
+ }
221
+ catch (fileErr) {
222
+ const fileErrorType = classifyFileError(fileErr);
223
+ const fileErrorMsg = fileErr instanceof Error ? fileErr.message : String(fileErr);
224
+ if (fileErrorType === 'fatal') {
225
+ // Fatal during isolation - fail remaining and throw
226
+ failed.push({ path: file.path, error: 'fatal', message: fileErrorMsg });
227
+ throw fileErr;
228
+ }
229
+ failed.push({ path: file.path, error: fileErrorType, message: fileErrorMsg });
230
+ }
231
+ }
232
+ }
233
+ const batchTime = Date.now() - batchStart;
234
+ this.batcher.recordBatchComplete(batchTime, files.length, files.reduce((sum, f) => sum + Buffer.byteLength(f.contents, 'utf-8'), 0), true, errorType === 'retryable');
235
+ this.indexQueue.concurrency = this.batcher.getConcurrency();
236
+ }
237
+ return { succeeded, failed };
238
+ }
239
+ /**
240
+ * Route failed files to appropriate tier based on error type.
241
+ */
242
+ routeFailedFiles(failures) {
243
+ for (const f of failures) {
244
+ if (f.error === 'skip') {
245
+ // Already counted in preFilter, nothing to do
246
+ continue;
247
+ }
248
+ if (f.error === 'permanent') {
249
+ this.stats.failed++;
250
+ logger.warn(`Permanent failure: ${f.path}: ${f.message}`);
251
+ continue;
252
+ }
253
+ if (f.error === 'retryable') {
254
+ // Check if already in retry queue
255
+ const existing = this.retryQueue.find(e => e.path === f.path);
256
+ if (existing) {
257
+ existing.attempts++;
258
+ existing.lastError = f.message;
259
+ existing.nextRetryAt = new Date(Date.now() + calculateBackoff(existing.attempts));
260
+ // Check if exhausted tier 1
261
+ if (existing.attempts > INDEXING_CONFIG.MAX_RETRIES) {
262
+ this.retryQueue = this.retryQueue.filter(e => e.path !== f.path);
263
+ this.addToCooldown(f.path, f.message);
264
+ }
265
+ }
266
+ else {
267
+ // New retry entry
268
+ this.retryQueue.push({
269
+ path: f.path,
270
+ attempts: 1,
271
+ lastError: f.message,
272
+ nextRetryAt: new Date(Date.now() + calculateBackoff(0)),
273
+ });
274
+ }
275
+ }
276
+ }
277
+ }
278
+ /**
279
+ * Add a file to cooldown queue (tier 2).
280
+ */
281
+ addToCooldown(filePath, lastError) {
282
+ const existing = this.cooldownQueue.find(e => e.path === filePath);
283
+ if (existing) {
284
+ existing.lastError = lastError;
285
+ existing.lastRetryAt = new Date().toISOString();
286
+ }
287
+ else {
288
+ this.cooldownQueue.push({
289
+ path: filePath,
290
+ cooldownCycle: 0,
291
+ lastError,
292
+ addedAt: new Date().toISOString(),
293
+ lastRetryAt: null,
294
+ });
295
+ }
296
+ this.saveCooldownQueue();
297
+ this.scheduleCooldownRetry();
298
+ }
299
+ /**
300
+ * Process tier 1 retry queue.
301
+ */
302
+ async processRetryQueue() {
303
+ if (this.retryQueue.length === 0)
304
+ return;
305
+ const pending = [...this.retryQueue];
306
+ this.retryQueue = [];
307
+ logger.info(`Processing ${pending.length} files in retry queue`);
308
+ for (const entry of pending) {
309
+ // Wait until retry time
310
+ const now = Date.now();
311
+ if (entry.nextRetryAt.getTime() > now) {
312
+ await new Promise(resolve => setTimeout(resolve, entry.nextRetryAt.getTime() - now));
313
+ }
314
+ // Re-read file
315
+ const absolutePath = path.join(this.root, entry.path);
316
+ const filtered = this.preFilterFile(absolutePath);
317
+ if (!filtered) {
318
+ // File no longer valid - skip
319
+ continue;
320
+ }
321
+ const { succeeded, failed } = await this.indexBatchWithIsolation([filtered.entry], { [filtered.entry.path]: filtered.metadata });
322
+ if (succeeded.length > 0) {
323
+ this.stats.recovered++;
324
+ logger.info(`Recovered: ${entry.path}`);
325
+ }
326
+ if (failed.length > 0) {
327
+ // Update attempt count and re-route
328
+ const updatedFailure = { ...failed[0], attempts: entry.attempts + 1 };
329
+ if (updatedFailure.attempts > INDEXING_CONFIG.MAX_RETRIES) {
330
+ this.addToCooldown(updatedFailure.path, updatedFailure.message);
331
+ logger.info(`Moved to cooldown: ${updatedFailure.path}`);
332
+ }
333
+ else {
334
+ this.retryQueue.push({
335
+ path: updatedFailure.path,
336
+ attempts: updatedFailure.attempts,
337
+ lastError: updatedFailure.message,
338
+ nextRetryAt: new Date(Date.now() + calculateBackoff(updatedFailure.attempts)),
339
+ });
340
+ }
341
+ }
342
+ }
343
+ // Process any remaining retries
344
+ if (this.retryQueue.length > 0) {
345
+ await this.processRetryQueue();
346
+ }
347
+ }
348
+ /**
349
+ * Schedule cooldown retry if not already scheduled.
350
+ */
351
+ scheduleCooldownRetry() {
352
+ if (this.cooldownQueue.length === 0 || this.cooldownTimer)
353
+ return;
354
+ const delayMs = INDEXING_CONFIG.COOLDOWN_PERIOD_MS;
355
+ this.cooldownNextRetryAt = new Date(Date.now() + delayMs);
356
+ const delayMinutes = Math.round(delayMs / 60000);
357
+ logger.info(`Cooldown retry scheduled in ${delayMinutes} min (${this.cooldownQueue.length} files)`);
358
+ sendLogToClient('info', `Cooldown retry in ${delayMinutes} min for ${this.cooldownQueue.length} files`);
359
+ this.cooldownTimer = setTimeout(() => {
360
+ this.cooldownTimer = null;
361
+ this.processCooldownQueue().catch(err => {
362
+ logger.error('Cooldown processing failed', err);
363
+ });
364
+ }, delayMs);
365
+ }
366
+ /**
367
+ * Process cooldown queue (tier 2).
368
+ */
369
+ async processCooldownQueue() {
370
+ if (this.cooldownQueue.length === 0)
371
+ return;
372
+ const currentCycle = Math.max(...this.cooldownQueue.map(e => e.cooldownCycle)) + 1;
373
+ logger.info(`Cooldown cycle ${currentCycle}/${INDEXING_CONFIG.MAX_COOLDOWN_CYCLES}: ${this.cooldownQueue.length} files`);
374
+ sendLogToClient('info', `Cooldown retry cycle ${currentCycle}/${INDEXING_CONFIG.MAX_COOLDOWN_CYCLES}: ${this.cooldownQueue.length} files`);
375
+ const succeeded = [];
376
+ const stillFailed = [];
377
+ for (const entry of this.cooldownQueue) {
378
+ // Re-read file
379
+ const absolutePath = path.join(this.root, entry.path);
380
+ const filtered = this.preFilterFile(absolutePath);
381
+ if (!filtered) {
382
+ // File no longer valid - remove from cooldown
383
+ continue;
384
+ }
385
+ try {
386
+ await this.context.addToIndex([filtered.entry], { waitForIndexing: false });
387
+ succeeded.push(entry.path);
388
+ this.stats.recovered++;
389
+ }
390
+ catch (err) {
391
+ const errorType = classifyFileError(err);
392
+ const errorMsg = err instanceof Error ? err.message : String(err);
393
+ if (errorType === 'fatal') {
394
+ throw err;
395
+ }
396
+ if (errorType === 'skip' || errorType === 'permanent') {
397
+ this.stats.failed++;
398
+ logger.warn(`Cooldown permanent fail: ${entry.path}: ${errorMsg}`);
399
+ continue;
400
+ }
401
+ // Still retryable - check cycle limit
402
+ if (currentCycle >= INDEXING_CONFIG.MAX_COOLDOWN_CYCLES) {
403
+ this.stats.failed++;
404
+ logger.warn(`Cooldown exhausted: ${entry.path}`);
405
+ }
406
+ else {
407
+ stillFailed.push({
408
+ ...entry,
409
+ cooldownCycle: currentCycle,
410
+ lastError: errorMsg,
411
+ lastRetryAt: new Date().toISOString(),
412
+ });
413
+ }
414
+ }
415
+ }
416
+ this.cooldownQueue = stillFailed;
417
+ this.cooldownNextRetryAt = null;
418
+ this.saveCooldownQueue();
419
+ if (succeeded.length > 0) {
420
+ logger.info(`Cooldown recovered ${succeeded.length} files`);
421
+ sendLogToClient('info', `Cooldown recovered ${succeeded.length} files`);
422
+ }
423
+ // Schedule next cycle if needed
424
+ this.scheduleCooldownRetry();
425
+ }
426
+ /**
427
+ * Main entry point: Index a list of file paths.
428
+ * Handles batching, pre-filtering, error isolation, and retry routing.
429
+ */
430
+ async indexFiles(absolutePaths, onProgress) {
431
+ const metadata = {};
432
+ const validFiles = [];
433
+ let skippedCount = 0;
434
+ // Pre-filter all files
435
+ for (const absPath of absolutePaths) {
436
+ const result = this.preFilterFile(absPath);
437
+ if (result) {
438
+ validFiles.push(result.entry);
439
+ metadata[result.entry.path] = result.metadata;
440
+ }
441
+ else {
442
+ skippedCount++;
443
+ }
444
+ }
445
+ if (validFiles.length === 0) {
446
+ return { metadata, indexed: 0, skipped: skippedCount };
447
+ }
448
+ // Create batches (generator -> array)
449
+ const batches = [...this.batcher.createBatches(validFiles)];
450
+ let indexedCount = 0;
451
+ let batchNum = 0;
452
+ const totalBatches = batches.length;
453
+ // Process batches with concurrency
454
+ const batchPromises = batches.map(batch => {
455
+ return this.indexQueue.add(async () => {
456
+ batchNum++;
457
+ const { succeeded, failed } = await this.indexBatchWithIsolation(batch.files, metadata);
458
+ indexedCount += succeeded.length;
459
+ // Route failures
460
+ this.routeFailedFiles(failed);
461
+ if (onProgress) {
462
+ onProgress(indexedCount, validFiles.length, `Batch ${batchNum}/${totalBatches}`);
463
+ }
464
+ });
465
+ });
466
+ await Promise.all(batchPromises);
467
+ // Process any immediate retries
468
+ if (this.retryQueue.length > 0) {
469
+ await this.processRetryQueue();
470
+ }
471
+ return { metadata, indexed: indexedCount, skipped: skippedCount };
472
+ }
473
+ /**
474
+ * Index a single file (for watcher events).
475
+ */
476
+ async indexSingleFile(absolutePath) {
477
+ const result = this.preFilterFile(absolutePath);
478
+ if (!result)
479
+ return false;
480
+ const { succeeded, failed } = await this.indexBatchWithIsolation([result.entry], { [result.entry.path]: result.metadata });
481
+ this.routeFailedFiles(failed);
482
+ return succeeded.length > 0;
483
+ }
484
+ /**
485
+ * Remove files from index.
486
+ */
487
+ async removeFiles(relativePaths) {
488
+ if (relativePaths.length === 0)
489
+ return;
490
+ try {
491
+ await this.context.removeFromIndex(relativePaths);
492
+ logger.debug(`Removed ${relativePaths.length} files from index`);
493
+ }
494
+ catch (err) {
495
+ logger.error('Failed to remove files from index', err);
496
+ }
497
+ }
498
+ /**
499
+ * Load cooldown queue from disk.
500
+ */
501
+ loadCooldownQueue() {
502
+ if (!isPersistenceEnabled())
503
+ return;
504
+ const stateDir = path.join(this.root, PERSISTENCE_CONFIG.STATE_DIR);
505
+ const cooldownPath = path.join(stateDir, PERSISTENCE_CONFIG.COOLDOWN_FILE);
506
+ try {
507
+ if (fs.existsSync(cooldownPath)) {
508
+ const data = JSON.parse(fs.readFileSync(cooldownPath, 'utf-8'));
509
+ this.cooldownQueue = data.entries;
510
+ if (data.nextRetryAt) {
511
+ this.cooldownNextRetryAt = new Date(data.nextRetryAt);
512
+ }
513
+ logger.info(`Loaded ${this.cooldownQueue.length} files from cooldown queue`);
514
+ this.scheduleCooldownRetry();
515
+ }
516
+ }
517
+ catch (err) {
518
+ logger.warn('Failed to load cooldown queue', err);
519
+ }
520
+ }
521
+ /**
522
+ * Save cooldown queue to disk.
523
+ */
524
+ saveCooldownQueue() {
525
+ if (!isPersistenceEnabled())
526
+ return;
527
+ const stateDir = path.join(this.root, PERSISTENCE_CONFIG.STATE_DIR);
528
+ const cooldownPath = path.join(stateDir, PERSISTENCE_CONFIG.COOLDOWN_FILE);
529
+ try {
530
+ if (!fs.existsSync(stateDir)) {
531
+ fs.mkdirSync(stateDir, { recursive: true });
532
+ }
533
+ const data = {
534
+ version: 1,
535
+ savedAt: new Date().toISOString(),
536
+ nextRetryAt: this.cooldownNextRetryAt?.toISOString() ?? null,
537
+ entries: this.cooldownQueue,
538
+ };
539
+ fs.writeFileSync(cooldownPath, JSON.stringify(data, null, 2));
540
+ }
541
+ catch (err) {
542
+ logger.warn('Failed to save cooldown queue', err);
543
+ }
544
+ }
545
+ /**
546
+ * Stop the indexer and clean up.
547
+ */
548
+ stop() {
549
+ if (this.cooldownTimer) {
550
+ clearTimeout(this.cooldownTimer);
551
+ this.cooldownTimer = null;
552
+ }
553
+ this.indexQueue.clear();
554
+ this.saveCooldownQueue();
555
+ }
556
+ /**
557
+ * Reset statistics (for reindex).
558
+ */
559
+ resetStats() {
560
+ this.stats = { indexed: 0, skipped: 0, recovered: 0, failed: 0 };
561
+ this.retryQueue = [];
562
+ // Don't clear cooldown queue - it persists across reindex
563
+ }
564
+ /**
565
+ * Close the indexer (alias for stop).
566
+ */
567
+ close() {
568
+ this.stop();
569
+ }
570
+ /**
571
+ * Trigger retry if conditions are met (no pending work, has failed files).
572
+ * This is a "hit and run" call - triggers async retry and returns immediately.
573
+ * Used by tool calls to opportunistically retry failed files.
574
+ */
575
+ triggerRetryIfNeeded() {
576
+ // Check conditions: no pending work and has failed files in cooldown
577
+ const hasPending = this.retryQueue.length > 0 || this.indexQueue.pending > 0;
578
+ const hasFailedInCooldown = this.cooldownQueue.length > 0;
579
+ if (!hasPending && hasFailedInCooldown) {
580
+ logger.info(`Triggering retry: ${this.cooldownQueue.length} files in cooldown`);
581
+ // Fire and forget - don't await
582
+ this.processCooldownQueue().catch(err => {
583
+ logger.error(`Cooldown retry failed: ${err instanceof Error ? err.message : err}`);
584
+ });
585
+ }
586
+ }
587
+ }
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Ignore filter using .gitignore and .augmentignore patterns
3
+ */
4
+ import { Ignore } from 'ignore';
5
+ /** Create an ignore filter for a workspace */
6
+ export declare function createIgnoreFilter(workspaceRoot: string): Ignore;
7
+ /** Check if a relative path should be ignored */
8
+ export declare function shouldIgnore(ig: Ignore, relativePath: string): boolean;
@@ -0,0 +1,88 @@
1
+ /**
2
+ * Ignore filter using .gitignore and .augmentignore patterns
3
+ */
4
+ import * as fs from 'node:fs';
5
+ import * as path from 'node:path';
6
+ import ignore from 'ignore';
7
+ import { createLogger } from './logger.js';
8
+ const logger = createLogger('IgnoreFilter');
9
+ /** Load ignore patterns from a file if it exists */
10
+ function loadIgnoreFile(filePath) {
11
+ try {
12
+ if (fs.existsSync(filePath)) {
13
+ const content = fs.readFileSync(filePath, 'utf-8');
14
+ return content
15
+ .split('\n')
16
+ .map((line) => line.trim())
17
+ .filter((line) => line && !line.startsWith('#'));
18
+ }
19
+ }
20
+ catch (err) {
21
+ logger.warn(`Failed to read ignore file: ${filePath}`, err);
22
+ }
23
+ return [];
24
+ }
25
+ /** Recursively find all .gitignore files in workspace */
26
+ function findGitignoreFiles(workspaceRoot) {
27
+ const gitignores = [];
28
+ function walk(dir) {
29
+ try {
30
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
31
+ for (const entry of entries) {
32
+ const fullPath = path.join(dir, entry.name);
33
+ if (entry.isDirectory()) {
34
+ // Skip .git directory
35
+ if (entry.name !== '.git') {
36
+ walk(fullPath);
37
+ }
38
+ }
39
+ else if (entry.name === '.gitignore') {
40
+ gitignores.push(fullPath);
41
+ }
42
+ }
43
+ }
44
+ catch {
45
+ // Ignore permission errors
46
+ }
47
+ }
48
+ walk(workspaceRoot);
49
+ return gitignores;
50
+ }
51
+ /** Create an ignore filter for a workspace */
52
+ export function createIgnoreFilter(workspaceRoot) {
53
+ const ig = ignore();
54
+ // Always ignore .git directory and our own state directory
55
+ ig.add('.git');
56
+ ig.add('.suco-auggie');
57
+ // Load root .gitignore
58
+ const rootGitignore = path.join(workspaceRoot, '.gitignore');
59
+ const rootPatterns = loadIgnoreFile(rootGitignore);
60
+ if (rootPatterns.length > 0) {
61
+ logger.debug(`Loaded ${rootPatterns.length} patterns from ${rootGitignore}`);
62
+ ig.add(rootPatterns);
63
+ }
64
+ // Load nested .gitignore files
65
+ const nestedGitignores = findGitignoreFiles(workspaceRoot).filter((p) => p !== rootGitignore);
66
+ for (const gitignorePath of nestedGitignores) {
67
+ const relativePath = path.relative(workspaceRoot, path.dirname(gitignorePath));
68
+ const patterns = loadIgnoreFile(gitignorePath);
69
+ // Prefix patterns with relative directory
70
+ const prefixedPatterns = patterns.map((p) => p.startsWith('/') ? path.join(relativePath, p.slice(1)) : path.join(relativePath, p));
71
+ if (prefixedPatterns.length > 0) {
72
+ logger.debug(`Loaded ${prefixedPatterns.length} patterns from ${gitignorePath}`);
73
+ ig.add(prefixedPatterns);
74
+ }
75
+ }
76
+ // Load .augmentignore
77
+ const augmentignore = path.join(workspaceRoot, '.augmentignore');
78
+ const augmentPatterns = loadIgnoreFile(augmentignore);
79
+ if (augmentPatterns.length > 0) {
80
+ logger.debug(`Loaded ${augmentPatterns.length} patterns from ${augmentignore}`);
81
+ ig.add(augmentPatterns);
82
+ }
83
+ return ig;
84
+ }
85
+ /** Check if a relative path should be ignored */
86
+ export function shouldIgnore(ig, relativePath) {
87
+ return ig.ignores(relativePath);
88
+ }
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * suco-auggie-mcp CLI
4
+ * MCP server exposing Augment Code context engine capabilities
5
+ */
6
+ /** CLI options passed to server */
7
+ export interface CLIOptions {
8
+ workspaces: string[];
9
+ ignoreRoots: boolean;
10
+ }