@coderule/mcp 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1905 @@
1
+ import fs4 from 'fs/promises';
2
+ import path from 'path';
3
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
+ import { createHash } from 'crypto';
5
+ import envPaths from 'env-paths';
6
+ import pino from 'pino';
7
+ import Database from 'better-sqlite3';
8
+ import { Qulite, enqueueFsEvent, JobStatus } from '@coderule/qulite';
9
+ import { CoderuleClients, ASTHttpClient, SyncHttpClient } from '@coderule/clients';
10
+ import fs2 from 'fs';
11
+ import { Worker } from 'worker_threads';
12
+ import chokidar from 'chokidar';
13
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
14
+ import { z } from 'zod';
15
+
16
+ // src/mcp-cli.ts
17
+ var level = process.env.CODERULE_LOG_LEVEL ?? "info";
18
+ var baseLogger = pino(
19
+ {
20
+ level,
21
+ name: "coderule-scanner",
22
+ timestamp: pino.stdTimeFunctions.isoTime
23
+ },
24
+ process.stderr
25
+ );
26
+ var logger = baseLogger;
27
+
28
+ // src/config/Defaults.ts
29
+ var DEFAULT_SNAPSHOT_DEBOUNCE_MS = 1e3;
30
+ var DEFAULT_HEARTBEAT_INTERVAL_MS = 6e4;
31
+ var DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS = 5e3;
32
+ var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
33
+ var DEFAULT_HASH_BATCH_SIZE = 32;
34
+ var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
35
+ var DEFAULT_HTTP_TIMEOUT_MS = 3e4;
36
+
37
+ // src/config/Configurator.ts
38
+ var DEFAULT_RETRIEVAL_FORMATTER = "standard";
39
+ var DEFAULTS = {
40
+ snapshotDebounceMs: DEFAULT_SNAPSHOT_DEBOUNCE_MS,
41
+ heartbeatIntervalMs: DEFAULT_HEARTBEAT_INTERVAL_MS,
42
+ heartbeatCheckIntervalMs: DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS,
43
+ queuePollIntervalMs: DEFAULT_QUEUE_POLL_INTERVAL_MS,
44
+ hashBatchSize: DEFAULT_HASH_BATCH_SIZE,
45
+ maxSnapshotAttempts: DEFAULT_MAX_SNAPSHOT_ATTEMPTS
46
+ };
47
+ function normalizeRoot(root) {
48
+ const resolved = path.resolve(root);
49
+ const normalized = path.normalize(resolved);
50
+ return normalized.split(path.sep).join("/");
51
+ }
52
+ function sha256(input) {
53
+ return createHash("sha256").update(input).digest("hex");
54
+ }
55
+ function parseInteger(value, fallback) {
56
+ if (!value) return fallback;
57
+ const parsed = Number.parseInt(value, 10);
58
+ if (Number.isNaN(parsed) || parsed <= 0) {
59
+ throw new Error(`Invalid integer value: ${value}`);
60
+ }
61
+ return parsed;
62
+ }
63
+ function parseFormatter(value) {
64
+ if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
65
+ const normalized = value.toLowerCase();
66
+ if (normalized === "standard" || normalized === "compact") {
67
+ return normalized;
68
+ }
69
+ throw new Error(
70
+ `Invalid CODERULE_RETRIEVAL_FORMATTER: ${value}. Expected "standard" or "compact"`
71
+ );
72
+ }
73
+ async function resolveConfig({
74
+ token
75
+ }) {
76
+ const resolvedToken = token ?? process.env.CODERULE_TOKEN;
77
+ if (!resolvedToken) {
78
+ throw new Error(
79
+ "Missing token: provide params.token or CODERULE_TOKEN env"
80
+ );
81
+ }
82
+ const rootCandidate = process.env.CODERULE_ROOT || process.cwd();
83
+ const rootPath = path.resolve(rootCandidate);
84
+ const normalized = normalizeRoot(rootPath);
85
+ const rootId = sha256(normalized);
86
+ const dataDir = process.env.CODERULE_DATA_DIR || envPaths("coderule").data;
87
+ const watchDir = path.join(dataDir, "watch");
88
+ await fs4.mkdir(watchDir, { recursive: true });
89
+ const dbPath = path.join(watchDir, `${rootId}.sqlite`);
90
+ const baseConfig = {
91
+ token: resolvedToken,
92
+ rootPath,
93
+ rootId,
94
+ dbPath,
95
+ dataDir,
96
+ authBaseUrl: process.env.CODERULE_AUTH_URL,
97
+ astBaseUrl: process.env.CODERULE_AST_URL,
98
+ syncBaseUrl: process.env.CODERULE_SYNC_URL,
99
+ retrievalBaseUrl: process.env.CODERULE_RETRIEVAL_URL,
100
+ httpTimeout: void 0,
101
+ snapshotDebounceMs: DEFAULTS.snapshotDebounceMs,
102
+ heartbeatIntervalMs: DEFAULTS.heartbeatIntervalMs,
103
+ heartbeatCheckIntervalMs: DEFAULTS.heartbeatCheckIntervalMs,
104
+ queuePollIntervalMs: DEFAULTS.queuePollIntervalMs,
105
+ hashBatchSize: DEFAULTS.hashBatchSize,
106
+ maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
107
+ retrievalFormatter: parseFormatter(
108
+ process.env.CODERULE_RETRIEVAL_FORMATTER
109
+ )
110
+ };
111
+ if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
112
+ baseConfig.snapshotDebounceMs = parseInteger(
113
+ process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS,
114
+ baseConfig.snapshotDebounceMs
115
+ );
116
+ }
117
+ if (process.env.CODERULE_HEARTBEAT_INTERVAL_MS) {
118
+ baseConfig.heartbeatIntervalMs = parseInteger(
119
+ process.env.CODERULE_HEARTBEAT_INTERVAL_MS,
120
+ baseConfig.heartbeatIntervalMs
121
+ );
122
+ }
123
+ if (process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS) {
124
+ baseConfig.heartbeatCheckIntervalMs = parseInteger(
125
+ process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS,
126
+ baseConfig.heartbeatCheckIntervalMs
127
+ );
128
+ }
129
+ if (process.env.CODERULE_QUEUE_POLL_INTERVAL_MS) {
130
+ baseConfig.queuePollIntervalMs = parseInteger(
131
+ process.env.CODERULE_QUEUE_POLL_INTERVAL_MS,
132
+ baseConfig.queuePollIntervalMs
133
+ );
134
+ }
135
+ if (process.env.CODERULE_HASH_BATCH_SIZE) {
136
+ baseConfig.hashBatchSize = parseInteger(
137
+ process.env.CODERULE_HASH_BATCH_SIZE,
138
+ baseConfig.hashBatchSize
139
+ );
140
+ }
141
+ if (process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS) {
142
+ baseConfig.maxSnapshotAttempts = parseInteger(
143
+ process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS,
144
+ baseConfig.maxSnapshotAttempts
145
+ );
146
+ }
147
+ baseConfig.httpTimeout = parseInteger(
148
+ process.env.CODERULE_HTTP_TIMEOUT,
149
+ DEFAULT_HTTP_TIMEOUT_MS
150
+ );
151
+ logger.debug(
152
+ {
153
+ rootPath,
154
+ dbPath,
155
+ dataDir,
156
+ authBaseUrl: baseConfig.authBaseUrl,
157
+ astBaseUrl: baseConfig.astBaseUrl,
158
+ syncBaseUrl: baseConfig.syncBaseUrl
159
+ },
160
+ "Resolved configuration"
161
+ );
162
+ return baseConfig;
163
+ }
164
+
165
+ // src/db/Schema.ts
166
+ var FILES_SCHEMA = `
167
+ CREATE TABLE IF NOT EXISTS files (
168
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
169
+ rel_path TEXT NOT NULL,
170
+ display_path TEXT NOT NULL,
171
+ size INTEGER NOT NULL,
172
+ mtime_ns INTEGER NOT NULL,
173
+ mode INTEGER,
174
+ ino TEXT,
175
+ dev TEXT,
176
+ is_symlink INTEGER NOT NULL DEFAULT 0,
177
+ target TEXT,
178
+ content_sha256 TEXT,
179
+ service_file_hash TEXT,
180
+ last_seen_ts INTEGER NOT NULL,
181
+ hash_state TEXT NOT NULL,
182
+ hash_owner TEXT,
183
+ hash_lease_expires_at INTEGER,
184
+ hash_started_at INTEGER,
185
+ UNIQUE(rel_path)
186
+ );
187
+ CREATE INDEX IF NOT EXISTS idx_files_hash_state ON files(hash_state);
188
+ CREATE INDEX IF NOT EXISTS idx_files_content_sha ON files(content_sha256);
189
+ CREATE INDEX IF NOT EXISTS idx_files_service_hash ON files(service_file_hash);
190
+ `;
191
+ var SNAPSHOTS_SCHEMA = `
192
+ CREATE TABLE IF NOT EXISTS snapshots (
193
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
194
+ snapshot_hash TEXT NOT NULL,
195
+ files_count INTEGER NOT NULL,
196
+ total_size INTEGER NOT NULL,
197
+ created_at INTEGER NOT NULL
198
+ );
199
+ `;
200
+
201
+ // src/db/Database.ts
202
+ function safeAlter(db, sql) {
203
+ try {
204
+ db.exec(sql);
205
+ } catch (error) {
206
+ if (typeof error?.message === "string" && error.message.includes("duplicate column name")) {
207
+ return;
208
+ }
209
+ throw error;
210
+ }
211
+ }
212
+ function applyMigrations(db, logger2) {
213
+ const alterations = [
214
+ "ALTER TABLE files ADD COLUMN hash_owner TEXT",
215
+ "ALTER TABLE files ADD COLUMN hash_lease_expires_at INTEGER",
216
+ "ALTER TABLE files ADD COLUMN hash_started_at INTEGER"
217
+ ];
218
+ for (const sql of alterations) {
219
+ try {
220
+ safeAlter(db, sql);
221
+ } catch (error) {
222
+ logger2.error({ err: error, sql }, "Database migration failed");
223
+ throw error;
224
+ }
225
+ }
226
+ db.exec(
227
+ "CREATE INDEX IF NOT EXISTS idx_files_hash_lease ON files(hash_state, hash_lease_expires_at)"
228
+ );
229
+ }
230
+ function openDatabase(dbPath, logger2) {
231
+ const db = new Database(dbPath, { verbose: void 0 });
232
+ logger2.info({ dbPath }, "Opened SQLite database");
233
+ db.pragma("journal_mode = WAL");
234
+ db.pragma("synchronous = NORMAL");
235
+ db.pragma("busy_timeout = 5000");
236
+ db.pragma("foreign_keys = ON");
237
+ db.exec("BEGIN");
238
+ try {
239
+ db.exec(FILES_SCHEMA);
240
+ db.exec(SNAPSHOTS_SCHEMA);
241
+ db.exec("COMMIT");
242
+ } catch (error) {
243
+ db.exec("ROLLBACK");
244
+ db.close();
245
+ throw error;
246
+ }
247
+ applyMigrations(db, logger2);
248
+ return db;
249
+ }
250
+
251
+ // src/db/FilesRepo.ts
252
+ var FilesRepo = class {
253
+ constructor(db) {
254
+ this.db = db;
255
+ this.selectByRelPath = this.db.prepare(
256
+ "SELECT * FROM files WHERE rel_path = ?"
257
+ );
258
+ this.insertStmt = this.db.prepare(
259
+ `INSERT INTO files (
260
+ rel_path,
261
+ display_path,
262
+ size,
263
+ mtime_ns,
264
+ mode,
265
+ ino,
266
+ dev,
267
+ is_symlink,
268
+ target,
269
+ content_sha256,
270
+ service_file_hash,
271
+ last_seen_ts,
272
+ hash_state,
273
+ hash_owner,
274
+ hash_lease_expires_at,
275
+ hash_started_at
276
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL, ?, ?, NULL, NULL, NULL)`
277
+ );
278
+ this.updateStmt = this.db.prepare(
279
+ `UPDATE files SET
280
+ display_path = ?,
281
+ size = ?,
282
+ mtime_ns = ?,
283
+ mode = ?,
284
+ ino = ?,
285
+ dev = ?,
286
+ is_symlink = ?,
287
+ target = ?,
288
+ content_sha256 = ?,
289
+ service_file_hash = ?,
290
+ last_seen_ts = ?,
291
+ hash_state = ?,
292
+ hash_owner = CASE WHEN ? = 'hashing' THEN hash_owner ELSE NULL END,
293
+ hash_lease_expires_at = CASE WHEN ? = 'hashing' THEN hash_lease_expires_at ELSE NULL END,
294
+ hash_started_at = CASE WHEN ? = 'hashing' THEN hash_started_at ELSE NULL END
295
+ WHERE id = ?`
296
+ );
297
+ this.markMissingStmt = this.db.prepare(
298
+ `UPDATE files
299
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL, last_seen_ts = ?,
300
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
301
+ WHERE rel_path = ?`
302
+ );
303
+ this.markMissingPrefixStmt = this.db.prepare(
304
+ `UPDATE files
305
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
306
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
307
+ WHERE (rel_path = ? OR rel_path LIKE (? || '/%')) AND hash_state != 'missing'`
308
+ );
309
+ this.markDirtyStmt = this.db.prepare(
310
+ `UPDATE files
311
+ SET hash_state = 'dirty', last_seen_ts = ?,
312
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
313
+ WHERE rel_path = ?`
314
+ );
315
+ this.claimDirtyStmt = this.db.prepare(
316
+ `WITH candidates AS (
317
+ SELECT id
318
+ FROM files
319
+ WHERE hash_state = 'dirty'
320
+ ORDER BY last_seen_ts ASC, id ASC
321
+ LIMIT @limit
322
+ )
323
+ UPDATE files
324
+ SET hash_state = 'hashing',
325
+ hash_owner = @owner,
326
+ hash_lease_expires_at = @lease_expires_at,
327
+ hash_started_at = @now
328
+ WHERE id IN candidates
329
+ RETURNING *`
330
+ );
331
+ this.markDirtyByIdStmt = this.db.prepare(
332
+ `UPDATE files
333
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
334
+ WHERE id = ?`
335
+ );
336
+ this.applyHashesStmt = this.db.prepare(
337
+ `UPDATE files
338
+ SET content_sha256 = ?, service_file_hash = ?, hash_state = 'clean',
339
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
340
+ WHERE id = ?`
341
+ );
342
+ this.requeueExpiredHashingStmt = this.db.prepare(
343
+ `UPDATE files
344
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
345
+ WHERE hash_state = 'hashing'
346
+ AND hash_lease_expires_at IS NOT NULL
347
+ AND hash_lease_expires_at <= ?`
348
+ );
349
+ this.resetHashingStmt = this.db.prepare(
350
+ `UPDATE files
351
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
352
+ WHERE hash_state = 'hashing'`
353
+ );
354
+ this.selectCleanSnapshotStmt = this.db.prepare(
355
+ `SELECT rel_path, service_file_hash, size
356
+ FROM files
357
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL
358
+ ORDER BY rel_path ASC`
359
+ );
360
+ this.totalsStmt = this.db.prepare(
361
+ `SELECT COUNT(*) AS files_count, COALESCE(SUM(size), 0) AS total_size
362
+ FROM files
363
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL`
364
+ );
365
+ this.markMissingBeforeStmt = this.db.prepare(
366
+ `UPDATE files
367
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
368
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
369
+ WHERE last_seen_ts < ? AND hash_state != 'missing'`
370
+ );
371
+ this.countByStateStmt = this.db.prepare(
372
+ "SELECT COUNT(*) as count FROM files WHERE hash_state = ?"
373
+ );
374
+ }
375
+ getByRelPath(relPath) {
376
+ return this.selectByRelPath.get(relPath);
377
+ }
378
+ upsertFromStat(params) {
379
+ const now = Date.now();
380
+ const { relPath, displayPath, stats, isSymlink, symlinkTarget } = params;
381
+ const existing = this.getByRelPath(relPath);
382
+ const mtimeNs = Math.trunc(stats.mtimeMs * 1e6);
383
+ const ino = typeof stats.ino === "number" ? String(stats.ino) : null;
384
+ const dev = typeof stats.dev === "number" ? String(stats.dev) : null;
385
+ const mode = typeof stats.mode === "number" ? stats.mode : null;
386
+ const isSymlinkInt = isSymlink ? 1 : 0;
387
+ if (!existing) {
388
+ this.insertStmt.run(
389
+ relPath,
390
+ displayPath,
391
+ stats.size,
392
+ mtimeNs,
393
+ mode,
394
+ ino,
395
+ dev,
396
+ isSymlinkInt,
397
+ symlinkTarget,
398
+ now,
399
+ "dirty"
400
+ );
401
+ return "dirty";
402
+ }
403
+ let nextState = existing.hash_state;
404
+ let nextContent = existing.content_sha256;
405
+ let nextServiceHash = existing.service_file_hash;
406
+ const changed = existing.size !== stats.size || existing.mtime_ns !== mtimeNs || existing.mode !== mode || existing.ino !== ino || existing.dev !== dev || existing.is_symlink !== isSymlinkInt || existing.target !== symlinkTarget;
407
+ if (changed || existing.hash_state === "missing") {
408
+ nextState = "dirty";
409
+ nextContent = null;
410
+ nextServiceHash = null;
411
+ }
412
+ this.updateStmt.run(
413
+ displayPath,
414
+ stats.size,
415
+ mtimeNs,
416
+ mode,
417
+ ino,
418
+ dev,
419
+ isSymlinkInt,
420
+ symlinkTarget,
421
+ nextContent,
422
+ nextServiceHash,
423
+ now,
424
+ nextState,
425
+ nextState,
426
+ nextState,
427
+ nextState,
428
+ existing.id
429
+ );
430
+ return nextState;
431
+ }
432
+ markMissing(relPath) {
433
+ const now = Date.now();
434
+ const result = this.markMissingStmt.run(now, relPath);
435
+ return result.changes ?? 0;
436
+ }
437
+ markMissingByPrefix(prefix) {
438
+ const result = this.markMissingPrefixStmt.run(prefix, prefix);
439
+ return result.changes ?? 0;
440
+ }
441
+ markDirty(relPath) {
442
+ const now = Date.now();
443
+ this.markDirtyStmt.run(now, relPath);
444
+ }
445
+ markMissingBefore(timestamp) {
446
+ const result = this.markMissingBeforeStmt.run(timestamp);
447
+ return result.changes ?? 0;
448
+ }
449
+ claimDirty(limit, owner, leaseMs) {
450
+ if (limit <= 0) {
451
+ return [];
452
+ }
453
+ const now = Date.now();
454
+ return this.claimDirtyStmt.all({
455
+ limit,
456
+ owner,
457
+ lease_expires_at: now + leaseMs,
458
+ now
459
+ });
460
+ }
461
+ markDirtyByIds(ids) {
462
+ if (!ids.length) return;
463
+ const tx = this.db.transaction((batch) => {
464
+ for (const id of batch) {
465
+ this.markDirtyByIdStmt.run(id);
466
+ }
467
+ });
468
+ tx(ids);
469
+ }
470
+ applyHashResults(results) {
471
+ if (!results.length) return;
472
+ const tx = this.db.transaction((batch) => {
473
+ for (const { id, contentSha256, serviceFileHash } of batch) {
474
+ this.applyHashesStmt.run(contentSha256, serviceFileHash, id);
475
+ }
476
+ });
477
+ tx(results);
478
+ }
479
+ getCleanFilesForSnapshot() {
480
+ return this.selectCleanSnapshotStmt.all();
481
+ }
482
+ getTotalsForSnapshot() {
483
+ const row = this.totalsStmt.get();
484
+ return {
485
+ filesCount: row?.files_count ?? 0,
486
+ totalSize: row?.total_size ?? 0
487
+ };
488
+ }
489
+ countByState(state) {
490
+ const row = this.countByStateStmt.get(state);
491
+ return row?.count ?? 0;
492
+ }
493
+ requeueExpiredHashing(now) {
494
+ const result = this.requeueExpiredHashingStmt.run(now);
495
+ return result.changes ?? 0;
496
+ }
497
+ resetHashingStates() {
498
+ const result = this.resetHashingStmt.run();
499
+ return result.changes ?? 0;
500
+ }
501
+ };
502
+
503
+ // src/db/SnapshotsRepo.ts
504
+ var SnapshotsRepo = class {
505
+ constructor(db) {
506
+ this.db = db;
507
+ this.insertStmt = this.db.prepare(
508
+ `INSERT INTO snapshots (snapshot_hash, files_count, total_size, created_at)
509
+ VALUES (?, ?, ?, ?)`
510
+ );
511
+ this.selectLatestStmt = this.db.prepare(
512
+ `SELECT * FROM snapshots ORDER BY created_at DESC LIMIT 1`
513
+ );
514
+ }
515
+ insert(snapshotHash, filesCount, totalSize, createdAt) {
516
+ this.insertStmt.run(snapshotHash, filesCount, totalSize, createdAt);
517
+ }
518
+ getLatest() {
519
+ return this.selectLatestStmt.get();
520
+ }
521
+ };
522
+ var Outbox = class {
523
+ constructor(db, logger2) {
524
+ this.log = logger2.child({ scope: "outbox" });
525
+ this.queue = new Qulite(db, {
526
+ logger: this.log,
527
+ defaultLeaseMs: 3e4,
528
+ defaultMaxAttempts: 10
529
+ });
530
+ this.markKindStmt = db.prepare(
531
+ `UPDATE qulite_jobs SET kind = @kind WHERE dedupe_key = @dedupe_key`
532
+ );
533
+ this.purgeLegacyStmt = db.prepare(
534
+ `DELETE FROM qulite_jobs WHERE type = 'fs_control' AND (kind IS NULL OR kind = '')`
535
+ );
536
+ const purged = this.purgeLegacyStmt.run().changes ?? 0;
537
+ if (purged > 0) {
538
+ this.log.warn({ purged }, "Purged legacy fs_control jobs without kind");
539
+ }
540
+ }
541
+ getQueue() {
542
+ return this.queue;
543
+ }
544
+ markKind(dedupeKey, kind) {
545
+ this.markKindStmt.run({ dedupe_key: dedupeKey, kind });
546
+ }
547
+ enqueueSnapshot(rootId, delayMs = 0) {
548
+ const result = enqueueFsEvent(this.queue, {
549
+ root_id: rootId,
550
+ rel_path: "",
551
+ kind: "snapshot",
552
+ delayMs,
553
+ maxAttempts: 20,
554
+ data: { root_id: rootId, kind: "snapshot" }
555
+ });
556
+ this.markKind(`snapshot:${rootId}`, "snapshot");
557
+ if (result.changes > 0) {
558
+ this.log.debug({ rootId }, "Enqueued snapshot job");
559
+ }
560
+ }
561
+ enqueueHeartbeat(rootId, delayMs = 0) {
562
+ const result = enqueueFsEvent(this.queue, {
563
+ root_id: rootId,
564
+ rel_path: "",
565
+ kind: "heartbeat",
566
+ delayMs,
567
+ maxAttempts: 5,
568
+ data: { root_id: rootId, kind: "heartbeat" }
569
+ });
570
+ this.markKind(`heartbeat:${rootId}`, "heartbeat");
571
+ if (result.changes > 0) {
572
+ this.log.debug({ rootId }, "Enqueued heartbeat job");
573
+ }
574
+ }
575
+ claimFsControlJob(leaseOwner, leaseMs = 3e4) {
576
+ return this.queue.claimNext({ type: "fs_control", leaseOwner, leaseMs });
577
+ }
578
+ ack(jobId, leaseOwner) {
579
+ return this.queue.ack(jobId, leaseOwner);
580
+ }
581
+ retry(jobId, leaseOwner, delayMs) {
582
+ return this.queue.retry(jobId, leaseOwner, delayMs);
583
+ }
584
+ fail(jobId, leaseOwner, error) {
585
+ return this.queue.fail(jobId, leaseOwner, error);
586
+ }
587
+ requeueTimedOut() {
588
+ return this.queue.requeueTimedOut();
589
+ }
590
+ };
591
+ function serviceConfig(baseUrl, timeout) {
592
+ if (baseUrl === void 0 && timeout === void 0) {
593
+ return void 0;
594
+ }
595
+ const config = {};
596
+ if (baseUrl !== void 0) {
597
+ config.baseUrl = baseUrl;
598
+ }
599
+ if (timeout !== void 0) {
600
+ config.timeout = timeout;
601
+ }
602
+ return config;
603
+ }
604
+ function createClients(config, logger2) {
605
+ const clientLogger = logger2.child({ scope: "clients" });
606
+ const httpTimeout = config.httpTimeout;
607
+ const clients = new CoderuleClients({
608
+ token: config.token,
609
+ auth: serviceConfig(config.authBaseUrl, httpTimeout),
610
+ ast: serviceConfig(config.astBaseUrl, httpTimeout),
611
+ sync: serviceConfig(config.syncBaseUrl, httpTimeout),
612
+ retrieval: serviceConfig(config.retrievalBaseUrl, httpTimeout),
613
+ jwtFactory: {
614
+ onTokenRefreshed: (info) => {
615
+ clientLogger.debug(
616
+ {
617
+ expiresAt: new Date(info.expiresAt).toISOString(),
618
+ serverUrl: info.serverUrl
619
+ },
620
+ "JWT refreshed"
621
+ );
622
+ }
623
+ }
624
+ });
625
+ return clients;
626
+ }
627
+
628
+ // src/rules/RulesFetcher.ts
629
+ async function fetchVisitorRules(clients, logger2) {
630
+ const fetchLogger = logger2.child({ scope: "rules" });
631
+ fetchLogger.info("Fetching visitor rules v2 from AST service");
632
+ const rules = await clients.ast.getVisitorRulesV2();
633
+ fetchLogger.info(
634
+ {
635
+ include_extensions: rules.include_extensions.length,
636
+ include_filenames: rules.include_filenames.length,
637
+ exclude_dirnames: rules.exclude_dirnames.length
638
+ },
639
+ "Fetched visitor rules"
640
+ );
641
+ return rules;
642
+ }
643
+ function toPosix(input) {
644
+ return input.split(path.sep).join("/");
645
+ }
646
+ function getLowerBasename(input) {
647
+ const base = input.split("/").pop();
648
+ return (base ?? "").toLowerCase();
649
+ }
650
+ function getLowerExt(basename) {
651
+ const idx = basename.lastIndexOf(".");
652
+ if (idx < 0) return "";
653
+ return basename.slice(idx).toLowerCase();
654
+ }
655
+ function compileRulesBundle(rules) {
656
+ const compiled = ASTHttpClient.compileRulesV2(rules);
657
+ const basePredicate = ASTHttpClient.buildIgnoredPredicate(compiled);
658
+ const predicate = (fullPath, stats) => {
659
+ let info = stats;
660
+ if (!info) {
661
+ logger.debug({ path: fullPath }, "Predicate fallback lstat");
662
+ try {
663
+ info = fs2.lstatSync(fullPath);
664
+ } catch (error) {
665
+ logger.warn(
666
+ { err: error, path: fullPath },
667
+ "Failed to lstat path for rules predicate"
668
+ );
669
+ return false;
670
+ }
671
+ }
672
+ return basePredicate(fullPath, info);
673
+ };
674
+ return {
675
+ rules,
676
+ compiled,
677
+ predicate
678
+ };
679
+ }
680
+ function shouldIncludeFile(relPath, stats, bundle) {
681
+ if (stats.isDirectory()) return false;
682
+ const posixRel = toPosix(relPath);
683
+ if (bundle.compiled.dirRe.test(posixRel)) {
684
+ return false;
685
+ }
686
+ const basename = getLowerBasename(posixRel);
687
+ if (bundle.compiled.names.has(basename)) {
688
+ return true;
689
+ }
690
+ const ext = getLowerExt(basename);
691
+ return bundle.compiled.exts.has(ext);
692
+ }
693
+ function shouldPruneDirectory(relPath, bundle) {
694
+ const posixRel = toPosix(relPath);
695
+ return bundle.compiled.dirRe.test(posixRel);
696
+ }
697
+ function buildWatcherIgnored(bundle) {
698
+ return (fullPath, stats) => bundle.predicate(fullPath, stats);
699
+ }
700
+ var HashWorker = class {
701
+ constructor(logger2) {
702
+ this.pending = /* @__PURE__ */ new Map();
703
+ this.nextTaskId = 1;
704
+ this.terminating = false;
705
+ this.log = logger2.child({ scope: "hash-worker" });
706
+ const workerUrl = new URL("./hash/WorkerThread.js", import.meta.url);
707
+ const execArgv = process.execArgv.filter(
708
+ (arg) => !arg.startsWith("--input-type")
709
+ );
710
+ const workerOptions = {
711
+ name: "coderule-hasher",
712
+ execArgv
713
+ };
714
+ if (workerUrl.pathname.endsWith(".js")) {
715
+ workerOptions.type = "module";
716
+ }
717
+ this.worker = new Worker(workerUrl, workerOptions);
718
+ this.worker.on(
719
+ "message",
720
+ (message) => this.onMessage(message)
721
+ );
722
+ this.worker.on("error", (error) => this.handleWorkerError(error));
723
+ this.worker.on("exit", (code) => {
724
+ if (code !== 0 && !this.terminating) {
725
+ this.handleWorkerError(
726
+ new Error(`Hasher worker exited with code ${code}`)
727
+ );
728
+ }
729
+ });
730
+ }
731
+ async terminate() {
732
+ this.terminating = true;
733
+ for (const [, pending] of this.pending) {
734
+ pending.reject(new Error("Hasher worker terminated"));
735
+ }
736
+ this.pending.clear();
737
+ await this.worker.terminate();
738
+ }
739
+ onMessage(message) {
740
+ const pending = this.pending.get(message.taskId);
741
+ if (!pending) {
742
+ this.log.warn(
743
+ { taskId: message.taskId },
744
+ "Received message for unknown task"
745
+ );
746
+ return;
747
+ }
748
+ this.pending.delete(message.taskId);
749
+ if (message.type === "hash-result") {
750
+ pending.resolve({
751
+ contentSha256: message.contentSha256,
752
+ serviceFileHash: message.serviceFileHash
753
+ });
754
+ } else {
755
+ const error = new Error(message.error);
756
+ error.code = message.code;
757
+ pending.reject(error);
758
+ }
759
+ }
760
+ handleWorkerError(error) {
761
+ this.log.error({ err: error }, "Hasher worker error");
762
+ for (const [, pending] of this.pending) {
763
+ pending.reject(error);
764
+ }
765
+ this.pending.clear();
766
+ }
767
+ compute(absPath, relPath) {
768
+ const taskId = this.nextTaskId++;
769
+ const payload = {
770
+ type: "hash",
771
+ taskId,
772
+ absPath,
773
+ relPath
774
+ };
775
+ return new Promise((resolve, reject) => {
776
+ this.pending.set(taskId, { resolve, reject });
777
+ this.worker.postMessage(payload);
778
+ });
779
+ }
780
+ };
781
+ var Hasher = class {
782
+ constructor(options) {
783
+ this.options = options;
784
+ this.worker = null;
785
+ this.log = options.logger.child({ scope: "hasher" });
786
+ this.inlineMode = process.env.CODERULE_HASHER_INLINE === "1";
787
+ this.ownerId = `hasher-${process.pid}-${Date.now()}`;
788
+ const leaseFromEnv = process.env.CODERULE_HASH_LEASE_MS ? Number.parseInt(process.env.CODERULE_HASH_LEASE_MS, 10) : Number.NaN;
789
+ this.leaseDurationMs = Number.isFinite(leaseFromEnv) && leaseFromEnv > 0 ? leaseFromEnv : 3e4;
790
+ if (!this.inlineMode) {
791
+ try {
792
+ this.worker = new HashWorker(this.log);
793
+ } catch (error) {
794
+ this.log.warn(
795
+ { err: error },
796
+ "Failed to start hasher worker, falling back to inline hashing"
797
+ );
798
+ this.worker = null;
799
+ this.inlineMode = true;
800
+ }
801
+ }
802
+ }
803
+ async close() {
804
+ if (this.worker) {
805
+ await this.worker.terminate();
806
+ }
807
+ }
808
+ resolveAbsolutePath(record) {
809
+ if (path.isAbsolute(record.display_path)) {
810
+ return record.display_path;
811
+ }
812
+ return path.join(this.options.rootPath, record.rel_path);
813
+ }
814
+ async ensureExists(absPath, record) {
815
+ try {
816
+ await fs4.access(absPath);
817
+ return true;
818
+ } catch (error) {
819
+ this.log.warn(
820
+ { err: error, relPath: record.rel_path },
821
+ "File missing before hashing"
822
+ );
823
+ this.options.filesRepo.markMissing(record.rel_path);
824
+ return false;
825
+ }
826
+ }
827
+ async computeHash(absPath, relPath) {
828
+ if (this.inlineMode || !this.worker) {
829
+ return this.hashInline(absPath, relPath);
830
+ }
831
+ return this.worker.compute(absPath, relPath);
832
+ }
833
+ async hashInline(absPath, relPath) {
834
+ return new Promise((resolve, reject) => {
835
+ const content = createHash("sha256");
836
+ const service = createHash("sha256");
837
+ service.update(relPath);
838
+ service.update("\n");
839
+ const stream = fs2.createReadStream(absPath);
840
+ stream.on("data", (chunk) => {
841
+ content.update(chunk);
842
+ service.update(chunk);
843
+ });
844
+ stream.on("error", (error) => {
845
+ reject(error);
846
+ });
847
+ stream.on("end", () => {
848
+ resolve({
849
+ contentSha256: content.digest("hex"),
850
+ serviceFileHash: service.digest("hex")
851
+ });
852
+ });
853
+ });
854
+ }
855
+ async processBatch(limit) {
856
+ const now = Date.now();
857
+ const requeued = this.options.filesRepo.requeueExpiredHashing(now);
858
+ if (requeued > 0) {
859
+ this.log.debug({ requeued }, "Requeued expired hashing leases");
860
+ }
861
+ const dirty = this.options.filesRepo.claimDirty(
862
+ limit,
863
+ this.ownerId,
864
+ this.leaseDurationMs
865
+ );
866
+ if (dirty.length > 0) {
867
+ this.log.debug({ count: dirty.length }, "Hashing claimed files");
868
+ }
869
+ if (dirty.length === 0) {
870
+ return false;
871
+ }
872
+ const successes = [];
873
+ const failures = [];
874
+ for (const record of dirty) {
875
+ const absPath = this.resolveAbsolutePath(record);
876
+ const exists = await this.ensureExists(absPath, record);
877
+ if (!exists) {
878
+ continue;
879
+ }
880
+ try {
881
+ const result = await this.computeHash(absPath, record.rel_path);
882
+ successes.push({
883
+ id: record.id,
884
+ contentSha256: result.contentSha256,
885
+ serviceFileHash: result.serviceFileHash
886
+ });
887
+ } catch (error) {
888
+ if (error?.code === "ENOENT") {
889
+ this.log.debug(
890
+ { relPath: record.rel_path },
891
+ "File disappeared during hashing"
892
+ );
893
+ this.options.filesRepo.markMissing(record.rel_path);
894
+ } else {
895
+ this.log.warn(
896
+ { err: error, relPath: record.rel_path },
897
+ "Failed to hash file"
898
+ );
899
+ failures.push(record.id);
900
+ }
901
+ }
902
+ }
903
+ if (successes.length) {
904
+ this.log.debug({ count: successes.length }, "Hashing succeeded");
905
+ this.options.filesRepo.applyHashResults(successes);
906
+ }
907
+ if (failures.length) {
908
+ this.log.warn({ count: failures.length }, "Hashing failed for files");
909
+ this.options.filesRepo.markDirtyByIds(failures);
910
+ }
911
+ return true;
912
+ }
913
+ };
914
+
915
+ // src/service/Bootstrap.ts
916
+ function createServiceLogger() {
917
+ return logger.child({ scope: "service" });
918
+ }
919
+ async function bootstrap(params) {
920
+ const config = await resolveConfig(params);
921
+ const logger2 = createServiceLogger();
922
+ const db = openDatabase(config.dbPath, logger2.child({ scope: "db" }));
923
+ const filesRepo = new FilesRepo(db);
924
+ const recovered = filesRepo.resetHashingStates();
925
+ if (recovered > 0) {
926
+ logger2.info({ recovered }, "Recovered lingering hashing leases");
927
+ }
928
+ const snapshotsRepo = new SnapshotsRepo(db);
929
+ const outbox = new Outbox(db, logger2);
930
+ const clients = createClients(config, logger2);
931
+ const rules = await fetchVisitorRules(clients, logger2);
932
+ const compiled = compileRulesBundle(rules);
933
+ const hasher = new Hasher({ rootPath: config.rootPath, filesRepo, logger: logger2 });
934
+ const runtime = {
935
+ config,
936
+ logger: logger2,
937
+ db,
938
+ outbox,
939
+ clients,
940
+ rules: compiled,
941
+ filesRepo,
942
+ snapshotsRepo,
943
+ hasher
944
+ };
945
+ return runtime;
946
+ }
947
+ function toPosixRelative(root, target) {
948
+ const rel = path.relative(root, target);
949
+ if (!rel || rel === "") return "";
950
+ return rel.split(path.sep).join("/");
951
+ }
952
+ function isInsideRoot(root, target) {
953
+ const rel = path.relative(root, target);
954
+ return rel === "" || !rel.startsWith("..") && !path.isAbsolute(rel);
955
+ }
956
+
957
+ // src/fs/Walker.ts
958
+ var EMPTY_STATS = {
959
+ processed: 0,
960
+ skipped: 0,
961
+ dirtied: 0,
962
+ missing: 0
963
+ };
964
+ function cloneStats(stats) {
965
+ return { ...stats };
966
+ }
967
+ async function readSymlinkTarget(absPath, log) {
968
+ try {
969
+ return await fs4.readlink(absPath);
970
+ } catch (error) {
971
+ log.warn({ err: error, path: absPath }, "Failed to read symlink target");
972
+ return null;
973
+ }
974
+ }
975
+ async function walkDirectory(current, opts, stats) {
976
+ const dirLogger = opts.logger;
977
+ let dirents;
978
+ try {
979
+ dirents = await fs4.readdir(current, { withFileTypes: true });
980
+ } catch (error) {
981
+ dirLogger.warn({ err: error, path: current }, "Failed to read directory");
982
+ return;
983
+ }
984
+ for (const dirent of dirents) {
985
+ const absPath = path.join(current, dirent.name);
986
+ const relPath = toPosixRelative(opts.rootPath, absPath);
987
+ if (dirent.isDirectory()) {
988
+ if (shouldPruneDirectory(relPath, opts.bundle)) {
989
+ stats.skipped += 1;
990
+ continue;
991
+ }
992
+ await walkDirectory(absPath, opts, stats);
993
+ continue;
994
+ }
995
+ if (dirent.isSymbolicLink() || dirent.isFile()) {
996
+ let stat;
997
+ try {
998
+ stat = await fs4.lstat(absPath);
999
+ } catch (error) {
1000
+ dirLogger.warn({ err: error, path: absPath }, "Failed to stat file");
1001
+ continue;
1002
+ }
1003
+ stats.processed += 1;
1004
+ if (!shouldIncludeFile(relPath, stat, opts.bundle)) {
1005
+ stats.skipped += 1;
1006
+ continue;
1007
+ }
1008
+ const target = dirent.isSymbolicLink() ? await readSymlinkTarget(absPath, dirLogger) : null;
1009
+ const state = opts.filesRepo.upsertFromStat({
1010
+ relPath,
1011
+ displayPath: absPath,
1012
+ stats: stat,
1013
+ isSymlink: dirent.isSymbolicLink(),
1014
+ symlinkTarget: target
1015
+ });
1016
+ if (state === "dirty") {
1017
+ stats.dirtied += 1;
1018
+ }
1019
+ continue;
1020
+ }
1021
+ stats.skipped += 1;
1022
+ }
1023
+ }
1024
+ async function runInventory(opts) {
1025
+ const stats = cloneStats(EMPTY_STATS);
1026
+ const startedAt = Date.now();
1027
+ await walkDirectory(opts.rootPath, opts, stats);
1028
+ const missing = opts.filesRepo.markMissingBefore(startedAt);
1029
+ stats.missing = missing;
1030
+ opts.logger.info({ ...stats }, "Completed initial inventory");
1031
+ return stats;
1032
+ }
1033
+ async function sleep(ms) {
1034
+ return new Promise((resolve) => setTimeout(resolve, ms));
1035
+ }
1036
+ function computeSnapshot(filesRepo) {
1037
+ const files = filesRepo.getCleanFilesForSnapshot();
1038
+ const hashes = files.map((file) => file.service_file_hash).filter((hash) => typeof hash === "string");
1039
+ const snapshotHash = SyncHttpClient.calculateSnapshotHash([...hashes].sort());
1040
+ const { filesCount, totalSize } = filesRepo.getTotalsForSnapshot();
1041
+ return {
1042
+ snapshotHash,
1043
+ files: files.map((file) => ({
1044
+ file_path: file.rel_path,
1045
+ file_hash: file.service_file_hash
1046
+ })),
1047
+ filesCount,
1048
+ totalSize
1049
+ };
1050
+ }
1051
+ async function uploadMissing(rootPath, missing, syncClient, logger2) {
1052
+ if (!missing || missing.length === 0) return;
1053
+ const map = /* @__PURE__ */ new Map();
1054
+ for (const missingFile of missing) {
1055
+ const absPath = path.join(rootPath, missingFile.file_path);
1056
+ try {
1057
+ const buffer = await fs4.readFile(absPath);
1058
+ map.set(missingFile.file_hash, {
1059
+ path: missingFile.file_path,
1060
+ content: buffer
1061
+ });
1062
+ } catch (error) {
1063
+ logger2.warn(
1064
+ { err: error, relPath: missingFile.file_path },
1065
+ "Failed to read missing file content"
1066
+ );
1067
+ }
1068
+ }
1069
+ if (map.size === 0) return;
1070
+ await syncClient.uploadFileContent(map);
1071
+ }
1072
+ async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2) {
1073
+ const { snapshotHash, files } = computation;
1074
+ let status = await syncClient.checkSnapshotStatus(snapshotHash);
1075
+ if (status.status === "READY") {
1076
+ logger2.info({ snapshotHash }, "Snapshot already READY");
1077
+ return;
1078
+ }
1079
+ if (status.status === "NOT_FOUND" || status.status === "MISSING_CONTENT") {
1080
+ status = await syncClient.createSnapshot(snapshotHash, files);
1081
+ }
1082
+ if (status.status === "MISSING_CONTENT" && status.missing_files?.length) {
1083
+ logger2.info(
1084
+ { missing: status.missing_files.length },
1085
+ "Uploading missing file content"
1086
+ );
1087
+ await uploadMissing(rootPath, status.missing_files, syncClient, logger2);
1088
+ status = await syncClient.createSnapshot(snapshotHash, files);
1089
+ }
1090
+ let attempt = 0;
1091
+ while (status.status !== "READY") {
1092
+ if (status.status === "FAILED") {
1093
+ throw new Error(`Snapshot failed processing: ${JSON.stringify(status)}`);
1094
+ }
1095
+ const delay = Math.min(5e3, 1e3 * Math.max(1, 2 ** attempt));
1096
+ await sleep(delay);
1097
+ attempt += 1;
1098
+ status = await syncClient.checkSnapshotStatus(snapshotHash);
1099
+ }
1100
+ logger2.info({ snapshotHash }, "Snapshot READY");
1101
+ }
1102
+ async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2) {
1103
+ const computation = computeSnapshot(filesRepo);
1104
+ await ensureSnapshotCreated(rootPath, computation, syncClient, logger2);
1105
+ const createdAt = Date.now();
1106
+ snapshotsRepo.insert(
1107
+ computation.snapshotHash,
1108
+ computation.filesCount,
1109
+ computation.totalSize,
1110
+ createdAt
1111
+ );
1112
+ return {
1113
+ snapshotHash: computation.snapshotHash,
1114
+ filesCount: computation.filesCount,
1115
+ totalSize: computation.totalSize,
1116
+ status: "READY",
1117
+ createdAt
1118
+ };
1119
+ }
1120
+
1121
+ // src/service/InitialSync.ts
1122
+ async function runInitialSyncPipeline(runtime) {
1123
+ const inventoryLogger = runtime.logger.child({ scope: "inventory" });
1124
+ await runInventory({
1125
+ rootPath: runtime.config.rootPath,
1126
+ bundle: runtime.rules,
1127
+ filesRepo: runtime.filesRepo,
1128
+ logger: inventoryLogger
1129
+ });
1130
+ const hashLogger = runtime.logger.child({ scope: "hash" });
1131
+ let hadWork = true;
1132
+ while (hadWork) {
1133
+ hadWork = await runtime.hasher.processBatch(runtime.config.hashBatchSize);
1134
+ if (hadWork) {
1135
+ hashLogger.debug("Hasher processed batch");
1136
+ }
1137
+ }
1138
+ const syncLogger = runtime.logger.child({ scope: "snapshot" });
1139
+ const result = await publishSnapshot(
1140
+ runtime.config.rootPath,
1141
+ runtime.filesRepo,
1142
+ runtime.snapshotsRepo,
1143
+ runtime.clients.sync,
1144
+ syncLogger
1145
+ );
1146
+ return result;
1147
+ }
1148
+ async function createChokidarWatcher(options, usePolling) {
1149
+ const log = options.logger.child({
1150
+ scope: "watcher",
1151
+ mode: usePolling ? "polling" : "native"
1152
+ });
1153
+ const watcher = chokidar.watch(options.rootPath, {
1154
+ ignored: options.ignored,
1155
+ ignoreInitial: true,
1156
+ persistent: true,
1157
+ awaitWriteFinish: {
1158
+ stabilityThreshold: 1500,
1159
+ pollInterval: 100
1160
+ },
1161
+ atomic: true,
1162
+ usePolling,
1163
+ interval: usePolling ? 200 : void 0,
1164
+ binaryInterval: usePolling ? 200 : void 0,
1165
+ alwaysStat: true,
1166
+ cwd: void 0,
1167
+ depth: void 0
1168
+ });
1169
+ await new Promise((resolve, reject) => {
1170
+ const onReady = () => {
1171
+ watcher.off("error", onError);
1172
+ log.info("Watcher ready");
1173
+ resolve();
1174
+ };
1175
+ const onError = (err) => {
1176
+ watcher.off("ready", onReady);
1177
+ reject(err);
1178
+ };
1179
+ watcher.once("ready", onReady);
1180
+ watcher.once("error", onError);
1181
+ });
1182
+ return { watcher, mode: usePolling ? "polling" : "native" };
1183
+ }
1184
+ function attachHandlers(watcher, options) {
1185
+ const { handlers, logger: logger2 } = options;
1186
+ const handle = (event, filePath, stats) => {
1187
+ logger2.debug({ event, filePath }, "Watcher raw event");
1188
+ Promise.resolve(handlers.onEvent(event, filePath, stats)).catch((error) => {
1189
+ logger2.error(
1190
+ { err: error, event, path: filePath },
1191
+ "Watcher handler failed"
1192
+ );
1193
+ });
1194
+ };
1195
+ watcher.on("add", (filePath, stats) => handle("add", filePath, stats));
1196
+ watcher.on("change", (filePath, stats) => handle("change", filePath, stats));
1197
+ watcher.on("unlink", (filePath) => handle("unlink", filePath));
1198
+ watcher.on("addDir", (dirPath) => handle("addDir", dirPath));
1199
+ watcher.on("unlinkDir", (dirPath) => handle("unlinkDir", dirPath));
1200
+ watcher.on("error", (error) => {
1201
+ logger2.error({ err: error }, "Watcher error");
1202
+ });
1203
+ if (handlers.onReady) {
1204
+ handlers.onReady();
1205
+ logger2.debug("Watcher ready callback executed");
1206
+ }
1207
+ }
1208
+ async function startWatcher(options) {
1209
+ try {
1210
+ const result = await createChokidarWatcher(options, false);
1211
+ attachHandlers(result.watcher, options);
1212
+ return result;
1213
+ } catch (error) {
1214
+ options.logger.warn(
1215
+ { err: error },
1216
+ "Native watcher failed, falling back to polling"
1217
+ );
1218
+ const result = await createChokidarWatcher(options, true);
1219
+ attachHandlers(result.watcher, options);
1220
+ return result;
1221
+ }
1222
+ }
1223
+
1224
+ // src/sync/HeartbeatProtocol.ts
1225
+ async function sendHeartbeat(rootId, snapshotsRepo, syncClient, logger2) {
1226
+ const latest = snapshotsRepo.getLatest();
1227
+ if (latest) {
1228
+ await syncClient.checkSnapshotStatus(latest.snapshot_hash);
1229
+ }
1230
+ logger2.debug({ rootId }, "Heartbeat sent");
1231
+ }
1232
+
1233
+ // src/service/State.ts
1234
+ var ServiceState = class {
1235
+ constructor() {
1236
+ this.lastChangeAt = Date.now();
1237
+ this.lastSnapshotReadyAt = Date.now();
1238
+ this.lastHeartbeatEnqueuedAt = 0;
1239
+ }
1240
+ updateChange(timestamp = Date.now()) {
1241
+ this.lastChangeAt = timestamp;
1242
+ }
1243
+ updateSnapshotReady(timestamp = Date.now()) {
1244
+ this.lastSnapshotReadyAt = timestamp;
1245
+ }
1246
+ updateHeartbeat(timestamp = Date.now()) {
1247
+ this.lastHeartbeatEnqueuedAt = timestamp;
1248
+ }
1249
+ };
1250
+
1251
+ // src/service/ServiceLoops.ts
1252
+ function safeParse(input) {
1253
+ try {
1254
+ return JSON.parse(input);
1255
+ } catch {
1256
+ return void 0;
1257
+ }
1258
+ }
1259
+ async function sleep2(ms) {
1260
+ return new Promise((resolve) => setTimeout(resolve, ms));
1261
+ }
1262
+ function computeBackoff(attempts) {
1263
+ const base = attempts ?? 0;
1264
+ const delay = 1e3 * 2 ** base;
1265
+ return Math.min(delay, 6e4);
1266
+ }
1267
+ async function readSymlinkTarget2(absPath) {
1268
+ try {
1269
+ return await fs4.readlink(absPath);
1270
+ } catch {
1271
+ return null;
1272
+ }
1273
+ }
1274
+ var SnapshotScheduler = class {
1275
+ constructor(rootId, debounceMs, outbox, state) {
1276
+ this.rootId = rootId;
1277
+ this.debounceMs = debounceMs;
1278
+ this.outbox = outbox;
1279
+ this.state = state;
1280
+ this.timer = null;
1281
+ }
1282
+ trigger() {
1283
+ this.state.updateChange();
1284
+ if (this.timer) {
1285
+ clearTimeout(this.timer);
1286
+ }
1287
+ this.timer = setTimeout(() => {
1288
+ this.outbox.enqueueSnapshot(this.rootId);
1289
+ this.timer = null;
1290
+ }, this.debounceMs);
1291
+ }
1292
+ cancel() {
1293
+ if (this.timer) {
1294
+ clearTimeout(this.timer);
1295
+ this.timer = null;
1296
+ }
1297
+ }
1298
+ };
1299
+ var ServiceRunner = class {
1300
+ constructor(runtime) {
1301
+ this.runtime = runtime;
1302
+ this.state = new ServiceState();
1303
+ this.watcher = null;
1304
+ this.running = false;
1305
+ this.fsControlLeaseOwner = `fs-control-${process.pid}-${Date.now()}`;
1306
+ this.tasks = /* @__PURE__ */ new Set();
1307
+ this.buffering = false;
1308
+ this.bufferedEvents = [];
1309
+ this.scheduler = new SnapshotScheduler(
1310
+ runtime.config.rootId,
1311
+ runtime.config.snapshotDebounceMs,
1312
+ runtime.outbox,
1313
+ this.state
1314
+ );
1315
+ this.ignoredPredicate = buildWatcherIgnored(runtime.rules);
1316
+ }
1317
+ recordInitialSnapshot(timestamp) {
1318
+ this.state.updateSnapshotReady(timestamp);
1319
+ this.state.updateChange(timestamp);
1320
+ this.state.updateHeartbeat(timestamp);
1321
+ }
1322
+ async start() {
1323
+ if (this.running) return;
1324
+ await this.prepareWatcher(false);
1325
+ await this.startLoops();
1326
+ }
1327
+ async stop() {
1328
+ if (!this.running) return;
1329
+ this.running = false;
1330
+ this.scheduler.cancel();
1331
+ if (this.watcher) {
1332
+ await this.watcher.close();
1333
+ this.watcher = null;
1334
+ }
1335
+ await Promise.all([...this.tasks]);
1336
+ await this.runtime.hasher.close();
1337
+ this.runtime.clients.close();
1338
+ this.runtime.db.close();
1339
+ }
1340
+ getServiceStateSnapshot() {
1341
+ return {
1342
+ lastChangeAt: this.state.lastChangeAt,
1343
+ lastSnapshotReadyAt: this.state.lastSnapshotReadyAt,
1344
+ lastHeartbeatEnqueuedAt: this.state.lastHeartbeatEnqueuedAt,
1345
+ watcherReady: this.watcher !== null,
1346
+ buffering: this.buffering
1347
+ };
1348
+ }
1349
+ runBackground(fn) {
1350
+ const task = fn();
1351
+ this.tasks.add(task);
1352
+ task.catch((error) => {
1353
+ if (this.running) {
1354
+ this.runtime.logger.error({ err: error }, "Background task failed");
1355
+ }
1356
+ }).finally(() => {
1357
+ this.tasks.delete(task);
1358
+ });
1359
+ }
1360
+ async prepareWatcher(bufferOnly) {
1361
+ const { rootPath } = this.runtime.config;
1362
+ const logger2 = this.runtime.logger.child({ scope: "watcher" });
1363
+ this.buffering = bufferOnly;
1364
+ const { watcher, mode } = await startWatcher({
1365
+ rootPath,
1366
+ ignored: this.ignoredPredicate,
1367
+ logger: logger2,
1368
+ handlers: {
1369
+ onEvent: (event, absPath, stats) => {
1370
+ logger2.debug({ event, absPath }, "Watcher event received");
1371
+ if (this.buffering) {
1372
+ this.bufferedEvents.push({ event, absPath, stats });
1373
+ return Promise.resolve();
1374
+ }
1375
+ return this.handleEvent(event, absPath, stats);
1376
+ }
1377
+ }
1378
+ });
1379
+ this.watcher = watcher;
1380
+ logger2.debug({ watched: watcher.getWatched() }, "Watcher targets");
1381
+ logger2.info({ mode, buffering: bufferOnly }, "File watcher started");
1382
+ }
1383
+ async enableWatcherProcessing() {
1384
+ if (!this.buffering) {
1385
+ return;
1386
+ }
1387
+ this.buffering = false;
1388
+ this.runtime.logger.debug(
1389
+ { buffered: this.bufferedEvents.length },
1390
+ "Watcher buffering disabled"
1391
+ );
1392
+ if (this.bufferedEvents.length === 0) return;
1393
+ for (const buffered of this.bufferedEvents) {
1394
+ await this.handleEvent(buffered.event, buffered.absPath, buffered.stats);
1395
+ }
1396
+ this.bufferedEvents = [];
1397
+ }
1398
+ async startLoops() {
1399
+ if (this.running) return;
1400
+ this.running = true;
1401
+ this.runBackground(() => this.hashLoop());
1402
+ this.runBackground(() => this.fsControlLoop());
1403
+ this.runBackground(() => this.heartbeatLoop());
1404
+ this.runBackground(() => this.requeueLoop());
1405
+ this.runtime.logger.debug("Background loops started");
1406
+ }
1407
+ async handleEvent(event, absPath, stats) {
1408
+ if (!this.running) return;
1409
+ const root = this.runtime.config.rootPath;
1410
+ const absolute = path.isAbsolute(absPath) ? absPath : path.join(root, absPath);
1411
+ if (!isInsideRoot(root, absolute)) {
1412
+ return;
1413
+ }
1414
+ switch (event) {
1415
+ case "add":
1416
+ case "change":
1417
+ await this.handleAddChange(absolute, stats);
1418
+ break;
1419
+ case "unlink":
1420
+ await this.handleUnlink(absolute);
1421
+ break;
1422
+ case "unlinkDir":
1423
+ await this.handleUnlinkDir(absolute);
1424
+ break;
1425
+ }
1426
+ }
1427
+ async handleAddChange(absPath, _stats) {
1428
+ let fileStats;
1429
+ try {
1430
+ fileStats = await fs4.lstat(absPath);
1431
+ } catch (error) {
1432
+ this.runtime.logger.warn(
1433
+ { err: error, path: absPath },
1434
+ "Failed to lstat path"
1435
+ );
1436
+ return;
1437
+ }
1438
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1439
+ if (!shouldIncludeFile(relPath, fileStats, this.runtime.rules)) {
1440
+ this.runtime.logger.debug({ relPath }, "Watcher event ignored by rules");
1441
+ return;
1442
+ }
1443
+ const isSymlink = fileStats.isSymbolicLink();
1444
+ const target = isSymlink ? await readSymlinkTarget2(absPath) : null;
1445
+ const state = this.runtime.filesRepo.upsertFromStat({
1446
+ relPath,
1447
+ displayPath: absPath,
1448
+ stats: fileStats,
1449
+ isSymlink,
1450
+ symlinkTarget: target
1451
+ });
1452
+ if (state === "dirty") {
1453
+ this.scheduler.trigger();
1454
+ }
1455
+ }
1456
+ async handleUnlink(absPath) {
1457
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1458
+ const changed = this.runtime.filesRepo.markMissing(relPath);
1459
+ if (changed > 0) {
1460
+ this.scheduler.trigger();
1461
+ }
1462
+ }
1463
+ async handleUnlinkDir(absPath) {
1464
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1465
+ const changed = this.runtime.filesRepo.markMissingByPrefix(relPath);
1466
+ if (changed > 0) {
1467
+ this.scheduler.trigger();
1468
+ }
1469
+ }
1470
+ async hashLoop() {
1471
+ while (this.running) {
1472
+ const processed = await this.runtime.hasher.processBatch(
1473
+ this.runtime.config.hashBatchSize
1474
+ );
1475
+ if (!processed) {
1476
+ await sleep2(500);
1477
+ }
1478
+ }
1479
+ }
1480
+ async fsControlLoop() {
1481
+ const log = this.runtime.logger.child({ scope: "fs-control-worker" });
1482
+ while (this.running) {
1483
+ const job = this.runtime.outbox.claimFsControlJob(
1484
+ this.fsControlLeaseOwner
1485
+ );
1486
+ if (!job) {
1487
+ await sleep2(this.runtime.config.queuePollIntervalMs);
1488
+ continue;
1489
+ }
1490
+ const payload = job.data ? safeParse(job.data) ?? {} : {};
1491
+ const jobKind = job.kind ?? (typeof payload.kind === "string" ? payload.kind : void 0);
1492
+ if (!jobKind) {
1493
+ log.warn(
1494
+ { jobId: job.id },
1495
+ "fs_control job missing kind, acknowledging"
1496
+ );
1497
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1498
+ continue;
1499
+ }
1500
+ if (jobKind === "snapshot") {
1501
+ await this.handleSnapshotJob(job, log);
1502
+ } else if (jobKind === "heartbeat") {
1503
+ await this.handleHeartbeatJob(job, log);
1504
+ } else {
1505
+ log.warn({ jobId: job.id, kind: jobKind }, "Unknown fs_control job");
1506
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1507
+ }
1508
+ }
1509
+ }
1510
+ async heartbeatLoop() {
1511
+ const log = this.runtime.logger.child({ scope: "heartbeat-loop" });
1512
+ while (this.running) {
1513
+ const now = Date.now();
1514
+ const sinceChange = now - this.state.lastChangeAt;
1515
+ const sinceHeartbeat = now - this.state.lastHeartbeatEnqueuedAt;
1516
+ if (sinceChange >= this.runtime.config.heartbeatIntervalMs && sinceHeartbeat >= this.runtime.config.heartbeatIntervalMs) {
1517
+ this.runtime.outbox.enqueueHeartbeat(this.runtime.config.rootId);
1518
+ this.state.updateHeartbeat(now);
1519
+ log.debug("Heartbeat enqueued");
1520
+ }
1521
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1522
+ }
1523
+ log.info("Heartbeat loop stopped");
1524
+ }
1525
+ async requeueLoop() {
1526
+ while (this.running) {
1527
+ const count = this.runtime.outbox.requeueTimedOut();
1528
+ if (count > 0) {
1529
+ this.runtime.logger.info({ count }, "Requeued timed-out jobs");
1530
+ }
1531
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1532
+ }
1533
+ }
1534
+ async handleSnapshotJob(job, log) {
1535
+ if (this.runtime.filesRepo.countByState("dirty") > 0 || this.runtime.filesRepo.countByState("hashing") > 0) {
1536
+ const delay = computeBackoff(job.attempts);
1537
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1538
+ await sleep2(200);
1539
+ return;
1540
+ }
1541
+ try {
1542
+ const result = await publishSnapshot(
1543
+ this.runtime.config.rootPath,
1544
+ this.runtime.filesRepo,
1545
+ this.runtime.snapshotsRepo,
1546
+ this.runtime.clients.sync,
1547
+ log
1548
+ );
1549
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1550
+ this.state.updateSnapshotReady(result.createdAt);
1551
+ log.info({ snapshotHash: result.snapshotHash }, "Snapshot job completed");
1552
+ } catch (error) {
1553
+ log.warn({ err: error }, "Snapshot job failed");
1554
+ const delay = computeBackoff(job.attempts);
1555
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1556
+ await sleep2(delay);
1557
+ }
1558
+ }
1559
+ async handleHeartbeatJob(job, log) {
1560
+ try {
1561
+ await sendHeartbeat(
1562
+ this.runtime.config.rootId,
1563
+ this.runtime.snapshotsRepo,
1564
+ this.runtime.clients.sync,
1565
+ log
1566
+ );
1567
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1568
+ this.state.updateHeartbeat(Date.now());
1569
+ } catch (error) {
1570
+ const delay = computeBackoff(job.attempts);
1571
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1572
+ log.warn({ err: error }, "Heartbeat failed; retry scheduled");
1573
+ }
1574
+ }
1575
+ };
1576
+ var HASH_STATES = ["clean", "dirty", "hashing", "missing"];
1577
+ function collectIndexingStatus(runtime, runner) {
1578
+ const byState = {};
1579
+ for (const state of HASH_STATES) {
1580
+ byState[state] = runtime.filesRepo.countByState(state);
1581
+ }
1582
+ const total = HASH_STATES.reduce((acc, state) => acc + byState[state], 0);
1583
+ const queue = runtime.outbox.getQueue();
1584
+ const queueCounts = {
1585
+ pending: queue.countByStatus(JobStatus.Pending),
1586
+ processing: queue.countByStatus(JobStatus.Processing),
1587
+ done: queue.countByStatus(JobStatus.Done),
1588
+ failed: queue.countByStatus(JobStatus.Failed)
1589
+ };
1590
+ return {
1591
+ timestamp: Date.now(),
1592
+ root: {
1593
+ id: runtime.config.rootId,
1594
+ path: runtime.config.rootPath
1595
+ },
1596
+ files: {
1597
+ total,
1598
+ byState
1599
+ },
1600
+ latestSnapshot: runtime.snapshotsRepo.getLatest() ?? null,
1601
+ queue: queueCounts,
1602
+ service: runner.getServiceStateSnapshot()
1603
+ };
1604
+ }
1605
+
1606
+ // src/mcp/server.ts
1607
+ var SERVER_NAME = "coderule-scanner-mcp";
1608
+ var SERVER_VERSION = process.env.npm_package_version ?? "0.0.0";
1609
+ function createMcpServer({
1610
+ runtime,
1611
+ runner
1612
+ }) {
1613
+ const server = new McpServer({
1614
+ name: SERVER_NAME,
1615
+ version: SERVER_VERSION,
1616
+ description: "Coderule file indexing MCP server"
1617
+ });
1618
+ server.registerTool(
1619
+ "check",
1620
+ {
1621
+ title: "Indexer status",
1622
+ description: "Inspect the current indexing state, snapshot, and queue metrics",
1623
+ inputSchema: {}
1624
+ },
1625
+ async () => {
1626
+ const status = collectIndexingStatus(runtime, runner);
1627
+ const text = JSON.stringify(status, null, 2);
1628
+ return {
1629
+ content: [{ type: "text", text }]
1630
+ };
1631
+ }
1632
+ );
1633
+ const queryInputSchema = {
1634
+ query: z.string().min(1, "Query text is required"),
1635
+ budgetTokens: z.number().int().positive().optional().describe("Token budget for retrieval (default 3000)")
1636
+ };
1637
+ server.registerTool(
1638
+ "query",
1639
+ {
1640
+ title: "Snapshot retrieval query",
1641
+ description: "Execute a retrieval query against the most recent indexed snapshot",
1642
+ inputSchema: queryInputSchema
1643
+ },
1644
+ async ({
1645
+ query,
1646
+ budgetTokens
1647
+ }) => {
1648
+ const latest = runtime.snapshotsRepo.getLatest();
1649
+ if (!latest) {
1650
+ const message = "No snapshots available yet. Run indexing first.";
1651
+ return {
1652
+ content: [{ type: "text", text: message }],
1653
+ isError: true
1654
+ };
1655
+ }
1656
+ const effectiveBudget = Math.max(100, budgetTokens ?? 3e3);
1657
+ try {
1658
+ const result = await runtime.clients.retrieval.query(
1659
+ latest.snapshot_hash,
1660
+ query,
1661
+ effectiveBudget,
1662
+ {
1663
+ formatter: runtime.config.retrievalFormatter
1664
+ }
1665
+ );
1666
+ const summary = {
1667
+ snapshotHash: latest.snapshot_hash,
1668
+ budgetTokens: effectiveBudget,
1669
+ formatter: runtime.config.retrievalFormatter
1670
+ };
1671
+ return {
1672
+ content: [
1673
+ {
1674
+ type: "text",
1675
+ text: result.formatted_output ?? "(no formatted output)"
1676
+ },
1677
+ {
1678
+ type: "text",
1679
+ text: JSON.stringify({ summary, result }, null, 2)
1680
+ }
1681
+ ]
1682
+ };
1683
+ } catch (error) {
1684
+ const message = error instanceof Error ? error.message : "Unknown retrieval error";
1685
+ runtime.logger.error({ err: error }, "Retrieval query failed");
1686
+ return {
1687
+ content: [{ type: "text", text: `Retrieval error: ${message}` }],
1688
+ isError: true
1689
+ };
1690
+ }
1691
+ }
1692
+ );
1693
+ return server;
1694
+ }
1695
+
1696
+ // src/mcp-cli.ts
1697
+ var ENV_FLAG_MAP = {
1698
+ root: "CODERULE_ROOT",
1699
+ "data-dir": "CODERULE_DATA_DIR",
1700
+ "auth-url": "CODERULE_AUTH_URL",
1701
+ "sync-url": "CODERULE_SYNC_URL",
1702
+ "ast-url": "CODERULE_AST_URL",
1703
+ "retrieval-url": "CODERULE_RETRIEVAL_URL",
1704
+ "retrieval-formatter": "CODERULE_RETRIEVAL_FORMATTER",
1705
+ "http-timeout": "CODERULE_HTTP_TIMEOUT",
1706
+ "snapshot-debounce": "CODERULE_SNAPSHOT_DEBOUNCE_MS",
1707
+ "heartbeat-interval": "CODERULE_HEARTBEAT_INTERVAL_MS",
1708
+ "heartbeat-check": "CODERULE_HEARTBEAT_CHECK_INTERVAL_MS",
1709
+ "queue-poll": "CODERULE_QUEUE_POLL_INTERVAL_MS",
1710
+ "hash-batch": "CODERULE_HASH_BATCH_SIZE",
1711
+ "hash-lease": "CODERULE_HASH_LEASE_MS",
1712
+ "max-snapshot-attempts": "CODERULE_MAX_SNAPSHOT_ATTEMPTS"
1713
+ };
1714
+ function printUsage() {
1715
+ console.log(`Usage: coderule-mcp-server [token] [options]
1716
+ `);
1717
+ console.log("Options:");
1718
+ console.log(" --token <token> Override CODERULE_TOKEN");
1719
+ console.log(
1720
+ " --clean, --reindex Remove existing local state before running"
1721
+ );
1722
+ console.log(
1723
+ " --inline-hasher Force inline hashing (debug only)"
1724
+ );
1725
+ console.log(" --root <path> Override CODERULE_ROOT");
1726
+ console.log(" --data-dir <path> Override CODERULE_DATA_DIR");
1727
+ console.log(" --auth-url <url> Override CODERULE_AUTH_URL");
1728
+ console.log(" --sync-url <url> Override CODERULE_SYNC_URL");
1729
+ console.log(" --ast-url <url> Override CODERULE_AST_URL");
1730
+ console.log(" --retrieval-url <url> Override CODERULE_RETRIEVAL_URL");
1731
+ console.log(
1732
+ " --retrieval-formatter <val> Override CODERULE_RETRIEVAL_FORMATTER (standard | compact)"
1733
+ );
1734
+ console.log(" --http-timeout <ms> Override CODERULE_HTTP_TIMEOUT");
1735
+ console.log(
1736
+ " --snapshot-debounce <ms> Override CODERULE_SNAPSHOT_DEBOUNCE_MS"
1737
+ );
1738
+ console.log(
1739
+ " --heartbeat-interval <ms> Override CODERULE_HEARTBEAT_INTERVAL_MS"
1740
+ );
1741
+ console.log(
1742
+ " --heartbeat-check <ms> Override CODERULE_HEARTBEAT_CHECK_INTERVAL_MS"
1743
+ );
1744
+ console.log(
1745
+ " --queue-poll <ms> Override CODERULE_QUEUE_POLL_INTERVAL_MS"
1746
+ );
1747
+ console.log(
1748
+ " --hash-batch <n> Override CODERULE_HASH_BATCH_SIZE"
1749
+ );
1750
+ console.log(" --hash-lease <ms> Override CODERULE_HASH_LEASE_MS");
1751
+ console.log(
1752
+ " --max-snapshot-attempts <n> Override CODERULE_MAX_SNAPSHOT_ATTEMPTS"
1753
+ );
1754
+ console.log(
1755
+ " KEY=value Set arbitrary environment variable"
1756
+ );
1757
+ console.log(" -h, --help Show this help message");
1758
+ }
1759
+ function parseArgs(argv) {
1760
+ let token = process.env.CODERULE_TOKEN;
1761
+ let clean = false;
1762
+ let inlineHasher = false;
1763
+ const env = {};
1764
+ const args = [...argv];
1765
+ while (args.length > 0) {
1766
+ const arg = args.shift();
1767
+ if (arg === "--help" || arg === "-h") {
1768
+ printUsage();
1769
+ return null;
1770
+ }
1771
+ if (arg === "--clean" || arg === "--reindex") {
1772
+ clean = true;
1773
+ continue;
1774
+ }
1775
+ if (arg === "--inline-hasher") {
1776
+ inlineHasher = true;
1777
+ continue;
1778
+ }
1779
+ if (arg === "--token") {
1780
+ const value = args.shift();
1781
+ if (!value) {
1782
+ throw new Error("Missing value for --token");
1783
+ }
1784
+ token = value;
1785
+ continue;
1786
+ }
1787
+ if (arg.startsWith("--token=")) {
1788
+ token = arg.slice("--token=".length);
1789
+ continue;
1790
+ }
1791
+ if (arg.startsWith("--")) {
1792
+ const flag = arg.slice(2);
1793
+ const envKey = ENV_FLAG_MAP[flag];
1794
+ if (!envKey) {
1795
+ throw new Error(`Unknown option: ${arg}`);
1796
+ }
1797
+ const value = args.shift();
1798
+ if (!value) {
1799
+ throw new Error(`Option ${arg} requires a value`);
1800
+ }
1801
+ env[envKey] = value;
1802
+ continue;
1803
+ }
1804
+ if (arg.includes("=")) {
1805
+ const [key, value] = arg.split("=", 2);
1806
+ if (!key || value === void 0) {
1807
+ throw new Error(`Invalid KEY=value argument: ${arg}`);
1808
+ }
1809
+ env[key] = value;
1810
+ continue;
1811
+ }
1812
+ if (!token) {
1813
+ token = arg;
1814
+ continue;
1815
+ }
1816
+ throw new Error(`Unexpected argument: ${arg}`);
1817
+ }
1818
+ if (!token) {
1819
+ throw new Error(
1820
+ "Missing token. Provide via argument or CODERULE_TOKEN environment variable."
1821
+ );
1822
+ }
1823
+ return { token, clean, inlineHasher, env };
1824
+ }
1825
+ async function ensureClean(configToken) {
1826
+ const config = await resolveConfig({ token: configToken });
1827
+ const targets = [
1828
+ config.dbPath,
1829
+ `${config.dbPath}-shm`,
1830
+ `${config.dbPath}-wal`
1831
+ ];
1832
+ await Promise.all(targets.map((target) => fs4.rm(target, { force: true })));
1833
+ await fs4.rm(path.join(config.dataDir, "watch", `${config.rootId}.sqlite-shm`), {
1834
+ force: true
1835
+ }).catch(() => {
1836
+ });
1837
+ const dir = path.dirname(config.dbPath);
1838
+ await fs4.mkdir(dir, { recursive: true });
1839
+ console.log(`Removed scanner database at ${config.dbPath}`);
1840
+ }
1841
+ function awaitShutdownSignals() {
1842
+ return new Promise((resolve) => {
1843
+ const signals = ["SIGINT", "SIGTERM"];
1844
+ const handler = (signal) => {
1845
+ for (const sig of signals) {
1846
+ process.off(sig, handler);
1847
+ }
1848
+ resolve(signal);
1849
+ };
1850
+ for (const sig of signals) {
1851
+ process.on(sig, handler);
1852
+ }
1853
+ });
1854
+ }
1855
+ async function main() {
1856
+ try {
1857
+ const options = parseArgs(process.argv.slice(2));
1858
+ if (!options) {
1859
+ return;
1860
+ }
1861
+ process.env.CODERULE_TOKEN = options.token;
1862
+ if (options.inlineHasher) {
1863
+ process.env.CODERULE_HASHER_INLINE = "1";
1864
+ }
1865
+ for (const [key, value] of Object.entries(options.env)) {
1866
+ process.env[key] = value;
1867
+ }
1868
+ if (options.clean) {
1869
+ await ensureClean(options.token);
1870
+ }
1871
+ const runtime = await bootstrap({ token: options.token });
1872
+ const runner = new ServiceRunner(runtime);
1873
+ try {
1874
+ await runner.prepareWatcher(true);
1875
+ const initial = await runInitialSyncPipeline(runtime);
1876
+ runtime.logger.info(
1877
+ {
1878
+ snapshotHash: initial.snapshotHash,
1879
+ filesCount: initial.filesCount
1880
+ },
1881
+ "Initial sync completed; starting MCP server"
1882
+ );
1883
+ runner.recordInitialSnapshot(initial.createdAt);
1884
+ await runner.startLoops();
1885
+ await runner.enableWatcherProcessing();
1886
+ const server = createMcpServer({ runtime, runner });
1887
+ const transport = new StdioServerTransport();
1888
+ await server.connect(transport);
1889
+ runtime.logger.info("MCP server connected via stdio");
1890
+ const signal = await awaitShutdownSignals();
1891
+ runtime.logger.info({ signal }, "Shutdown signal received");
1892
+ if (typeof transport.close === "function") {
1893
+ await transport.close();
1894
+ }
1895
+ } finally {
1896
+ await runner.stop();
1897
+ }
1898
+ } catch (error) {
1899
+ console.error("MCP server failed:", error);
1900
+ process.exitCode = 1;
1901
+ }
1902
+ }
1903
+ void main();
1904
+ //# sourceMappingURL=mcp-cli.js.map
1905
+ //# sourceMappingURL=mcp-cli.js.map