@coderule/mcp 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1646 @@
1
+ import pino from 'pino';
2
+ import { createHash } from 'crypto';
3
+ import fs4 from 'fs/promises';
4
+ import path from 'path';
5
+ import envPaths from 'env-paths';
6
+ import Database from 'better-sqlite3';
7
+ import { Qulite, enqueueFsEvent } from '@coderule/qulite';
8
+ import { CoderuleClients, ASTHttpClient, SyncHttpClient } from '@coderule/clients';
9
+ import fs2 from 'fs';
10
+ import { Worker } from 'worker_threads';
11
+ import chokidar from 'chokidar';
12
+
13
+ // src/log/logger.ts
14
+ var level = process.env.CODERULE_LOG_LEVEL ?? "info";
15
+ var baseLogger = pino(
16
+ {
17
+ level,
18
+ name: "coderule-scanner",
19
+ timestamp: pino.stdTimeFunctions.isoTime
20
+ },
21
+ process.stderr
22
+ );
23
+ var logger = baseLogger;
24
+
25
+ // src/config/Defaults.ts
26
+ var DEFAULT_SNAPSHOT_DEBOUNCE_MS = 1e3;
27
+ var DEFAULT_HEARTBEAT_INTERVAL_MS = 6e4;
28
+ var DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS = 5e3;
29
+ var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
30
+ var DEFAULT_HASH_BATCH_SIZE = 32;
31
+ var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
32
+ var DEFAULT_HTTP_TIMEOUT_MS = 3e4;
33
+
34
+ // src/config/Configurator.ts
35
+ var DEFAULT_RETRIEVAL_FORMATTER = "standard";
36
+ var DEFAULTS = {
37
+ snapshotDebounceMs: DEFAULT_SNAPSHOT_DEBOUNCE_MS,
38
+ heartbeatIntervalMs: DEFAULT_HEARTBEAT_INTERVAL_MS,
39
+ heartbeatCheckIntervalMs: DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS,
40
+ queuePollIntervalMs: DEFAULT_QUEUE_POLL_INTERVAL_MS,
41
+ hashBatchSize: DEFAULT_HASH_BATCH_SIZE,
42
+ maxSnapshotAttempts: DEFAULT_MAX_SNAPSHOT_ATTEMPTS
43
+ };
44
+ function normalizeRoot(root) {
45
+ const resolved = path.resolve(root);
46
+ const normalized = path.normalize(resolved);
47
+ return normalized.split(path.sep).join("/");
48
+ }
49
+ function sha256(input) {
50
+ return createHash("sha256").update(input).digest("hex");
51
+ }
52
+ function parseInteger(value, fallback) {
53
+ if (!value) return fallback;
54
+ const parsed = Number.parseInt(value, 10);
55
+ if (Number.isNaN(parsed) || parsed <= 0) {
56
+ throw new Error(`Invalid integer value: ${value}`);
57
+ }
58
+ return parsed;
59
+ }
60
+ function parseFormatter(value) {
61
+ if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
62
+ const normalized = value.toLowerCase();
63
+ if (normalized === "standard" || normalized === "compact") {
64
+ return normalized;
65
+ }
66
+ throw new Error(
67
+ `Invalid CODERULE_RETRIEVAL_FORMATTER: ${value}. Expected "standard" or "compact"`
68
+ );
69
+ }
70
+ async function resolveConfig({
71
+ token
72
+ }) {
73
+ const resolvedToken = token ?? process.env.CODERULE_TOKEN;
74
+ if (!resolvedToken) {
75
+ throw new Error(
76
+ "Missing token: provide params.token or CODERULE_TOKEN env"
77
+ );
78
+ }
79
+ const rootCandidate = process.env.CODERULE_ROOT || process.cwd();
80
+ const rootPath = path.resolve(rootCandidate);
81
+ const normalized = normalizeRoot(rootPath);
82
+ const rootId = sha256(normalized);
83
+ const dataDir = process.env.CODERULE_DATA_DIR || envPaths("coderule").data;
84
+ const watchDir = path.join(dataDir, "watch");
85
+ await fs4.mkdir(watchDir, { recursive: true });
86
+ const dbPath = path.join(watchDir, `${rootId}.sqlite`);
87
+ const baseConfig = {
88
+ token: resolvedToken,
89
+ rootPath,
90
+ rootId,
91
+ dbPath,
92
+ dataDir,
93
+ authBaseUrl: process.env.CODERULE_AUTH_URL,
94
+ astBaseUrl: process.env.CODERULE_AST_URL,
95
+ syncBaseUrl: process.env.CODERULE_SYNC_URL,
96
+ retrievalBaseUrl: process.env.CODERULE_RETRIEVAL_URL,
97
+ httpTimeout: void 0,
98
+ snapshotDebounceMs: DEFAULTS.snapshotDebounceMs,
99
+ heartbeatIntervalMs: DEFAULTS.heartbeatIntervalMs,
100
+ heartbeatCheckIntervalMs: DEFAULTS.heartbeatCheckIntervalMs,
101
+ queuePollIntervalMs: DEFAULTS.queuePollIntervalMs,
102
+ hashBatchSize: DEFAULTS.hashBatchSize,
103
+ maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
104
+ retrievalFormatter: parseFormatter(
105
+ process.env.CODERULE_RETRIEVAL_FORMATTER
106
+ )
107
+ };
108
+ if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
109
+ baseConfig.snapshotDebounceMs = parseInteger(
110
+ process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS,
111
+ baseConfig.snapshotDebounceMs
112
+ );
113
+ }
114
+ if (process.env.CODERULE_HEARTBEAT_INTERVAL_MS) {
115
+ baseConfig.heartbeatIntervalMs = parseInteger(
116
+ process.env.CODERULE_HEARTBEAT_INTERVAL_MS,
117
+ baseConfig.heartbeatIntervalMs
118
+ );
119
+ }
120
+ if (process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS) {
121
+ baseConfig.heartbeatCheckIntervalMs = parseInteger(
122
+ process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS,
123
+ baseConfig.heartbeatCheckIntervalMs
124
+ );
125
+ }
126
+ if (process.env.CODERULE_QUEUE_POLL_INTERVAL_MS) {
127
+ baseConfig.queuePollIntervalMs = parseInteger(
128
+ process.env.CODERULE_QUEUE_POLL_INTERVAL_MS,
129
+ baseConfig.queuePollIntervalMs
130
+ );
131
+ }
132
+ if (process.env.CODERULE_HASH_BATCH_SIZE) {
133
+ baseConfig.hashBatchSize = parseInteger(
134
+ process.env.CODERULE_HASH_BATCH_SIZE,
135
+ baseConfig.hashBatchSize
136
+ );
137
+ }
138
+ if (process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS) {
139
+ baseConfig.maxSnapshotAttempts = parseInteger(
140
+ process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS,
141
+ baseConfig.maxSnapshotAttempts
142
+ );
143
+ }
144
+ baseConfig.httpTimeout = parseInteger(
145
+ process.env.CODERULE_HTTP_TIMEOUT,
146
+ DEFAULT_HTTP_TIMEOUT_MS
147
+ );
148
+ logger.debug(
149
+ {
150
+ rootPath,
151
+ dbPath,
152
+ dataDir,
153
+ authBaseUrl: baseConfig.authBaseUrl,
154
+ astBaseUrl: baseConfig.astBaseUrl,
155
+ syncBaseUrl: baseConfig.syncBaseUrl
156
+ },
157
+ "Resolved configuration"
158
+ );
159
+ return baseConfig;
160
+ }
161
+
162
+ // src/db/Schema.ts
163
+ var FILES_SCHEMA = `
164
+ CREATE TABLE IF NOT EXISTS files (
165
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
166
+ rel_path TEXT NOT NULL,
167
+ display_path TEXT NOT NULL,
168
+ size INTEGER NOT NULL,
169
+ mtime_ns INTEGER NOT NULL,
170
+ mode INTEGER,
171
+ ino TEXT,
172
+ dev TEXT,
173
+ is_symlink INTEGER NOT NULL DEFAULT 0,
174
+ target TEXT,
175
+ content_sha256 TEXT,
176
+ service_file_hash TEXT,
177
+ last_seen_ts INTEGER NOT NULL,
178
+ hash_state TEXT NOT NULL,
179
+ hash_owner TEXT,
180
+ hash_lease_expires_at INTEGER,
181
+ hash_started_at INTEGER,
182
+ UNIQUE(rel_path)
183
+ );
184
+ CREATE INDEX IF NOT EXISTS idx_files_hash_state ON files(hash_state);
185
+ CREATE INDEX IF NOT EXISTS idx_files_content_sha ON files(content_sha256);
186
+ CREATE INDEX IF NOT EXISTS idx_files_service_hash ON files(service_file_hash);
187
+ `;
188
+ var SNAPSHOTS_SCHEMA = `
189
+ CREATE TABLE IF NOT EXISTS snapshots (
190
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
191
+ snapshot_hash TEXT NOT NULL,
192
+ files_count INTEGER NOT NULL,
193
+ total_size INTEGER NOT NULL,
194
+ created_at INTEGER NOT NULL
195
+ );
196
+ `;
197
+
198
+ // src/db/Database.ts
199
+ function safeAlter(db, sql) {
200
+ try {
201
+ db.exec(sql);
202
+ } catch (error) {
203
+ if (typeof error?.message === "string" && error.message.includes("duplicate column name")) {
204
+ return;
205
+ }
206
+ throw error;
207
+ }
208
+ }
209
+ function applyMigrations(db, logger2) {
210
+ const alterations = [
211
+ "ALTER TABLE files ADD COLUMN hash_owner TEXT",
212
+ "ALTER TABLE files ADD COLUMN hash_lease_expires_at INTEGER",
213
+ "ALTER TABLE files ADD COLUMN hash_started_at INTEGER"
214
+ ];
215
+ for (const sql of alterations) {
216
+ try {
217
+ safeAlter(db, sql);
218
+ } catch (error) {
219
+ logger2.error({ err: error, sql }, "Database migration failed");
220
+ throw error;
221
+ }
222
+ }
223
+ db.exec(
224
+ "CREATE INDEX IF NOT EXISTS idx_files_hash_lease ON files(hash_state, hash_lease_expires_at)"
225
+ );
226
+ }
227
+ function openDatabase(dbPath, logger2) {
228
+ const db = new Database(dbPath, { verbose: void 0 });
229
+ logger2.info({ dbPath }, "Opened SQLite database");
230
+ db.pragma("journal_mode = WAL");
231
+ db.pragma("synchronous = NORMAL");
232
+ db.pragma("busy_timeout = 5000");
233
+ db.pragma("foreign_keys = ON");
234
+ db.exec("BEGIN");
235
+ try {
236
+ db.exec(FILES_SCHEMA);
237
+ db.exec(SNAPSHOTS_SCHEMA);
238
+ db.exec("COMMIT");
239
+ } catch (error) {
240
+ db.exec("ROLLBACK");
241
+ db.close();
242
+ throw error;
243
+ }
244
+ applyMigrations(db, logger2);
245
+ return db;
246
+ }
247
+
248
+ // src/db/FilesRepo.ts
249
+ var FilesRepo = class {
250
+ constructor(db) {
251
+ this.db = db;
252
+ this.selectByRelPath = this.db.prepare(
253
+ "SELECT * FROM files WHERE rel_path = ?"
254
+ );
255
+ this.insertStmt = this.db.prepare(
256
+ `INSERT INTO files (
257
+ rel_path,
258
+ display_path,
259
+ size,
260
+ mtime_ns,
261
+ mode,
262
+ ino,
263
+ dev,
264
+ is_symlink,
265
+ target,
266
+ content_sha256,
267
+ service_file_hash,
268
+ last_seen_ts,
269
+ hash_state,
270
+ hash_owner,
271
+ hash_lease_expires_at,
272
+ hash_started_at
273
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL, ?, ?, NULL, NULL, NULL)`
274
+ );
275
+ this.updateStmt = this.db.prepare(
276
+ `UPDATE files SET
277
+ display_path = ?,
278
+ size = ?,
279
+ mtime_ns = ?,
280
+ mode = ?,
281
+ ino = ?,
282
+ dev = ?,
283
+ is_symlink = ?,
284
+ target = ?,
285
+ content_sha256 = ?,
286
+ service_file_hash = ?,
287
+ last_seen_ts = ?,
288
+ hash_state = ?,
289
+ hash_owner = CASE WHEN ? = 'hashing' THEN hash_owner ELSE NULL END,
290
+ hash_lease_expires_at = CASE WHEN ? = 'hashing' THEN hash_lease_expires_at ELSE NULL END,
291
+ hash_started_at = CASE WHEN ? = 'hashing' THEN hash_started_at ELSE NULL END
292
+ WHERE id = ?`
293
+ );
294
+ this.markMissingStmt = this.db.prepare(
295
+ `UPDATE files
296
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL, last_seen_ts = ?,
297
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
298
+ WHERE rel_path = ?`
299
+ );
300
+ this.markMissingPrefixStmt = this.db.prepare(
301
+ `UPDATE files
302
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
303
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
304
+ WHERE (rel_path = ? OR rel_path LIKE (? || '/%')) AND hash_state != 'missing'`
305
+ );
306
+ this.markDirtyStmt = this.db.prepare(
307
+ `UPDATE files
308
+ SET hash_state = 'dirty', last_seen_ts = ?,
309
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
310
+ WHERE rel_path = ?`
311
+ );
312
+ this.claimDirtyStmt = this.db.prepare(
313
+ `WITH candidates AS (
314
+ SELECT id
315
+ FROM files
316
+ WHERE hash_state = 'dirty'
317
+ ORDER BY last_seen_ts ASC, id ASC
318
+ LIMIT @limit
319
+ )
320
+ UPDATE files
321
+ SET hash_state = 'hashing',
322
+ hash_owner = @owner,
323
+ hash_lease_expires_at = @lease_expires_at,
324
+ hash_started_at = @now
325
+ WHERE id IN candidates
326
+ RETURNING *`
327
+ );
328
+ this.markDirtyByIdStmt = this.db.prepare(
329
+ `UPDATE files
330
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
331
+ WHERE id = ?`
332
+ );
333
+ this.applyHashesStmt = this.db.prepare(
334
+ `UPDATE files
335
+ SET content_sha256 = ?, service_file_hash = ?, hash_state = 'clean',
336
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
337
+ WHERE id = ?`
338
+ );
339
+ this.requeueExpiredHashingStmt = this.db.prepare(
340
+ `UPDATE files
341
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
342
+ WHERE hash_state = 'hashing'
343
+ AND hash_lease_expires_at IS NOT NULL
344
+ AND hash_lease_expires_at <= ?`
345
+ );
346
+ this.resetHashingStmt = this.db.prepare(
347
+ `UPDATE files
348
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
349
+ WHERE hash_state = 'hashing'`
350
+ );
351
+ this.selectCleanSnapshotStmt = this.db.prepare(
352
+ `SELECT rel_path, service_file_hash, size
353
+ FROM files
354
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL
355
+ ORDER BY rel_path ASC`
356
+ );
357
+ this.totalsStmt = this.db.prepare(
358
+ `SELECT COUNT(*) AS files_count, COALESCE(SUM(size), 0) AS total_size
359
+ FROM files
360
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL`
361
+ );
362
+ this.markMissingBeforeStmt = this.db.prepare(
363
+ `UPDATE files
364
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
365
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
366
+ WHERE last_seen_ts < ? AND hash_state != 'missing'`
367
+ );
368
+ this.countByStateStmt = this.db.prepare(
369
+ "SELECT COUNT(*) as count FROM files WHERE hash_state = ?"
370
+ );
371
+ }
372
+ getByRelPath(relPath) {
373
+ return this.selectByRelPath.get(relPath);
374
+ }
375
+ upsertFromStat(params) {
376
+ const now = Date.now();
377
+ const { relPath, displayPath, stats, isSymlink, symlinkTarget } = params;
378
+ const existing = this.getByRelPath(relPath);
379
+ const mtimeNs = Math.trunc(stats.mtimeMs * 1e6);
380
+ const ino = typeof stats.ino === "number" ? String(stats.ino) : null;
381
+ const dev = typeof stats.dev === "number" ? String(stats.dev) : null;
382
+ const mode = typeof stats.mode === "number" ? stats.mode : null;
383
+ const isSymlinkInt = isSymlink ? 1 : 0;
384
+ if (!existing) {
385
+ this.insertStmt.run(
386
+ relPath,
387
+ displayPath,
388
+ stats.size,
389
+ mtimeNs,
390
+ mode,
391
+ ino,
392
+ dev,
393
+ isSymlinkInt,
394
+ symlinkTarget,
395
+ now,
396
+ "dirty"
397
+ );
398
+ return "dirty";
399
+ }
400
+ let nextState = existing.hash_state;
401
+ let nextContent = existing.content_sha256;
402
+ let nextServiceHash = existing.service_file_hash;
403
+ const changed = existing.size !== stats.size || existing.mtime_ns !== mtimeNs || existing.mode !== mode || existing.ino !== ino || existing.dev !== dev || existing.is_symlink !== isSymlinkInt || existing.target !== symlinkTarget;
404
+ if (changed || existing.hash_state === "missing") {
405
+ nextState = "dirty";
406
+ nextContent = null;
407
+ nextServiceHash = null;
408
+ }
409
+ this.updateStmt.run(
410
+ displayPath,
411
+ stats.size,
412
+ mtimeNs,
413
+ mode,
414
+ ino,
415
+ dev,
416
+ isSymlinkInt,
417
+ symlinkTarget,
418
+ nextContent,
419
+ nextServiceHash,
420
+ now,
421
+ nextState,
422
+ nextState,
423
+ nextState,
424
+ nextState,
425
+ existing.id
426
+ );
427
+ return nextState;
428
+ }
429
+ markMissing(relPath) {
430
+ const now = Date.now();
431
+ const result = this.markMissingStmt.run(now, relPath);
432
+ return result.changes ?? 0;
433
+ }
434
+ markMissingByPrefix(prefix) {
435
+ const result = this.markMissingPrefixStmt.run(prefix, prefix);
436
+ return result.changes ?? 0;
437
+ }
438
+ markDirty(relPath) {
439
+ const now = Date.now();
440
+ this.markDirtyStmt.run(now, relPath);
441
+ }
442
+ markMissingBefore(timestamp) {
443
+ const result = this.markMissingBeforeStmt.run(timestamp);
444
+ return result.changes ?? 0;
445
+ }
446
+ claimDirty(limit, owner, leaseMs) {
447
+ if (limit <= 0) {
448
+ return [];
449
+ }
450
+ const now = Date.now();
451
+ return this.claimDirtyStmt.all({
452
+ limit,
453
+ owner,
454
+ lease_expires_at: now + leaseMs,
455
+ now
456
+ });
457
+ }
458
+ markDirtyByIds(ids) {
459
+ if (!ids.length) return;
460
+ const tx = this.db.transaction((batch) => {
461
+ for (const id of batch) {
462
+ this.markDirtyByIdStmt.run(id);
463
+ }
464
+ });
465
+ tx(ids);
466
+ }
467
+ applyHashResults(results) {
468
+ if (!results.length) return;
469
+ const tx = this.db.transaction((batch) => {
470
+ for (const { id, contentSha256, serviceFileHash } of batch) {
471
+ this.applyHashesStmt.run(contentSha256, serviceFileHash, id);
472
+ }
473
+ });
474
+ tx(results);
475
+ }
476
+ getCleanFilesForSnapshot() {
477
+ return this.selectCleanSnapshotStmt.all();
478
+ }
479
+ getTotalsForSnapshot() {
480
+ const row = this.totalsStmt.get();
481
+ return {
482
+ filesCount: row?.files_count ?? 0,
483
+ totalSize: row?.total_size ?? 0
484
+ };
485
+ }
486
+ countByState(state) {
487
+ const row = this.countByStateStmt.get(state);
488
+ return row?.count ?? 0;
489
+ }
490
+ requeueExpiredHashing(now) {
491
+ const result = this.requeueExpiredHashingStmt.run(now);
492
+ return result.changes ?? 0;
493
+ }
494
+ resetHashingStates() {
495
+ const result = this.resetHashingStmt.run();
496
+ return result.changes ?? 0;
497
+ }
498
+ };
499
+
500
+ // src/db/SnapshotsRepo.ts
501
+ var SnapshotsRepo = class {
502
+ constructor(db) {
503
+ this.db = db;
504
+ this.insertStmt = this.db.prepare(
505
+ `INSERT INTO snapshots (snapshot_hash, files_count, total_size, created_at)
506
+ VALUES (?, ?, ?, ?)`
507
+ );
508
+ this.selectLatestStmt = this.db.prepare(
509
+ `SELECT * FROM snapshots ORDER BY created_at DESC LIMIT 1`
510
+ );
511
+ }
512
+ insert(snapshotHash, filesCount, totalSize, createdAt) {
513
+ this.insertStmt.run(snapshotHash, filesCount, totalSize, createdAt);
514
+ }
515
+ getLatest() {
516
+ return this.selectLatestStmt.get();
517
+ }
518
+ };
519
+ var Outbox = class {
520
+ constructor(db, logger2) {
521
+ this.log = logger2.child({ scope: "outbox" });
522
+ this.queue = new Qulite(db, {
523
+ logger: this.log,
524
+ defaultLeaseMs: 3e4,
525
+ defaultMaxAttempts: 10
526
+ });
527
+ this.markKindStmt = db.prepare(
528
+ `UPDATE qulite_jobs SET kind = @kind WHERE dedupe_key = @dedupe_key`
529
+ );
530
+ this.purgeLegacyStmt = db.prepare(
531
+ `DELETE FROM qulite_jobs WHERE type = 'fs_control' AND (kind IS NULL OR kind = '')`
532
+ );
533
+ const purged = this.purgeLegacyStmt.run().changes ?? 0;
534
+ if (purged > 0) {
535
+ this.log.warn({ purged }, "Purged legacy fs_control jobs without kind");
536
+ }
537
+ }
538
+ getQueue() {
539
+ return this.queue;
540
+ }
541
+ markKind(dedupeKey, kind) {
542
+ this.markKindStmt.run({ dedupe_key: dedupeKey, kind });
543
+ }
544
+ enqueueSnapshot(rootId, delayMs = 0) {
545
+ const result = enqueueFsEvent(this.queue, {
546
+ root_id: rootId,
547
+ rel_path: "",
548
+ kind: "snapshot",
549
+ delayMs,
550
+ maxAttempts: 20,
551
+ data: { root_id: rootId, kind: "snapshot" }
552
+ });
553
+ this.markKind(`snapshot:${rootId}`, "snapshot");
554
+ if (result.changes > 0) {
555
+ this.log.debug({ rootId }, "Enqueued snapshot job");
556
+ }
557
+ }
558
+ enqueueHeartbeat(rootId, delayMs = 0) {
559
+ const result = enqueueFsEvent(this.queue, {
560
+ root_id: rootId,
561
+ rel_path: "",
562
+ kind: "heartbeat",
563
+ delayMs,
564
+ maxAttempts: 5,
565
+ data: { root_id: rootId, kind: "heartbeat" }
566
+ });
567
+ this.markKind(`heartbeat:${rootId}`, "heartbeat");
568
+ if (result.changes > 0) {
569
+ this.log.debug({ rootId }, "Enqueued heartbeat job");
570
+ }
571
+ }
572
+ claimFsControlJob(leaseOwner, leaseMs = 3e4) {
573
+ return this.queue.claimNext({ type: "fs_control", leaseOwner, leaseMs });
574
+ }
575
+ ack(jobId, leaseOwner) {
576
+ return this.queue.ack(jobId, leaseOwner);
577
+ }
578
+ retry(jobId, leaseOwner, delayMs) {
579
+ return this.queue.retry(jobId, leaseOwner, delayMs);
580
+ }
581
+ fail(jobId, leaseOwner, error) {
582
+ return this.queue.fail(jobId, leaseOwner, error);
583
+ }
584
+ requeueTimedOut() {
585
+ return this.queue.requeueTimedOut();
586
+ }
587
+ };
588
+ function serviceConfig(baseUrl, timeout) {
589
+ if (baseUrl === void 0 && timeout === void 0) {
590
+ return void 0;
591
+ }
592
+ const config = {};
593
+ if (baseUrl !== void 0) {
594
+ config.baseUrl = baseUrl;
595
+ }
596
+ if (timeout !== void 0) {
597
+ config.timeout = timeout;
598
+ }
599
+ return config;
600
+ }
601
+ function createClients(config, logger2) {
602
+ const clientLogger = logger2.child({ scope: "clients" });
603
+ const httpTimeout = config.httpTimeout;
604
+ const clients = new CoderuleClients({
605
+ token: config.token,
606
+ auth: serviceConfig(config.authBaseUrl, httpTimeout),
607
+ ast: serviceConfig(config.astBaseUrl, httpTimeout),
608
+ sync: serviceConfig(config.syncBaseUrl, httpTimeout),
609
+ retrieval: serviceConfig(config.retrievalBaseUrl, httpTimeout),
610
+ jwtFactory: {
611
+ onTokenRefreshed: (info) => {
612
+ clientLogger.debug(
613
+ {
614
+ expiresAt: new Date(info.expiresAt).toISOString(),
615
+ serverUrl: info.serverUrl
616
+ },
617
+ "JWT refreshed"
618
+ );
619
+ }
620
+ }
621
+ });
622
+ return clients;
623
+ }
624
+
625
+ // src/rules/RulesFetcher.ts
626
+ async function fetchVisitorRules(clients, logger2) {
627
+ const fetchLogger = logger2.child({ scope: "rules" });
628
+ fetchLogger.info("Fetching visitor rules v2 from AST service");
629
+ const rules = await clients.ast.getVisitorRulesV2();
630
+ fetchLogger.info(
631
+ {
632
+ include_extensions: rules.include_extensions.length,
633
+ include_filenames: rules.include_filenames.length,
634
+ exclude_dirnames: rules.exclude_dirnames.length
635
+ },
636
+ "Fetched visitor rules"
637
+ );
638
+ return rules;
639
+ }
640
+ function toPosix(input) {
641
+ return input.split(path.sep).join("/");
642
+ }
643
+ function getLowerBasename(input) {
644
+ const base = input.split("/").pop();
645
+ return (base ?? "").toLowerCase();
646
+ }
647
+ function getLowerExt(basename) {
648
+ const idx = basename.lastIndexOf(".");
649
+ if (idx < 0) return "";
650
+ return basename.slice(idx).toLowerCase();
651
+ }
652
+ function compileRulesBundle(rules) {
653
+ const compiled = ASTHttpClient.compileRulesV2(rules);
654
+ const basePredicate = ASTHttpClient.buildIgnoredPredicate(compiled);
655
+ const predicate = (fullPath, stats) => {
656
+ let info = stats;
657
+ if (!info) {
658
+ logger.debug({ path: fullPath }, "Predicate fallback lstat");
659
+ try {
660
+ info = fs2.lstatSync(fullPath);
661
+ } catch (error) {
662
+ logger.warn(
663
+ { err: error, path: fullPath },
664
+ "Failed to lstat path for rules predicate"
665
+ );
666
+ return false;
667
+ }
668
+ }
669
+ return basePredicate(fullPath, info);
670
+ };
671
+ return {
672
+ rules,
673
+ compiled,
674
+ predicate
675
+ };
676
+ }
677
+ function shouldIncludeFile(relPath, stats, bundle) {
678
+ if (stats.isDirectory()) return false;
679
+ const posixRel = toPosix(relPath);
680
+ if (bundle.compiled.dirRe.test(posixRel)) {
681
+ return false;
682
+ }
683
+ const basename = getLowerBasename(posixRel);
684
+ if (bundle.compiled.names.has(basename)) {
685
+ return true;
686
+ }
687
+ const ext = getLowerExt(basename);
688
+ return bundle.compiled.exts.has(ext);
689
+ }
690
+ function shouldPruneDirectory(relPath, bundle) {
691
+ const posixRel = toPosix(relPath);
692
+ return bundle.compiled.dirRe.test(posixRel);
693
+ }
694
+ function buildWatcherIgnored(bundle) {
695
+ return (fullPath, stats) => bundle.predicate(fullPath, stats);
696
+ }
697
+ var HashWorker = class {
698
+ constructor(logger2) {
699
+ this.pending = /* @__PURE__ */ new Map();
700
+ this.nextTaskId = 1;
701
+ this.terminating = false;
702
+ this.log = logger2.child({ scope: "hash-worker" });
703
+ const workerUrl = new URL("./hash/WorkerThread.js", import.meta.url);
704
+ const execArgv = process.execArgv.filter(
705
+ (arg) => !arg.startsWith("--input-type")
706
+ );
707
+ const workerOptions = {
708
+ name: "coderule-hasher",
709
+ execArgv
710
+ };
711
+ if (workerUrl.pathname.endsWith(".js")) {
712
+ workerOptions.type = "module";
713
+ }
714
+ this.worker = new Worker(workerUrl, workerOptions);
715
+ this.worker.on(
716
+ "message",
717
+ (message) => this.onMessage(message)
718
+ );
719
+ this.worker.on("error", (error) => this.handleWorkerError(error));
720
+ this.worker.on("exit", (code) => {
721
+ if (code !== 0 && !this.terminating) {
722
+ this.handleWorkerError(
723
+ new Error(`Hasher worker exited with code ${code}`)
724
+ );
725
+ }
726
+ });
727
+ }
728
+ async terminate() {
729
+ this.terminating = true;
730
+ for (const [, pending] of this.pending) {
731
+ pending.reject(new Error("Hasher worker terminated"));
732
+ }
733
+ this.pending.clear();
734
+ await this.worker.terminate();
735
+ }
736
+ onMessage(message) {
737
+ const pending = this.pending.get(message.taskId);
738
+ if (!pending) {
739
+ this.log.warn(
740
+ { taskId: message.taskId },
741
+ "Received message for unknown task"
742
+ );
743
+ return;
744
+ }
745
+ this.pending.delete(message.taskId);
746
+ if (message.type === "hash-result") {
747
+ pending.resolve({
748
+ contentSha256: message.contentSha256,
749
+ serviceFileHash: message.serviceFileHash
750
+ });
751
+ } else {
752
+ const error = new Error(message.error);
753
+ error.code = message.code;
754
+ pending.reject(error);
755
+ }
756
+ }
757
+ handleWorkerError(error) {
758
+ this.log.error({ err: error }, "Hasher worker error");
759
+ for (const [, pending] of this.pending) {
760
+ pending.reject(error);
761
+ }
762
+ this.pending.clear();
763
+ }
764
+ compute(absPath, relPath) {
765
+ const taskId = this.nextTaskId++;
766
+ const payload = {
767
+ type: "hash",
768
+ taskId,
769
+ absPath,
770
+ relPath
771
+ };
772
+ return new Promise((resolve, reject) => {
773
+ this.pending.set(taskId, { resolve, reject });
774
+ this.worker.postMessage(payload);
775
+ });
776
+ }
777
+ };
778
+ var Hasher = class {
779
+ constructor(options) {
780
+ this.options = options;
781
+ this.worker = null;
782
+ this.log = options.logger.child({ scope: "hasher" });
783
+ this.inlineMode = process.env.CODERULE_HASHER_INLINE === "1";
784
+ this.ownerId = `hasher-${process.pid}-${Date.now()}`;
785
+ const leaseFromEnv = process.env.CODERULE_HASH_LEASE_MS ? Number.parseInt(process.env.CODERULE_HASH_LEASE_MS, 10) : Number.NaN;
786
+ this.leaseDurationMs = Number.isFinite(leaseFromEnv) && leaseFromEnv > 0 ? leaseFromEnv : 3e4;
787
+ if (!this.inlineMode) {
788
+ try {
789
+ this.worker = new HashWorker(this.log);
790
+ } catch (error) {
791
+ this.log.warn(
792
+ { err: error },
793
+ "Failed to start hasher worker, falling back to inline hashing"
794
+ );
795
+ this.worker = null;
796
+ this.inlineMode = true;
797
+ }
798
+ }
799
+ }
800
+ async close() {
801
+ if (this.worker) {
802
+ await this.worker.terminate();
803
+ }
804
+ }
805
+ resolveAbsolutePath(record) {
806
+ if (path.isAbsolute(record.display_path)) {
807
+ return record.display_path;
808
+ }
809
+ return path.join(this.options.rootPath, record.rel_path);
810
+ }
811
+ async ensureExists(absPath, record) {
812
+ try {
813
+ await fs4.access(absPath);
814
+ return true;
815
+ } catch (error) {
816
+ this.log.warn(
817
+ { err: error, relPath: record.rel_path },
818
+ "File missing before hashing"
819
+ );
820
+ this.options.filesRepo.markMissing(record.rel_path);
821
+ return false;
822
+ }
823
+ }
824
+ async computeHash(absPath, relPath) {
825
+ if (this.inlineMode || !this.worker) {
826
+ return this.hashInline(absPath, relPath);
827
+ }
828
+ return this.worker.compute(absPath, relPath);
829
+ }
830
+ async hashInline(absPath, relPath) {
831
+ return new Promise((resolve, reject) => {
832
+ const content = createHash("sha256");
833
+ const service = createHash("sha256");
834
+ service.update(relPath);
835
+ service.update("\n");
836
+ const stream = fs2.createReadStream(absPath);
837
+ stream.on("data", (chunk) => {
838
+ content.update(chunk);
839
+ service.update(chunk);
840
+ });
841
+ stream.on("error", (error) => {
842
+ reject(error);
843
+ });
844
+ stream.on("end", () => {
845
+ resolve({
846
+ contentSha256: content.digest("hex"),
847
+ serviceFileHash: service.digest("hex")
848
+ });
849
+ });
850
+ });
851
+ }
852
+ async processBatch(limit) {
853
+ const now = Date.now();
854
+ const requeued = this.options.filesRepo.requeueExpiredHashing(now);
855
+ if (requeued > 0) {
856
+ this.log.debug({ requeued }, "Requeued expired hashing leases");
857
+ }
858
+ const dirty = this.options.filesRepo.claimDirty(
859
+ limit,
860
+ this.ownerId,
861
+ this.leaseDurationMs
862
+ );
863
+ if (dirty.length > 0) {
864
+ this.log.debug({ count: dirty.length }, "Hashing claimed files");
865
+ }
866
+ if (dirty.length === 0) {
867
+ return false;
868
+ }
869
+ const successes = [];
870
+ const failures = [];
871
+ for (const record of dirty) {
872
+ const absPath = this.resolveAbsolutePath(record);
873
+ const exists = await this.ensureExists(absPath, record);
874
+ if (!exists) {
875
+ continue;
876
+ }
877
+ try {
878
+ const result = await this.computeHash(absPath, record.rel_path);
879
+ successes.push({
880
+ id: record.id,
881
+ contentSha256: result.contentSha256,
882
+ serviceFileHash: result.serviceFileHash
883
+ });
884
+ } catch (error) {
885
+ if (error?.code === "ENOENT") {
886
+ this.log.debug(
887
+ { relPath: record.rel_path },
888
+ "File disappeared during hashing"
889
+ );
890
+ this.options.filesRepo.markMissing(record.rel_path);
891
+ } else {
892
+ this.log.warn(
893
+ { err: error, relPath: record.rel_path },
894
+ "Failed to hash file"
895
+ );
896
+ failures.push(record.id);
897
+ }
898
+ }
899
+ }
900
+ if (successes.length) {
901
+ this.log.debug({ count: successes.length }, "Hashing succeeded");
902
+ this.options.filesRepo.applyHashResults(successes);
903
+ }
904
+ if (failures.length) {
905
+ this.log.warn({ count: failures.length }, "Hashing failed for files");
906
+ this.options.filesRepo.markDirtyByIds(failures);
907
+ }
908
+ return true;
909
+ }
910
+ };
911
+
912
+ // src/service/Bootstrap.ts
913
+ function createServiceLogger() {
914
+ return logger.child({ scope: "service" });
915
+ }
916
+ async function bootstrap(params) {
917
+ const config = await resolveConfig(params);
918
+ const logger2 = createServiceLogger();
919
+ const db = openDatabase(config.dbPath, logger2.child({ scope: "db" }));
920
+ const filesRepo = new FilesRepo(db);
921
+ const recovered = filesRepo.resetHashingStates();
922
+ if (recovered > 0) {
923
+ logger2.info({ recovered }, "Recovered lingering hashing leases");
924
+ }
925
+ const snapshotsRepo = new SnapshotsRepo(db);
926
+ const outbox = new Outbox(db, logger2);
927
+ const clients = createClients(config, logger2);
928
+ const rules = await fetchVisitorRules(clients, logger2);
929
+ const compiled = compileRulesBundle(rules);
930
+ const hasher = new Hasher({ rootPath: config.rootPath, filesRepo, logger: logger2 });
931
+ const runtime = {
932
+ config,
933
+ logger: logger2,
934
+ db,
935
+ outbox,
936
+ clients,
937
+ rules: compiled,
938
+ filesRepo,
939
+ snapshotsRepo,
940
+ hasher
941
+ };
942
+ return runtime;
943
+ }
944
+ function toPosixRelative(root, target) {
945
+ const rel = path.relative(root, target);
946
+ if (!rel || rel === "") return "";
947
+ return rel.split(path.sep).join("/");
948
+ }
949
+ function isInsideRoot(root, target) {
950
+ const rel = path.relative(root, target);
951
+ return rel === "" || !rel.startsWith("..") && !path.isAbsolute(rel);
952
+ }
953
+
954
+ // src/fs/Walker.ts
955
+ var EMPTY_STATS = {
956
+ processed: 0,
957
+ skipped: 0,
958
+ dirtied: 0,
959
+ missing: 0
960
+ };
961
+ function cloneStats(stats) {
962
+ return { ...stats };
963
+ }
964
+ async function readSymlinkTarget(absPath, log) {
965
+ try {
966
+ return await fs4.readlink(absPath);
967
+ } catch (error) {
968
+ log.warn({ err: error, path: absPath }, "Failed to read symlink target");
969
+ return null;
970
+ }
971
+ }
972
+ async function walkDirectory(current, opts, stats) {
973
+ const dirLogger = opts.logger;
974
+ let dirents;
975
+ try {
976
+ dirents = await fs4.readdir(current, { withFileTypes: true });
977
+ } catch (error) {
978
+ dirLogger.warn({ err: error, path: current }, "Failed to read directory");
979
+ return;
980
+ }
981
+ for (const dirent of dirents) {
982
+ const absPath = path.join(current, dirent.name);
983
+ const relPath = toPosixRelative(opts.rootPath, absPath);
984
+ if (dirent.isDirectory()) {
985
+ if (shouldPruneDirectory(relPath, opts.bundle)) {
986
+ stats.skipped += 1;
987
+ continue;
988
+ }
989
+ await walkDirectory(absPath, opts, stats);
990
+ continue;
991
+ }
992
+ if (dirent.isSymbolicLink() || dirent.isFile()) {
993
+ let stat;
994
+ try {
995
+ stat = await fs4.lstat(absPath);
996
+ } catch (error) {
997
+ dirLogger.warn({ err: error, path: absPath }, "Failed to stat file");
998
+ continue;
999
+ }
1000
+ stats.processed += 1;
1001
+ if (!shouldIncludeFile(relPath, stat, opts.bundle)) {
1002
+ stats.skipped += 1;
1003
+ continue;
1004
+ }
1005
+ const target = dirent.isSymbolicLink() ? await readSymlinkTarget(absPath, dirLogger) : null;
1006
+ const state = opts.filesRepo.upsertFromStat({
1007
+ relPath,
1008
+ displayPath: absPath,
1009
+ stats: stat,
1010
+ isSymlink: dirent.isSymbolicLink(),
1011
+ symlinkTarget: target
1012
+ });
1013
+ if (state === "dirty") {
1014
+ stats.dirtied += 1;
1015
+ }
1016
+ continue;
1017
+ }
1018
+ stats.skipped += 1;
1019
+ }
1020
+ }
1021
+ async function runInventory(opts) {
1022
+ const stats = cloneStats(EMPTY_STATS);
1023
+ const startedAt = Date.now();
1024
+ await walkDirectory(opts.rootPath, opts, stats);
1025
+ const missing = opts.filesRepo.markMissingBefore(startedAt);
1026
+ stats.missing = missing;
1027
+ opts.logger.info({ ...stats }, "Completed initial inventory");
1028
+ return stats;
1029
+ }
1030
+ async function sleep(ms) {
1031
+ return new Promise((resolve) => setTimeout(resolve, ms));
1032
+ }
1033
+ function computeSnapshot(filesRepo) {
1034
+ const files = filesRepo.getCleanFilesForSnapshot();
1035
+ const hashes = files.map((file) => file.service_file_hash).filter((hash) => typeof hash === "string");
1036
+ const snapshotHash = SyncHttpClient.calculateSnapshotHash([...hashes].sort());
1037
+ const { filesCount, totalSize } = filesRepo.getTotalsForSnapshot();
1038
+ return {
1039
+ snapshotHash,
1040
+ files: files.map((file) => ({
1041
+ file_path: file.rel_path,
1042
+ file_hash: file.service_file_hash
1043
+ })),
1044
+ filesCount,
1045
+ totalSize
1046
+ };
1047
+ }
1048
+ async function uploadMissing(rootPath, missing, syncClient, logger2) {
1049
+ if (!missing || missing.length === 0) return;
1050
+ const map = /* @__PURE__ */ new Map();
1051
+ for (const missingFile of missing) {
1052
+ const absPath = path.join(rootPath, missingFile.file_path);
1053
+ try {
1054
+ const buffer = await fs4.readFile(absPath);
1055
+ map.set(missingFile.file_hash, {
1056
+ path: missingFile.file_path,
1057
+ content: buffer
1058
+ });
1059
+ } catch (error) {
1060
+ logger2.warn(
1061
+ { err: error, relPath: missingFile.file_path },
1062
+ "Failed to read missing file content"
1063
+ );
1064
+ }
1065
+ }
1066
+ if (map.size === 0) return;
1067
+ await syncClient.uploadFileContent(map);
1068
+ }
1069
+ async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2) {
1070
+ const { snapshotHash, files } = computation;
1071
+ let status = await syncClient.checkSnapshotStatus(snapshotHash);
1072
+ if (status.status === "READY") {
1073
+ logger2.info({ snapshotHash }, "Snapshot already READY");
1074
+ return;
1075
+ }
1076
+ if (status.status === "NOT_FOUND" || status.status === "MISSING_CONTENT") {
1077
+ status = await syncClient.createSnapshot(snapshotHash, files);
1078
+ }
1079
+ if (status.status === "MISSING_CONTENT" && status.missing_files?.length) {
1080
+ logger2.info(
1081
+ { missing: status.missing_files.length },
1082
+ "Uploading missing file content"
1083
+ );
1084
+ await uploadMissing(rootPath, status.missing_files, syncClient, logger2);
1085
+ status = await syncClient.createSnapshot(snapshotHash, files);
1086
+ }
1087
+ let attempt = 0;
1088
+ while (status.status !== "READY") {
1089
+ if (status.status === "FAILED") {
1090
+ throw new Error(`Snapshot failed processing: ${JSON.stringify(status)}`);
1091
+ }
1092
+ const delay = Math.min(5e3, 1e3 * Math.max(1, 2 ** attempt));
1093
+ await sleep(delay);
1094
+ attempt += 1;
1095
+ status = await syncClient.checkSnapshotStatus(snapshotHash);
1096
+ }
1097
+ logger2.info({ snapshotHash }, "Snapshot READY");
1098
+ }
1099
+ async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2) {
1100
+ const computation = computeSnapshot(filesRepo);
1101
+ await ensureSnapshotCreated(rootPath, computation, syncClient, logger2);
1102
+ const createdAt = Date.now();
1103
+ snapshotsRepo.insert(
1104
+ computation.snapshotHash,
1105
+ computation.filesCount,
1106
+ computation.totalSize,
1107
+ createdAt
1108
+ );
1109
+ return {
1110
+ snapshotHash: computation.snapshotHash,
1111
+ filesCount: computation.filesCount,
1112
+ totalSize: computation.totalSize,
1113
+ status: "READY",
1114
+ createdAt
1115
+ };
1116
+ }
1117
+
1118
+ // src/service/InitialSync.ts
1119
+ async function runInitialSyncPipeline(runtime) {
1120
+ const inventoryLogger = runtime.logger.child({ scope: "inventory" });
1121
+ await runInventory({
1122
+ rootPath: runtime.config.rootPath,
1123
+ bundle: runtime.rules,
1124
+ filesRepo: runtime.filesRepo,
1125
+ logger: inventoryLogger
1126
+ });
1127
+ const hashLogger = runtime.logger.child({ scope: "hash" });
1128
+ let hadWork = true;
1129
+ while (hadWork) {
1130
+ hadWork = await runtime.hasher.processBatch(runtime.config.hashBatchSize);
1131
+ if (hadWork) {
1132
+ hashLogger.debug("Hasher processed batch");
1133
+ }
1134
+ }
1135
+ const syncLogger = runtime.logger.child({ scope: "snapshot" });
1136
+ const result = await publishSnapshot(
1137
+ runtime.config.rootPath,
1138
+ runtime.filesRepo,
1139
+ runtime.snapshotsRepo,
1140
+ runtime.clients.sync,
1141
+ syncLogger
1142
+ );
1143
+ return result;
1144
+ }
1145
+ async function createChokidarWatcher(options, usePolling) {
1146
+ const log = options.logger.child({
1147
+ scope: "watcher",
1148
+ mode: usePolling ? "polling" : "native"
1149
+ });
1150
+ const watcher = chokidar.watch(options.rootPath, {
1151
+ ignored: options.ignored,
1152
+ ignoreInitial: true,
1153
+ persistent: true,
1154
+ awaitWriteFinish: {
1155
+ stabilityThreshold: 1500,
1156
+ pollInterval: 100
1157
+ },
1158
+ atomic: true,
1159
+ usePolling,
1160
+ interval: usePolling ? 200 : void 0,
1161
+ binaryInterval: usePolling ? 200 : void 0,
1162
+ alwaysStat: true,
1163
+ cwd: void 0,
1164
+ depth: void 0
1165
+ });
1166
+ await new Promise((resolve, reject) => {
1167
+ const onReady = () => {
1168
+ watcher.off("error", onError);
1169
+ log.info("Watcher ready");
1170
+ resolve();
1171
+ };
1172
+ const onError = (err) => {
1173
+ watcher.off("ready", onReady);
1174
+ reject(err);
1175
+ };
1176
+ watcher.once("ready", onReady);
1177
+ watcher.once("error", onError);
1178
+ });
1179
+ return { watcher, mode: usePolling ? "polling" : "native" };
1180
+ }
1181
+ function attachHandlers(watcher, options) {
1182
+ const { handlers, logger: logger2 } = options;
1183
+ const handle = (event, filePath, stats) => {
1184
+ logger2.debug({ event, filePath }, "Watcher raw event");
1185
+ Promise.resolve(handlers.onEvent(event, filePath, stats)).catch((error) => {
1186
+ logger2.error(
1187
+ { err: error, event, path: filePath },
1188
+ "Watcher handler failed"
1189
+ );
1190
+ });
1191
+ };
1192
+ watcher.on("add", (filePath, stats) => handle("add", filePath, stats));
1193
+ watcher.on("change", (filePath, stats) => handle("change", filePath, stats));
1194
+ watcher.on("unlink", (filePath) => handle("unlink", filePath));
1195
+ watcher.on("addDir", (dirPath) => handle("addDir", dirPath));
1196
+ watcher.on("unlinkDir", (dirPath) => handle("unlinkDir", dirPath));
1197
+ watcher.on("error", (error) => {
1198
+ logger2.error({ err: error }, "Watcher error");
1199
+ });
1200
+ if (handlers.onReady) {
1201
+ handlers.onReady();
1202
+ logger2.debug("Watcher ready callback executed");
1203
+ }
1204
+ }
1205
+ async function startWatcher(options) {
1206
+ try {
1207
+ const result = await createChokidarWatcher(options, false);
1208
+ attachHandlers(result.watcher, options);
1209
+ return result;
1210
+ } catch (error) {
1211
+ options.logger.warn(
1212
+ { err: error },
1213
+ "Native watcher failed, falling back to polling"
1214
+ );
1215
+ const result = await createChokidarWatcher(options, true);
1216
+ attachHandlers(result.watcher, options);
1217
+ return result;
1218
+ }
1219
+ }
1220
+
1221
+ // src/sync/HeartbeatProtocol.ts
1222
+ async function sendHeartbeat(rootId, snapshotsRepo, syncClient, logger2) {
1223
+ const latest = snapshotsRepo.getLatest();
1224
+ if (latest) {
1225
+ await syncClient.checkSnapshotStatus(latest.snapshot_hash);
1226
+ }
1227
+ logger2.debug({ rootId }, "Heartbeat sent");
1228
+ }
1229
+
1230
+ // src/service/State.ts
1231
+ var ServiceState = class {
1232
+ constructor() {
1233
+ this.lastChangeAt = Date.now();
1234
+ this.lastSnapshotReadyAt = Date.now();
1235
+ this.lastHeartbeatEnqueuedAt = 0;
1236
+ }
1237
+ updateChange(timestamp = Date.now()) {
1238
+ this.lastChangeAt = timestamp;
1239
+ }
1240
+ updateSnapshotReady(timestamp = Date.now()) {
1241
+ this.lastSnapshotReadyAt = timestamp;
1242
+ }
1243
+ updateHeartbeat(timestamp = Date.now()) {
1244
+ this.lastHeartbeatEnqueuedAt = timestamp;
1245
+ }
1246
+ };
1247
+
1248
+ // src/service/ServiceLoops.ts
1249
+ function safeParse(input) {
1250
+ try {
1251
+ return JSON.parse(input);
1252
+ } catch {
1253
+ return void 0;
1254
+ }
1255
+ }
1256
+ async function sleep2(ms) {
1257
+ return new Promise((resolve) => setTimeout(resolve, ms));
1258
+ }
1259
+ function computeBackoff(attempts) {
1260
+ const base = attempts ?? 0;
1261
+ const delay = 1e3 * 2 ** base;
1262
+ return Math.min(delay, 6e4);
1263
+ }
1264
+ async function readSymlinkTarget2(absPath) {
1265
+ try {
1266
+ return await fs4.readlink(absPath);
1267
+ } catch {
1268
+ return null;
1269
+ }
1270
+ }
1271
+ var SnapshotScheduler = class {
1272
+ constructor(rootId, debounceMs, outbox, state) {
1273
+ this.rootId = rootId;
1274
+ this.debounceMs = debounceMs;
1275
+ this.outbox = outbox;
1276
+ this.state = state;
1277
+ this.timer = null;
1278
+ }
1279
+ trigger() {
1280
+ this.state.updateChange();
1281
+ if (this.timer) {
1282
+ clearTimeout(this.timer);
1283
+ }
1284
+ this.timer = setTimeout(() => {
1285
+ this.outbox.enqueueSnapshot(this.rootId);
1286
+ this.timer = null;
1287
+ }, this.debounceMs);
1288
+ }
1289
+ cancel() {
1290
+ if (this.timer) {
1291
+ clearTimeout(this.timer);
1292
+ this.timer = null;
1293
+ }
1294
+ }
1295
+ };
1296
+ var ServiceRunner = class {
1297
+ constructor(runtime) {
1298
+ this.runtime = runtime;
1299
+ this.state = new ServiceState();
1300
+ this.watcher = null;
1301
+ this.running = false;
1302
+ this.fsControlLeaseOwner = `fs-control-${process.pid}-${Date.now()}`;
1303
+ this.tasks = /* @__PURE__ */ new Set();
1304
+ this.buffering = false;
1305
+ this.bufferedEvents = [];
1306
+ this.scheduler = new SnapshotScheduler(
1307
+ runtime.config.rootId,
1308
+ runtime.config.snapshotDebounceMs,
1309
+ runtime.outbox,
1310
+ this.state
1311
+ );
1312
+ this.ignoredPredicate = buildWatcherIgnored(runtime.rules);
1313
+ }
1314
+ recordInitialSnapshot(timestamp) {
1315
+ this.state.updateSnapshotReady(timestamp);
1316
+ this.state.updateChange(timestamp);
1317
+ this.state.updateHeartbeat(timestamp);
1318
+ }
1319
+ async start() {
1320
+ if (this.running) return;
1321
+ await this.prepareWatcher(false);
1322
+ await this.startLoops();
1323
+ }
1324
+ async stop() {
1325
+ if (!this.running) return;
1326
+ this.running = false;
1327
+ this.scheduler.cancel();
1328
+ if (this.watcher) {
1329
+ await this.watcher.close();
1330
+ this.watcher = null;
1331
+ }
1332
+ await Promise.all([...this.tasks]);
1333
+ await this.runtime.hasher.close();
1334
+ this.runtime.clients.close();
1335
+ this.runtime.db.close();
1336
+ }
1337
+ getServiceStateSnapshot() {
1338
+ return {
1339
+ lastChangeAt: this.state.lastChangeAt,
1340
+ lastSnapshotReadyAt: this.state.lastSnapshotReadyAt,
1341
+ lastHeartbeatEnqueuedAt: this.state.lastHeartbeatEnqueuedAt,
1342
+ watcherReady: this.watcher !== null,
1343
+ buffering: this.buffering
1344
+ };
1345
+ }
1346
+ runBackground(fn) {
1347
+ const task = fn();
1348
+ this.tasks.add(task);
1349
+ task.catch((error) => {
1350
+ if (this.running) {
1351
+ this.runtime.logger.error({ err: error }, "Background task failed");
1352
+ }
1353
+ }).finally(() => {
1354
+ this.tasks.delete(task);
1355
+ });
1356
+ }
1357
+ async prepareWatcher(bufferOnly) {
1358
+ const { rootPath } = this.runtime.config;
1359
+ const logger2 = this.runtime.logger.child({ scope: "watcher" });
1360
+ this.buffering = bufferOnly;
1361
+ const { watcher, mode } = await startWatcher({
1362
+ rootPath,
1363
+ ignored: this.ignoredPredicate,
1364
+ logger: logger2,
1365
+ handlers: {
1366
+ onEvent: (event, absPath, stats) => {
1367
+ logger2.debug({ event, absPath }, "Watcher event received");
1368
+ if (this.buffering) {
1369
+ this.bufferedEvents.push({ event, absPath, stats });
1370
+ return Promise.resolve();
1371
+ }
1372
+ return this.handleEvent(event, absPath, stats);
1373
+ }
1374
+ }
1375
+ });
1376
+ this.watcher = watcher;
1377
+ logger2.debug({ watched: watcher.getWatched() }, "Watcher targets");
1378
+ logger2.info({ mode, buffering: bufferOnly }, "File watcher started");
1379
+ }
1380
+ async enableWatcherProcessing() {
1381
+ if (!this.buffering) {
1382
+ return;
1383
+ }
1384
+ this.buffering = false;
1385
+ this.runtime.logger.debug(
1386
+ { buffered: this.bufferedEvents.length },
1387
+ "Watcher buffering disabled"
1388
+ );
1389
+ if (this.bufferedEvents.length === 0) return;
1390
+ for (const buffered of this.bufferedEvents) {
1391
+ await this.handleEvent(buffered.event, buffered.absPath, buffered.stats);
1392
+ }
1393
+ this.bufferedEvents = [];
1394
+ }
1395
+ async startLoops() {
1396
+ if (this.running) return;
1397
+ this.running = true;
1398
+ this.runBackground(() => this.hashLoop());
1399
+ this.runBackground(() => this.fsControlLoop());
1400
+ this.runBackground(() => this.heartbeatLoop());
1401
+ this.runBackground(() => this.requeueLoop());
1402
+ this.runtime.logger.debug("Background loops started");
1403
+ }
1404
+ async handleEvent(event, absPath, stats) {
1405
+ if (!this.running) return;
1406
+ const root = this.runtime.config.rootPath;
1407
+ const absolute = path.isAbsolute(absPath) ? absPath : path.join(root, absPath);
1408
+ if (!isInsideRoot(root, absolute)) {
1409
+ return;
1410
+ }
1411
+ switch (event) {
1412
+ case "add":
1413
+ case "change":
1414
+ await this.handleAddChange(absolute, stats);
1415
+ break;
1416
+ case "unlink":
1417
+ await this.handleUnlink(absolute);
1418
+ break;
1419
+ case "unlinkDir":
1420
+ await this.handleUnlinkDir(absolute);
1421
+ break;
1422
+ }
1423
+ }
1424
+ async handleAddChange(absPath, _stats) {
1425
+ let fileStats;
1426
+ try {
1427
+ fileStats = await fs4.lstat(absPath);
1428
+ } catch (error) {
1429
+ this.runtime.logger.warn(
1430
+ { err: error, path: absPath },
1431
+ "Failed to lstat path"
1432
+ );
1433
+ return;
1434
+ }
1435
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1436
+ if (!shouldIncludeFile(relPath, fileStats, this.runtime.rules)) {
1437
+ this.runtime.logger.debug({ relPath }, "Watcher event ignored by rules");
1438
+ return;
1439
+ }
1440
+ const isSymlink = fileStats.isSymbolicLink();
1441
+ const target = isSymlink ? await readSymlinkTarget2(absPath) : null;
1442
+ const state = this.runtime.filesRepo.upsertFromStat({
1443
+ relPath,
1444
+ displayPath: absPath,
1445
+ stats: fileStats,
1446
+ isSymlink,
1447
+ symlinkTarget: target
1448
+ });
1449
+ if (state === "dirty") {
1450
+ this.scheduler.trigger();
1451
+ }
1452
+ }
1453
+ async handleUnlink(absPath) {
1454
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1455
+ const changed = this.runtime.filesRepo.markMissing(relPath);
1456
+ if (changed > 0) {
1457
+ this.scheduler.trigger();
1458
+ }
1459
+ }
1460
+ async handleUnlinkDir(absPath) {
1461
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1462
+ const changed = this.runtime.filesRepo.markMissingByPrefix(relPath);
1463
+ if (changed > 0) {
1464
+ this.scheduler.trigger();
1465
+ }
1466
+ }
1467
+ async hashLoop() {
1468
+ while (this.running) {
1469
+ const processed = await this.runtime.hasher.processBatch(
1470
+ this.runtime.config.hashBatchSize
1471
+ );
1472
+ if (!processed) {
1473
+ await sleep2(500);
1474
+ }
1475
+ }
1476
+ }
1477
+ async fsControlLoop() {
1478
+ const log = this.runtime.logger.child({ scope: "fs-control-worker" });
1479
+ while (this.running) {
1480
+ const job = this.runtime.outbox.claimFsControlJob(
1481
+ this.fsControlLeaseOwner
1482
+ );
1483
+ if (!job) {
1484
+ await sleep2(this.runtime.config.queuePollIntervalMs);
1485
+ continue;
1486
+ }
1487
+ const payload = job.data ? safeParse(job.data) ?? {} : {};
1488
+ const jobKind = job.kind ?? (typeof payload.kind === "string" ? payload.kind : void 0);
1489
+ if (!jobKind) {
1490
+ log.warn(
1491
+ { jobId: job.id },
1492
+ "fs_control job missing kind, acknowledging"
1493
+ );
1494
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1495
+ continue;
1496
+ }
1497
+ if (jobKind === "snapshot") {
1498
+ await this.handleSnapshotJob(job, log);
1499
+ } else if (jobKind === "heartbeat") {
1500
+ await this.handleHeartbeatJob(job, log);
1501
+ } else {
1502
+ log.warn({ jobId: job.id, kind: jobKind }, "Unknown fs_control job");
1503
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1504
+ }
1505
+ }
1506
+ }
1507
+ async heartbeatLoop() {
1508
+ const log = this.runtime.logger.child({ scope: "heartbeat-loop" });
1509
+ while (this.running) {
1510
+ const now = Date.now();
1511
+ const sinceChange = now - this.state.lastChangeAt;
1512
+ const sinceHeartbeat = now - this.state.lastHeartbeatEnqueuedAt;
1513
+ if (sinceChange >= this.runtime.config.heartbeatIntervalMs && sinceHeartbeat >= this.runtime.config.heartbeatIntervalMs) {
1514
+ this.runtime.outbox.enqueueHeartbeat(this.runtime.config.rootId);
1515
+ this.state.updateHeartbeat(now);
1516
+ log.debug("Heartbeat enqueued");
1517
+ }
1518
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1519
+ }
1520
+ log.info("Heartbeat loop stopped");
1521
+ }
1522
+ async requeueLoop() {
1523
+ while (this.running) {
1524
+ const count = this.runtime.outbox.requeueTimedOut();
1525
+ if (count > 0) {
1526
+ this.runtime.logger.info({ count }, "Requeued timed-out jobs");
1527
+ }
1528
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1529
+ }
1530
+ }
1531
+ async handleSnapshotJob(job, log) {
1532
+ if (this.runtime.filesRepo.countByState("dirty") > 0 || this.runtime.filesRepo.countByState("hashing") > 0) {
1533
+ const delay = computeBackoff(job.attempts);
1534
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1535
+ await sleep2(200);
1536
+ return;
1537
+ }
1538
+ try {
1539
+ const result = await publishSnapshot(
1540
+ this.runtime.config.rootPath,
1541
+ this.runtime.filesRepo,
1542
+ this.runtime.snapshotsRepo,
1543
+ this.runtime.clients.sync,
1544
+ log
1545
+ );
1546
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1547
+ this.state.updateSnapshotReady(result.createdAt);
1548
+ log.info({ snapshotHash: result.snapshotHash }, "Snapshot job completed");
1549
+ } catch (error) {
1550
+ log.warn({ err: error }, "Snapshot job failed");
1551
+ const delay = computeBackoff(job.attempts);
1552
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1553
+ await sleep2(delay);
1554
+ }
1555
+ }
1556
+ async handleHeartbeatJob(job, log) {
1557
+ try {
1558
+ await sendHeartbeat(
1559
+ this.runtime.config.rootId,
1560
+ this.runtime.snapshotsRepo,
1561
+ this.runtime.clients.sync,
1562
+ log
1563
+ );
1564
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1565
+ this.state.updateHeartbeat(Date.now());
1566
+ } catch (error) {
1567
+ const delay = computeBackoff(job.attempts);
1568
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1569
+ log.warn({ err: error }, "Heartbeat failed; retry scheduled");
1570
+ }
1571
+ }
1572
+ };
1573
+
1574
+ // src/index.ts
1575
+ async function cleanupRuntime(runner, runtime) {
1576
+ if (runner) {
1577
+ await runner.stop();
1578
+ return;
1579
+ }
1580
+ if (runtime) {
1581
+ await runtime.hasher.close();
1582
+ runtime.clients.close();
1583
+ runtime.db.close();
1584
+ }
1585
+ }
1586
+ function awaitShutdownSignals() {
1587
+ return new Promise((resolve) => {
1588
+ const signals = ["SIGINT", "SIGTERM"];
1589
+ const handler = (signal) => {
1590
+ for (const sig of signals) {
1591
+ process.off(sig, handler);
1592
+ }
1593
+ resolve(signal);
1594
+ };
1595
+ for (const sig of signals) {
1596
+ process.on(sig, handler);
1597
+ }
1598
+ });
1599
+ }
1600
+ async function runInitialSync(params) {
1601
+ const runtime = await bootstrap(params);
1602
+ try {
1603
+ const result = await runInitialSyncPipeline(runtime);
1604
+ runtime.logger.info(
1605
+ {
1606
+ snapshotHash: result.snapshotHash,
1607
+ filesCount: result.filesCount,
1608
+ totalSize: result.totalSize
1609
+ },
1610
+ "Initial sync completed"
1611
+ );
1612
+ return result;
1613
+ } finally {
1614
+ await runtime.hasher.close();
1615
+ runtime.clients.close();
1616
+ runtime.db.close();
1617
+ }
1618
+ }
1619
+ async function runService(params) {
1620
+ const runtime = await bootstrap(params);
1621
+ let runner;
1622
+ try {
1623
+ runner = new ServiceRunner(runtime);
1624
+ await runner.prepareWatcher(true);
1625
+ const initial = await runInitialSyncPipeline(runtime);
1626
+ runtime.logger.info(
1627
+ {
1628
+ snapshotHash: initial.snapshotHash,
1629
+ filesCount: initial.filesCount
1630
+ },
1631
+ "Initial sync completed; entering continuous mode"
1632
+ );
1633
+ runner.recordInitialSnapshot(initial.createdAt);
1634
+ await runner.startLoops();
1635
+ await runner.enableWatcherProcessing();
1636
+ runtime.logger.info("Coderule scanner service is running");
1637
+ const signal = await awaitShutdownSignals();
1638
+ runtime.logger.info({ signal }, "Shutdown signal received");
1639
+ } finally {
1640
+ await cleanupRuntime(runner, runtime);
1641
+ }
1642
+ }
1643
+
1644
+ export { runInitialSync, runService };
1645
+ //# sourceMappingURL=index.js.map
1646
+ //# sourceMappingURL=index.js.map