@coderule/mcp 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,1661 @@
1
+ 'use strict';
2
+
3
+ var pino = require('pino');
4
+ var crypto = require('crypto');
5
+ var fs4 = require('fs/promises');
6
+ var path = require('path');
7
+ var envPaths = require('env-paths');
8
+ var Database = require('better-sqlite3');
9
+ var qulite = require('@coderule/qulite');
10
+ var clients = require('@coderule/clients');
11
+ var fs2 = require('fs');
12
+ var worker_threads = require('worker_threads');
13
+ var chokidar = require('chokidar');
14
+
15
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
16
+
17
+ var pino__default = /*#__PURE__*/_interopDefault(pino);
18
+ var fs4__default = /*#__PURE__*/_interopDefault(fs4);
19
+ var path__default = /*#__PURE__*/_interopDefault(path);
20
+ var envPaths__default = /*#__PURE__*/_interopDefault(envPaths);
21
+ var Database__default = /*#__PURE__*/_interopDefault(Database);
22
+ var fs2__default = /*#__PURE__*/_interopDefault(fs2);
23
+ var chokidar__default = /*#__PURE__*/_interopDefault(chokidar);
24
+
25
+ // node_modules/tsup/assets/cjs_shims.js
26
+ var getImportMetaUrl = () => typeof document === "undefined" ? new URL(`file:${__filename}`).href : document.currentScript && document.currentScript.src || new URL("main.js", document.baseURI).href;
27
+ var importMetaUrl = /* @__PURE__ */ getImportMetaUrl();
28
+ var level = process.env.CODERULE_LOG_LEVEL ?? "info";
29
+ var baseLogger = pino__default.default(
30
+ {
31
+ level,
32
+ name: "coderule-scanner",
33
+ timestamp: pino__default.default.stdTimeFunctions.isoTime
34
+ },
35
+ process.stderr
36
+ );
37
+ var logger = baseLogger;
38
+
39
+ // src/config/Defaults.ts
40
+ var DEFAULT_SNAPSHOT_DEBOUNCE_MS = 1e3;
41
+ var DEFAULT_HEARTBEAT_INTERVAL_MS = 6e4;
42
+ var DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS = 5e3;
43
+ var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
44
+ var DEFAULT_HASH_BATCH_SIZE = 32;
45
+ var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
46
+ var DEFAULT_HTTP_TIMEOUT_MS = 3e4;
47
+
48
+ // src/config/Configurator.ts
49
+ var DEFAULT_RETRIEVAL_FORMATTER = "standard";
50
+ var DEFAULTS = {
51
+ snapshotDebounceMs: DEFAULT_SNAPSHOT_DEBOUNCE_MS,
52
+ heartbeatIntervalMs: DEFAULT_HEARTBEAT_INTERVAL_MS,
53
+ heartbeatCheckIntervalMs: DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS,
54
+ queuePollIntervalMs: DEFAULT_QUEUE_POLL_INTERVAL_MS,
55
+ hashBatchSize: DEFAULT_HASH_BATCH_SIZE,
56
+ maxSnapshotAttempts: DEFAULT_MAX_SNAPSHOT_ATTEMPTS
57
+ };
58
+ function normalizeRoot(root) {
59
+ const resolved = path__default.default.resolve(root);
60
+ const normalized = path__default.default.normalize(resolved);
61
+ return normalized.split(path__default.default.sep).join("/");
62
+ }
63
+ function sha256(input) {
64
+ return crypto.createHash("sha256").update(input).digest("hex");
65
+ }
66
+ function parseInteger(value, fallback) {
67
+ if (!value) return fallback;
68
+ const parsed = Number.parseInt(value, 10);
69
+ if (Number.isNaN(parsed) || parsed <= 0) {
70
+ throw new Error(`Invalid integer value: ${value}`);
71
+ }
72
+ return parsed;
73
+ }
74
+ function parseFormatter(value) {
75
+ if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
76
+ const normalized = value.toLowerCase();
77
+ if (normalized === "standard" || normalized === "compact") {
78
+ return normalized;
79
+ }
80
+ throw new Error(
81
+ `Invalid CODERULE_RETRIEVAL_FORMATTER: ${value}. Expected "standard" or "compact"`
82
+ );
83
+ }
84
+ async function resolveConfig({
85
+ token
86
+ }) {
87
+ const resolvedToken = token ?? process.env.CODERULE_TOKEN;
88
+ if (!resolvedToken) {
89
+ throw new Error(
90
+ "Missing token: provide params.token or CODERULE_TOKEN env"
91
+ );
92
+ }
93
+ const rootCandidate = process.env.CODERULE_ROOT || process.cwd();
94
+ const rootPath = path__default.default.resolve(rootCandidate);
95
+ const normalized = normalizeRoot(rootPath);
96
+ const rootId = sha256(normalized);
97
+ const dataDir = process.env.CODERULE_DATA_DIR || envPaths__default.default("coderule").data;
98
+ const watchDir = path__default.default.join(dataDir, "watch");
99
+ await fs4__default.default.mkdir(watchDir, { recursive: true });
100
+ const dbPath = path__default.default.join(watchDir, `${rootId}.sqlite`);
101
+ const baseConfig = {
102
+ token: resolvedToken,
103
+ rootPath,
104
+ rootId,
105
+ dbPath,
106
+ dataDir,
107
+ authBaseUrl: process.env.CODERULE_AUTH_URL,
108
+ astBaseUrl: process.env.CODERULE_AST_URL,
109
+ syncBaseUrl: process.env.CODERULE_SYNC_URL,
110
+ retrievalBaseUrl: process.env.CODERULE_RETRIEVAL_URL,
111
+ httpTimeout: void 0,
112
+ snapshotDebounceMs: DEFAULTS.snapshotDebounceMs,
113
+ heartbeatIntervalMs: DEFAULTS.heartbeatIntervalMs,
114
+ heartbeatCheckIntervalMs: DEFAULTS.heartbeatCheckIntervalMs,
115
+ queuePollIntervalMs: DEFAULTS.queuePollIntervalMs,
116
+ hashBatchSize: DEFAULTS.hashBatchSize,
117
+ maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
118
+ retrievalFormatter: parseFormatter(
119
+ process.env.CODERULE_RETRIEVAL_FORMATTER
120
+ )
121
+ };
122
+ if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
123
+ baseConfig.snapshotDebounceMs = parseInteger(
124
+ process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS,
125
+ baseConfig.snapshotDebounceMs
126
+ );
127
+ }
128
+ if (process.env.CODERULE_HEARTBEAT_INTERVAL_MS) {
129
+ baseConfig.heartbeatIntervalMs = parseInteger(
130
+ process.env.CODERULE_HEARTBEAT_INTERVAL_MS,
131
+ baseConfig.heartbeatIntervalMs
132
+ );
133
+ }
134
+ if (process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS) {
135
+ baseConfig.heartbeatCheckIntervalMs = parseInteger(
136
+ process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS,
137
+ baseConfig.heartbeatCheckIntervalMs
138
+ );
139
+ }
140
+ if (process.env.CODERULE_QUEUE_POLL_INTERVAL_MS) {
141
+ baseConfig.queuePollIntervalMs = parseInteger(
142
+ process.env.CODERULE_QUEUE_POLL_INTERVAL_MS,
143
+ baseConfig.queuePollIntervalMs
144
+ );
145
+ }
146
+ if (process.env.CODERULE_HASH_BATCH_SIZE) {
147
+ baseConfig.hashBatchSize = parseInteger(
148
+ process.env.CODERULE_HASH_BATCH_SIZE,
149
+ baseConfig.hashBatchSize
150
+ );
151
+ }
152
+ if (process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS) {
153
+ baseConfig.maxSnapshotAttempts = parseInteger(
154
+ process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS,
155
+ baseConfig.maxSnapshotAttempts
156
+ );
157
+ }
158
+ baseConfig.httpTimeout = parseInteger(
159
+ process.env.CODERULE_HTTP_TIMEOUT,
160
+ DEFAULT_HTTP_TIMEOUT_MS
161
+ );
162
+ logger.debug(
163
+ {
164
+ rootPath,
165
+ dbPath,
166
+ dataDir,
167
+ authBaseUrl: baseConfig.authBaseUrl,
168
+ astBaseUrl: baseConfig.astBaseUrl,
169
+ syncBaseUrl: baseConfig.syncBaseUrl
170
+ },
171
+ "Resolved configuration"
172
+ );
173
+ return baseConfig;
174
+ }
175
+
176
+ // src/db/Schema.ts
177
+ var FILES_SCHEMA = `
178
+ CREATE TABLE IF NOT EXISTS files (
179
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
180
+ rel_path TEXT NOT NULL,
181
+ display_path TEXT NOT NULL,
182
+ size INTEGER NOT NULL,
183
+ mtime_ns INTEGER NOT NULL,
184
+ mode INTEGER,
185
+ ino TEXT,
186
+ dev TEXT,
187
+ is_symlink INTEGER NOT NULL DEFAULT 0,
188
+ target TEXT,
189
+ content_sha256 TEXT,
190
+ service_file_hash TEXT,
191
+ last_seen_ts INTEGER NOT NULL,
192
+ hash_state TEXT NOT NULL,
193
+ hash_owner TEXT,
194
+ hash_lease_expires_at INTEGER,
195
+ hash_started_at INTEGER,
196
+ UNIQUE(rel_path)
197
+ );
198
+ CREATE INDEX IF NOT EXISTS idx_files_hash_state ON files(hash_state);
199
+ CREATE INDEX IF NOT EXISTS idx_files_content_sha ON files(content_sha256);
200
+ CREATE INDEX IF NOT EXISTS idx_files_service_hash ON files(service_file_hash);
201
+ `;
202
+ var SNAPSHOTS_SCHEMA = `
203
+ CREATE TABLE IF NOT EXISTS snapshots (
204
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
205
+ snapshot_hash TEXT NOT NULL,
206
+ files_count INTEGER NOT NULL,
207
+ total_size INTEGER NOT NULL,
208
+ created_at INTEGER NOT NULL
209
+ );
210
+ `;
211
+
212
+ // src/db/Database.ts
213
+ function safeAlter(db, sql) {
214
+ try {
215
+ db.exec(sql);
216
+ } catch (error) {
217
+ if (typeof error?.message === "string" && error.message.includes("duplicate column name")) {
218
+ return;
219
+ }
220
+ throw error;
221
+ }
222
+ }
223
+ function applyMigrations(db, logger2) {
224
+ const alterations = [
225
+ "ALTER TABLE files ADD COLUMN hash_owner TEXT",
226
+ "ALTER TABLE files ADD COLUMN hash_lease_expires_at INTEGER",
227
+ "ALTER TABLE files ADD COLUMN hash_started_at INTEGER"
228
+ ];
229
+ for (const sql of alterations) {
230
+ try {
231
+ safeAlter(db, sql);
232
+ } catch (error) {
233
+ logger2.error({ err: error, sql }, "Database migration failed");
234
+ throw error;
235
+ }
236
+ }
237
+ db.exec(
238
+ "CREATE INDEX IF NOT EXISTS idx_files_hash_lease ON files(hash_state, hash_lease_expires_at)"
239
+ );
240
+ }
241
+ function openDatabase(dbPath, logger2) {
242
+ const db = new Database__default.default(dbPath, { verbose: void 0 });
243
+ logger2.info({ dbPath }, "Opened SQLite database");
244
+ db.pragma("journal_mode = WAL");
245
+ db.pragma("synchronous = NORMAL");
246
+ db.pragma("busy_timeout = 5000");
247
+ db.pragma("foreign_keys = ON");
248
+ db.exec("BEGIN");
249
+ try {
250
+ db.exec(FILES_SCHEMA);
251
+ db.exec(SNAPSHOTS_SCHEMA);
252
+ db.exec("COMMIT");
253
+ } catch (error) {
254
+ db.exec("ROLLBACK");
255
+ db.close();
256
+ throw error;
257
+ }
258
+ applyMigrations(db, logger2);
259
+ return db;
260
+ }
261
+
262
+ // src/db/FilesRepo.ts
263
+ var FilesRepo = class {
264
+ constructor(db) {
265
+ this.db = db;
266
+ this.selectByRelPath = this.db.prepare(
267
+ "SELECT * FROM files WHERE rel_path = ?"
268
+ );
269
+ this.insertStmt = this.db.prepare(
270
+ `INSERT INTO files (
271
+ rel_path,
272
+ display_path,
273
+ size,
274
+ mtime_ns,
275
+ mode,
276
+ ino,
277
+ dev,
278
+ is_symlink,
279
+ target,
280
+ content_sha256,
281
+ service_file_hash,
282
+ last_seen_ts,
283
+ hash_state,
284
+ hash_owner,
285
+ hash_lease_expires_at,
286
+ hash_started_at
287
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL, ?, ?, NULL, NULL, NULL)`
288
+ );
289
+ this.updateStmt = this.db.prepare(
290
+ `UPDATE files SET
291
+ display_path = ?,
292
+ size = ?,
293
+ mtime_ns = ?,
294
+ mode = ?,
295
+ ino = ?,
296
+ dev = ?,
297
+ is_symlink = ?,
298
+ target = ?,
299
+ content_sha256 = ?,
300
+ service_file_hash = ?,
301
+ last_seen_ts = ?,
302
+ hash_state = ?,
303
+ hash_owner = CASE WHEN ? = 'hashing' THEN hash_owner ELSE NULL END,
304
+ hash_lease_expires_at = CASE WHEN ? = 'hashing' THEN hash_lease_expires_at ELSE NULL END,
305
+ hash_started_at = CASE WHEN ? = 'hashing' THEN hash_started_at ELSE NULL END
306
+ WHERE id = ?`
307
+ );
308
+ this.markMissingStmt = this.db.prepare(
309
+ `UPDATE files
310
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL, last_seen_ts = ?,
311
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
312
+ WHERE rel_path = ?`
313
+ );
314
+ this.markMissingPrefixStmt = this.db.prepare(
315
+ `UPDATE files
316
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
317
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
318
+ WHERE (rel_path = ? OR rel_path LIKE (? || '/%')) AND hash_state != 'missing'`
319
+ );
320
+ this.markDirtyStmt = this.db.prepare(
321
+ `UPDATE files
322
+ SET hash_state = 'dirty', last_seen_ts = ?,
323
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
324
+ WHERE rel_path = ?`
325
+ );
326
+ this.claimDirtyStmt = this.db.prepare(
327
+ `WITH candidates AS (
328
+ SELECT id
329
+ FROM files
330
+ WHERE hash_state = 'dirty'
331
+ ORDER BY last_seen_ts ASC, id ASC
332
+ LIMIT @limit
333
+ )
334
+ UPDATE files
335
+ SET hash_state = 'hashing',
336
+ hash_owner = @owner,
337
+ hash_lease_expires_at = @lease_expires_at,
338
+ hash_started_at = @now
339
+ WHERE id IN candidates
340
+ RETURNING *`
341
+ );
342
+ this.markDirtyByIdStmt = this.db.prepare(
343
+ `UPDATE files
344
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
345
+ WHERE id = ?`
346
+ );
347
+ this.applyHashesStmt = this.db.prepare(
348
+ `UPDATE files
349
+ SET content_sha256 = ?, service_file_hash = ?, hash_state = 'clean',
350
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
351
+ WHERE id = ?`
352
+ );
353
+ this.requeueExpiredHashingStmt = this.db.prepare(
354
+ `UPDATE files
355
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
356
+ WHERE hash_state = 'hashing'
357
+ AND hash_lease_expires_at IS NOT NULL
358
+ AND hash_lease_expires_at <= ?`
359
+ );
360
+ this.resetHashingStmt = this.db.prepare(
361
+ `UPDATE files
362
+ SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
363
+ WHERE hash_state = 'hashing'`
364
+ );
365
+ this.selectCleanSnapshotStmt = this.db.prepare(
366
+ `SELECT rel_path, service_file_hash, size
367
+ FROM files
368
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL
369
+ ORDER BY rel_path ASC`
370
+ );
371
+ this.totalsStmt = this.db.prepare(
372
+ `SELECT COUNT(*) AS files_count, COALESCE(SUM(size), 0) AS total_size
373
+ FROM files
374
+ WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL`
375
+ );
376
+ this.markMissingBeforeStmt = this.db.prepare(
377
+ `UPDATE files
378
+ SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
379
+ hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
380
+ WHERE last_seen_ts < ? AND hash_state != 'missing'`
381
+ );
382
+ this.countByStateStmt = this.db.prepare(
383
+ "SELECT COUNT(*) as count FROM files WHERE hash_state = ?"
384
+ );
385
+ }
386
+ getByRelPath(relPath) {
387
+ return this.selectByRelPath.get(relPath);
388
+ }
389
+ upsertFromStat(params) {
390
+ const now = Date.now();
391
+ const { relPath, displayPath, stats, isSymlink, symlinkTarget } = params;
392
+ const existing = this.getByRelPath(relPath);
393
+ const mtimeNs = Math.trunc(stats.mtimeMs * 1e6);
394
+ const ino = typeof stats.ino === "number" ? String(stats.ino) : null;
395
+ const dev = typeof stats.dev === "number" ? String(stats.dev) : null;
396
+ const mode = typeof stats.mode === "number" ? stats.mode : null;
397
+ const isSymlinkInt = isSymlink ? 1 : 0;
398
+ if (!existing) {
399
+ this.insertStmt.run(
400
+ relPath,
401
+ displayPath,
402
+ stats.size,
403
+ mtimeNs,
404
+ mode,
405
+ ino,
406
+ dev,
407
+ isSymlinkInt,
408
+ symlinkTarget,
409
+ now,
410
+ "dirty"
411
+ );
412
+ return "dirty";
413
+ }
414
+ let nextState = existing.hash_state;
415
+ let nextContent = existing.content_sha256;
416
+ let nextServiceHash = existing.service_file_hash;
417
+ const changed = existing.size !== stats.size || existing.mtime_ns !== mtimeNs || existing.mode !== mode || existing.ino !== ino || existing.dev !== dev || existing.is_symlink !== isSymlinkInt || existing.target !== symlinkTarget;
418
+ if (changed || existing.hash_state === "missing") {
419
+ nextState = "dirty";
420
+ nextContent = null;
421
+ nextServiceHash = null;
422
+ }
423
+ this.updateStmt.run(
424
+ displayPath,
425
+ stats.size,
426
+ mtimeNs,
427
+ mode,
428
+ ino,
429
+ dev,
430
+ isSymlinkInt,
431
+ symlinkTarget,
432
+ nextContent,
433
+ nextServiceHash,
434
+ now,
435
+ nextState,
436
+ nextState,
437
+ nextState,
438
+ nextState,
439
+ existing.id
440
+ );
441
+ return nextState;
442
+ }
443
+ markMissing(relPath) {
444
+ const now = Date.now();
445
+ const result = this.markMissingStmt.run(now, relPath);
446
+ return result.changes ?? 0;
447
+ }
448
+ markMissingByPrefix(prefix) {
449
+ const result = this.markMissingPrefixStmt.run(prefix, prefix);
450
+ return result.changes ?? 0;
451
+ }
452
+ markDirty(relPath) {
453
+ const now = Date.now();
454
+ this.markDirtyStmt.run(now, relPath);
455
+ }
456
+ markMissingBefore(timestamp) {
457
+ const result = this.markMissingBeforeStmt.run(timestamp);
458
+ return result.changes ?? 0;
459
+ }
460
+ claimDirty(limit, owner, leaseMs) {
461
+ if (limit <= 0) {
462
+ return [];
463
+ }
464
+ const now = Date.now();
465
+ return this.claimDirtyStmt.all({
466
+ limit,
467
+ owner,
468
+ lease_expires_at: now + leaseMs,
469
+ now
470
+ });
471
+ }
472
+ markDirtyByIds(ids) {
473
+ if (!ids.length) return;
474
+ const tx = this.db.transaction((batch) => {
475
+ for (const id of batch) {
476
+ this.markDirtyByIdStmt.run(id);
477
+ }
478
+ });
479
+ tx(ids);
480
+ }
481
+ applyHashResults(results) {
482
+ if (!results.length) return;
483
+ const tx = this.db.transaction((batch) => {
484
+ for (const { id, contentSha256, serviceFileHash } of batch) {
485
+ this.applyHashesStmt.run(contentSha256, serviceFileHash, id);
486
+ }
487
+ });
488
+ tx(results);
489
+ }
490
+ getCleanFilesForSnapshot() {
491
+ return this.selectCleanSnapshotStmt.all();
492
+ }
493
+ getTotalsForSnapshot() {
494
+ const row = this.totalsStmt.get();
495
+ return {
496
+ filesCount: row?.files_count ?? 0,
497
+ totalSize: row?.total_size ?? 0
498
+ };
499
+ }
500
+ countByState(state) {
501
+ const row = this.countByStateStmt.get(state);
502
+ return row?.count ?? 0;
503
+ }
504
+ requeueExpiredHashing(now) {
505
+ const result = this.requeueExpiredHashingStmt.run(now);
506
+ return result.changes ?? 0;
507
+ }
508
+ resetHashingStates() {
509
+ const result = this.resetHashingStmt.run();
510
+ return result.changes ?? 0;
511
+ }
512
+ };
513
+
514
+ // src/db/SnapshotsRepo.ts
515
+ var SnapshotsRepo = class {
516
+ constructor(db) {
517
+ this.db = db;
518
+ this.insertStmt = this.db.prepare(
519
+ `INSERT INTO snapshots (snapshot_hash, files_count, total_size, created_at)
520
+ VALUES (?, ?, ?, ?)`
521
+ );
522
+ this.selectLatestStmt = this.db.prepare(
523
+ `SELECT * FROM snapshots ORDER BY created_at DESC LIMIT 1`
524
+ );
525
+ }
526
+ insert(snapshotHash, filesCount, totalSize, createdAt) {
527
+ this.insertStmt.run(snapshotHash, filesCount, totalSize, createdAt);
528
+ }
529
+ getLatest() {
530
+ return this.selectLatestStmt.get();
531
+ }
532
+ };
533
+ var Outbox = class {
534
+ constructor(db, logger2) {
535
+ this.log = logger2.child({ scope: "outbox" });
536
+ this.queue = new qulite.Qulite(db, {
537
+ logger: this.log,
538
+ defaultLeaseMs: 3e4,
539
+ defaultMaxAttempts: 10
540
+ });
541
+ this.markKindStmt = db.prepare(
542
+ `UPDATE qulite_jobs SET kind = @kind WHERE dedupe_key = @dedupe_key`
543
+ );
544
+ this.purgeLegacyStmt = db.prepare(
545
+ `DELETE FROM qulite_jobs WHERE type = 'fs_control' AND (kind IS NULL OR kind = '')`
546
+ );
547
+ const purged = this.purgeLegacyStmt.run().changes ?? 0;
548
+ if (purged > 0) {
549
+ this.log.warn({ purged }, "Purged legacy fs_control jobs without kind");
550
+ }
551
+ }
552
+ getQueue() {
553
+ return this.queue;
554
+ }
555
+ markKind(dedupeKey, kind) {
556
+ this.markKindStmt.run({ dedupe_key: dedupeKey, kind });
557
+ }
558
+ enqueueSnapshot(rootId, delayMs = 0) {
559
+ const result = qulite.enqueueFsEvent(this.queue, {
560
+ root_id: rootId,
561
+ rel_path: "",
562
+ kind: "snapshot",
563
+ delayMs,
564
+ maxAttempts: 20,
565
+ data: { root_id: rootId, kind: "snapshot" }
566
+ });
567
+ this.markKind(`snapshot:${rootId}`, "snapshot");
568
+ if (result.changes > 0) {
569
+ this.log.debug({ rootId }, "Enqueued snapshot job");
570
+ }
571
+ }
572
+ enqueueHeartbeat(rootId, delayMs = 0) {
573
+ const result = qulite.enqueueFsEvent(this.queue, {
574
+ root_id: rootId,
575
+ rel_path: "",
576
+ kind: "heartbeat",
577
+ delayMs,
578
+ maxAttempts: 5,
579
+ data: { root_id: rootId, kind: "heartbeat" }
580
+ });
581
+ this.markKind(`heartbeat:${rootId}`, "heartbeat");
582
+ if (result.changes > 0) {
583
+ this.log.debug({ rootId }, "Enqueued heartbeat job");
584
+ }
585
+ }
586
+ claimFsControlJob(leaseOwner, leaseMs = 3e4) {
587
+ return this.queue.claimNext({ type: "fs_control", leaseOwner, leaseMs });
588
+ }
589
+ ack(jobId, leaseOwner) {
590
+ return this.queue.ack(jobId, leaseOwner);
591
+ }
592
+ retry(jobId, leaseOwner, delayMs) {
593
+ return this.queue.retry(jobId, leaseOwner, delayMs);
594
+ }
595
+ fail(jobId, leaseOwner, error) {
596
+ return this.queue.fail(jobId, leaseOwner, error);
597
+ }
598
+ requeueTimedOut() {
599
+ return this.queue.requeueTimedOut();
600
+ }
601
+ };
602
+ function serviceConfig(baseUrl, timeout) {
603
+ if (baseUrl === void 0 && timeout === void 0) {
604
+ return void 0;
605
+ }
606
+ const config = {};
607
+ if (baseUrl !== void 0) {
608
+ config.baseUrl = baseUrl;
609
+ }
610
+ if (timeout !== void 0) {
611
+ config.timeout = timeout;
612
+ }
613
+ return config;
614
+ }
615
+ function createClients(config, logger2) {
616
+ const clientLogger = logger2.child({ scope: "clients" });
617
+ const httpTimeout = config.httpTimeout;
618
+ const clients$1 = new clients.CoderuleClients({
619
+ token: config.token,
620
+ auth: serviceConfig(config.authBaseUrl, httpTimeout),
621
+ ast: serviceConfig(config.astBaseUrl, httpTimeout),
622
+ sync: serviceConfig(config.syncBaseUrl, httpTimeout),
623
+ retrieval: serviceConfig(config.retrievalBaseUrl, httpTimeout),
624
+ jwtFactory: {
625
+ onTokenRefreshed: (info) => {
626
+ clientLogger.debug(
627
+ {
628
+ expiresAt: new Date(info.expiresAt).toISOString(),
629
+ serverUrl: info.serverUrl
630
+ },
631
+ "JWT refreshed"
632
+ );
633
+ }
634
+ }
635
+ });
636
+ return clients$1;
637
+ }
638
+
639
+ // src/rules/RulesFetcher.ts
640
+ async function fetchVisitorRules(clients, logger2) {
641
+ const fetchLogger = logger2.child({ scope: "rules" });
642
+ fetchLogger.info("Fetching visitor rules v2 from AST service");
643
+ const rules = await clients.ast.getVisitorRulesV2();
644
+ fetchLogger.info(
645
+ {
646
+ include_extensions: rules.include_extensions.length,
647
+ include_filenames: rules.include_filenames.length,
648
+ exclude_dirnames: rules.exclude_dirnames.length
649
+ },
650
+ "Fetched visitor rules"
651
+ );
652
+ return rules;
653
+ }
654
+ function toPosix(input) {
655
+ return input.split(path__default.default.sep).join("/");
656
+ }
657
+ function getLowerBasename(input) {
658
+ const base = input.split("/").pop();
659
+ return (base ?? "").toLowerCase();
660
+ }
661
+ function getLowerExt(basename) {
662
+ const idx = basename.lastIndexOf(".");
663
+ if (idx < 0) return "";
664
+ return basename.slice(idx).toLowerCase();
665
+ }
666
+ function compileRulesBundle(rules) {
667
+ const compiled = clients.ASTHttpClient.compileRulesV2(rules);
668
+ const basePredicate = clients.ASTHttpClient.buildIgnoredPredicate(compiled);
669
+ const predicate = (fullPath, stats) => {
670
+ let info = stats;
671
+ if (!info) {
672
+ logger.debug({ path: fullPath }, "Predicate fallback lstat");
673
+ try {
674
+ info = fs2__default.default.lstatSync(fullPath);
675
+ } catch (error) {
676
+ logger.warn(
677
+ { err: error, path: fullPath },
678
+ "Failed to lstat path for rules predicate"
679
+ );
680
+ return false;
681
+ }
682
+ }
683
+ return basePredicate(fullPath, info);
684
+ };
685
+ return {
686
+ rules,
687
+ compiled,
688
+ predicate
689
+ };
690
+ }
691
+ function shouldIncludeFile(relPath, stats, bundle) {
692
+ if (stats.isDirectory()) return false;
693
+ const posixRel = toPosix(relPath);
694
+ if (bundle.compiled.dirRe.test(posixRel)) {
695
+ return false;
696
+ }
697
+ const basename = getLowerBasename(posixRel);
698
+ if (bundle.compiled.names.has(basename)) {
699
+ return true;
700
+ }
701
+ const ext = getLowerExt(basename);
702
+ return bundle.compiled.exts.has(ext);
703
+ }
704
+ function shouldPruneDirectory(relPath, bundle) {
705
+ const posixRel = toPosix(relPath);
706
+ return bundle.compiled.dirRe.test(posixRel);
707
+ }
708
+ function buildWatcherIgnored(bundle) {
709
+ return (fullPath, stats) => bundle.predicate(fullPath, stats);
710
+ }
711
+ var HashWorker = class {
712
+ constructor(logger2) {
713
+ this.pending = /* @__PURE__ */ new Map();
714
+ this.nextTaskId = 1;
715
+ this.terminating = false;
716
+ this.log = logger2.child({ scope: "hash-worker" });
717
+ const workerUrl = new URL("./hash/WorkerThread.js", importMetaUrl);
718
+ const execArgv = process.execArgv.filter(
719
+ (arg) => !arg.startsWith("--input-type")
720
+ );
721
+ const workerOptions = {
722
+ name: "coderule-hasher",
723
+ execArgv
724
+ };
725
+ if (workerUrl.pathname.endsWith(".js")) {
726
+ workerOptions.type = "module";
727
+ }
728
+ this.worker = new worker_threads.Worker(workerUrl, workerOptions);
729
+ this.worker.on(
730
+ "message",
731
+ (message) => this.onMessage(message)
732
+ );
733
+ this.worker.on("error", (error) => this.handleWorkerError(error));
734
+ this.worker.on("exit", (code) => {
735
+ if (code !== 0 && !this.terminating) {
736
+ this.handleWorkerError(
737
+ new Error(`Hasher worker exited with code ${code}`)
738
+ );
739
+ }
740
+ });
741
+ }
742
+ async terminate() {
743
+ this.terminating = true;
744
+ for (const [, pending] of this.pending) {
745
+ pending.reject(new Error("Hasher worker terminated"));
746
+ }
747
+ this.pending.clear();
748
+ await this.worker.terminate();
749
+ }
750
+ onMessage(message) {
751
+ const pending = this.pending.get(message.taskId);
752
+ if (!pending) {
753
+ this.log.warn(
754
+ { taskId: message.taskId },
755
+ "Received message for unknown task"
756
+ );
757
+ return;
758
+ }
759
+ this.pending.delete(message.taskId);
760
+ if (message.type === "hash-result") {
761
+ pending.resolve({
762
+ contentSha256: message.contentSha256,
763
+ serviceFileHash: message.serviceFileHash
764
+ });
765
+ } else {
766
+ const error = new Error(message.error);
767
+ error.code = message.code;
768
+ pending.reject(error);
769
+ }
770
+ }
771
+ handleWorkerError(error) {
772
+ this.log.error({ err: error }, "Hasher worker error");
773
+ for (const [, pending] of this.pending) {
774
+ pending.reject(error);
775
+ }
776
+ this.pending.clear();
777
+ }
778
+ compute(absPath, relPath) {
779
+ const taskId = this.nextTaskId++;
780
+ const payload = {
781
+ type: "hash",
782
+ taskId,
783
+ absPath,
784
+ relPath
785
+ };
786
+ return new Promise((resolve, reject) => {
787
+ this.pending.set(taskId, { resolve, reject });
788
+ this.worker.postMessage(payload);
789
+ });
790
+ }
791
+ };
792
+ var Hasher = class {
793
+ constructor(options) {
794
+ this.options = options;
795
+ this.worker = null;
796
+ this.log = options.logger.child({ scope: "hasher" });
797
+ this.inlineMode = process.env.CODERULE_HASHER_INLINE === "1";
798
+ this.ownerId = `hasher-${process.pid}-${Date.now()}`;
799
+ const leaseFromEnv = process.env.CODERULE_HASH_LEASE_MS ? Number.parseInt(process.env.CODERULE_HASH_LEASE_MS, 10) : Number.NaN;
800
+ this.leaseDurationMs = Number.isFinite(leaseFromEnv) && leaseFromEnv > 0 ? leaseFromEnv : 3e4;
801
+ if (!this.inlineMode) {
802
+ try {
803
+ this.worker = new HashWorker(this.log);
804
+ } catch (error) {
805
+ this.log.warn(
806
+ { err: error },
807
+ "Failed to start hasher worker, falling back to inline hashing"
808
+ );
809
+ this.worker = null;
810
+ this.inlineMode = true;
811
+ }
812
+ }
813
+ }
814
+ async close() {
815
+ if (this.worker) {
816
+ await this.worker.terminate();
817
+ }
818
+ }
819
+ resolveAbsolutePath(record) {
820
+ if (path__default.default.isAbsolute(record.display_path)) {
821
+ return record.display_path;
822
+ }
823
+ return path__default.default.join(this.options.rootPath, record.rel_path);
824
+ }
825
+ async ensureExists(absPath, record) {
826
+ try {
827
+ await fs4__default.default.access(absPath);
828
+ return true;
829
+ } catch (error) {
830
+ this.log.warn(
831
+ { err: error, relPath: record.rel_path },
832
+ "File missing before hashing"
833
+ );
834
+ this.options.filesRepo.markMissing(record.rel_path);
835
+ return false;
836
+ }
837
+ }
838
+ async computeHash(absPath, relPath) {
839
+ if (this.inlineMode || !this.worker) {
840
+ return this.hashInline(absPath, relPath);
841
+ }
842
+ return this.worker.compute(absPath, relPath);
843
+ }
844
+ async hashInline(absPath, relPath) {
845
+ return new Promise((resolve, reject) => {
846
+ const content = crypto.createHash("sha256");
847
+ const service = crypto.createHash("sha256");
848
+ service.update(relPath);
849
+ service.update("\n");
850
+ const stream = fs2__default.default.createReadStream(absPath);
851
+ stream.on("data", (chunk) => {
852
+ content.update(chunk);
853
+ service.update(chunk);
854
+ });
855
+ stream.on("error", (error) => {
856
+ reject(error);
857
+ });
858
+ stream.on("end", () => {
859
+ resolve({
860
+ contentSha256: content.digest("hex"),
861
+ serviceFileHash: service.digest("hex")
862
+ });
863
+ });
864
+ });
865
+ }
866
+ async processBatch(limit) {
867
+ const now = Date.now();
868
+ const requeued = this.options.filesRepo.requeueExpiredHashing(now);
869
+ if (requeued > 0) {
870
+ this.log.debug({ requeued }, "Requeued expired hashing leases");
871
+ }
872
+ const dirty = this.options.filesRepo.claimDirty(
873
+ limit,
874
+ this.ownerId,
875
+ this.leaseDurationMs
876
+ );
877
+ if (dirty.length > 0) {
878
+ this.log.debug({ count: dirty.length }, "Hashing claimed files");
879
+ }
880
+ if (dirty.length === 0) {
881
+ return false;
882
+ }
883
+ const successes = [];
884
+ const failures = [];
885
+ for (const record of dirty) {
886
+ const absPath = this.resolveAbsolutePath(record);
887
+ const exists = await this.ensureExists(absPath, record);
888
+ if (!exists) {
889
+ continue;
890
+ }
891
+ try {
892
+ const result = await this.computeHash(absPath, record.rel_path);
893
+ successes.push({
894
+ id: record.id,
895
+ contentSha256: result.contentSha256,
896
+ serviceFileHash: result.serviceFileHash
897
+ });
898
+ } catch (error) {
899
+ if (error?.code === "ENOENT") {
900
+ this.log.debug(
901
+ { relPath: record.rel_path },
902
+ "File disappeared during hashing"
903
+ );
904
+ this.options.filesRepo.markMissing(record.rel_path);
905
+ } else {
906
+ this.log.warn(
907
+ { err: error, relPath: record.rel_path },
908
+ "Failed to hash file"
909
+ );
910
+ failures.push(record.id);
911
+ }
912
+ }
913
+ }
914
+ if (successes.length) {
915
+ this.log.debug({ count: successes.length }, "Hashing succeeded");
916
+ this.options.filesRepo.applyHashResults(successes);
917
+ }
918
+ if (failures.length) {
919
+ this.log.warn({ count: failures.length }, "Hashing failed for files");
920
+ this.options.filesRepo.markDirtyByIds(failures);
921
+ }
922
+ return true;
923
+ }
924
+ };
925
+
926
+ // src/service/Bootstrap.ts
927
+ function createServiceLogger() {
928
+ return logger.child({ scope: "service" });
929
+ }
930
+ async function bootstrap(params) {
931
+ const config = await resolveConfig(params);
932
+ const logger2 = createServiceLogger();
933
+ const db = openDatabase(config.dbPath, logger2.child({ scope: "db" }));
934
+ const filesRepo = new FilesRepo(db);
935
+ const recovered = filesRepo.resetHashingStates();
936
+ if (recovered > 0) {
937
+ logger2.info({ recovered }, "Recovered lingering hashing leases");
938
+ }
939
+ const snapshotsRepo = new SnapshotsRepo(db);
940
+ const outbox = new Outbox(db, logger2);
941
+ const clients = createClients(config, logger2);
942
+ const rules = await fetchVisitorRules(clients, logger2);
943
+ const compiled = compileRulesBundle(rules);
944
+ const hasher = new Hasher({ rootPath: config.rootPath, filesRepo, logger: logger2 });
945
+ const runtime = {
946
+ config,
947
+ logger: logger2,
948
+ db,
949
+ outbox,
950
+ clients,
951
+ rules: compiled,
952
+ filesRepo,
953
+ snapshotsRepo,
954
+ hasher
955
+ };
956
+ return runtime;
957
+ }
958
+ function toPosixRelative(root, target) {
959
+ const rel = path__default.default.relative(root, target);
960
+ if (!rel || rel === "") return "";
961
+ return rel.split(path__default.default.sep).join("/");
962
+ }
963
+ function isInsideRoot(root, target) {
964
+ const rel = path__default.default.relative(root, target);
965
+ return rel === "" || !rel.startsWith("..") && !path__default.default.isAbsolute(rel);
966
+ }
967
+
968
+ // src/fs/Walker.ts
969
+ var EMPTY_STATS = {
970
+ processed: 0,
971
+ skipped: 0,
972
+ dirtied: 0,
973
+ missing: 0
974
+ };
975
+ function cloneStats(stats) {
976
+ return { ...stats };
977
+ }
978
+ async function readSymlinkTarget(absPath, log) {
979
+ try {
980
+ return await fs4__default.default.readlink(absPath);
981
+ } catch (error) {
982
+ log.warn({ err: error, path: absPath }, "Failed to read symlink target");
983
+ return null;
984
+ }
985
+ }
986
+ async function walkDirectory(current, opts, stats) {
987
+ const dirLogger = opts.logger;
988
+ let dirents;
989
+ try {
990
+ dirents = await fs4__default.default.readdir(current, { withFileTypes: true });
991
+ } catch (error) {
992
+ dirLogger.warn({ err: error, path: current }, "Failed to read directory");
993
+ return;
994
+ }
995
+ for (const dirent of dirents) {
996
+ const absPath = path__default.default.join(current, dirent.name);
997
+ const relPath = toPosixRelative(opts.rootPath, absPath);
998
+ if (dirent.isDirectory()) {
999
+ if (shouldPruneDirectory(relPath, opts.bundle)) {
1000
+ stats.skipped += 1;
1001
+ continue;
1002
+ }
1003
+ await walkDirectory(absPath, opts, stats);
1004
+ continue;
1005
+ }
1006
+ if (dirent.isSymbolicLink() || dirent.isFile()) {
1007
+ let stat;
1008
+ try {
1009
+ stat = await fs4__default.default.lstat(absPath);
1010
+ } catch (error) {
1011
+ dirLogger.warn({ err: error, path: absPath }, "Failed to stat file");
1012
+ continue;
1013
+ }
1014
+ stats.processed += 1;
1015
+ if (!shouldIncludeFile(relPath, stat, opts.bundle)) {
1016
+ stats.skipped += 1;
1017
+ continue;
1018
+ }
1019
+ const target = dirent.isSymbolicLink() ? await readSymlinkTarget(absPath, dirLogger) : null;
1020
+ const state = opts.filesRepo.upsertFromStat({
1021
+ relPath,
1022
+ displayPath: absPath,
1023
+ stats: stat,
1024
+ isSymlink: dirent.isSymbolicLink(),
1025
+ symlinkTarget: target
1026
+ });
1027
+ if (state === "dirty") {
1028
+ stats.dirtied += 1;
1029
+ }
1030
+ continue;
1031
+ }
1032
+ stats.skipped += 1;
1033
+ }
1034
+ }
1035
+ async function runInventory(opts) {
1036
+ const stats = cloneStats(EMPTY_STATS);
1037
+ const startedAt = Date.now();
1038
+ await walkDirectory(opts.rootPath, opts, stats);
1039
+ const missing = opts.filesRepo.markMissingBefore(startedAt);
1040
+ stats.missing = missing;
1041
+ opts.logger.info({ ...stats }, "Completed initial inventory");
1042
+ return stats;
1043
+ }
1044
+ async function sleep(ms) {
1045
+ return new Promise((resolve) => setTimeout(resolve, ms));
1046
+ }
1047
+ function computeSnapshot(filesRepo) {
1048
+ const files = filesRepo.getCleanFilesForSnapshot();
1049
+ const hashes = files.map((file) => file.service_file_hash).filter((hash) => typeof hash === "string");
1050
+ const snapshotHash = clients.SyncHttpClient.calculateSnapshotHash([...hashes].sort());
1051
+ const { filesCount, totalSize } = filesRepo.getTotalsForSnapshot();
1052
+ return {
1053
+ snapshotHash,
1054
+ files: files.map((file) => ({
1055
+ file_path: file.rel_path,
1056
+ file_hash: file.service_file_hash
1057
+ })),
1058
+ filesCount,
1059
+ totalSize
1060
+ };
1061
+ }
1062
+ async function uploadMissing(rootPath, missing, syncClient, logger2) {
1063
+ if (!missing || missing.length === 0) return;
1064
+ const map = /* @__PURE__ */ new Map();
1065
+ for (const missingFile of missing) {
1066
+ const absPath = path__default.default.join(rootPath, missingFile.file_path);
1067
+ try {
1068
+ const buffer = await fs4__default.default.readFile(absPath);
1069
+ map.set(missingFile.file_hash, {
1070
+ path: missingFile.file_path,
1071
+ content: buffer
1072
+ });
1073
+ } catch (error) {
1074
+ logger2.warn(
1075
+ { err: error, relPath: missingFile.file_path },
1076
+ "Failed to read missing file content"
1077
+ );
1078
+ }
1079
+ }
1080
+ if (map.size === 0) return;
1081
+ await syncClient.uploadFileContent(map);
1082
+ }
1083
+ async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2) {
1084
+ const { snapshotHash, files } = computation;
1085
+ let status = await syncClient.checkSnapshotStatus(snapshotHash);
1086
+ if (status.status === "READY") {
1087
+ logger2.info({ snapshotHash }, "Snapshot already READY");
1088
+ return;
1089
+ }
1090
+ if (status.status === "NOT_FOUND" || status.status === "MISSING_CONTENT") {
1091
+ status = await syncClient.createSnapshot(snapshotHash, files);
1092
+ }
1093
+ if (status.status === "MISSING_CONTENT" && status.missing_files?.length) {
1094
+ logger2.info(
1095
+ { missing: status.missing_files.length },
1096
+ "Uploading missing file content"
1097
+ );
1098
+ await uploadMissing(rootPath, status.missing_files, syncClient, logger2);
1099
+ status = await syncClient.createSnapshot(snapshotHash, files);
1100
+ }
1101
+ let attempt = 0;
1102
+ while (status.status !== "READY") {
1103
+ if (status.status === "FAILED") {
1104
+ throw new Error(`Snapshot failed processing: ${JSON.stringify(status)}`);
1105
+ }
1106
+ const delay = Math.min(5e3, 1e3 * Math.max(1, 2 ** attempt));
1107
+ await sleep(delay);
1108
+ attempt += 1;
1109
+ status = await syncClient.checkSnapshotStatus(snapshotHash);
1110
+ }
1111
+ logger2.info({ snapshotHash }, "Snapshot READY");
1112
+ }
1113
+ async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2) {
1114
+ const computation = computeSnapshot(filesRepo);
1115
+ await ensureSnapshotCreated(rootPath, computation, syncClient, logger2);
1116
+ const createdAt = Date.now();
1117
+ snapshotsRepo.insert(
1118
+ computation.snapshotHash,
1119
+ computation.filesCount,
1120
+ computation.totalSize,
1121
+ createdAt
1122
+ );
1123
+ return {
1124
+ snapshotHash: computation.snapshotHash,
1125
+ filesCount: computation.filesCount,
1126
+ totalSize: computation.totalSize,
1127
+ status: "READY",
1128
+ createdAt
1129
+ };
1130
+ }
1131
+
1132
+ // src/service/InitialSync.ts
1133
+ async function runInitialSyncPipeline(runtime) {
1134
+ const inventoryLogger = runtime.logger.child({ scope: "inventory" });
1135
+ await runInventory({
1136
+ rootPath: runtime.config.rootPath,
1137
+ bundle: runtime.rules,
1138
+ filesRepo: runtime.filesRepo,
1139
+ logger: inventoryLogger
1140
+ });
1141
+ const hashLogger = runtime.logger.child({ scope: "hash" });
1142
+ let hadWork = true;
1143
+ while (hadWork) {
1144
+ hadWork = await runtime.hasher.processBatch(runtime.config.hashBatchSize);
1145
+ if (hadWork) {
1146
+ hashLogger.debug("Hasher processed batch");
1147
+ }
1148
+ }
1149
+ const syncLogger = runtime.logger.child({ scope: "snapshot" });
1150
+ const result = await publishSnapshot(
1151
+ runtime.config.rootPath,
1152
+ runtime.filesRepo,
1153
+ runtime.snapshotsRepo,
1154
+ runtime.clients.sync,
1155
+ syncLogger
1156
+ );
1157
+ return result;
1158
+ }
1159
+ async function createChokidarWatcher(options, usePolling) {
1160
+ const log = options.logger.child({
1161
+ scope: "watcher",
1162
+ mode: usePolling ? "polling" : "native"
1163
+ });
1164
+ const watcher = chokidar__default.default.watch(options.rootPath, {
1165
+ ignored: options.ignored,
1166
+ ignoreInitial: true,
1167
+ persistent: true,
1168
+ awaitWriteFinish: {
1169
+ stabilityThreshold: 1500,
1170
+ pollInterval: 100
1171
+ },
1172
+ atomic: true,
1173
+ usePolling,
1174
+ interval: usePolling ? 200 : void 0,
1175
+ binaryInterval: usePolling ? 200 : void 0,
1176
+ alwaysStat: true,
1177
+ cwd: void 0,
1178
+ depth: void 0
1179
+ });
1180
+ await new Promise((resolve, reject) => {
1181
+ const onReady = () => {
1182
+ watcher.off("error", onError);
1183
+ log.info("Watcher ready");
1184
+ resolve();
1185
+ };
1186
+ const onError = (err) => {
1187
+ watcher.off("ready", onReady);
1188
+ reject(err);
1189
+ };
1190
+ watcher.once("ready", onReady);
1191
+ watcher.once("error", onError);
1192
+ });
1193
+ return { watcher, mode: usePolling ? "polling" : "native" };
1194
+ }
1195
+ function attachHandlers(watcher, options) {
1196
+ const { handlers, logger: logger2 } = options;
1197
+ const handle = (event, filePath, stats) => {
1198
+ logger2.debug({ event, filePath }, "Watcher raw event");
1199
+ Promise.resolve(handlers.onEvent(event, filePath, stats)).catch((error) => {
1200
+ logger2.error(
1201
+ { err: error, event, path: filePath },
1202
+ "Watcher handler failed"
1203
+ );
1204
+ });
1205
+ };
1206
+ watcher.on("add", (filePath, stats) => handle("add", filePath, stats));
1207
+ watcher.on("change", (filePath, stats) => handle("change", filePath, stats));
1208
+ watcher.on("unlink", (filePath) => handle("unlink", filePath));
1209
+ watcher.on("addDir", (dirPath) => handle("addDir", dirPath));
1210
+ watcher.on("unlinkDir", (dirPath) => handle("unlinkDir", dirPath));
1211
+ watcher.on("error", (error) => {
1212
+ logger2.error({ err: error }, "Watcher error");
1213
+ });
1214
+ if (handlers.onReady) {
1215
+ handlers.onReady();
1216
+ logger2.debug("Watcher ready callback executed");
1217
+ }
1218
+ }
1219
+ async function startWatcher(options) {
1220
+ try {
1221
+ const result = await createChokidarWatcher(options, false);
1222
+ attachHandlers(result.watcher, options);
1223
+ return result;
1224
+ } catch (error) {
1225
+ options.logger.warn(
1226
+ { err: error },
1227
+ "Native watcher failed, falling back to polling"
1228
+ );
1229
+ const result = await createChokidarWatcher(options, true);
1230
+ attachHandlers(result.watcher, options);
1231
+ return result;
1232
+ }
1233
+ }
1234
+
1235
+ // src/sync/HeartbeatProtocol.ts
1236
+ async function sendHeartbeat(rootId, snapshotsRepo, syncClient, logger2) {
1237
+ const latest = snapshotsRepo.getLatest();
1238
+ if (latest) {
1239
+ await syncClient.checkSnapshotStatus(latest.snapshot_hash);
1240
+ }
1241
+ logger2.debug({ rootId }, "Heartbeat sent");
1242
+ }
1243
+
1244
+ // src/service/State.ts
1245
+ var ServiceState = class {
1246
+ constructor() {
1247
+ this.lastChangeAt = Date.now();
1248
+ this.lastSnapshotReadyAt = Date.now();
1249
+ this.lastHeartbeatEnqueuedAt = 0;
1250
+ }
1251
+ updateChange(timestamp = Date.now()) {
1252
+ this.lastChangeAt = timestamp;
1253
+ }
1254
+ updateSnapshotReady(timestamp = Date.now()) {
1255
+ this.lastSnapshotReadyAt = timestamp;
1256
+ }
1257
+ updateHeartbeat(timestamp = Date.now()) {
1258
+ this.lastHeartbeatEnqueuedAt = timestamp;
1259
+ }
1260
+ };
1261
+
1262
+ // src/service/ServiceLoops.ts
1263
+ function safeParse(input) {
1264
+ try {
1265
+ return JSON.parse(input);
1266
+ } catch {
1267
+ return void 0;
1268
+ }
1269
+ }
1270
+ async function sleep2(ms) {
1271
+ return new Promise((resolve) => setTimeout(resolve, ms));
1272
+ }
1273
+ function computeBackoff(attempts) {
1274
+ const base = attempts ?? 0;
1275
+ const delay = 1e3 * 2 ** base;
1276
+ return Math.min(delay, 6e4);
1277
+ }
1278
+ async function readSymlinkTarget2(absPath) {
1279
+ try {
1280
+ return await fs4__default.default.readlink(absPath);
1281
+ } catch {
1282
+ return null;
1283
+ }
1284
+ }
1285
+ var SnapshotScheduler = class {
1286
+ constructor(rootId, debounceMs, outbox, state) {
1287
+ this.rootId = rootId;
1288
+ this.debounceMs = debounceMs;
1289
+ this.outbox = outbox;
1290
+ this.state = state;
1291
+ this.timer = null;
1292
+ }
1293
+ trigger() {
1294
+ this.state.updateChange();
1295
+ if (this.timer) {
1296
+ clearTimeout(this.timer);
1297
+ }
1298
+ this.timer = setTimeout(() => {
1299
+ this.outbox.enqueueSnapshot(this.rootId);
1300
+ this.timer = null;
1301
+ }, this.debounceMs);
1302
+ }
1303
+ cancel() {
1304
+ if (this.timer) {
1305
+ clearTimeout(this.timer);
1306
+ this.timer = null;
1307
+ }
1308
+ }
1309
+ };
1310
+ var ServiceRunner = class {
1311
+ constructor(runtime) {
1312
+ this.runtime = runtime;
1313
+ this.state = new ServiceState();
1314
+ this.watcher = null;
1315
+ this.running = false;
1316
+ this.fsControlLeaseOwner = `fs-control-${process.pid}-${Date.now()}`;
1317
+ this.tasks = /* @__PURE__ */ new Set();
1318
+ this.buffering = false;
1319
+ this.bufferedEvents = [];
1320
+ this.scheduler = new SnapshotScheduler(
1321
+ runtime.config.rootId,
1322
+ runtime.config.snapshotDebounceMs,
1323
+ runtime.outbox,
1324
+ this.state
1325
+ );
1326
+ this.ignoredPredicate = buildWatcherIgnored(runtime.rules);
1327
+ }
1328
+ recordInitialSnapshot(timestamp) {
1329
+ this.state.updateSnapshotReady(timestamp);
1330
+ this.state.updateChange(timestamp);
1331
+ this.state.updateHeartbeat(timestamp);
1332
+ }
1333
+ async start() {
1334
+ if (this.running) return;
1335
+ await this.prepareWatcher(false);
1336
+ await this.startLoops();
1337
+ }
1338
+ async stop() {
1339
+ if (!this.running) return;
1340
+ this.running = false;
1341
+ this.scheduler.cancel();
1342
+ if (this.watcher) {
1343
+ await this.watcher.close();
1344
+ this.watcher = null;
1345
+ }
1346
+ await Promise.all([...this.tasks]);
1347
+ await this.runtime.hasher.close();
1348
+ this.runtime.clients.close();
1349
+ this.runtime.db.close();
1350
+ }
1351
+ getServiceStateSnapshot() {
1352
+ return {
1353
+ lastChangeAt: this.state.lastChangeAt,
1354
+ lastSnapshotReadyAt: this.state.lastSnapshotReadyAt,
1355
+ lastHeartbeatEnqueuedAt: this.state.lastHeartbeatEnqueuedAt,
1356
+ watcherReady: this.watcher !== null,
1357
+ buffering: this.buffering
1358
+ };
1359
+ }
1360
+ runBackground(fn) {
1361
+ const task = fn();
1362
+ this.tasks.add(task);
1363
+ task.catch((error) => {
1364
+ if (this.running) {
1365
+ this.runtime.logger.error({ err: error }, "Background task failed");
1366
+ }
1367
+ }).finally(() => {
1368
+ this.tasks.delete(task);
1369
+ });
1370
+ }
1371
+ async prepareWatcher(bufferOnly) {
1372
+ const { rootPath } = this.runtime.config;
1373
+ const logger2 = this.runtime.logger.child({ scope: "watcher" });
1374
+ this.buffering = bufferOnly;
1375
+ const { watcher, mode } = await startWatcher({
1376
+ rootPath,
1377
+ ignored: this.ignoredPredicate,
1378
+ logger: logger2,
1379
+ handlers: {
1380
+ onEvent: (event, absPath, stats) => {
1381
+ logger2.debug({ event, absPath }, "Watcher event received");
1382
+ if (this.buffering) {
1383
+ this.bufferedEvents.push({ event, absPath, stats });
1384
+ return Promise.resolve();
1385
+ }
1386
+ return this.handleEvent(event, absPath, stats);
1387
+ }
1388
+ }
1389
+ });
1390
+ this.watcher = watcher;
1391
+ logger2.debug({ watched: watcher.getWatched() }, "Watcher targets");
1392
+ logger2.info({ mode, buffering: bufferOnly }, "File watcher started");
1393
+ }
1394
+ async enableWatcherProcessing() {
1395
+ if (!this.buffering) {
1396
+ return;
1397
+ }
1398
+ this.buffering = false;
1399
+ this.runtime.logger.debug(
1400
+ { buffered: this.bufferedEvents.length },
1401
+ "Watcher buffering disabled"
1402
+ );
1403
+ if (this.bufferedEvents.length === 0) return;
1404
+ for (const buffered of this.bufferedEvents) {
1405
+ await this.handleEvent(buffered.event, buffered.absPath, buffered.stats);
1406
+ }
1407
+ this.bufferedEvents = [];
1408
+ }
1409
+ async startLoops() {
1410
+ if (this.running) return;
1411
+ this.running = true;
1412
+ this.runBackground(() => this.hashLoop());
1413
+ this.runBackground(() => this.fsControlLoop());
1414
+ this.runBackground(() => this.heartbeatLoop());
1415
+ this.runBackground(() => this.requeueLoop());
1416
+ this.runtime.logger.debug("Background loops started");
1417
+ }
1418
+ async handleEvent(event, absPath, stats) {
1419
+ if (!this.running) return;
1420
+ const root = this.runtime.config.rootPath;
1421
+ const absolute = path__default.default.isAbsolute(absPath) ? absPath : path__default.default.join(root, absPath);
1422
+ if (!isInsideRoot(root, absolute)) {
1423
+ return;
1424
+ }
1425
+ switch (event) {
1426
+ case "add":
1427
+ case "change":
1428
+ await this.handleAddChange(absolute, stats);
1429
+ break;
1430
+ case "unlink":
1431
+ await this.handleUnlink(absolute);
1432
+ break;
1433
+ case "unlinkDir":
1434
+ await this.handleUnlinkDir(absolute);
1435
+ break;
1436
+ }
1437
+ }
1438
+ async handleAddChange(absPath, _stats) {
1439
+ let fileStats;
1440
+ try {
1441
+ fileStats = await fs4__default.default.lstat(absPath);
1442
+ } catch (error) {
1443
+ this.runtime.logger.warn(
1444
+ { err: error, path: absPath },
1445
+ "Failed to lstat path"
1446
+ );
1447
+ return;
1448
+ }
1449
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1450
+ if (!shouldIncludeFile(relPath, fileStats, this.runtime.rules)) {
1451
+ this.runtime.logger.debug({ relPath }, "Watcher event ignored by rules");
1452
+ return;
1453
+ }
1454
+ const isSymlink = fileStats.isSymbolicLink();
1455
+ const target = isSymlink ? await readSymlinkTarget2(absPath) : null;
1456
+ const state = this.runtime.filesRepo.upsertFromStat({
1457
+ relPath,
1458
+ displayPath: absPath,
1459
+ stats: fileStats,
1460
+ isSymlink,
1461
+ symlinkTarget: target
1462
+ });
1463
+ if (state === "dirty") {
1464
+ this.scheduler.trigger();
1465
+ }
1466
+ }
1467
+ async handleUnlink(absPath) {
1468
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1469
+ const changed = this.runtime.filesRepo.markMissing(relPath);
1470
+ if (changed > 0) {
1471
+ this.scheduler.trigger();
1472
+ }
1473
+ }
1474
+ async handleUnlinkDir(absPath) {
1475
+ const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
1476
+ const changed = this.runtime.filesRepo.markMissingByPrefix(relPath);
1477
+ if (changed > 0) {
1478
+ this.scheduler.trigger();
1479
+ }
1480
+ }
1481
+ async hashLoop() {
1482
+ while (this.running) {
1483
+ const processed = await this.runtime.hasher.processBatch(
1484
+ this.runtime.config.hashBatchSize
1485
+ );
1486
+ if (!processed) {
1487
+ await sleep2(500);
1488
+ }
1489
+ }
1490
+ }
1491
+ async fsControlLoop() {
1492
+ const log = this.runtime.logger.child({ scope: "fs-control-worker" });
1493
+ while (this.running) {
1494
+ const job = this.runtime.outbox.claimFsControlJob(
1495
+ this.fsControlLeaseOwner
1496
+ );
1497
+ if (!job) {
1498
+ await sleep2(this.runtime.config.queuePollIntervalMs);
1499
+ continue;
1500
+ }
1501
+ const payload = job.data ? safeParse(job.data) ?? {} : {};
1502
+ const jobKind = job.kind ?? (typeof payload.kind === "string" ? payload.kind : void 0);
1503
+ if (!jobKind) {
1504
+ log.warn(
1505
+ { jobId: job.id },
1506
+ "fs_control job missing kind, acknowledging"
1507
+ );
1508
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1509
+ continue;
1510
+ }
1511
+ if (jobKind === "snapshot") {
1512
+ await this.handleSnapshotJob(job, log);
1513
+ } else if (jobKind === "heartbeat") {
1514
+ await this.handleHeartbeatJob(job, log);
1515
+ } else {
1516
+ log.warn({ jobId: job.id, kind: jobKind }, "Unknown fs_control job");
1517
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1518
+ }
1519
+ }
1520
+ }
1521
+ async heartbeatLoop() {
1522
+ const log = this.runtime.logger.child({ scope: "heartbeat-loop" });
1523
+ while (this.running) {
1524
+ const now = Date.now();
1525
+ const sinceChange = now - this.state.lastChangeAt;
1526
+ const sinceHeartbeat = now - this.state.lastHeartbeatEnqueuedAt;
1527
+ if (sinceChange >= this.runtime.config.heartbeatIntervalMs && sinceHeartbeat >= this.runtime.config.heartbeatIntervalMs) {
1528
+ this.runtime.outbox.enqueueHeartbeat(this.runtime.config.rootId);
1529
+ this.state.updateHeartbeat(now);
1530
+ log.debug("Heartbeat enqueued");
1531
+ }
1532
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1533
+ }
1534
+ log.info("Heartbeat loop stopped");
1535
+ }
1536
+ async requeueLoop() {
1537
+ while (this.running) {
1538
+ const count = this.runtime.outbox.requeueTimedOut();
1539
+ if (count > 0) {
1540
+ this.runtime.logger.info({ count }, "Requeued timed-out jobs");
1541
+ }
1542
+ await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
1543
+ }
1544
+ }
1545
+ async handleSnapshotJob(job, log) {
1546
+ if (this.runtime.filesRepo.countByState("dirty") > 0 || this.runtime.filesRepo.countByState("hashing") > 0) {
1547
+ const delay = computeBackoff(job.attempts);
1548
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1549
+ await sleep2(200);
1550
+ return;
1551
+ }
1552
+ try {
1553
+ const result = await publishSnapshot(
1554
+ this.runtime.config.rootPath,
1555
+ this.runtime.filesRepo,
1556
+ this.runtime.snapshotsRepo,
1557
+ this.runtime.clients.sync,
1558
+ log
1559
+ );
1560
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1561
+ this.state.updateSnapshotReady(result.createdAt);
1562
+ log.info({ snapshotHash: result.snapshotHash }, "Snapshot job completed");
1563
+ } catch (error) {
1564
+ log.warn({ err: error }, "Snapshot job failed");
1565
+ const delay = computeBackoff(job.attempts);
1566
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1567
+ await sleep2(delay);
1568
+ }
1569
+ }
1570
+ async handleHeartbeatJob(job, log) {
1571
+ try {
1572
+ await sendHeartbeat(
1573
+ this.runtime.config.rootId,
1574
+ this.runtime.snapshotsRepo,
1575
+ this.runtime.clients.sync,
1576
+ log
1577
+ );
1578
+ this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1579
+ this.state.updateHeartbeat(Date.now());
1580
+ } catch (error) {
1581
+ const delay = computeBackoff(job.attempts);
1582
+ this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
1583
+ log.warn({ err: error }, "Heartbeat failed; retry scheduled");
1584
+ }
1585
+ }
1586
+ };
1587
+
1588
+ // src/index.ts
1589
+ async function cleanupRuntime(runner, runtime) {
1590
+ if (runner) {
1591
+ await runner.stop();
1592
+ return;
1593
+ }
1594
+ if (runtime) {
1595
+ await runtime.hasher.close();
1596
+ runtime.clients.close();
1597
+ runtime.db.close();
1598
+ }
1599
+ }
1600
+ function awaitShutdownSignals() {
1601
+ return new Promise((resolve) => {
1602
+ const signals = ["SIGINT", "SIGTERM"];
1603
+ const handler = (signal) => {
1604
+ for (const sig of signals) {
1605
+ process.off(sig, handler);
1606
+ }
1607
+ resolve(signal);
1608
+ };
1609
+ for (const sig of signals) {
1610
+ process.on(sig, handler);
1611
+ }
1612
+ });
1613
+ }
1614
+ async function runInitialSync(params) {
1615
+ const runtime = await bootstrap(params);
1616
+ try {
1617
+ const result = await runInitialSyncPipeline(runtime);
1618
+ runtime.logger.info(
1619
+ {
1620
+ snapshotHash: result.snapshotHash,
1621
+ filesCount: result.filesCount,
1622
+ totalSize: result.totalSize
1623
+ },
1624
+ "Initial sync completed"
1625
+ );
1626
+ return result;
1627
+ } finally {
1628
+ await runtime.hasher.close();
1629
+ runtime.clients.close();
1630
+ runtime.db.close();
1631
+ }
1632
+ }
1633
+ async function runService(params) {
1634
+ const runtime = await bootstrap(params);
1635
+ let runner;
1636
+ try {
1637
+ runner = new ServiceRunner(runtime);
1638
+ await runner.prepareWatcher(true);
1639
+ const initial = await runInitialSyncPipeline(runtime);
1640
+ runtime.logger.info(
1641
+ {
1642
+ snapshotHash: initial.snapshotHash,
1643
+ filesCount: initial.filesCount
1644
+ },
1645
+ "Initial sync completed; entering continuous mode"
1646
+ );
1647
+ runner.recordInitialSnapshot(initial.createdAt);
1648
+ await runner.startLoops();
1649
+ await runner.enableWatcherProcessing();
1650
+ runtime.logger.info("Coderule scanner service is running");
1651
+ const signal = await awaitShutdownSignals();
1652
+ runtime.logger.info({ signal }, "Shutdown signal received");
1653
+ } finally {
1654
+ await cleanupRuntime(runner, runtime);
1655
+ }
1656
+ }
1657
+
1658
+ exports.runInitialSync = runInitialSync;
1659
+ exports.runService = runService;
1660
+ //# sourceMappingURL=index.cjs.map
1661
+ //# sourceMappingURL=index.cjs.map