@coderule/mcp 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/cli.cjs +1809 -0
- package/dist/cli.cjs.map +1 -0
- package/dist/cli.d.cts +2 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +1795 -0
- package/dist/cli.js.map +1 -0
- package/dist/hash/WorkerThread.cjs +59 -0
- package/dist/hash/WorkerThread.cjs.map +1 -0
- package/dist/hash/WorkerThread.d.cts +2 -0
- package/dist/hash/WorkerThread.d.ts +2 -0
- package/dist/hash/WorkerThread.js +53 -0
- package/dist/hash/WorkerThread.js.map +1 -0
- package/dist/index.cjs +1661 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +16 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +1646 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp-cli.cjs +1919 -0
- package/dist/mcp-cli.cjs.map +1 -0
- package/dist/mcp-cli.d.cts +2 -0
- package/dist/mcp-cli.d.ts +2 -0
- package/dist/mcp-cli.js +1905 -0
- package/dist/mcp-cli.js.map +1 -0
- package/package.json +91 -0
package/dist/mcp-cli.cjs
ADDED
|
@@ -0,0 +1,1919 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var fs4 = require('fs/promises');
|
|
4
|
+
var path = require('path');
|
|
5
|
+
var stdio_js = require('@modelcontextprotocol/sdk/server/stdio.js');
|
|
6
|
+
var crypto = require('crypto');
|
|
7
|
+
var envPaths = require('env-paths');
|
|
8
|
+
var pino = require('pino');
|
|
9
|
+
var Database = require('better-sqlite3');
|
|
10
|
+
var qulite = require('@coderule/qulite');
|
|
11
|
+
var clients = require('@coderule/clients');
|
|
12
|
+
var fs2 = require('fs');
|
|
13
|
+
var worker_threads = require('worker_threads');
|
|
14
|
+
var chokidar = require('chokidar');
|
|
15
|
+
var mcp_js = require('@modelcontextprotocol/sdk/server/mcp.js');
|
|
16
|
+
var zod = require('zod');
|
|
17
|
+
|
|
18
|
+
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
19
|
+
|
|
20
|
+
var fs4__default = /*#__PURE__*/_interopDefault(fs4);
|
|
21
|
+
var path__default = /*#__PURE__*/_interopDefault(path);
|
|
22
|
+
var envPaths__default = /*#__PURE__*/_interopDefault(envPaths);
|
|
23
|
+
var pino__default = /*#__PURE__*/_interopDefault(pino);
|
|
24
|
+
var Database__default = /*#__PURE__*/_interopDefault(Database);
|
|
25
|
+
var fs2__default = /*#__PURE__*/_interopDefault(fs2);
|
|
26
|
+
var chokidar__default = /*#__PURE__*/_interopDefault(chokidar);
|
|
27
|
+
|
|
28
|
+
// node_modules/tsup/assets/cjs_shims.js
|
|
29
|
+
var getImportMetaUrl = () => typeof document === "undefined" ? new URL(`file:${__filename}`).href : document.currentScript && document.currentScript.src || new URL("main.js", document.baseURI).href;
|
|
30
|
+
var importMetaUrl = /* @__PURE__ */ getImportMetaUrl();
|
|
31
|
+
var level = process.env.CODERULE_LOG_LEVEL ?? "info";
|
|
32
|
+
var baseLogger = pino__default.default(
|
|
33
|
+
{
|
|
34
|
+
level,
|
|
35
|
+
name: "coderule-scanner",
|
|
36
|
+
timestamp: pino__default.default.stdTimeFunctions.isoTime
|
|
37
|
+
},
|
|
38
|
+
process.stderr
|
|
39
|
+
);
|
|
40
|
+
var logger = baseLogger;
|
|
41
|
+
|
|
42
|
+
// src/config/Defaults.ts
|
|
43
|
+
var DEFAULT_SNAPSHOT_DEBOUNCE_MS = 1e3;
|
|
44
|
+
var DEFAULT_HEARTBEAT_INTERVAL_MS = 6e4;
|
|
45
|
+
var DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS = 5e3;
|
|
46
|
+
var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
|
|
47
|
+
var DEFAULT_HASH_BATCH_SIZE = 32;
|
|
48
|
+
var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
|
|
49
|
+
var DEFAULT_HTTP_TIMEOUT_MS = 3e4;
|
|
50
|
+
|
|
51
|
+
// src/config/Configurator.ts
|
|
52
|
+
var DEFAULT_RETRIEVAL_FORMATTER = "standard";
|
|
53
|
+
var DEFAULTS = {
|
|
54
|
+
snapshotDebounceMs: DEFAULT_SNAPSHOT_DEBOUNCE_MS,
|
|
55
|
+
heartbeatIntervalMs: DEFAULT_HEARTBEAT_INTERVAL_MS,
|
|
56
|
+
heartbeatCheckIntervalMs: DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS,
|
|
57
|
+
queuePollIntervalMs: DEFAULT_QUEUE_POLL_INTERVAL_MS,
|
|
58
|
+
hashBatchSize: DEFAULT_HASH_BATCH_SIZE,
|
|
59
|
+
maxSnapshotAttempts: DEFAULT_MAX_SNAPSHOT_ATTEMPTS
|
|
60
|
+
};
|
|
61
|
+
function normalizeRoot(root) {
|
|
62
|
+
const resolved = path__default.default.resolve(root);
|
|
63
|
+
const normalized = path__default.default.normalize(resolved);
|
|
64
|
+
return normalized.split(path__default.default.sep).join("/");
|
|
65
|
+
}
|
|
66
|
+
function sha256(input) {
|
|
67
|
+
return crypto.createHash("sha256").update(input).digest("hex");
|
|
68
|
+
}
|
|
69
|
+
function parseInteger(value, fallback) {
|
|
70
|
+
if (!value) return fallback;
|
|
71
|
+
const parsed = Number.parseInt(value, 10);
|
|
72
|
+
if (Number.isNaN(parsed) || parsed <= 0) {
|
|
73
|
+
throw new Error(`Invalid integer value: ${value}`);
|
|
74
|
+
}
|
|
75
|
+
return parsed;
|
|
76
|
+
}
|
|
77
|
+
function parseFormatter(value) {
|
|
78
|
+
if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
|
|
79
|
+
const normalized = value.toLowerCase();
|
|
80
|
+
if (normalized === "standard" || normalized === "compact") {
|
|
81
|
+
return normalized;
|
|
82
|
+
}
|
|
83
|
+
throw new Error(
|
|
84
|
+
`Invalid CODERULE_RETRIEVAL_FORMATTER: ${value}. Expected "standard" or "compact"`
|
|
85
|
+
);
|
|
86
|
+
}
|
|
87
|
+
async function resolveConfig({
|
|
88
|
+
token
|
|
89
|
+
}) {
|
|
90
|
+
const resolvedToken = token ?? process.env.CODERULE_TOKEN;
|
|
91
|
+
if (!resolvedToken) {
|
|
92
|
+
throw new Error(
|
|
93
|
+
"Missing token: provide params.token or CODERULE_TOKEN env"
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
const rootCandidate = process.env.CODERULE_ROOT || process.cwd();
|
|
97
|
+
const rootPath = path__default.default.resolve(rootCandidate);
|
|
98
|
+
const normalized = normalizeRoot(rootPath);
|
|
99
|
+
const rootId = sha256(normalized);
|
|
100
|
+
const dataDir = process.env.CODERULE_DATA_DIR || envPaths__default.default("coderule").data;
|
|
101
|
+
const watchDir = path__default.default.join(dataDir, "watch");
|
|
102
|
+
await fs4__default.default.mkdir(watchDir, { recursive: true });
|
|
103
|
+
const dbPath = path__default.default.join(watchDir, `${rootId}.sqlite`);
|
|
104
|
+
const baseConfig = {
|
|
105
|
+
token: resolvedToken,
|
|
106
|
+
rootPath,
|
|
107
|
+
rootId,
|
|
108
|
+
dbPath,
|
|
109
|
+
dataDir,
|
|
110
|
+
authBaseUrl: process.env.CODERULE_AUTH_URL,
|
|
111
|
+
astBaseUrl: process.env.CODERULE_AST_URL,
|
|
112
|
+
syncBaseUrl: process.env.CODERULE_SYNC_URL,
|
|
113
|
+
retrievalBaseUrl: process.env.CODERULE_RETRIEVAL_URL,
|
|
114
|
+
httpTimeout: void 0,
|
|
115
|
+
snapshotDebounceMs: DEFAULTS.snapshotDebounceMs,
|
|
116
|
+
heartbeatIntervalMs: DEFAULTS.heartbeatIntervalMs,
|
|
117
|
+
heartbeatCheckIntervalMs: DEFAULTS.heartbeatCheckIntervalMs,
|
|
118
|
+
queuePollIntervalMs: DEFAULTS.queuePollIntervalMs,
|
|
119
|
+
hashBatchSize: DEFAULTS.hashBatchSize,
|
|
120
|
+
maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
|
|
121
|
+
retrievalFormatter: parseFormatter(
|
|
122
|
+
process.env.CODERULE_RETRIEVAL_FORMATTER
|
|
123
|
+
)
|
|
124
|
+
};
|
|
125
|
+
if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
|
|
126
|
+
baseConfig.snapshotDebounceMs = parseInteger(
|
|
127
|
+
process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS,
|
|
128
|
+
baseConfig.snapshotDebounceMs
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
if (process.env.CODERULE_HEARTBEAT_INTERVAL_MS) {
|
|
132
|
+
baseConfig.heartbeatIntervalMs = parseInteger(
|
|
133
|
+
process.env.CODERULE_HEARTBEAT_INTERVAL_MS,
|
|
134
|
+
baseConfig.heartbeatIntervalMs
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
if (process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS) {
|
|
138
|
+
baseConfig.heartbeatCheckIntervalMs = parseInteger(
|
|
139
|
+
process.env.CODERULE_HEARTBEAT_CHECK_INTERVAL_MS,
|
|
140
|
+
baseConfig.heartbeatCheckIntervalMs
|
|
141
|
+
);
|
|
142
|
+
}
|
|
143
|
+
if (process.env.CODERULE_QUEUE_POLL_INTERVAL_MS) {
|
|
144
|
+
baseConfig.queuePollIntervalMs = parseInteger(
|
|
145
|
+
process.env.CODERULE_QUEUE_POLL_INTERVAL_MS,
|
|
146
|
+
baseConfig.queuePollIntervalMs
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
if (process.env.CODERULE_HASH_BATCH_SIZE) {
|
|
150
|
+
baseConfig.hashBatchSize = parseInteger(
|
|
151
|
+
process.env.CODERULE_HASH_BATCH_SIZE,
|
|
152
|
+
baseConfig.hashBatchSize
|
|
153
|
+
);
|
|
154
|
+
}
|
|
155
|
+
if (process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS) {
|
|
156
|
+
baseConfig.maxSnapshotAttempts = parseInteger(
|
|
157
|
+
process.env.CODERULE_MAX_SNAPSHOT_ATTEMPTS,
|
|
158
|
+
baseConfig.maxSnapshotAttempts
|
|
159
|
+
);
|
|
160
|
+
}
|
|
161
|
+
baseConfig.httpTimeout = parseInteger(
|
|
162
|
+
process.env.CODERULE_HTTP_TIMEOUT,
|
|
163
|
+
DEFAULT_HTTP_TIMEOUT_MS
|
|
164
|
+
);
|
|
165
|
+
logger.debug(
|
|
166
|
+
{
|
|
167
|
+
rootPath,
|
|
168
|
+
dbPath,
|
|
169
|
+
dataDir,
|
|
170
|
+
authBaseUrl: baseConfig.authBaseUrl,
|
|
171
|
+
astBaseUrl: baseConfig.astBaseUrl,
|
|
172
|
+
syncBaseUrl: baseConfig.syncBaseUrl
|
|
173
|
+
},
|
|
174
|
+
"Resolved configuration"
|
|
175
|
+
);
|
|
176
|
+
return baseConfig;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// src/db/Schema.ts
|
|
180
|
+
var FILES_SCHEMA = `
|
|
181
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
182
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
183
|
+
rel_path TEXT NOT NULL,
|
|
184
|
+
display_path TEXT NOT NULL,
|
|
185
|
+
size INTEGER NOT NULL,
|
|
186
|
+
mtime_ns INTEGER NOT NULL,
|
|
187
|
+
mode INTEGER,
|
|
188
|
+
ino TEXT,
|
|
189
|
+
dev TEXT,
|
|
190
|
+
is_symlink INTEGER NOT NULL DEFAULT 0,
|
|
191
|
+
target TEXT,
|
|
192
|
+
content_sha256 TEXT,
|
|
193
|
+
service_file_hash TEXT,
|
|
194
|
+
last_seen_ts INTEGER NOT NULL,
|
|
195
|
+
hash_state TEXT NOT NULL,
|
|
196
|
+
hash_owner TEXT,
|
|
197
|
+
hash_lease_expires_at INTEGER,
|
|
198
|
+
hash_started_at INTEGER,
|
|
199
|
+
UNIQUE(rel_path)
|
|
200
|
+
);
|
|
201
|
+
CREATE INDEX IF NOT EXISTS idx_files_hash_state ON files(hash_state);
|
|
202
|
+
CREATE INDEX IF NOT EXISTS idx_files_content_sha ON files(content_sha256);
|
|
203
|
+
CREATE INDEX IF NOT EXISTS idx_files_service_hash ON files(service_file_hash);
|
|
204
|
+
`;
|
|
205
|
+
var SNAPSHOTS_SCHEMA = `
|
|
206
|
+
CREATE TABLE IF NOT EXISTS snapshots (
|
|
207
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
208
|
+
snapshot_hash TEXT NOT NULL,
|
|
209
|
+
files_count INTEGER NOT NULL,
|
|
210
|
+
total_size INTEGER NOT NULL,
|
|
211
|
+
created_at INTEGER NOT NULL
|
|
212
|
+
);
|
|
213
|
+
`;
|
|
214
|
+
|
|
215
|
+
// src/db/Database.ts
|
|
216
|
+
function safeAlter(db, sql) {
|
|
217
|
+
try {
|
|
218
|
+
db.exec(sql);
|
|
219
|
+
} catch (error) {
|
|
220
|
+
if (typeof error?.message === "string" && error.message.includes("duplicate column name")) {
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
throw error;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
function applyMigrations(db, logger2) {
|
|
227
|
+
const alterations = [
|
|
228
|
+
"ALTER TABLE files ADD COLUMN hash_owner TEXT",
|
|
229
|
+
"ALTER TABLE files ADD COLUMN hash_lease_expires_at INTEGER",
|
|
230
|
+
"ALTER TABLE files ADD COLUMN hash_started_at INTEGER"
|
|
231
|
+
];
|
|
232
|
+
for (const sql of alterations) {
|
|
233
|
+
try {
|
|
234
|
+
safeAlter(db, sql);
|
|
235
|
+
} catch (error) {
|
|
236
|
+
logger2.error({ err: error, sql }, "Database migration failed");
|
|
237
|
+
throw error;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
db.exec(
|
|
241
|
+
"CREATE INDEX IF NOT EXISTS idx_files_hash_lease ON files(hash_state, hash_lease_expires_at)"
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
function openDatabase(dbPath, logger2) {
|
|
245
|
+
const db = new Database__default.default(dbPath, { verbose: void 0 });
|
|
246
|
+
logger2.info({ dbPath }, "Opened SQLite database");
|
|
247
|
+
db.pragma("journal_mode = WAL");
|
|
248
|
+
db.pragma("synchronous = NORMAL");
|
|
249
|
+
db.pragma("busy_timeout = 5000");
|
|
250
|
+
db.pragma("foreign_keys = ON");
|
|
251
|
+
db.exec("BEGIN");
|
|
252
|
+
try {
|
|
253
|
+
db.exec(FILES_SCHEMA);
|
|
254
|
+
db.exec(SNAPSHOTS_SCHEMA);
|
|
255
|
+
db.exec("COMMIT");
|
|
256
|
+
} catch (error) {
|
|
257
|
+
db.exec("ROLLBACK");
|
|
258
|
+
db.close();
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
applyMigrations(db, logger2);
|
|
262
|
+
return db;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// src/db/FilesRepo.ts
|
|
266
|
+
var FilesRepo = class {
|
|
267
|
+
constructor(db) {
|
|
268
|
+
this.db = db;
|
|
269
|
+
this.selectByRelPath = this.db.prepare(
|
|
270
|
+
"SELECT * FROM files WHERE rel_path = ?"
|
|
271
|
+
);
|
|
272
|
+
this.insertStmt = this.db.prepare(
|
|
273
|
+
`INSERT INTO files (
|
|
274
|
+
rel_path,
|
|
275
|
+
display_path,
|
|
276
|
+
size,
|
|
277
|
+
mtime_ns,
|
|
278
|
+
mode,
|
|
279
|
+
ino,
|
|
280
|
+
dev,
|
|
281
|
+
is_symlink,
|
|
282
|
+
target,
|
|
283
|
+
content_sha256,
|
|
284
|
+
service_file_hash,
|
|
285
|
+
last_seen_ts,
|
|
286
|
+
hash_state,
|
|
287
|
+
hash_owner,
|
|
288
|
+
hash_lease_expires_at,
|
|
289
|
+
hash_started_at
|
|
290
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL, ?, ?, NULL, NULL, NULL)`
|
|
291
|
+
);
|
|
292
|
+
this.updateStmt = this.db.prepare(
|
|
293
|
+
`UPDATE files SET
|
|
294
|
+
display_path = ?,
|
|
295
|
+
size = ?,
|
|
296
|
+
mtime_ns = ?,
|
|
297
|
+
mode = ?,
|
|
298
|
+
ino = ?,
|
|
299
|
+
dev = ?,
|
|
300
|
+
is_symlink = ?,
|
|
301
|
+
target = ?,
|
|
302
|
+
content_sha256 = ?,
|
|
303
|
+
service_file_hash = ?,
|
|
304
|
+
last_seen_ts = ?,
|
|
305
|
+
hash_state = ?,
|
|
306
|
+
hash_owner = CASE WHEN ? = 'hashing' THEN hash_owner ELSE NULL END,
|
|
307
|
+
hash_lease_expires_at = CASE WHEN ? = 'hashing' THEN hash_lease_expires_at ELSE NULL END,
|
|
308
|
+
hash_started_at = CASE WHEN ? = 'hashing' THEN hash_started_at ELSE NULL END
|
|
309
|
+
WHERE id = ?`
|
|
310
|
+
);
|
|
311
|
+
this.markMissingStmt = this.db.prepare(
|
|
312
|
+
`UPDATE files
|
|
313
|
+
SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL, last_seen_ts = ?,
|
|
314
|
+
hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
315
|
+
WHERE rel_path = ?`
|
|
316
|
+
);
|
|
317
|
+
this.markMissingPrefixStmt = this.db.prepare(
|
|
318
|
+
`UPDATE files
|
|
319
|
+
SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
|
|
320
|
+
hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
321
|
+
WHERE (rel_path = ? OR rel_path LIKE (? || '/%')) AND hash_state != 'missing'`
|
|
322
|
+
);
|
|
323
|
+
this.markDirtyStmt = this.db.prepare(
|
|
324
|
+
`UPDATE files
|
|
325
|
+
SET hash_state = 'dirty', last_seen_ts = ?,
|
|
326
|
+
hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
327
|
+
WHERE rel_path = ?`
|
|
328
|
+
);
|
|
329
|
+
this.claimDirtyStmt = this.db.prepare(
|
|
330
|
+
`WITH candidates AS (
|
|
331
|
+
SELECT id
|
|
332
|
+
FROM files
|
|
333
|
+
WHERE hash_state = 'dirty'
|
|
334
|
+
ORDER BY last_seen_ts ASC, id ASC
|
|
335
|
+
LIMIT @limit
|
|
336
|
+
)
|
|
337
|
+
UPDATE files
|
|
338
|
+
SET hash_state = 'hashing',
|
|
339
|
+
hash_owner = @owner,
|
|
340
|
+
hash_lease_expires_at = @lease_expires_at,
|
|
341
|
+
hash_started_at = @now
|
|
342
|
+
WHERE id IN candidates
|
|
343
|
+
RETURNING *`
|
|
344
|
+
);
|
|
345
|
+
this.markDirtyByIdStmt = this.db.prepare(
|
|
346
|
+
`UPDATE files
|
|
347
|
+
SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
348
|
+
WHERE id = ?`
|
|
349
|
+
);
|
|
350
|
+
this.applyHashesStmt = this.db.prepare(
|
|
351
|
+
`UPDATE files
|
|
352
|
+
SET content_sha256 = ?, service_file_hash = ?, hash_state = 'clean',
|
|
353
|
+
hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
354
|
+
WHERE id = ?`
|
|
355
|
+
);
|
|
356
|
+
this.requeueExpiredHashingStmt = this.db.prepare(
|
|
357
|
+
`UPDATE files
|
|
358
|
+
SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
359
|
+
WHERE hash_state = 'hashing'
|
|
360
|
+
AND hash_lease_expires_at IS NOT NULL
|
|
361
|
+
AND hash_lease_expires_at <= ?`
|
|
362
|
+
);
|
|
363
|
+
this.resetHashingStmt = this.db.prepare(
|
|
364
|
+
`UPDATE files
|
|
365
|
+
SET hash_state = 'dirty', hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
366
|
+
WHERE hash_state = 'hashing'`
|
|
367
|
+
);
|
|
368
|
+
this.selectCleanSnapshotStmt = this.db.prepare(
|
|
369
|
+
`SELECT rel_path, service_file_hash, size
|
|
370
|
+
FROM files
|
|
371
|
+
WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL
|
|
372
|
+
ORDER BY rel_path ASC`
|
|
373
|
+
);
|
|
374
|
+
this.totalsStmt = this.db.prepare(
|
|
375
|
+
`SELECT COUNT(*) AS files_count, COALESCE(SUM(size), 0) AS total_size
|
|
376
|
+
FROM files
|
|
377
|
+
WHERE hash_state = 'clean' AND service_file_hash IS NOT NULL`
|
|
378
|
+
);
|
|
379
|
+
this.markMissingBeforeStmt = this.db.prepare(
|
|
380
|
+
`UPDATE files
|
|
381
|
+
SET hash_state = 'missing', content_sha256 = NULL, service_file_hash = NULL,
|
|
382
|
+
hash_owner = NULL, hash_lease_expires_at = NULL, hash_started_at = NULL
|
|
383
|
+
WHERE last_seen_ts < ? AND hash_state != 'missing'`
|
|
384
|
+
);
|
|
385
|
+
this.countByStateStmt = this.db.prepare(
|
|
386
|
+
"SELECT COUNT(*) as count FROM files WHERE hash_state = ?"
|
|
387
|
+
);
|
|
388
|
+
}
|
|
389
|
+
getByRelPath(relPath) {
|
|
390
|
+
return this.selectByRelPath.get(relPath);
|
|
391
|
+
}
|
|
392
|
+
upsertFromStat(params) {
|
|
393
|
+
const now = Date.now();
|
|
394
|
+
const { relPath, displayPath, stats, isSymlink, symlinkTarget } = params;
|
|
395
|
+
const existing = this.getByRelPath(relPath);
|
|
396
|
+
const mtimeNs = Math.trunc(stats.mtimeMs * 1e6);
|
|
397
|
+
const ino = typeof stats.ino === "number" ? String(stats.ino) : null;
|
|
398
|
+
const dev = typeof stats.dev === "number" ? String(stats.dev) : null;
|
|
399
|
+
const mode = typeof stats.mode === "number" ? stats.mode : null;
|
|
400
|
+
const isSymlinkInt = isSymlink ? 1 : 0;
|
|
401
|
+
if (!existing) {
|
|
402
|
+
this.insertStmt.run(
|
|
403
|
+
relPath,
|
|
404
|
+
displayPath,
|
|
405
|
+
stats.size,
|
|
406
|
+
mtimeNs,
|
|
407
|
+
mode,
|
|
408
|
+
ino,
|
|
409
|
+
dev,
|
|
410
|
+
isSymlinkInt,
|
|
411
|
+
symlinkTarget,
|
|
412
|
+
now,
|
|
413
|
+
"dirty"
|
|
414
|
+
);
|
|
415
|
+
return "dirty";
|
|
416
|
+
}
|
|
417
|
+
let nextState = existing.hash_state;
|
|
418
|
+
let nextContent = existing.content_sha256;
|
|
419
|
+
let nextServiceHash = existing.service_file_hash;
|
|
420
|
+
const changed = existing.size !== stats.size || existing.mtime_ns !== mtimeNs || existing.mode !== mode || existing.ino !== ino || existing.dev !== dev || existing.is_symlink !== isSymlinkInt || existing.target !== symlinkTarget;
|
|
421
|
+
if (changed || existing.hash_state === "missing") {
|
|
422
|
+
nextState = "dirty";
|
|
423
|
+
nextContent = null;
|
|
424
|
+
nextServiceHash = null;
|
|
425
|
+
}
|
|
426
|
+
this.updateStmt.run(
|
|
427
|
+
displayPath,
|
|
428
|
+
stats.size,
|
|
429
|
+
mtimeNs,
|
|
430
|
+
mode,
|
|
431
|
+
ino,
|
|
432
|
+
dev,
|
|
433
|
+
isSymlinkInt,
|
|
434
|
+
symlinkTarget,
|
|
435
|
+
nextContent,
|
|
436
|
+
nextServiceHash,
|
|
437
|
+
now,
|
|
438
|
+
nextState,
|
|
439
|
+
nextState,
|
|
440
|
+
nextState,
|
|
441
|
+
nextState,
|
|
442
|
+
existing.id
|
|
443
|
+
);
|
|
444
|
+
return nextState;
|
|
445
|
+
}
|
|
446
|
+
markMissing(relPath) {
|
|
447
|
+
const now = Date.now();
|
|
448
|
+
const result = this.markMissingStmt.run(now, relPath);
|
|
449
|
+
return result.changes ?? 0;
|
|
450
|
+
}
|
|
451
|
+
markMissingByPrefix(prefix) {
|
|
452
|
+
const result = this.markMissingPrefixStmt.run(prefix, prefix);
|
|
453
|
+
return result.changes ?? 0;
|
|
454
|
+
}
|
|
455
|
+
markDirty(relPath) {
|
|
456
|
+
const now = Date.now();
|
|
457
|
+
this.markDirtyStmt.run(now, relPath);
|
|
458
|
+
}
|
|
459
|
+
markMissingBefore(timestamp) {
|
|
460
|
+
const result = this.markMissingBeforeStmt.run(timestamp);
|
|
461
|
+
return result.changes ?? 0;
|
|
462
|
+
}
|
|
463
|
+
claimDirty(limit, owner, leaseMs) {
|
|
464
|
+
if (limit <= 0) {
|
|
465
|
+
return [];
|
|
466
|
+
}
|
|
467
|
+
const now = Date.now();
|
|
468
|
+
return this.claimDirtyStmt.all({
|
|
469
|
+
limit,
|
|
470
|
+
owner,
|
|
471
|
+
lease_expires_at: now + leaseMs,
|
|
472
|
+
now
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
markDirtyByIds(ids) {
|
|
476
|
+
if (!ids.length) return;
|
|
477
|
+
const tx = this.db.transaction((batch) => {
|
|
478
|
+
for (const id of batch) {
|
|
479
|
+
this.markDirtyByIdStmt.run(id);
|
|
480
|
+
}
|
|
481
|
+
});
|
|
482
|
+
tx(ids);
|
|
483
|
+
}
|
|
484
|
+
applyHashResults(results) {
|
|
485
|
+
if (!results.length) return;
|
|
486
|
+
const tx = this.db.transaction((batch) => {
|
|
487
|
+
for (const { id, contentSha256, serviceFileHash } of batch) {
|
|
488
|
+
this.applyHashesStmt.run(contentSha256, serviceFileHash, id);
|
|
489
|
+
}
|
|
490
|
+
});
|
|
491
|
+
tx(results);
|
|
492
|
+
}
|
|
493
|
+
getCleanFilesForSnapshot() {
|
|
494
|
+
return this.selectCleanSnapshotStmt.all();
|
|
495
|
+
}
|
|
496
|
+
getTotalsForSnapshot() {
|
|
497
|
+
const row = this.totalsStmt.get();
|
|
498
|
+
return {
|
|
499
|
+
filesCount: row?.files_count ?? 0,
|
|
500
|
+
totalSize: row?.total_size ?? 0
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
countByState(state) {
|
|
504
|
+
const row = this.countByStateStmt.get(state);
|
|
505
|
+
return row?.count ?? 0;
|
|
506
|
+
}
|
|
507
|
+
requeueExpiredHashing(now) {
|
|
508
|
+
const result = this.requeueExpiredHashingStmt.run(now);
|
|
509
|
+
return result.changes ?? 0;
|
|
510
|
+
}
|
|
511
|
+
resetHashingStates() {
|
|
512
|
+
const result = this.resetHashingStmt.run();
|
|
513
|
+
return result.changes ?? 0;
|
|
514
|
+
}
|
|
515
|
+
};
|
|
516
|
+
|
|
517
|
+
// src/db/SnapshotsRepo.ts
|
|
518
|
+
var SnapshotsRepo = class {
|
|
519
|
+
constructor(db) {
|
|
520
|
+
this.db = db;
|
|
521
|
+
this.insertStmt = this.db.prepare(
|
|
522
|
+
`INSERT INTO snapshots (snapshot_hash, files_count, total_size, created_at)
|
|
523
|
+
VALUES (?, ?, ?, ?)`
|
|
524
|
+
);
|
|
525
|
+
this.selectLatestStmt = this.db.prepare(
|
|
526
|
+
`SELECT * FROM snapshots ORDER BY created_at DESC LIMIT 1`
|
|
527
|
+
);
|
|
528
|
+
}
|
|
529
|
+
insert(snapshotHash, filesCount, totalSize, createdAt) {
|
|
530
|
+
this.insertStmt.run(snapshotHash, filesCount, totalSize, createdAt);
|
|
531
|
+
}
|
|
532
|
+
getLatest() {
|
|
533
|
+
return this.selectLatestStmt.get();
|
|
534
|
+
}
|
|
535
|
+
};
|
|
536
|
+
var Outbox = class {
|
|
537
|
+
constructor(db, logger2) {
|
|
538
|
+
this.log = logger2.child({ scope: "outbox" });
|
|
539
|
+
this.queue = new qulite.Qulite(db, {
|
|
540
|
+
logger: this.log,
|
|
541
|
+
defaultLeaseMs: 3e4,
|
|
542
|
+
defaultMaxAttempts: 10
|
|
543
|
+
});
|
|
544
|
+
this.markKindStmt = db.prepare(
|
|
545
|
+
`UPDATE qulite_jobs SET kind = @kind WHERE dedupe_key = @dedupe_key`
|
|
546
|
+
);
|
|
547
|
+
this.purgeLegacyStmt = db.prepare(
|
|
548
|
+
`DELETE FROM qulite_jobs WHERE type = 'fs_control' AND (kind IS NULL OR kind = '')`
|
|
549
|
+
);
|
|
550
|
+
const purged = this.purgeLegacyStmt.run().changes ?? 0;
|
|
551
|
+
if (purged > 0) {
|
|
552
|
+
this.log.warn({ purged }, "Purged legacy fs_control jobs without kind");
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
getQueue() {
|
|
556
|
+
return this.queue;
|
|
557
|
+
}
|
|
558
|
+
markKind(dedupeKey, kind) {
|
|
559
|
+
this.markKindStmt.run({ dedupe_key: dedupeKey, kind });
|
|
560
|
+
}
|
|
561
|
+
enqueueSnapshot(rootId, delayMs = 0) {
|
|
562
|
+
const result = qulite.enqueueFsEvent(this.queue, {
|
|
563
|
+
root_id: rootId,
|
|
564
|
+
rel_path: "",
|
|
565
|
+
kind: "snapshot",
|
|
566
|
+
delayMs,
|
|
567
|
+
maxAttempts: 20,
|
|
568
|
+
data: { root_id: rootId, kind: "snapshot" }
|
|
569
|
+
});
|
|
570
|
+
this.markKind(`snapshot:${rootId}`, "snapshot");
|
|
571
|
+
if (result.changes > 0) {
|
|
572
|
+
this.log.debug({ rootId }, "Enqueued snapshot job");
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
enqueueHeartbeat(rootId, delayMs = 0) {
|
|
576
|
+
const result = qulite.enqueueFsEvent(this.queue, {
|
|
577
|
+
root_id: rootId,
|
|
578
|
+
rel_path: "",
|
|
579
|
+
kind: "heartbeat",
|
|
580
|
+
delayMs,
|
|
581
|
+
maxAttempts: 5,
|
|
582
|
+
data: { root_id: rootId, kind: "heartbeat" }
|
|
583
|
+
});
|
|
584
|
+
this.markKind(`heartbeat:${rootId}`, "heartbeat");
|
|
585
|
+
if (result.changes > 0) {
|
|
586
|
+
this.log.debug({ rootId }, "Enqueued heartbeat job");
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
claimFsControlJob(leaseOwner, leaseMs = 3e4) {
|
|
590
|
+
return this.queue.claimNext({ type: "fs_control", leaseOwner, leaseMs });
|
|
591
|
+
}
|
|
592
|
+
ack(jobId, leaseOwner) {
|
|
593
|
+
return this.queue.ack(jobId, leaseOwner);
|
|
594
|
+
}
|
|
595
|
+
retry(jobId, leaseOwner, delayMs) {
|
|
596
|
+
return this.queue.retry(jobId, leaseOwner, delayMs);
|
|
597
|
+
}
|
|
598
|
+
fail(jobId, leaseOwner, error) {
|
|
599
|
+
return this.queue.fail(jobId, leaseOwner, error);
|
|
600
|
+
}
|
|
601
|
+
requeueTimedOut() {
|
|
602
|
+
return this.queue.requeueTimedOut();
|
|
603
|
+
}
|
|
604
|
+
};
|
|
605
|
+
function serviceConfig(baseUrl, timeout) {
|
|
606
|
+
if (baseUrl === void 0 && timeout === void 0) {
|
|
607
|
+
return void 0;
|
|
608
|
+
}
|
|
609
|
+
const config = {};
|
|
610
|
+
if (baseUrl !== void 0) {
|
|
611
|
+
config.baseUrl = baseUrl;
|
|
612
|
+
}
|
|
613
|
+
if (timeout !== void 0) {
|
|
614
|
+
config.timeout = timeout;
|
|
615
|
+
}
|
|
616
|
+
return config;
|
|
617
|
+
}
|
|
618
|
+
function createClients(config, logger2) {
|
|
619
|
+
const clientLogger = logger2.child({ scope: "clients" });
|
|
620
|
+
const httpTimeout = config.httpTimeout;
|
|
621
|
+
const clients$1 = new clients.CoderuleClients({
|
|
622
|
+
token: config.token,
|
|
623
|
+
auth: serviceConfig(config.authBaseUrl, httpTimeout),
|
|
624
|
+
ast: serviceConfig(config.astBaseUrl, httpTimeout),
|
|
625
|
+
sync: serviceConfig(config.syncBaseUrl, httpTimeout),
|
|
626
|
+
retrieval: serviceConfig(config.retrievalBaseUrl, httpTimeout),
|
|
627
|
+
jwtFactory: {
|
|
628
|
+
onTokenRefreshed: (info) => {
|
|
629
|
+
clientLogger.debug(
|
|
630
|
+
{
|
|
631
|
+
expiresAt: new Date(info.expiresAt).toISOString(),
|
|
632
|
+
serverUrl: info.serverUrl
|
|
633
|
+
},
|
|
634
|
+
"JWT refreshed"
|
|
635
|
+
);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
});
|
|
639
|
+
return clients$1;
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
// src/rules/RulesFetcher.ts
|
|
643
|
+
async function fetchVisitorRules(clients, logger2) {
|
|
644
|
+
const fetchLogger = logger2.child({ scope: "rules" });
|
|
645
|
+
fetchLogger.info("Fetching visitor rules v2 from AST service");
|
|
646
|
+
const rules = await clients.ast.getVisitorRulesV2();
|
|
647
|
+
fetchLogger.info(
|
|
648
|
+
{
|
|
649
|
+
include_extensions: rules.include_extensions.length,
|
|
650
|
+
include_filenames: rules.include_filenames.length,
|
|
651
|
+
exclude_dirnames: rules.exclude_dirnames.length
|
|
652
|
+
},
|
|
653
|
+
"Fetched visitor rules"
|
|
654
|
+
);
|
|
655
|
+
return rules;
|
|
656
|
+
}
|
|
657
|
+
function toPosix(input) {
|
|
658
|
+
return input.split(path__default.default.sep).join("/");
|
|
659
|
+
}
|
|
660
|
+
function getLowerBasename(input) {
|
|
661
|
+
const base = input.split("/").pop();
|
|
662
|
+
return (base ?? "").toLowerCase();
|
|
663
|
+
}
|
|
664
|
+
function getLowerExt(basename) {
|
|
665
|
+
const idx = basename.lastIndexOf(".");
|
|
666
|
+
if (idx < 0) return "";
|
|
667
|
+
return basename.slice(idx).toLowerCase();
|
|
668
|
+
}
|
|
669
|
+
function compileRulesBundle(rules) {
|
|
670
|
+
const compiled = clients.ASTHttpClient.compileRulesV2(rules);
|
|
671
|
+
const basePredicate = clients.ASTHttpClient.buildIgnoredPredicate(compiled);
|
|
672
|
+
const predicate = (fullPath, stats) => {
|
|
673
|
+
let info = stats;
|
|
674
|
+
if (!info) {
|
|
675
|
+
logger.debug({ path: fullPath }, "Predicate fallback lstat");
|
|
676
|
+
try {
|
|
677
|
+
info = fs2__default.default.lstatSync(fullPath);
|
|
678
|
+
} catch (error) {
|
|
679
|
+
logger.warn(
|
|
680
|
+
{ err: error, path: fullPath },
|
|
681
|
+
"Failed to lstat path for rules predicate"
|
|
682
|
+
);
|
|
683
|
+
return false;
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
return basePredicate(fullPath, info);
|
|
687
|
+
};
|
|
688
|
+
return {
|
|
689
|
+
rules,
|
|
690
|
+
compiled,
|
|
691
|
+
predicate
|
|
692
|
+
};
|
|
693
|
+
}
|
|
694
|
+
function shouldIncludeFile(relPath, stats, bundle) {
|
|
695
|
+
if (stats.isDirectory()) return false;
|
|
696
|
+
const posixRel = toPosix(relPath);
|
|
697
|
+
if (bundle.compiled.dirRe.test(posixRel)) {
|
|
698
|
+
return false;
|
|
699
|
+
}
|
|
700
|
+
const basename = getLowerBasename(posixRel);
|
|
701
|
+
if (bundle.compiled.names.has(basename)) {
|
|
702
|
+
return true;
|
|
703
|
+
}
|
|
704
|
+
const ext = getLowerExt(basename);
|
|
705
|
+
return bundle.compiled.exts.has(ext);
|
|
706
|
+
}
|
|
707
|
+
function shouldPruneDirectory(relPath, bundle) {
|
|
708
|
+
const posixRel = toPosix(relPath);
|
|
709
|
+
return bundle.compiled.dirRe.test(posixRel);
|
|
710
|
+
}
|
|
711
|
+
function buildWatcherIgnored(bundle) {
|
|
712
|
+
return (fullPath, stats) => bundle.predicate(fullPath, stats);
|
|
713
|
+
}
|
|
714
|
+
var HashWorker = class {
|
|
715
|
+
constructor(logger2) {
|
|
716
|
+
this.pending = /* @__PURE__ */ new Map();
|
|
717
|
+
this.nextTaskId = 1;
|
|
718
|
+
this.terminating = false;
|
|
719
|
+
this.log = logger2.child({ scope: "hash-worker" });
|
|
720
|
+
const workerUrl = new URL("./hash/WorkerThread.js", importMetaUrl);
|
|
721
|
+
const execArgv = process.execArgv.filter(
|
|
722
|
+
(arg) => !arg.startsWith("--input-type")
|
|
723
|
+
);
|
|
724
|
+
const workerOptions = {
|
|
725
|
+
name: "coderule-hasher",
|
|
726
|
+
execArgv
|
|
727
|
+
};
|
|
728
|
+
if (workerUrl.pathname.endsWith(".js")) {
|
|
729
|
+
workerOptions.type = "module";
|
|
730
|
+
}
|
|
731
|
+
this.worker = new worker_threads.Worker(workerUrl, workerOptions);
|
|
732
|
+
this.worker.on(
|
|
733
|
+
"message",
|
|
734
|
+
(message) => this.onMessage(message)
|
|
735
|
+
);
|
|
736
|
+
this.worker.on("error", (error) => this.handleWorkerError(error));
|
|
737
|
+
this.worker.on("exit", (code) => {
|
|
738
|
+
if (code !== 0 && !this.terminating) {
|
|
739
|
+
this.handleWorkerError(
|
|
740
|
+
new Error(`Hasher worker exited with code ${code}`)
|
|
741
|
+
);
|
|
742
|
+
}
|
|
743
|
+
});
|
|
744
|
+
}
|
|
745
|
+
async terminate() {
|
|
746
|
+
this.terminating = true;
|
|
747
|
+
for (const [, pending] of this.pending) {
|
|
748
|
+
pending.reject(new Error("Hasher worker terminated"));
|
|
749
|
+
}
|
|
750
|
+
this.pending.clear();
|
|
751
|
+
await this.worker.terminate();
|
|
752
|
+
}
|
|
753
|
+
onMessage(message) {
|
|
754
|
+
const pending = this.pending.get(message.taskId);
|
|
755
|
+
if (!pending) {
|
|
756
|
+
this.log.warn(
|
|
757
|
+
{ taskId: message.taskId },
|
|
758
|
+
"Received message for unknown task"
|
|
759
|
+
);
|
|
760
|
+
return;
|
|
761
|
+
}
|
|
762
|
+
this.pending.delete(message.taskId);
|
|
763
|
+
if (message.type === "hash-result") {
|
|
764
|
+
pending.resolve({
|
|
765
|
+
contentSha256: message.contentSha256,
|
|
766
|
+
serviceFileHash: message.serviceFileHash
|
|
767
|
+
});
|
|
768
|
+
} else {
|
|
769
|
+
const error = new Error(message.error);
|
|
770
|
+
error.code = message.code;
|
|
771
|
+
pending.reject(error);
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
handleWorkerError(error) {
|
|
775
|
+
this.log.error({ err: error }, "Hasher worker error");
|
|
776
|
+
for (const [, pending] of this.pending) {
|
|
777
|
+
pending.reject(error);
|
|
778
|
+
}
|
|
779
|
+
this.pending.clear();
|
|
780
|
+
}
|
|
781
|
+
compute(absPath, relPath) {
|
|
782
|
+
const taskId = this.nextTaskId++;
|
|
783
|
+
const payload = {
|
|
784
|
+
type: "hash",
|
|
785
|
+
taskId,
|
|
786
|
+
absPath,
|
|
787
|
+
relPath
|
|
788
|
+
};
|
|
789
|
+
return new Promise((resolve, reject) => {
|
|
790
|
+
this.pending.set(taskId, { resolve, reject });
|
|
791
|
+
this.worker.postMessage(payload);
|
|
792
|
+
});
|
|
793
|
+
}
|
|
794
|
+
};
|
|
795
|
+
var Hasher = class {
|
|
796
|
+
constructor(options) {
|
|
797
|
+
this.options = options;
|
|
798
|
+
this.worker = null;
|
|
799
|
+
this.log = options.logger.child({ scope: "hasher" });
|
|
800
|
+
this.inlineMode = process.env.CODERULE_HASHER_INLINE === "1";
|
|
801
|
+
this.ownerId = `hasher-${process.pid}-${Date.now()}`;
|
|
802
|
+
const leaseFromEnv = process.env.CODERULE_HASH_LEASE_MS ? Number.parseInt(process.env.CODERULE_HASH_LEASE_MS, 10) : Number.NaN;
|
|
803
|
+
this.leaseDurationMs = Number.isFinite(leaseFromEnv) && leaseFromEnv > 0 ? leaseFromEnv : 3e4;
|
|
804
|
+
if (!this.inlineMode) {
|
|
805
|
+
try {
|
|
806
|
+
this.worker = new HashWorker(this.log);
|
|
807
|
+
} catch (error) {
|
|
808
|
+
this.log.warn(
|
|
809
|
+
{ err: error },
|
|
810
|
+
"Failed to start hasher worker, falling back to inline hashing"
|
|
811
|
+
);
|
|
812
|
+
this.worker = null;
|
|
813
|
+
this.inlineMode = true;
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
async close() {
|
|
818
|
+
if (this.worker) {
|
|
819
|
+
await this.worker.terminate();
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
resolveAbsolutePath(record) {
|
|
823
|
+
if (path__default.default.isAbsolute(record.display_path)) {
|
|
824
|
+
return record.display_path;
|
|
825
|
+
}
|
|
826
|
+
return path__default.default.join(this.options.rootPath, record.rel_path);
|
|
827
|
+
}
|
|
828
|
+
async ensureExists(absPath, record) {
|
|
829
|
+
try {
|
|
830
|
+
await fs4__default.default.access(absPath);
|
|
831
|
+
return true;
|
|
832
|
+
} catch (error) {
|
|
833
|
+
this.log.warn(
|
|
834
|
+
{ err: error, relPath: record.rel_path },
|
|
835
|
+
"File missing before hashing"
|
|
836
|
+
);
|
|
837
|
+
this.options.filesRepo.markMissing(record.rel_path);
|
|
838
|
+
return false;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
async computeHash(absPath, relPath) {
|
|
842
|
+
if (this.inlineMode || !this.worker) {
|
|
843
|
+
return this.hashInline(absPath, relPath);
|
|
844
|
+
}
|
|
845
|
+
return this.worker.compute(absPath, relPath);
|
|
846
|
+
}
|
|
847
|
+
async hashInline(absPath, relPath) {
|
|
848
|
+
return new Promise((resolve, reject) => {
|
|
849
|
+
const content = crypto.createHash("sha256");
|
|
850
|
+
const service = crypto.createHash("sha256");
|
|
851
|
+
service.update(relPath);
|
|
852
|
+
service.update("\n");
|
|
853
|
+
const stream = fs2__default.default.createReadStream(absPath);
|
|
854
|
+
stream.on("data", (chunk) => {
|
|
855
|
+
content.update(chunk);
|
|
856
|
+
service.update(chunk);
|
|
857
|
+
});
|
|
858
|
+
stream.on("error", (error) => {
|
|
859
|
+
reject(error);
|
|
860
|
+
});
|
|
861
|
+
stream.on("end", () => {
|
|
862
|
+
resolve({
|
|
863
|
+
contentSha256: content.digest("hex"),
|
|
864
|
+
serviceFileHash: service.digest("hex")
|
|
865
|
+
});
|
|
866
|
+
});
|
|
867
|
+
});
|
|
868
|
+
}
|
|
869
|
+
async processBatch(limit) {
|
|
870
|
+
const now = Date.now();
|
|
871
|
+
const requeued = this.options.filesRepo.requeueExpiredHashing(now);
|
|
872
|
+
if (requeued > 0) {
|
|
873
|
+
this.log.debug({ requeued }, "Requeued expired hashing leases");
|
|
874
|
+
}
|
|
875
|
+
const dirty = this.options.filesRepo.claimDirty(
|
|
876
|
+
limit,
|
|
877
|
+
this.ownerId,
|
|
878
|
+
this.leaseDurationMs
|
|
879
|
+
);
|
|
880
|
+
if (dirty.length > 0) {
|
|
881
|
+
this.log.debug({ count: dirty.length }, "Hashing claimed files");
|
|
882
|
+
}
|
|
883
|
+
if (dirty.length === 0) {
|
|
884
|
+
return false;
|
|
885
|
+
}
|
|
886
|
+
const successes = [];
|
|
887
|
+
const failures = [];
|
|
888
|
+
for (const record of dirty) {
|
|
889
|
+
const absPath = this.resolveAbsolutePath(record);
|
|
890
|
+
const exists = await this.ensureExists(absPath, record);
|
|
891
|
+
if (!exists) {
|
|
892
|
+
continue;
|
|
893
|
+
}
|
|
894
|
+
try {
|
|
895
|
+
const result = await this.computeHash(absPath, record.rel_path);
|
|
896
|
+
successes.push({
|
|
897
|
+
id: record.id,
|
|
898
|
+
contentSha256: result.contentSha256,
|
|
899
|
+
serviceFileHash: result.serviceFileHash
|
|
900
|
+
});
|
|
901
|
+
} catch (error) {
|
|
902
|
+
if (error?.code === "ENOENT") {
|
|
903
|
+
this.log.debug(
|
|
904
|
+
{ relPath: record.rel_path },
|
|
905
|
+
"File disappeared during hashing"
|
|
906
|
+
);
|
|
907
|
+
this.options.filesRepo.markMissing(record.rel_path);
|
|
908
|
+
} else {
|
|
909
|
+
this.log.warn(
|
|
910
|
+
{ err: error, relPath: record.rel_path },
|
|
911
|
+
"Failed to hash file"
|
|
912
|
+
);
|
|
913
|
+
failures.push(record.id);
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
if (successes.length) {
|
|
918
|
+
this.log.debug({ count: successes.length }, "Hashing succeeded");
|
|
919
|
+
this.options.filesRepo.applyHashResults(successes);
|
|
920
|
+
}
|
|
921
|
+
if (failures.length) {
|
|
922
|
+
this.log.warn({ count: failures.length }, "Hashing failed for files");
|
|
923
|
+
this.options.filesRepo.markDirtyByIds(failures);
|
|
924
|
+
}
|
|
925
|
+
return true;
|
|
926
|
+
}
|
|
927
|
+
};
|
|
928
|
+
|
|
929
|
+
// src/service/Bootstrap.ts
|
|
930
|
+
function createServiceLogger() {
|
|
931
|
+
return logger.child({ scope: "service" });
|
|
932
|
+
}
|
|
933
|
+
async function bootstrap(params) {
|
|
934
|
+
const config = await resolveConfig(params);
|
|
935
|
+
const logger2 = createServiceLogger();
|
|
936
|
+
const db = openDatabase(config.dbPath, logger2.child({ scope: "db" }));
|
|
937
|
+
const filesRepo = new FilesRepo(db);
|
|
938
|
+
const recovered = filesRepo.resetHashingStates();
|
|
939
|
+
if (recovered > 0) {
|
|
940
|
+
logger2.info({ recovered }, "Recovered lingering hashing leases");
|
|
941
|
+
}
|
|
942
|
+
const snapshotsRepo = new SnapshotsRepo(db);
|
|
943
|
+
const outbox = new Outbox(db, logger2);
|
|
944
|
+
const clients = createClients(config, logger2);
|
|
945
|
+
const rules = await fetchVisitorRules(clients, logger2);
|
|
946
|
+
const compiled = compileRulesBundle(rules);
|
|
947
|
+
const hasher = new Hasher({ rootPath: config.rootPath, filesRepo, logger: logger2 });
|
|
948
|
+
const runtime = {
|
|
949
|
+
config,
|
|
950
|
+
logger: logger2,
|
|
951
|
+
db,
|
|
952
|
+
outbox,
|
|
953
|
+
clients,
|
|
954
|
+
rules: compiled,
|
|
955
|
+
filesRepo,
|
|
956
|
+
snapshotsRepo,
|
|
957
|
+
hasher
|
|
958
|
+
};
|
|
959
|
+
return runtime;
|
|
960
|
+
}
|
|
961
|
+
function toPosixRelative(root, target) {
|
|
962
|
+
const rel = path__default.default.relative(root, target);
|
|
963
|
+
if (!rel || rel === "") return "";
|
|
964
|
+
return rel.split(path__default.default.sep).join("/");
|
|
965
|
+
}
|
|
966
|
+
function isInsideRoot(root, target) {
|
|
967
|
+
const rel = path__default.default.relative(root, target);
|
|
968
|
+
return rel === "" || !rel.startsWith("..") && !path__default.default.isAbsolute(rel);
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
// src/fs/Walker.ts
|
|
972
|
+
var EMPTY_STATS = {
|
|
973
|
+
processed: 0,
|
|
974
|
+
skipped: 0,
|
|
975
|
+
dirtied: 0,
|
|
976
|
+
missing: 0
|
|
977
|
+
};
|
|
978
|
+
function cloneStats(stats) {
|
|
979
|
+
return { ...stats };
|
|
980
|
+
}
|
|
981
|
+
async function readSymlinkTarget(absPath, log) {
|
|
982
|
+
try {
|
|
983
|
+
return await fs4__default.default.readlink(absPath);
|
|
984
|
+
} catch (error) {
|
|
985
|
+
log.warn({ err: error, path: absPath }, "Failed to read symlink target");
|
|
986
|
+
return null;
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
async function walkDirectory(current, opts, stats) {
|
|
990
|
+
const dirLogger = opts.logger;
|
|
991
|
+
let dirents;
|
|
992
|
+
try {
|
|
993
|
+
dirents = await fs4__default.default.readdir(current, { withFileTypes: true });
|
|
994
|
+
} catch (error) {
|
|
995
|
+
dirLogger.warn({ err: error, path: current }, "Failed to read directory");
|
|
996
|
+
return;
|
|
997
|
+
}
|
|
998
|
+
for (const dirent of dirents) {
|
|
999
|
+
const absPath = path__default.default.join(current, dirent.name);
|
|
1000
|
+
const relPath = toPosixRelative(opts.rootPath, absPath);
|
|
1001
|
+
if (dirent.isDirectory()) {
|
|
1002
|
+
if (shouldPruneDirectory(relPath, opts.bundle)) {
|
|
1003
|
+
stats.skipped += 1;
|
|
1004
|
+
continue;
|
|
1005
|
+
}
|
|
1006
|
+
await walkDirectory(absPath, opts, stats);
|
|
1007
|
+
continue;
|
|
1008
|
+
}
|
|
1009
|
+
if (dirent.isSymbolicLink() || dirent.isFile()) {
|
|
1010
|
+
let stat;
|
|
1011
|
+
try {
|
|
1012
|
+
stat = await fs4__default.default.lstat(absPath);
|
|
1013
|
+
} catch (error) {
|
|
1014
|
+
dirLogger.warn({ err: error, path: absPath }, "Failed to stat file");
|
|
1015
|
+
continue;
|
|
1016
|
+
}
|
|
1017
|
+
stats.processed += 1;
|
|
1018
|
+
if (!shouldIncludeFile(relPath, stat, opts.bundle)) {
|
|
1019
|
+
stats.skipped += 1;
|
|
1020
|
+
continue;
|
|
1021
|
+
}
|
|
1022
|
+
const target = dirent.isSymbolicLink() ? await readSymlinkTarget(absPath, dirLogger) : null;
|
|
1023
|
+
const state = opts.filesRepo.upsertFromStat({
|
|
1024
|
+
relPath,
|
|
1025
|
+
displayPath: absPath,
|
|
1026
|
+
stats: stat,
|
|
1027
|
+
isSymlink: dirent.isSymbolicLink(),
|
|
1028
|
+
symlinkTarget: target
|
|
1029
|
+
});
|
|
1030
|
+
if (state === "dirty") {
|
|
1031
|
+
stats.dirtied += 1;
|
|
1032
|
+
}
|
|
1033
|
+
continue;
|
|
1034
|
+
}
|
|
1035
|
+
stats.skipped += 1;
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
async function runInventory(opts) {
|
|
1039
|
+
const stats = cloneStats(EMPTY_STATS);
|
|
1040
|
+
const startedAt = Date.now();
|
|
1041
|
+
await walkDirectory(opts.rootPath, opts, stats);
|
|
1042
|
+
const missing = opts.filesRepo.markMissingBefore(startedAt);
|
|
1043
|
+
stats.missing = missing;
|
|
1044
|
+
opts.logger.info({ ...stats }, "Completed initial inventory");
|
|
1045
|
+
return stats;
|
|
1046
|
+
}
|
|
1047
|
+
async function sleep(ms) {
|
|
1048
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1049
|
+
}
|
|
1050
|
+
function computeSnapshot(filesRepo) {
|
|
1051
|
+
const files = filesRepo.getCleanFilesForSnapshot();
|
|
1052
|
+
const hashes = files.map((file) => file.service_file_hash).filter((hash) => typeof hash === "string");
|
|
1053
|
+
const snapshotHash = clients.SyncHttpClient.calculateSnapshotHash([...hashes].sort());
|
|
1054
|
+
const { filesCount, totalSize } = filesRepo.getTotalsForSnapshot();
|
|
1055
|
+
return {
|
|
1056
|
+
snapshotHash,
|
|
1057
|
+
files: files.map((file) => ({
|
|
1058
|
+
file_path: file.rel_path,
|
|
1059
|
+
file_hash: file.service_file_hash
|
|
1060
|
+
})),
|
|
1061
|
+
filesCount,
|
|
1062
|
+
totalSize
|
|
1063
|
+
};
|
|
1064
|
+
}
|
|
1065
|
+
async function uploadMissing(rootPath, missing, syncClient, logger2) {
|
|
1066
|
+
if (!missing || missing.length === 0) return;
|
|
1067
|
+
const map = /* @__PURE__ */ new Map();
|
|
1068
|
+
for (const missingFile of missing) {
|
|
1069
|
+
const absPath = path__default.default.join(rootPath, missingFile.file_path);
|
|
1070
|
+
try {
|
|
1071
|
+
const buffer = await fs4__default.default.readFile(absPath);
|
|
1072
|
+
map.set(missingFile.file_hash, {
|
|
1073
|
+
path: missingFile.file_path,
|
|
1074
|
+
content: buffer
|
|
1075
|
+
});
|
|
1076
|
+
} catch (error) {
|
|
1077
|
+
logger2.warn(
|
|
1078
|
+
{ err: error, relPath: missingFile.file_path },
|
|
1079
|
+
"Failed to read missing file content"
|
|
1080
|
+
);
|
|
1081
|
+
}
|
|
1082
|
+
}
|
|
1083
|
+
if (map.size === 0) return;
|
|
1084
|
+
await syncClient.uploadFileContent(map);
|
|
1085
|
+
}
|
|
1086
|
+
async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2) {
|
|
1087
|
+
const { snapshotHash, files } = computation;
|
|
1088
|
+
let status = await syncClient.checkSnapshotStatus(snapshotHash);
|
|
1089
|
+
if (status.status === "READY") {
|
|
1090
|
+
logger2.info({ snapshotHash }, "Snapshot already READY");
|
|
1091
|
+
return;
|
|
1092
|
+
}
|
|
1093
|
+
if (status.status === "NOT_FOUND" || status.status === "MISSING_CONTENT") {
|
|
1094
|
+
status = await syncClient.createSnapshot(snapshotHash, files);
|
|
1095
|
+
}
|
|
1096
|
+
if (status.status === "MISSING_CONTENT" && status.missing_files?.length) {
|
|
1097
|
+
logger2.info(
|
|
1098
|
+
{ missing: status.missing_files.length },
|
|
1099
|
+
"Uploading missing file content"
|
|
1100
|
+
);
|
|
1101
|
+
await uploadMissing(rootPath, status.missing_files, syncClient, logger2);
|
|
1102
|
+
status = await syncClient.createSnapshot(snapshotHash, files);
|
|
1103
|
+
}
|
|
1104
|
+
let attempt = 0;
|
|
1105
|
+
while (status.status !== "READY") {
|
|
1106
|
+
if (status.status === "FAILED") {
|
|
1107
|
+
throw new Error(`Snapshot failed processing: ${JSON.stringify(status)}`);
|
|
1108
|
+
}
|
|
1109
|
+
const delay = Math.min(5e3, 1e3 * Math.max(1, 2 ** attempt));
|
|
1110
|
+
await sleep(delay);
|
|
1111
|
+
attempt += 1;
|
|
1112
|
+
status = await syncClient.checkSnapshotStatus(snapshotHash);
|
|
1113
|
+
}
|
|
1114
|
+
logger2.info({ snapshotHash }, "Snapshot READY");
|
|
1115
|
+
}
|
|
1116
|
+
async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2) {
|
|
1117
|
+
const computation = computeSnapshot(filesRepo);
|
|
1118
|
+
await ensureSnapshotCreated(rootPath, computation, syncClient, logger2);
|
|
1119
|
+
const createdAt = Date.now();
|
|
1120
|
+
snapshotsRepo.insert(
|
|
1121
|
+
computation.snapshotHash,
|
|
1122
|
+
computation.filesCount,
|
|
1123
|
+
computation.totalSize,
|
|
1124
|
+
createdAt
|
|
1125
|
+
);
|
|
1126
|
+
return {
|
|
1127
|
+
snapshotHash: computation.snapshotHash,
|
|
1128
|
+
filesCount: computation.filesCount,
|
|
1129
|
+
totalSize: computation.totalSize,
|
|
1130
|
+
status: "READY",
|
|
1131
|
+
createdAt
|
|
1132
|
+
};
|
|
1133
|
+
}
|
|
1134
|
+
|
|
1135
|
+
// src/service/InitialSync.ts
|
|
1136
|
+
async function runInitialSyncPipeline(runtime) {
|
|
1137
|
+
const inventoryLogger = runtime.logger.child({ scope: "inventory" });
|
|
1138
|
+
await runInventory({
|
|
1139
|
+
rootPath: runtime.config.rootPath,
|
|
1140
|
+
bundle: runtime.rules,
|
|
1141
|
+
filesRepo: runtime.filesRepo,
|
|
1142
|
+
logger: inventoryLogger
|
|
1143
|
+
});
|
|
1144
|
+
const hashLogger = runtime.logger.child({ scope: "hash" });
|
|
1145
|
+
let hadWork = true;
|
|
1146
|
+
while (hadWork) {
|
|
1147
|
+
hadWork = await runtime.hasher.processBatch(runtime.config.hashBatchSize);
|
|
1148
|
+
if (hadWork) {
|
|
1149
|
+
hashLogger.debug("Hasher processed batch");
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
const syncLogger = runtime.logger.child({ scope: "snapshot" });
|
|
1153
|
+
const result = await publishSnapshot(
|
|
1154
|
+
runtime.config.rootPath,
|
|
1155
|
+
runtime.filesRepo,
|
|
1156
|
+
runtime.snapshotsRepo,
|
|
1157
|
+
runtime.clients.sync,
|
|
1158
|
+
syncLogger
|
|
1159
|
+
);
|
|
1160
|
+
return result;
|
|
1161
|
+
}
|
|
1162
|
+
async function createChokidarWatcher(options, usePolling) {
|
|
1163
|
+
const log = options.logger.child({
|
|
1164
|
+
scope: "watcher",
|
|
1165
|
+
mode: usePolling ? "polling" : "native"
|
|
1166
|
+
});
|
|
1167
|
+
const watcher = chokidar__default.default.watch(options.rootPath, {
|
|
1168
|
+
ignored: options.ignored,
|
|
1169
|
+
ignoreInitial: true,
|
|
1170
|
+
persistent: true,
|
|
1171
|
+
awaitWriteFinish: {
|
|
1172
|
+
stabilityThreshold: 1500,
|
|
1173
|
+
pollInterval: 100
|
|
1174
|
+
},
|
|
1175
|
+
atomic: true,
|
|
1176
|
+
usePolling,
|
|
1177
|
+
interval: usePolling ? 200 : void 0,
|
|
1178
|
+
binaryInterval: usePolling ? 200 : void 0,
|
|
1179
|
+
alwaysStat: true,
|
|
1180
|
+
cwd: void 0,
|
|
1181
|
+
depth: void 0
|
|
1182
|
+
});
|
|
1183
|
+
await new Promise((resolve, reject) => {
|
|
1184
|
+
const onReady = () => {
|
|
1185
|
+
watcher.off("error", onError);
|
|
1186
|
+
log.info("Watcher ready");
|
|
1187
|
+
resolve();
|
|
1188
|
+
};
|
|
1189
|
+
const onError = (err) => {
|
|
1190
|
+
watcher.off("ready", onReady);
|
|
1191
|
+
reject(err);
|
|
1192
|
+
};
|
|
1193
|
+
watcher.once("ready", onReady);
|
|
1194
|
+
watcher.once("error", onError);
|
|
1195
|
+
});
|
|
1196
|
+
return { watcher, mode: usePolling ? "polling" : "native" };
|
|
1197
|
+
}
|
|
1198
|
+
function attachHandlers(watcher, options) {
|
|
1199
|
+
const { handlers, logger: logger2 } = options;
|
|
1200
|
+
const handle = (event, filePath, stats) => {
|
|
1201
|
+
logger2.debug({ event, filePath }, "Watcher raw event");
|
|
1202
|
+
Promise.resolve(handlers.onEvent(event, filePath, stats)).catch((error) => {
|
|
1203
|
+
logger2.error(
|
|
1204
|
+
{ err: error, event, path: filePath },
|
|
1205
|
+
"Watcher handler failed"
|
|
1206
|
+
);
|
|
1207
|
+
});
|
|
1208
|
+
};
|
|
1209
|
+
watcher.on("add", (filePath, stats) => handle("add", filePath, stats));
|
|
1210
|
+
watcher.on("change", (filePath, stats) => handle("change", filePath, stats));
|
|
1211
|
+
watcher.on("unlink", (filePath) => handle("unlink", filePath));
|
|
1212
|
+
watcher.on("addDir", (dirPath) => handle("addDir", dirPath));
|
|
1213
|
+
watcher.on("unlinkDir", (dirPath) => handle("unlinkDir", dirPath));
|
|
1214
|
+
watcher.on("error", (error) => {
|
|
1215
|
+
logger2.error({ err: error }, "Watcher error");
|
|
1216
|
+
});
|
|
1217
|
+
if (handlers.onReady) {
|
|
1218
|
+
handlers.onReady();
|
|
1219
|
+
logger2.debug("Watcher ready callback executed");
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
async function startWatcher(options) {
|
|
1223
|
+
try {
|
|
1224
|
+
const result = await createChokidarWatcher(options, false);
|
|
1225
|
+
attachHandlers(result.watcher, options);
|
|
1226
|
+
return result;
|
|
1227
|
+
} catch (error) {
|
|
1228
|
+
options.logger.warn(
|
|
1229
|
+
{ err: error },
|
|
1230
|
+
"Native watcher failed, falling back to polling"
|
|
1231
|
+
);
|
|
1232
|
+
const result = await createChokidarWatcher(options, true);
|
|
1233
|
+
attachHandlers(result.watcher, options);
|
|
1234
|
+
return result;
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
|
|
1238
|
+
// src/sync/HeartbeatProtocol.ts
|
|
1239
|
+
async function sendHeartbeat(rootId, snapshotsRepo, syncClient, logger2) {
|
|
1240
|
+
const latest = snapshotsRepo.getLatest();
|
|
1241
|
+
if (latest) {
|
|
1242
|
+
await syncClient.checkSnapshotStatus(latest.snapshot_hash);
|
|
1243
|
+
}
|
|
1244
|
+
logger2.debug({ rootId }, "Heartbeat sent");
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
// src/service/State.ts
|
|
1248
|
+
var ServiceState = class {
|
|
1249
|
+
constructor() {
|
|
1250
|
+
this.lastChangeAt = Date.now();
|
|
1251
|
+
this.lastSnapshotReadyAt = Date.now();
|
|
1252
|
+
this.lastHeartbeatEnqueuedAt = 0;
|
|
1253
|
+
}
|
|
1254
|
+
updateChange(timestamp = Date.now()) {
|
|
1255
|
+
this.lastChangeAt = timestamp;
|
|
1256
|
+
}
|
|
1257
|
+
updateSnapshotReady(timestamp = Date.now()) {
|
|
1258
|
+
this.lastSnapshotReadyAt = timestamp;
|
|
1259
|
+
}
|
|
1260
|
+
updateHeartbeat(timestamp = Date.now()) {
|
|
1261
|
+
this.lastHeartbeatEnqueuedAt = timestamp;
|
|
1262
|
+
}
|
|
1263
|
+
};
|
|
1264
|
+
|
|
1265
|
+
// src/service/ServiceLoops.ts
|
|
1266
|
+
function safeParse(input) {
|
|
1267
|
+
try {
|
|
1268
|
+
return JSON.parse(input);
|
|
1269
|
+
} catch {
|
|
1270
|
+
return void 0;
|
|
1271
|
+
}
|
|
1272
|
+
}
|
|
1273
|
+
async function sleep2(ms) {
|
|
1274
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1275
|
+
}
|
|
1276
|
+
function computeBackoff(attempts) {
|
|
1277
|
+
const base = attempts ?? 0;
|
|
1278
|
+
const delay = 1e3 * 2 ** base;
|
|
1279
|
+
return Math.min(delay, 6e4);
|
|
1280
|
+
}
|
|
1281
|
+
async function readSymlinkTarget2(absPath) {
|
|
1282
|
+
try {
|
|
1283
|
+
return await fs4__default.default.readlink(absPath);
|
|
1284
|
+
} catch {
|
|
1285
|
+
return null;
|
|
1286
|
+
}
|
|
1287
|
+
}
|
|
1288
|
+
var SnapshotScheduler = class {
|
|
1289
|
+
constructor(rootId, debounceMs, outbox, state) {
|
|
1290
|
+
this.rootId = rootId;
|
|
1291
|
+
this.debounceMs = debounceMs;
|
|
1292
|
+
this.outbox = outbox;
|
|
1293
|
+
this.state = state;
|
|
1294
|
+
this.timer = null;
|
|
1295
|
+
}
|
|
1296
|
+
trigger() {
|
|
1297
|
+
this.state.updateChange();
|
|
1298
|
+
if (this.timer) {
|
|
1299
|
+
clearTimeout(this.timer);
|
|
1300
|
+
}
|
|
1301
|
+
this.timer = setTimeout(() => {
|
|
1302
|
+
this.outbox.enqueueSnapshot(this.rootId);
|
|
1303
|
+
this.timer = null;
|
|
1304
|
+
}, this.debounceMs);
|
|
1305
|
+
}
|
|
1306
|
+
cancel() {
|
|
1307
|
+
if (this.timer) {
|
|
1308
|
+
clearTimeout(this.timer);
|
|
1309
|
+
this.timer = null;
|
|
1310
|
+
}
|
|
1311
|
+
}
|
|
1312
|
+
};
|
|
1313
|
+
var ServiceRunner = class {
|
|
1314
|
+
constructor(runtime) {
|
|
1315
|
+
this.runtime = runtime;
|
|
1316
|
+
this.state = new ServiceState();
|
|
1317
|
+
this.watcher = null;
|
|
1318
|
+
this.running = false;
|
|
1319
|
+
this.fsControlLeaseOwner = `fs-control-${process.pid}-${Date.now()}`;
|
|
1320
|
+
this.tasks = /* @__PURE__ */ new Set();
|
|
1321
|
+
this.buffering = false;
|
|
1322
|
+
this.bufferedEvents = [];
|
|
1323
|
+
this.scheduler = new SnapshotScheduler(
|
|
1324
|
+
runtime.config.rootId,
|
|
1325
|
+
runtime.config.snapshotDebounceMs,
|
|
1326
|
+
runtime.outbox,
|
|
1327
|
+
this.state
|
|
1328
|
+
);
|
|
1329
|
+
this.ignoredPredicate = buildWatcherIgnored(runtime.rules);
|
|
1330
|
+
}
|
|
1331
|
+
recordInitialSnapshot(timestamp) {
|
|
1332
|
+
this.state.updateSnapshotReady(timestamp);
|
|
1333
|
+
this.state.updateChange(timestamp);
|
|
1334
|
+
this.state.updateHeartbeat(timestamp);
|
|
1335
|
+
}
|
|
1336
|
+
async start() {
|
|
1337
|
+
if (this.running) return;
|
|
1338
|
+
await this.prepareWatcher(false);
|
|
1339
|
+
await this.startLoops();
|
|
1340
|
+
}
|
|
1341
|
+
async stop() {
|
|
1342
|
+
if (!this.running) return;
|
|
1343
|
+
this.running = false;
|
|
1344
|
+
this.scheduler.cancel();
|
|
1345
|
+
if (this.watcher) {
|
|
1346
|
+
await this.watcher.close();
|
|
1347
|
+
this.watcher = null;
|
|
1348
|
+
}
|
|
1349
|
+
await Promise.all([...this.tasks]);
|
|
1350
|
+
await this.runtime.hasher.close();
|
|
1351
|
+
this.runtime.clients.close();
|
|
1352
|
+
this.runtime.db.close();
|
|
1353
|
+
}
|
|
1354
|
+
getServiceStateSnapshot() {
|
|
1355
|
+
return {
|
|
1356
|
+
lastChangeAt: this.state.lastChangeAt,
|
|
1357
|
+
lastSnapshotReadyAt: this.state.lastSnapshotReadyAt,
|
|
1358
|
+
lastHeartbeatEnqueuedAt: this.state.lastHeartbeatEnqueuedAt,
|
|
1359
|
+
watcherReady: this.watcher !== null,
|
|
1360
|
+
buffering: this.buffering
|
|
1361
|
+
};
|
|
1362
|
+
}
|
|
1363
|
+
runBackground(fn) {
|
|
1364
|
+
const task = fn();
|
|
1365
|
+
this.tasks.add(task);
|
|
1366
|
+
task.catch((error) => {
|
|
1367
|
+
if (this.running) {
|
|
1368
|
+
this.runtime.logger.error({ err: error }, "Background task failed");
|
|
1369
|
+
}
|
|
1370
|
+
}).finally(() => {
|
|
1371
|
+
this.tasks.delete(task);
|
|
1372
|
+
});
|
|
1373
|
+
}
|
|
1374
|
+
async prepareWatcher(bufferOnly) {
|
|
1375
|
+
const { rootPath } = this.runtime.config;
|
|
1376
|
+
const logger2 = this.runtime.logger.child({ scope: "watcher" });
|
|
1377
|
+
this.buffering = bufferOnly;
|
|
1378
|
+
const { watcher, mode } = await startWatcher({
|
|
1379
|
+
rootPath,
|
|
1380
|
+
ignored: this.ignoredPredicate,
|
|
1381
|
+
logger: logger2,
|
|
1382
|
+
handlers: {
|
|
1383
|
+
onEvent: (event, absPath, stats) => {
|
|
1384
|
+
logger2.debug({ event, absPath }, "Watcher event received");
|
|
1385
|
+
if (this.buffering) {
|
|
1386
|
+
this.bufferedEvents.push({ event, absPath, stats });
|
|
1387
|
+
return Promise.resolve();
|
|
1388
|
+
}
|
|
1389
|
+
return this.handleEvent(event, absPath, stats);
|
|
1390
|
+
}
|
|
1391
|
+
}
|
|
1392
|
+
});
|
|
1393
|
+
this.watcher = watcher;
|
|
1394
|
+
logger2.debug({ watched: watcher.getWatched() }, "Watcher targets");
|
|
1395
|
+
logger2.info({ mode, buffering: bufferOnly }, "File watcher started");
|
|
1396
|
+
}
|
|
1397
|
+
async enableWatcherProcessing() {
|
|
1398
|
+
if (!this.buffering) {
|
|
1399
|
+
return;
|
|
1400
|
+
}
|
|
1401
|
+
this.buffering = false;
|
|
1402
|
+
this.runtime.logger.debug(
|
|
1403
|
+
{ buffered: this.bufferedEvents.length },
|
|
1404
|
+
"Watcher buffering disabled"
|
|
1405
|
+
);
|
|
1406
|
+
if (this.bufferedEvents.length === 0) return;
|
|
1407
|
+
for (const buffered of this.bufferedEvents) {
|
|
1408
|
+
await this.handleEvent(buffered.event, buffered.absPath, buffered.stats);
|
|
1409
|
+
}
|
|
1410
|
+
this.bufferedEvents = [];
|
|
1411
|
+
}
|
|
1412
|
+
async startLoops() {
|
|
1413
|
+
if (this.running) return;
|
|
1414
|
+
this.running = true;
|
|
1415
|
+
this.runBackground(() => this.hashLoop());
|
|
1416
|
+
this.runBackground(() => this.fsControlLoop());
|
|
1417
|
+
this.runBackground(() => this.heartbeatLoop());
|
|
1418
|
+
this.runBackground(() => this.requeueLoop());
|
|
1419
|
+
this.runtime.logger.debug("Background loops started");
|
|
1420
|
+
}
|
|
1421
|
+
async handleEvent(event, absPath, stats) {
|
|
1422
|
+
if (!this.running) return;
|
|
1423
|
+
const root = this.runtime.config.rootPath;
|
|
1424
|
+
const absolute = path__default.default.isAbsolute(absPath) ? absPath : path__default.default.join(root, absPath);
|
|
1425
|
+
if (!isInsideRoot(root, absolute)) {
|
|
1426
|
+
return;
|
|
1427
|
+
}
|
|
1428
|
+
switch (event) {
|
|
1429
|
+
case "add":
|
|
1430
|
+
case "change":
|
|
1431
|
+
await this.handleAddChange(absolute, stats);
|
|
1432
|
+
break;
|
|
1433
|
+
case "unlink":
|
|
1434
|
+
await this.handleUnlink(absolute);
|
|
1435
|
+
break;
|
|
1436
|
+
case "unlinkDir":
|
|
1437
|
+
await this.handleUnlinkDir(absolute);
|
|
1438
|
+
break;
|
|
1439
|
+
}
|
|
1440
|
+
}
|
|
1441
|
+
async handleAddChange(absPath, _stats) {
|
|
1442
|
+
let fileStats;
|
|
1443
|
+
try {
|
|
1444
|
+
fileStats = await fs4__default.default.lstat(absPath);
|
|
1445
|
+
} catch (error) {
|
|
1446
|
+
this.runtime.logger.warn(
|
|
1447
|
+
{ err: error, path: absPath },
|
|
1448
|
+
"Failed to lstat path"
|
|
1449
|
+
);
|
|
1450
|
+
return;
|
|
1451
|
+
}
|
|
1452
|
+
const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
|
|
1453
|
+
if (!shouldIncludeFile(relPath, fileStats, this.runtime.rules)) {
|
|
1454
|
+
this.runtime.logger.debug({ relPath }, "Watcher event ignored by rules");
|
|
1455
|
+
return;
|
|
1456
|
+
}
|
|
1457
|
+
const isSymlink = fileStats.isSymbolicLink();
|
|
1458
|
+
const target = isSymlink ? await readSymlinkTarget2(absPath) : null;
|
|
1459
|
+
const state = this.runtime.filesRepo.upsertFromStat({
|
|
1460
|
+
relPath,
|
|
1461
|
+
displayPath: absPath,
|
|
1462
|
+
stats: fileStats,
|
|
1463
|
+
isSymlink,
|
|
1464
|
+
symlinkTarget: target
|
|
1465
|
+
});
|
|
1466
|
+
if (state === "dirty") {
|
|
1467
|
+
this.scheduler.trigger();
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
async handleUnlink(absPath) {
|
|
1471
|
+
const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
|
|
1472
|
+
const changed = this.runtime.filesRepo.markMissing(relPath);
|
|
1473
|
+
if (changed > 0) {
|
|
1474
|
+
this.scheduler.trigger();
|
|
1475
|
+
}
|
|
1476
|
+
}
|
|
1477
|
+
async handleUnlinkDir(absPath) {
|
|
1478
|
+
const relPath = toPosixRelative(this.runtime.config.rootPath, absPath);
|
|
1479
|
+
const changed = this.runtime.filesRepo.markMissingByPrefix(relPath);
|
|
1480
|
+
if (changed > 0) {
|
|
1481
|
+
this.scheduler.trigger();
|
|
1482
|
+
}
|
|
1483
|
+
}
|
|
1484
|
+
async hashLoop() {
|
|
1485
|
+
while (this.running) {
|
|
1486
|
+
const processed = await this.runtime.hasher.processBatch(
|
|
1487
|
+
this.runtime.config.hashBatchSize
|
|
1488
|
+
);
|
|
1489
|
+
if (!processed) {
|
|
1490
|
+
await sleep2(500);
|
|
1491
|
+
}
|
|
1492
|
+
}
|
|
1493
|
+
}
|
|
1494
|
+
async fsControlLoop() {
|
|
1495
|
+
const log = this.runtime.logger.child({ scope: "fs-control-worker" });
|
|
1496
|
+
while (this.running) {
|
|
1497
|
+
const job = this.runtime.outbox.claimFsControlJob(
|
|
1498
|
+
this.fsControlLeaseOwner
|
|
1499
|
+
);
|
|
1500
|
+
if (!job) {
|
|
1501
|
+
await sleep2(this.runtime.config.queuePollIntervalMs);
|
|
1502
|
+
continue;
|
|
1503
|
+
}
|
|
1504
|
+
const payload = job.data ? safeParse(job.data) ?? {} : {};
|
|
1505
|
+
const jobKind = job.kind ?? (typeof payload.kind === "string" ? payload.kind : void 0);
|
|
1506
|
+
if (!jobKind) {
|
|
1507
|
+
log.warn(
|
|
1508
|
+
{ jobId: job.id },
|
|
1509
|
+
"fs_control job missing kind, acknowledging"
|
|
1510
|
+
);
|
|
1511
|
+
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1512
|
+
continue;
|
|
1513
|
+
}
|
|
1514
|
+
if (jobKind === "snapshot") {
|
|
1515
|
+
await this.handleSnapshotJob(job, log);
|
|
1516
|
+
} else if (jobKind === "heartbeat") {
|
|
1517
|
+
await this.handleHeartbeatJob(job, log);
|
|
1518
|
+
} else {
|
|
1519
|
+
log.warn({ jobId: job.id, kind: jobKind }, "Unknown fs_control job");
|
|
1520
|
+
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1521
|
+
}
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
async heartbeatLoop() {
|
|
1525
|
+
const log = this.runtime.logger.child({ scope: "heartbeat-loop" });
|
|
1526
|
+
while (this.running) {
|
|
1527
|
+
const now = Date.now();
|
|
1528
|
+
const sinceChange = now - this.state.lastChangeAt;
|
|
1529
|
+
const sinceHeartbeat = now - this.state.lastHeartbeatEnqueuedAt;
|
|
1530
|
+
if (sinceChange >= this.runtime.config.heartbeatIntervalMs && sinceHeartbeat >= this.runtime.config.heartbeatIntervalMs) {
|
|
1531
|
+
this.runtime.outbox.enqueueHeartbeat(this.runtime.config.rootId);
|
|
1532
|
+
this.state.updateHeartbeat(now);
|
|
1533
|
+
log.debug("Heartbeat enqueued");
|
|
1534
|
+
}
|
|
1535
|
+
await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
|
|
1536
|
+
}
|
|
1537
|
+
log.info("Heartbeat loop stopped");
|
|
1538
|
+
}
|
|
1539
|
+
async requeueLoop() {
|
|
1540
|
+
while (this.running) {
|
|
1541
|
+
const count = this.runtime.outbox.requeueTimedOut();
|
|
1542
|
+
if (count > 0) {
|
|
1543
|
+
this.runtime.logger.info({ count }, "Requeued timed-out jobs");
|
|
1544
|
+
}
|
|
1545
|
+
await sleep2(this.runtime.config.heartbeatCheckIntervalMs);
|
|
1546
|
+
}
|
|
1547
|
+
}
|
|
1548
|
+
async handleSnapshotJob(job, log) {
|
|
1549
|
+
if (this.runtime.filesRepo.countByState("dirty") > 0 || this.runtime.filesRepo.countByState("hashing") > 0) {
|
|
1550
|
+
const delay = computeBackoff(job.attempts);
|
|
1551
|
+
this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
|
|
1552
|
+
await sleep2(200);
|
|
1553
|
+
return;
|
|
1554
|
+
}
|
|
1555
|
+
try {
|
|
1556
|
+
const result = await publishSnapshot(
|
|
1557
|
+
this.runtime.config.rootPath,
|
|
1558
|
+
this.runtime.filesRepo,
|
|
1559
|
+
this.runtime.snapshotsRepo,
|
|
1560
|
+
this.runtime.clients.sync,
|
|
1561
|
+
log
|
|
1562
|
+
);
|
|
1563
|
+
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1564
|
+
this.state.updateSnapshotReady(result.createdAt);
|
|
1565
|
+
log.info({ snapshotHash: result.snapshotHash }, "Snapshot job completed");
|
|
1566
|
+
} catch (error) {
|
|
1567
|
+
log.warn({ err: error }, "Snapshot job failed");
|
|
1568
|
+
const delay = computeBackoff(job.attempts);
|
|
1569
|
+
this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
|
|
1570
|
+
await sleep2(delay);
|
|
1571
|
+
}
|
|
1572
|
+
}
|
|
1573
|
+
async handleHeartbeatJob(job, log) {
|
|
1574
|
+
try {
|
|
1575
|
+
await sendHeartbeat(
|
|
1576
|
+
this.runtime.config.rootId,
|
|
1577
|
+
this.runtime.snapshotsRepo,
|
|
1578
|
+
this.runtime.clients.sync,
|
|
1579
|
+
log
|
|
1580
|
+
);
|
|
1581
|
+
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1582
|
+
this.state.updateHeartbeat(Date.now());
|
|
1583
|
+
} catch (error) {
|
|
1584
|
+
const delay = computeBackoff(job.attempts);
|
|
1585
|
+
this.runtime.outbox.retry(job.id, this.fsControlLeaseOwner, delay);
|
|
1586
|
+
log.warn({ err: error }, "Heartbeat failed; retry scheduled");
|
|
1587
|
+
}
|
|
1588
|
+
}
|
|
1589
|
+
};
|
|
1590
|
+
var HASH_STATES = ["clean", "dirty", "hashing", "missing"];
|
|
1591
|
+
function collectIndexingStatus(runtime, runner) {
|
|
1592
|
+
const byState = {};
|
|
1593
|
+
for (const state of HASH_STATES) {
|
|
1594
|
+
byState[state] = runtime.filesRepo.countByState(state);
|
|
1595
|
+
}
|
|
1596
|
+
const total = HASH_STATES.reduce((acc, state) => acc + byState[state], 0);
|
|
1597
|
+
const queue = runtime.outbox.getQueue();
|
|
1598
|
+
const queueCounts = {
|
|
1599
|
+
pending: queue.countByStatus(qulite.JobStatus.Pending),
|
|
1600
|
+
processing: queue.countByStatus(qulite.JobStatus.Processing),
|
|
1601
|
+
done: queue.countByStatus(qulite.JobStatus.Done),
|
|
1602
|
+
failed: queue.countByStatus(qulite.JobStatus.Failed)
|
|
1603
|
+
};
|
|
1604
|
+
return {
|
|
1605
|
+
timestamp: Date.now(),
|
|
1606
|
+
root: {
|
|
1607
|
+
id: runtime.config.rootId,
|
|
1608
|
+
path: runtime.config.rootPath
|
|
1609
|
+
},
|
|
1610
|
+
files: {
|
|
1611
|
+
total,
|
|
1612
|
+
byState
|
|
1613
|
+
},
|
|
1614
|
+
latestSnapshot: runtime.snapshotsRepo.getLatest() ?? null,
|
|
1615
|
+
queue: queueCounts,
|
|
1616
|
+
service: runner.getServiceStateSnapshot()
|
|
1617
|
+
};
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
// src/mcp/server.ts
|
|
1621
|
+
var SERVER_NAME = "coderule-scanner-mcp";
|
|
1622
|
+
var SERVER_VERSION = process.env.npm_package_version ?? "0.0.0";
|
|
1623
|
+
function createMcpServer({
|
|
1624
|
+
runtime,
|
|
1625
|
+
runner
|
|
1626
|
+
}) {
|
|
1627
|
+
const server = new mcp_js.McpServer({
|
|
1628
|
+
name: SERVER_NAME,
|
|
1629
|
+
version: SERVER_VERSION,
|
|
1630
|
+
description: "Coderule file indexing MCP server"
|
|
1631
|
+
});
|
|
1632
|
+
server.registerTool(
|
|
1633
|
+
"check",
|
|
1634
|
+
{
|
|
1635
|
+
title: "Indexer status",
|
|
1636
|
+
description: "Inspect the current indexing state, snapshot, and queue metrics",
|
|
1637
|
+
inputSchema: {}
|
|
1638
|
+
},
|
|
1639
|
+
async () => {
|
|
1640
|
+
const status = collectIndexingStatus(runtime, runner);
|
|
1641
|
+
const text = JSON.stringify(status, null, 2);
|
|
1642
|
+
return {
|
|
1643
|
+
content: [{ type: "text", text }]
|
|
1644
|
+
};
|
|
1645
|
+
}
|
|
1646
|
+
);
|
|
1647
|
+
const queryInputSchema = {
|
|
1648
|
+
query: zod.z.string().min(1, "Query text is required"),
|
|
1649
|
+
budgetTokens: zod.z.number().int().positive().optional().describe("Token budget for retrieval (default 3000)")
|
|
1650
|
+
};
|
|
1651
|
+
server.registerTool(
|
|
1652
|
+
"query",
|
|
1653
|
+
{
|
|
1654
|
+
title: "Snapshot retrieval query",
|
|
1655
|
+
description: "Execute a retrieval query against the most recent indexed snapshot",
|
|
1656
|
+
inputSchema: queryInputSchema
|
|
1657
|
+
},
|
|
1658
|
+
async ({
|
|
1659
|
+
query,
|
|
1660
|
+
budgetTokens
|
|
1661
|
+
}) => {
|
|
1662
|
+
const latest = runtime.snapshotsRepo.getLatest();
|
|
1663
|
+
if (!latest) {
|
|
1664
|
+
const message = "No snapshots available yet. Run indexing first.";
|
|
1665
|
+
return {
|
|
1666
|
+
content: [{ type: "text", text: message }],
|
|
1667
|
+
isError: true
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
const effectiveBudget = Math.max(100, budgetTokens ?? 3e3);
|
|
1671
|
+
try {
|
|
1672
|
+
const result = await runtime.clients.retrieval.query(
|
|
1673
|
+
latest.snapshot_hash,
|
|
1674
|
+
query,
|
|
1675
|
+
effectiveBudget,
|
|
1676
|
+
{
|
|
1677
|
+
formatter: runtime.config.retrievalFormatter
|
|
1678
|
+
}
|
|
1679
|
+
);
|
|
1680
|
+
const summary = {
|
|
1681
|
+
snapshotHash: latest.snapshot_hash,
|
|
1682
|
+
budgetTokens: effectiveBudget,
|
|
1683
|
+
formatter: runtime.config.retrievalFormatter
|
|
1684
|
+
};
|
|
1685
|
+
return {
|
|
1686
|
+
content: [
|
|
1687
|
+
{
|
|
1688
|
+
type: "text",
|
|
1689
|
+
text: result.formatted_output ?? "(no formatted output)"
|
|
1690
|
+
},
|
|
1691
|
+
{
|
|
1692
|
+
type: "text",
|
|
1693
|
+
text: JSON.stringify({ summary, result }, null, 2)
|
|
1694
|
+
}
|
|
1695
|
+
]
|
|
1696
|
+
};
|
|
1697
|
+
} catch (error) {
|
|
1698
|
+
const message = error instanceof Error ? error.message : "Unknown retrieval error";
|
|
1699
|
+
runtime.logger.error({ err: error }, "Retrieval query failed");
|
|
1700
|
+
return {
|
|
1701
|
+
content: [{ type: "text", text: `Retrieval error: ${message}` }],
|
|
1702
|
+
isError: true
|
|
1703
|
+
};
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
);
|
|
1707
|
+
return server;
|
|
1708
|
+
}
|
|
1709
|
+
|
|
1710
|
+
// src/mcp-cli.ts
|
|
1711
|
+
var ENV_FLAG_MAP = {
|
|
1712
|
+
root: "CODERULE_ROOT",
|
|
1713
|
+
"data-dir": "CODERULE_DATA_DIR",
|
|
1714
|
+
"auth-url": "CODERULE_AUTH_URL",
|
|
1715
|
+
"sync-url": "CODERULE_SYNC_URL",
|
|
1716
|
+
"ast-url": "CODERULE_AST_URL",
|
|
1717
|
+
"retrieval-url": "CODERULE_RETRIEVAL_URL",
|
|
1718
|
+
"retrieval-formatter": "CODERULE_RETRIEVAL_FORMATTER",
|
|
1719
|
+
"http-timeout": "CODERULE_HTTP_TIMEOUT",
|
|
1720
|
+
"snapshot-debounce": "CODERULE_SNAPSHOT_DEBOUNCE_MS",
|
|
1721
|
+
"heartbeat-interval": "CODERULE_HEARTBEAT_INTERVAL_MS",
|
|
1722
|
+
"heartbeat-check": "CODERULE_HEARTBEAT_CHECK_INTERVAL_MS",
|
|
1723
|
+
"queue-poll": "CODERULE_QUEUE_POLL_INTERVAL_MS",
|
|
1724
|
+
"hash-batch": "CODERULE_HASH_BATCH_SIZE",
|
|
1725
|
+
"hash-lease": "CODERULE_HASH_LEASE_MS",
|
|
1726
|
+
"max-snapshot-attempts": "CODERULE_MAX_SNAPSHOT_ATTEMPTS"
|
|
1727
|
+
};
|
|
1728
|
+
function printUsage() {
|
|
1729
|
+
console.log(`Usage: coderule-mcp-server [token] [options]
|
|
1730
|
+
`);
|
|
1731
|
+
console.log("Options:");
|
|
1732
|
+
console.log(" --token <token> Override CODERULE_TOKEN");
|
|
1733
|
+
console.log(
|
|
1734
|
+
" --clean, --reindex Remove existing local state before running"
|
|
1735
|
+
);
|
|
1736
|
+
console.log(
|
|
1737
|
+
" --inline-hasher Force inline hashing (debug only)"
|
|
1738
|
+
);
|
|
1739
|
+
console.log(" --root <path> Override CODERULE_ROOT");
|
|
1740
|
+
console.log(" --data-dir <path> Override CODERULE_DATA_DIR");
|
|
1741
|
+
console.log(" --auth-url <url> Override CODERULE_AUTH_URL");
|
|
1742
|
+
console.log(" --sync-url <url> Override CODERULE_SYNC_URL");
|
|
1743
|
+
console.log(" --ast-url <url> Override CODERULE_AST_URL");
|
|
1744
|
+
console.log(" --retrieval-url <url> Override CODERULE_RETRIEVAL_URL");
|
|
1745
|
+
console.log(
|
|
1746
|
+
" --retrieval-formatter <val> Override CODERULE_RETRIEVAL_FORMATTER (standard | compact)"
|
|
1747
|
+
);
|
|
1748
|
+
console.log(" --http-timeout <ms> Override CODERULE_HTTP_TIMEOUT");
|
|
1749
|
+
console.log(
|
|
1750
|
+
" --snapshot-debounce <ms> Override CODERULE_SNAPSHOT_DEBOUNCE_MS"
|
|
1751
|
+
);
|
|
1752
|
+
console.log(
|
|
1753
|
+
" --heartbeat-interval <ms> Override CODERULE_HEARTBEAT_INTERVAL_MS"
|
|
1754
|
+
);
|
|
1755
|
+
console.log(
|
|
1756
|
+
" --heartbeat-check <ms> Override CODERULE_HEARTBEAT_CHECK_INTERVAL_MS"
|
|
1757
|
+
);
|
|
1758
|
+
console.log(
|
|
1759
|
+
" --queue-poll <ms> Override CODERULE_QUEUE_POLL_INTERVAL_MS"
|
|
1760
|
+
);
|
|
1761
|
+
console.log(
|
|
1762
|
+
" --hash-batch <n> Override CODERULE_HASH_BATCH_SIZE"
|
|
1763
|
+
);
|
|
1764
|
+
console.log(" --hash-lease <ms> Override CODERULE_HASH_LEASE_MS");
|
|
1765
|
+
console.log(
|
|
1766
|
+
" --max-snapshot-attempts <n> Override CODERULE_MAX_SNAPSHOT_ATTEMPTS"
|
|
1767
|
+
);
|
|
1768
|
+
console.log(
|
|
1769
|
+
" KEY=value Set arbitrary environment variable"
|
|
1770
|
+
);
|
|
1771
|
+
console.log(" -h, --help Show this help message");
|
|
1772
|
+
}
|
|
1773
|
+
function parseArgs(argv) {
|
|
1774
|
+
let token = process.env.CODERULE_TOKEN;
|
|
1775
|
+
let clean = false;
|
|
1776
|
+
let inlineHasher = false;
|
|
1777
|
+
const env = {};
|
|
1778
|
+
const args = [...argv];
|
|
1779
|
+
while (args.length > 0) {
|
|
1780
|
+
const arg = args.shift();
|
|
1781
|
+
if (arg === "--help" || arg === "-h") {
|
|
1782
|
+
printUsage();
|
|
1783
|
+
return null;
|
|
1784
|
+
}
|
|
1785
|
+
if (arg === "--clean" || arg === "--reindex") {
|
|
1786
|
+
clean = true;
|
|
1787
|
+
continue;
|
|
1788
|
+
}
|
|
1789
|
+
if (arg === "--inline-hasher") {
|
|
1790
|
+
inlineHasher = true;
|
|
1791
|
+
continue;
|
|
1792
|
+
}
|
|
1793
|
+
if (arg === "--token") {
|
|
1794
|
+
const value = args.shift();
|
|
1795
|
+
if (!value) {
|
|
1796
|
+
throw new Error("Missing value for --token");
|
|
1797
|
+
}
|
|
1798
|
+
token = value;
|
|
1799
|
+
continue;
|
|
1800
|
+
}
|
|
1801
|
+
if (arg.startsWith("--token=")) {
|
|
1802
|
+
token = arg.slice("--token=".length);
|
|
1803
|
+
continue;
|
|
1804
|
+
}
|
|
1805
|
+
if (arg.startsWith("--")) {
|
|
1806
|
+
const flag = arg.slice(2);
|
|
1807
|
+
const envKey = ENV_FLAG_MAP[flag];
|
|
1808
|
+
if (!envKey) {
|
|
1809
|
+
throw new Error(`Unknown option: ${arg}`);
|
|
1810
|
+
}
|
|
1811
|
+
const value = args.shift();
|
|
1812
|
+
if (!value) {
|
|
1813
|
+
throw new Error(`Option ${arg} requires a value`);
|
|
1814
|
+
}
|
|
1815
|
+
env[envKey] = value;
|
|
1816
|
+
continue;
|
|
1817
|
+
}
|
|
1818
|
+
if (arg.includes("=")) {
|
|
1819
|
+
const [key, value] = arg.split("=", 2);
|
|
1820
|
+
if (!key || value === void 0) {
|
|
1821
|
+
throw new Error(`Invalid KEY=value argument: ${arg}`);
|
|
1822
|
+
}
|
|
1823
|
+
env[key] = value;
|
|
1824
|
+
continue;
|
|
1825
|
+
}
|
|
1826
|
+
if (!token) {
|
|
1827
|
+
token = arg;
|
|
1828
|
+
continue;
|
|
1829
|
+
}
|
|
1830
|
+
throw new Error(`Unexpected argument: ${arg}`);
|
|
1831
|
+
}
|
|
1832
|
+
if (!token) {
|
|
1833
|
+
throw new Error(
|
|
1834
|
+
"Missing token. Provide via argument or CODERULE_TOKEN environment variable."
|
|
1835
|
+
);
|
|
1836
|
+
}
|
|
1837
|
+
return { token, clean, inlineHasher, env };
|
|
1838
|
+
}
|
|
1839
|
+
async function ensureClean(configToken) {
|
|
1840
|
+
const config = await resolveConfig({ token: configToken });
|
|
1841
|
+
const targets = [
|
|
1842
|
+
config.dbPath,
|
|
1843
|
+
`${config.dbPath}-shm`,
|
|
1844
|
+
`${config.dbPath}-wal`
|
|
1845
|
+
];
|
|
1846
|
+
await Promise.all(targets.map((target) => fs4__default.default.rm(target, { force: true })));
|
|
1847
|
+
await fs4__default.default.rm(path__default.default.join(config.dataDir, "watch", `${config.rootId}.sqlite-shm`), {
|
|
1848
|
+
force: true
|
|
1849
|
+
}).catch(() => {
|
|
1850
|
+
});
|
|
1851
|
+
const dir = path__default.default.dirname(config.dbPath);
|
|
1852
|
+
await fs4__default.default.mkdir(dir, { recursive: true });
|
|
1853
|
+
console.log(`Removed scanner database at ${config.dbPath}`);
|
|
1854
|
+
}
|
|
1855
|
+
function awaitShutdownSignals() {
|
|
1856
|
+
return new Promise((resolve) => {
|
|
1857
|
+
const signals = ["SIGINT", "SIGTERM"];
|
|
1858
|
+
const handler = (signal) => {
|
|
1859
|
+
for (const sig of signals) {
|
|
1860
|
+
process.off(sig, handler);
|
|
1861
|
+
}
|
|
1862
|
+
resolve(signal);
|
|
1863
|
+
};
|
|
1864
|
+
for (const sig of signals) {
|
|
1865
|
+
process.on(sig, handler);
|
|
1866
|
+
}
|
|
1867
|
+
});
|
|
1868
|
+
}
|
|
1869
|
+
async function main() {
|
|
1870
|
+
try {
|
|
1871
|
+
const options = parseArgs(process.argv.slice(2));
|
|
1872
|
+
if (!options) {
|
|
1873
|
+
return;
|
|
1874
|
+
}
|
|
1875
|
+
process.env.CODERULE_TOKEN = options.token;
|
|
1876
|
+
if (options.inlineHasher) {
|
|
1877
|
+
process.env.CODERULE_HASHER_INLINE = "1";
|
|
1878
|
+
}
|
|
1879
|
+
for (const [key, value] of Object.entries(options.env)) {
|
|
1880
|
+
process.env[key] = value;
|
|
1881
|
+
}
|
|
1882
|
+
if (options.clean) {
|
|
1883
|
+
await ensureClean(options.token);
|
|
1884
|
+
}
|
|
1885
|
+
const runtime = await bootstrap({ token: options.token });
|
|
1886
|
+
const runner = new ServiceRunner(runtime);
|
|
1887
|
+
try {
|
|
1888
|
+
await runner.prepareWatcher(true);
|
|
1889
|
+
const initial = await runInitialSyncPipeline(runtime);
|
|
1890
|
+
runtime.logger.info(
|
|
1891
|
+
{
|
|
1892
|
+
snapshotHash: initial.snapshotHash,
|
|
1893
|
+
filesCount: initial.filesCount
|
|
1894
|
+
},
|
|
1895
|
+
"Initial sync completed; starting MCP server"
|
|
1896
|
+
);
|
|
1897
|
+
runner.recordInitialSnapshot(initial.createdAt);
|
|
1898
|
+
await runner.startLoops();
|
|
1899
|
+
await runner.enableWatcherProcessing();
|
|
1900
|
+
const server = createMcpServer({ runtime, runner });
|
|
1901
|
+
const transport = new stdio_js.StdioServerTransport();
|
|
1902
|
+
await server.connect(transport);
|
|
1903
|
+
runtime.logger.info("MCP server connected via stdio");
|
|
1904
|
+
const signal = await awaitShutdownSignals();
|
|
1905
|
+
runtime.logger.info({ signal }, "Shutdown signal received");
|
|
1906
|
+
if (typeof transport.close === "function") {
|
|
1907
|
+
await transport.close();
|
|
1908
|
+
}
|
|
1909
|
+
} finally {
|
|
1910
|
+
await runner.stop();
|
|
1911
|
+
}
|
|
1912
|
+
} catch (error) {
|
|
1913
|
+
console.error("MCP server failed:", error);
|
|
1914
|
+
process.exitCode = 1;
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
void main();
|
|
1918
|
+
//# sourceMappingURL=mcp-cli.cjs.map
|
|
1919
|
+
//# sourceMappingURL=mcp-cli.cjs.map
|