@coderule/mcp 1.3.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +135 -35
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +136 -36
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +135 -35
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +136 -36
- package/dist/index.js.map +1 -1
- package/dist/mcp-cli.cjs +193 -45
- package/dist/mcp-cli.cjs.map +1 -1
- package/dist/mcp-cli.js +194 -46
- package/dist/mcp-cli.js.map +1 -1
- package/package.json +1 -1
package/dist/mcp-cli.cjs
CHANGED
|
@@ -47,7 +47,7 @@ var DEFAULT_HEARTBEAT_CHECK_INTERVAL_MS = 5e3;
|
|
|
47
47
|
var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
|
|
48
48
|
var DEFAULT_HASH_BATCH_SIZE = 32;
|
|
49
49
|
var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
|
|
50
|
-
var DEFAULT_HTTP_TIMEOUT_MS =
|
|
50
|
+
var DEFAULT_HTTP_TIMEOUT_MS = 12e4;
|
|
51
51
|
|
|
52
52
|
// src/config/Configurator.ts
|
|
53
53
|
var DEFAULT_RETRIEVAL_FORMATTER = "standard";
|
|
@@ -552,6 +552,16 @@ var Outbox = class {
|
|
|
552
552
|
if (purged > 0) {
|
|
553
553
|
this.log.warn({ purged }, "Purged legacy fs_control jobs without kind");
|
|
554
554
|
}
|
|
555
|
+
try {
|
|
556
|
+
const counts = {
|
|
557
|
+
pending: this.queue.countByStatus(qulite.JobStatus.Pending),
|
|
558
|
+
processing: this.queue.countByStatus(qulite.JobStatus.Processing),
|
|
559
|
+
done: this.queue.countByStatus(qulite.JobStatus.Done),
|
|
560
|
+
failed: this.queue.countByStatus(qulite.JobStatus.Failed)
|
|
561
|
+
};
|
|
562
|
+
this.log.debug({ counts }, "Outbox initialized");
|
|
563
|
+
} catch {
|
|
564
|
+
}
|
|
555
565
|
}
|
|
556
566
|
getQueue() {
|
|
557
567
|
return this.queue;
|
|
@@ -1063,44 +1073,109 @@ function computeSnapshot(filesRepo) {
|
|
|
1063
1073
|
totalSize
|
|
1064
1074
|
};
|
|
1065
1075
|
}
|
|
1066
|
-
async function
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
for (const missingFile of missing) {
|
|
1070
|
-
const absPath = path__default.default.join(rootPath, missingFile.file_path);
|
|
1076
|
+
async function withRetries(op, logger2, context, maxAttempts) {
|
|
1077
|
+
let attempt = 0;
|
|
1078
|
+
while (true) {
|
|
1071
1079
|
try {
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1080
|
+
return await op();
|
|
1081
|
+
} catch (err) {
|
|
1082
|
+
attempt += 1;
|
|
1083
|
+
if (attempt >= maxAttempts) {
|
|
1084
|
+
logger2.error(
|
|
1085
|
+
{ err, ...context, attempt },
|
|
1086
|
+
"Operation failed after retries"
|
|
1087
|
+
);
|
|
1088
|
+
throw err;
|
|
1089
|
+
}
|
|
1090
|
+
const delay = Math.min(15e3, 1e3 * 2 ** (attempt - 1));
|
|
1078
1091
|
logger2.warn(
|
|
1079
|
-
{ err
|
|
1080
|
-
"
|
|
1092
|
+
{ err, ...context, attempt, delay },
|
|
1093
|
+
"Operation failed; retrying"
|
|
1081
1094
|
);
|
|
1095
|
+
await sleep(delay);
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts, chunkSize = 64) {
|
|
1100
|
+
if (!missing || missing.length === 0) return;
|
|
1101
|
+
const total = missing.length;
|
|
1102
|
+
const chunks = [];
|
|
1103
|
+
for (let i = 0; i < total; i += chunkSize) {
|
|
1104
|
+
chunks.push(missing.slice(i, i + chunkSize));
|
|
1105
|
+
}
|
|
1106
|
+
for (let idx = 0; idx < chunks.length; idx++) {
|
|
1107
|
+
const list = chunks[idx];
|
|
1108
|
+
const map = /* @__PURE__ */ new Map();
|
|
1109
|
+
for (const missingFile of list) {
|
|
1110
|
+
const absPath = path__default.default.join(rootPath, missingFile.file_path);
|
|
1111
|
+
try {
|
|
1112
|
+
const buffer = await fs4__default.default.readFile(absPath);
|
|
1113
|
+
map.set(missingFile.file_hash, {
|
|
1114
|
+
path: missingFile.file_path,
|
|
1115
|
+
content: buffer
|
|
1116
|
+
});
|
|
1117
|
+
} catch (error) {
|
|
1118
|
+
logger2.warn(
|
|
1119
|
+
{ err: error, relPath: missingFile.file_path },
|
|
1120
|
+
"Failed to read missing file content"
|
|
1121
|
+
);
|
|
1122
|
+
}
|
|
1082
1123
|
}
|
|
1124
|
+
if (map.size === 0) continue;
|
|
1125
|
+
await withRetries(
|
|
1126
|
+
() => syncClient.uploadFileContent(map),
|
|
1127
|
+
logger2,
|
|
1128
|
+
{
|
|
1129
|
+
op: "uploadFileContent",
|
|
1130
|
+
chunkIndex: idx + 1,
|
|
1131
|
+
chunks: chunks.length,
|
|
1132
|
+
files: map.size
|
|
1133
|
+
},
|
|
1134
|
+
maxAttempts
|
|
1135
|
+
);
|
|
1083
1136
|
}
|
|
1084
|
-
if (map.size === 0) return;
|
|
1085
|
-
await syncClient.uploadFileContent(map);
|
|
1086
1137
|
}
|
|
1087
|
-
async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2) {
|
|
1138
|
+
async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2, options) {
|
|
1088
1139
|
const { snapshotHash, files } = computation;
|
|
1089
|
-
|
|
1140
|
+
const maxAttempts = options?.maxAttempts ?? 5;
|
|
1141
|
+
const uploadChunkSize = options?.uploadChunkSize ?? 64;
|
|
1142
|
+
let status = await withRetries(
|
|
1143
|
+
() => syncClient.checkSnapshotStatus(snapshotHash),
|
|
1144
|
+
logger2,
|
|
1145
|
+
{ op: "checkSnapshotStatus", snapshotHash },
|
|
1146
|
+
maxAttempts
|
|
1147
|
+
);
|
|
1090
1148
|
if (status.status === "READY") {
|
|
1091
1149
|
logger2.info({ snapshotHash }, "Snapshot already READY");
|
|
1092
1150
|
return;
|
|
1093
1151
|
}
|
|
1094
1152
|
if (status.status === "NOT_FOUND" || status.status === "MISSING_CONTENT") {
|
|
1095
|
-
status = await
|
|
1153
|
+
status = await withRetries(
|
|
1154
|
+
() => syncClient.createSnapshot(snapshotHash, files),
|
|
1155
|
+
logger2,
|
|
1156
|
+
{ op: "createSnapshot", snapshotHash },
|
|
1157
|
+
maxAttempts
|
|
1158
|
+
);
|
|
1096
1159
|
}
|
|
1097
1160
|
if (status.status === "MISSING_CONTENT" && status.missing_files?.length) {
|
|
1098
1161
|
logger2.info(
|
|
1099
1162
|
{ missing: status.missing_files.length },
|
|
1100
1163
|
"Uploading missing file content"
|
|
1101
1164
|
);
|
|
1102
|
-
await uploadMissing(
|
|
1103
|
-
|
|
1165
|
+
await uploadMissing(
|
|
1166
|
+
rootPath,
|
|
1167
|
+
status.missing_files,
|
|
1168
|
+
syncClient,
|
|
1169
|
+
logger2,
|
|
1170
|
+
maxAttempts,
|
|
1171
|
+
uploadChunkSize
|
|
1172
|
+
);
|
|
1173
|
+
status = await withRetries(
|
|
1174
|
+
() => syncClient.createSnapshot(snapshotHash, files),
|
|
1175
|
+
logger2,
|
|
1176
|
+
{ op: "createSnapshot", snapshotHash },
|
|
1177
|
+
maxAttempts
|
|
1178
|
+
);
|
|
1104
1179
|
}
|
|
1105
1180
|
let attempt = 0;
|
|
1106
1181
|
while (status.status !== "READY") {
|
|
@@ -1110,13 +1185,24 @@ async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2)
|
|
|
1110
1185
|
const delay = Math.min(5e3, 1e3 * Math.max(1, 2 ** attempt));
|
|
1111
1186
|
await sleep(delay);
|
|
1112
1187
|
attempt += 1;
|
|
1113
|
-
status = await
|
|
1188
|
+
status = await withRetries(
|
|
1189
|
+
() => syncClient.checkSnapshotStatus(snapshotHash),
|
|
1190
|
+
logger2,
|
|
1191
|
+
{ op: "checkSnapshotStatus", snapshotHash },
|
|
1192
|
+
maxAttempts
|
|
1193
|
+
);
|
|
1114
1194
|
}
|
|
1115
1195
|
logger2.info({ snapshotHash }, "Snapshot READY");
|
|
1116
1196
|
}
|
|
1117
|
-
async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2) {
|
|
1197
|
+
async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, logger2, options) {
|
|
1118
1198
|
const computation = computeSnapshot(filesRepo);
|
|
1119
|
-
await ensureSnapshotCreated(
|
|
1199
|
+
await ensureSnapshotCreated(
|
|
1200
|
+
rootPath,
|
|
1201
|
+
computation,
|
|
1202
|
+
syncClient,
|
|
1203
|
+
logger2,
|
|
1204
|
+
options
|
|
1205
|
+
);
|
|
1120
1206
|
const createdAt = Date.now();
|
|
1121
1207
|
snapshotsRepo.insert(
|
|
1122
1208
|
computation.snapshotHash,
|
|
@@ -1156,7 +1242,8 @@ async function runInitialSyncPipeline(runtime) {
|
|
|
1156
1242
|
runtime.filesRepo,
|
|
1157
1243
|
runtime.snapshotsRepo,
|
|
1158
1244
|
runtime.clients.sync,
|
|
1159
|
-
syncLogger
|
|
1245
|
+
syncLogger,
|
|
1246
|
+
{ maxAttempts: runtime.config.maxSnapshotAttempts }
|
|
1160
1247
|
);
|
|
1161
1248
|
return result;
|
|
1162
1249
|
}
|
|
@@ -1559,7 +1646,8 @@ var ServiceRunner = class {
|
|
|
1559
1646
|
this.runtime.filesRepo,
|
|
1560
1647
|
this.runtime.snapshotsRepo,
|
|
1561
1648
|
this.runtime.clients.sync,
|
|
1562
|
-
log
|
|
1649
|
+
log,
|
|
1650
|
+
{ maxAttempts: this.runtime.config.maxSnapshotAttempts }
|
|
1563
1651
|
);
|
|
1564
1652
|
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1565
1653
|
this.state.updateSnapshotReady(result.createdAt);
|
|
@@ -1617,6 +1705,63 @@ function collectIndexingStatus(runtime, runner) {
|
|
|
1617
1705
|
service: runner.getServiceStateSnapshot()
|
|
1618
1706
|
};
|
|
1619
1707
|
}
|
|
1708
|
+
function formatStatus(status) {
|
|
1709
|
+
const lines = [];
|
|
1710
|
+
lines.push("=== Coderule Indexing Status ===");
|
|
1711
|
+
lines.push("");
|
|
1712
|
+
lines.push(`Timestamp: ${new Date(status.timestamp).toISOString()}`);
|
|
1713
|
+
lines.push("");
|
|
1714
|
+
lines.push("Repository:");
|
|
1715
|
+
lines.push(` ID: ${status.root.id}`);
|
|
1716
|
+
lines.push(` Path: ${status.root.path}`);
|
|
1717
|
+
lines.push("");
|
|
1718
|
+
lines.push("Files:");
|
|
1719
|
+
lines.push(` Total: ${status.files.total}`);
|
|
1720
|
+
lines.push(" States:");
|
|
1721
|
+
lines.push(` Clean: ${status.files.byState.clean}`);
|
|
1722
|
+
lines.push(` Dirty: ${status.files.byState.dirty}`);
|
|
1723
|
+
lines.push(` Hashing: ${status.files.byState.hashing}`);
|
|
1724
|
+
lines.push(` Missing: ${status.files.byState.missing}`);
|
|
1725
|
+
lines.push("");
|
|
1726
|
+
lines.push("Queue:");
|
|
1727
|
+
lines.push(` Pending: ${status.queue.pending}`);
|
|
1728
|
+
lines.push(` Processing: ${status.queue.processing}`);
|
|
1729
|
+
lines.push(` Done: ${status.queue.done}`);
|
|
1730
|
+
lines.push(` Failed: ${status.queue.failed}`);
|
|
1731
|
+
lines.push("");
|
|
1732
|
+
if (status.latestSnapshot) {
|
|
1733
|
+
lines.push("Latest Snapshot:");
|
|
1734
|
+
lines.push(` Hash: ${status.latestSnapshot.snapshot_hash}`);
|
|
1735
|
+
lines.push(` Files: ${status.latestSnapshot.files_count}`);
|
|
1736
|
+
lines.push(` Size: ${formatBytes(status.latestSnapshot.total_size)}`);
|
|
1737
|
+
lines.push(
|
|
1738
|
+
` Created: ${new Date(status.latestSnapshot.created_at).toISOString()}`
|
|
1739
|
+
);
|
|
1740
|
+
} else {
|
|
1741
|
+
lines.push("Latest Snapshot: None");
|
|
1742
|
+
}
|
|
1743
|
+
lines.push("");
|
|
1744
|
+
lines.push("Service State:");
|
|
1745
|
+
lines.push(
|
|
1746
|
+
` Last Change: ${new Date(status.service.lastChangeAt).toISOString()}`
|
|
1747
|
+
);
|
|
1748
|
+
lines.push(
|
|
1749
|
+
` Last Snapshot Ready: ${new Date(status.service.lastSnapshotReadyAt).toISOString()}`
|
|
1750
|
+
);
|
|
1751
|
+
lines.push(
|
|
1752
|
+
` Last Heartbeat: ${status.service.lastHeartbeatEnqueuedAt > 0 ? new Date(status.service.lastHeartbeatEnqueuedAt).toISOString() : "Never"}`
|
|
1753
|
+
);
|
|
1754
|
+
lines.push(` Watcher Ready: ${status.service.watcherReady ? "Yes" : "No"}`);
|
|
1755
|
+
lines.push(` Buffering: ${status.service.buffering ? "Yes" : "No"}`);
|
|
1756
|
+
return lines.join("\n");
|
|
1757
|
+
}
|
|
1758
|
+
function formatBytes(bytes) {
|
|
1759
|
+
if (bytes === 0) return "0 B";
|
|
1760
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
1761
|
+
const k = 1024;
|
|
1762
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
1763
|
+
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${units[i]}`;
|
|
1764
|
+
}
|
|
1620
1765
|
|
|
1621
1766
|
// src/mcp/server.ts
|
|
1622
1767
|
var SERVER_NAME = "coderule-scanner-mcp";
|
|
@@ -1639,7 +1784,7 @@ function createMcpServer({
|
|
|
1639
1784
|
},
|
|
1640
1785
|
async () => {
|
|
1641
1786
|
const status = collectIndexingStatus(runtime, runner);
|
|
1642
|
-
const text =
|
|
1787
|
+
const text = formatStatus(status);
|
|
1643
1788
|
return {
|
|
1644
1789
|
content: [{ type: "text", text }]
|
|
1645
1790
|
};
|
|
@@ -1678,20 +1823,11 @@ function createMcpServer({
|
|
|
1678
1823
|
formatter: runtime.config.retrievalFormatter
|
|
1679
1824
|
}
|
|
1680
1825
|
);
|
|
1681
|
-
const summary = {
|
|
1682
|
-
snapshotHash: latest.snapshot_hash,
|
|
1683
|
-
budgetTokens: effectiveBudget,
|
|
1684
|
-
formatter: runtime.config.retrievalFormatter
|
|
1685
|
-
};
|
|
1686
1826
|
return {
|
|
1687
1827
|
content: [
|
|
1688
1828
|
{
|
|
1689
1829
|
type: "text",
|
|
1690
1830
|
text: result.formatted_output ?? "(no formatted output)"
|
|
1691
|
-
},
|
|
1692
|
-
{
|
|
1693
|
-
type: "text",
|
|
1694
|
-
text: JSON.stringify({ summary, result }, null, 2)
|
|
1695
1831
|
}
|
|
1696
1832
|
]
|
|
1697
1833
|
};
|
|
@@ -1887,15 +2023,27 @@ async function main() {
|
|
|
1887
2023
|
const runner = new ServiceRunner(runtime);
|
|
1888
2024
|
try {
|
|
1889
2025
|
await runner.prepareWatcher(true);
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
2026
|
+
let initialCreatedAt;
|
|
2027
|
+
try {
|
|
2028
|
+
const initial = await runInitialSyncPipeline(runtime);
|
|
2029
|
+
runtime.logger.info(
|
|
2030
|
+
{
|
|
2031
|
+
snapshotHash: initial.snapshotHash,
|
|
2032
|
+
filesCount: initial.filesCount
|
|
2033
|
+
},
|
|
2034
|
+
"Initial sync completed; starting MCP server"
|
|
2035
|
+
);
|
|
2036
|
+
initialCreatedAt = initial.createdAt;
|
|
2037
|
+
} catch (error) {
|
|
2038
|
+
runtime.logger.warn(
|
|
2039
|
+
{ err: error },
|
|
2040
|
+
"Initial sync failed; enqueuing snapshot job and continuing"
|
|
2041
|
+
);
|
|
2042
|
+
runtime.outbox.enqueueSnapshot(runtime.config.rootId, 0);
|
|
2043
|
+
}
|
|
2044
|
+
if (initialCreatedAt) {
|
|
2045
|
+
runner.recordInitialSnapshot(initialCreatedAt);
|
|
2046
|
+
}
|
|
1899
2047
|
await runner.startLoops();
|
|
1900
2048
|
await runner.enableWatcherProcessing();
|
|
1901
2049
|
const server = createMcpServer({ runtime, runner });
|