@coderule/mcp 1.5.0 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +71 -17
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +71 -17
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +66 -16
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +66 -16
- package/dist/index.js.map +1 -1
- package/dist/mcp-cli.cjs +111 -31
- package/dist/mcp-cli.cjs.map +1 -1
- package/dist/mcp-cli.js +111 -31
- package/dist/mcp-cli.js.map +1 -1
- package/package.json +1 -1
package/dist/mcp-cli.cjs
CHANGED
|
@@ -48,6 +48,8 @@ var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
|
|
|
48
48
|
var DEFAULT_HASH_BATCH_SIZE = 32;
|
|
49
49
|
var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
|
|
50
50
|
var DEFAULT_HTTP_TIMEOUT_MS = 12e4;
|
|
51
|
+
var DEFAULT_UPLOAD_CHUNK_SIZE = 1;
|
|
52
|
+
var DEFAULT_MAX_QUERY_WAIT_MS = 5e4;
|
|
51
53
|
|
|
52
54
|
// src/config/Configurator.ts
|
|
53
55
|
var DEFAULT_RETRIEVAL_FORMATTER = "standard";
|
|
@@ -75,6 +77,14 @@ function parseInteger(value, fallback) {
|
|
|
75
77
|
}
|
|
76
78
|
return parsed;
|
|
77
79
|
}
|
|
80
|
+
function parseSecondsToMs(value, fallbackMs) {
|
|
81
|
+
if (!value) return fallbackMs;
|
|
82
|
+
const seconds = Number.parseInt(value, 10);
|
|
83
|
+
if (Number.isNaN(seconds) || seconds <= 0) {
|
|
84
|
+
throw new Error(`Invalid seconds value: ${value}`);
|
|
85
|
+
}
|
|
86
|
+
return seconds * 1e3;
|
|
87
|
+
}
|
|
78
88
|
function parseFormatter(value) {
|
|
79
89
|
if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
|
|
80
90
|
const normalized = value.toLowerCase();
|
|
@@ -121,7 +131,9 @@ async function resolveConfig({
|
|
|
121
131
|
maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
|
|
122
132
|
retrievalFormatter: parseFormatter(
|
|
123
133
|
process.env.CODERULE_RETRIEVAL_FORMATTER
|
|
124
|
-
)
|
|
134
|
+
),
|
|
135
|
+
uploadChunkSize: DEFAULT_UPLOAD_CHUNK_SIZE,
|
|
136
|
+
maxQueryWaitMs: DEFAULT_MAX_QUERY_WAIT_MS
|
|
125
137
|
};
|
|
126
138
|
if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
|
|
127
139
|
baseConfig.snapshotDebounceMs = parseInteger(
|
|
@@ -163,6 +175,16 @@ async function resolveConfig({
|
|
|
163
175
|
process.env.CODERULE_HTTP_TIMEOUT,
|
|
164
176
|
DEFAULT_HTTP_TIMEOUT_MS
|
|
165
177
|
);
|
|
178
|
+
if (process.env.CODERULE_UPLOAD_CHUNK_SIZE) {
|
|
179
|
+
baseConfig.uploadChunkSize = parseInteger(
|
|
180
|
+
process.env.CODERULE_UPLOAD_CHUNK_SIZE,
|
|
181
|
+
baseConfig.uploadChunkSize
|
|
182
|
+
);
|
|
183
|
+
}
|
|
184
|
+
baseConfig.maxQueryWaitMs = parseSecondsToMs(
|
|
185
|
+
process.env.CODERULE_MAX_WAIT_TIME,
|
|
186
|
+
baseConfig.maxQueryWaitMs
|
|
187
|
+
);
|
|
166
188
|
logger.debug(
|
|
167
189
|
{
|
|
168
190
|
rootPath,
|
|
@@ -1096,7 +1118,7 @@ async function withRetries(op, logger2, context, maxAttempts) {
|
|
|
1096
1118
|
}
|
|
1097
1119
|
}
|
|
1098
1120
|
}
|
|
1099
|
-
async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts, chunkSize =
|
|
1121
|
+
async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts, chunkSize = 1) {
|
|
1100
1122
|
if (!missing || missing.length === 0) return;
|
|
1101
1123
|
const total = missing.length;
|
|
1102
1124
|
const chunks = [];
|
|
@@ -1138,7 +1160,7 @@ async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts
|
|
|
1138
1160
|
async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2, options) {
|
|
1139
1161
|
const { snapshotHash, files } = computation;
|
|
1140
1162
|
const maxAttempts = options?.maxAttempts ?? 5;
|
|
1141
|
-
const uploadChunkSize = options?.uploadChunkSize ??
|
|
1163
|
+
const uploadChunkSize = options?.uploadChunkSize ?? 1;
|
|
1142
1164
|
let status = await withRetries(
|
|
1143
1165
|
() => syncClient.checkSnapshotStatus(snapshotHash),
|
|
1144
1166
|
logger2,
|
|
@@ -1220,7 +1242,7 @@ async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, l
|
|
|
1220
1242
|
}
|
|
1221
1243
|
|
|
1222
1244
|
// src/service/InitialSync.ts
|
|
1223
|
-
async function runInitialSyncPipeline(runtime) {
|
|
1245
|
+
async function runInitialSyncPipeline(runtime, options) {
|
|
1224
1246
|
const inventoryLogger = runtime.logger.child({ scope: "inventory" });
|
|
1225
1247
|
await runInventory({
|
|
1226
1248
|
rootPath: runtime.config.rootPath,
|
|
@@ -1236,16 +1258,22 @@ async function runInitialSyncPipeline(runtime) {
|
|
|
1236
1258
|
hashLogger.debug("Hasher processed batch");
|
|
1237
1259
|
}
|
|
1238
1260
|
}
|
|
1239
|
-
const
|
|
1240
|
-
const
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
{ maxAttempts: runtime.config.maxSnapshotAttempts }
|
|
1261
|
+
const computation = computeSnapshot(runtime.filesRepo);
|
|
1262
|
+
const createdAt = Date.now();
|
|
1263
|
+
runtime.snapshotsRepo.insert(
|
|
1264
|
+
computation.snapshotHash,
|
|
1265
|
+
computation.filesCount,
|
|
1266
|
+
computation.totalSize,
|
|
1267
|
+
createdAt
|
|
1247
1268
|
);
|
|
1248
|
-
|
|
1269
|
+
runtime.outbox.enqueueSnapshot(runtime.config.rootId, 0);
|
|
1270
|
+
return {
|
|
1271
|
+
snapshotHash: computation.snapshotHash,
|
|
1272
|
+
filesCount: computation.filesCount,
|
|
1273
|
+
totalSize: computation.totalSize,
|
|
1274
|
+
status: "READY",
|
|
1275
|
+
createdAt
|
|
1276
|
+
};
|
|
1249
1277
|
}
|
|
1250
1278
|
async function createChokidarWatcher(options, usePolling) {
|
|
1251
1279
|
const log = options.logger.child({
|
|
@@ -1647,7 +1675,10 @@ var ServiceRunner = class {
|
|
|
1647
1675
|
this.runtime.snapshotsRepo,
|
|
1648
1676
|
this.runtime.clients.sync,
|
|
1649
1677
|
log,
|
|
1650
|
-
{
|
|
1678
|
+
{
|
|
1679
|
+
maxAttempts: this.runtime.config.maxSnapshotAttempts,
|
|
1680
|
+
uploadChunkSize: this.runtime.config.uploadChunkSize
|
|
1681
|
+
}
|
|
1651
1682
|
);
|
|
1652
1683
|
this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
|
|
1653
1684
|
this.state.updateSnapshotReady(result.createdAt);
|
|
@@ -1770,6 +1801,37 @@ function createMcpServer({
|
|
|
1770
1801
|
runtime,
|
|
1771
1802
|
runner
|
|
1772
1803
|
}) {
|
|
1804
|
+
async function sleep3(ms) {
|
|
1805
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1806
|
+
}
|
|
1807
|
+
async function waitForLocalSnapshot(deadlineMs) {
|
|
1808
|
+
let latest = runtime.snapshotsRepo.getLatest();
|
|
1809
|
+
while (!latest && Date.now() < deadlineMs) {
|
|
1810
|
+
await sleep3(250);
|
|
1811
|
+
latest = runtime.snapshotsRepo.getLatest();
|
|
1812
|
+
}
|
|
1813
|
+
return latest;
|
|
1814
|
+
}
|
|
1815
|
+
async function waitForServerReady(initialHash, deadlineMs) {
|
|
1816
|
+
let currentHash = initialHash;
|
|
1817
|
+
while (Date.now() < deadlineMs) {
|
|
1818
|
+
try {
|
|
1819
|
+
const status = await runtime.clients.sync.checkSnapshotStatus(currentHash);
|
|
1820
|
+
if (status.status === "READY") {
|
|
1821
|
+
return currentHash;
|
|
1822
|
+
}
|
|
1823
|
+
if (status.status === "FAILED") {
|
|
1824
|
+
}
|
|
1825
|
+
} catch {
|
|
1826
|
+
}
|
|
1827
|
+
await sleep3(500);
|
|
1828
|
+
const latest = runtime.snapshotsRepo.getLatest();
|
|
1829
|
+
if (latest && latest.snapshot_hash !== currentHash) {
|
|
1830
|
+
currentHash = latest.snapshot_hash;
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1833
|
+
return void 0;
|
|
1834
|
+
}
|
|
1773
1835
|
const server = new mcp_js.McpServer({
|
|
1774
1836
|
name: SERVER_NAME,
|
|
1775
1837
|
version: SERVER_VERSION,
|
|
@@ -1805,23 +1867,31 @@ function createMcpServer({
|
|
|
1805
1867
|
query,
|
|
1806
1868
|
budgetTokens
|
|
1807
1869
|
}) => {
|
|
1808
|
-
const
|
|
1870
|
+
const deadline = Date.now() + runtime.config.maxQueryWaitMs;
|
|
1871
|
+
const latest = await waitForLocalSnapshot(deadline);
|
|
1809
1872
|
if (!latest) {
|
|
1810
|
-
const
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1873
|
+
const statusText = formatStatus(collectIndexingStatus(runtime, runner));
|
|
1874
|
+
const text = `We are not ready....
|
|
1875
|
+
${statusText}`;
|
|
1876
|
+
return { content: [{ type: "text", text }] };
|
|
1877
|
+
}
|
|
1878
|
+
const readyHash = await waitForServerReady(
|
|
1879
|
+
latest.snapshot_hash,
|
|
1880
|
+
deadline
|
|
1881
|
+
);
|
|
1882
|
+
if (!readyHash) {
|
|
1883
|
+
const statusText = formatStatus(collectIndexingStatus(runtime, runner));
|
|
1884
|
+
const text = `We are not ready....
|
|
1885
|
+
${statusText}`;
|
|
1886
|
+
return { content: [{ type: "text", text }] };
|
|
1815
1887
|
}
|
|
1816
1888
|
const effectiveBudget = Math.max(100, budgetTokens ?? 3e3);
|
|
1817
1889
|
try {
|
|
1818
1890
|
const result = await runtime.clients.retrieval.query(
|
|
1819
|
-
|
|
1891
|
+
readyHash,
|
|
1820
1892
|
query,
|
|
1821
1893
|
effectiveBudget,
|
|
1822
|
-
{
|
|
1823
|
-
formatter: runtime.config.retrievalFormatter
|
|
1824
|
-
}
|
|
1894
|
+
{ formatter: runtime.config.retrievalFormatter }
|
|
1825
1895
|
);
|
|
1826
1896
|
return {
|
|
1827
1897
|
content: [
|
|
@@ -1860,7 +1930,9 @@ var ENV_FLAG_MAP = {
|
|
|
1860
1930
|
"queue-poll": "CODERULE_QUEUE_POLL_INTERVAL_MS",
|
|
1861
1931
|
"hash-batch": "CODERULE_HASH_BATCH_SIZE",
|
|
1862
1932
|
"hash-lease": "CODERULE_HASH_LEASE_MS",
|
|
1863
|
-
"max-snapshot-attempts": "CODERULE_MAX_SNAPSHOT_ATTEMPTS"
|
|
1933
|
+
"max-snapshot-attempts": "CODERULE_MAX_SNAPSHOT_ATTEMPTS",
|
|
1934
|
+
"upload-chunk-size": "CODERULE_UPLOAD_CHUNK_SIZE",
|
|
1935
|
+
"max-wait-time": "CODERULE_MAX_WAIT_TIME"
|
|
1864
1936
|
};
|
|
1865
1937
|
function printUsage() {
|
|
1866
1938
|
console.log(`Usage: coderule-mcp-server [token] [options]
|
|
@@ -1902,6 +1974,12 @@ function printUsage() {
|
|
|
1902
1974
|
console.log(
|
|
1903
1975
|
" --max-snapshot-attempts <n> Override CODERULE_MAX_SNAPSHOT_ATTEMPTS"
|
|
1904
1976
|
);
|
|
1977
|
+
console.log(
|
|
1978
|
+
" --upload-chunk-size <n> Override CODERULE_UPLOAD_CHUNK_SIZE (default 1)"
|
|
1979
|
+
);
|
|
1980
|
+
console.log(
|
|
1981
|
+
" --max-wait-time <sec> Override CODERULE_MAX_WAIT_TIME (default 50s)"
|
|
1982
|
+
);
|
|
1905
1983
|
console.log(
|
|
1906
1984
|
" KEY=value Set arbitrary environment variable"
|
|
1907
1985
|
);
|
|
@@ -2023,15 +2101,21 @@ async function main() {
|
|
|
2023
2101
|
const runner = new ServiceRunner(runtime);
|
|
2024
2102
|
try {
|
|
2025
2103
|
await runner.prepareWatcher(true);
|
|
2104
|
+
const server = createMcpServer({ runtime, runner });
|
|
2105
|
+
const transport = new stdio_js.StdioServerTransport();
|
|
2106
|
+
await server.connect(transport);
|
|
2107
|
+
runtime.logger.info("MCP server connected via stdio");
|
|
2026
2108
|
let initialCreatedAt;
|
|
2027
2109
|
try {
|
|
2028
|
-
const initial = await runInitialSyncPipeline(runtime
|
|
2110
|
+
const initial = await runInitialSyncPipeline(runtime, {
|
|
2111
|
+
blockUntilReady: false
|
|
2112
|
+
});
|
|
2029
2113
|
runtime.logger.info(
|
|
2030
2114
|
{
|
|
2031
2115
|
snapshotHash: initial.snapshotHash,
|
|
2032
2116
|
filesCount: initial.filesCount
|
|
2033
2117
|
},
|
|
2034
|
-
"Initial sync completed;
|
|
2118
|
+
"Initial sync completed; entering continuous mode"
|
|
2035
2119
|
);
|
|
2036
2120
|
initialCreatedAt = initial.createdAt;
|
|
2037
2121
|
} catch (error) {
|
|
@@ -2046,10 +2130,6 @@ async function main() {
|
|
|
2046
2130
|
}
|
|
2047
2131
|
await runner.startLoops();
|
|
2048
2132
|
await runner.enableWatcherProcessing();
|
|
2049
|
-
const server = createMcpServer({ runtime, runner });
|
|
2050
|
-
const transport = new stdio_js.StdioServerTransport();
|
|
2051
|
-
await server.connect(transport);
|
|
2052
|
-
runtime.logger.info("MCP server connected via stdio");
|
|
2053
2133
|
const signal = await awaitShutdownSignals();
|
|
2054
2134
|
runtime.logger.info({ signal }, "Shutdown signal received");
|
|
2055
2135
|
if (typeof transport.close === "function") {
|