@coderule/mcp 1.5.0 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -44,6 +44,8 @@ var DEFAULT_QUEUE_POLL_INTERVAL_MS = 500;
44
44
  var DEFAULT_HASH_BATCH_SIZE = 32;
45
45
  var DEFAULT_MAX_SNAPSHOT_ATTEMPTS = 5;
46
46
  var DEFAULT_HTTP_TIMEOUT_MS = 12e4;
47
+ var DEFAULT_UPLOAD_CHUNK_SIZE = 1;
48
+ var DEFAULT_MAX_QUERY_WAIT_MS = 5e4;
47
49
 
48
50
  // src/config/Configurator.ts
49
51
  var DEFAULT_RETRIEVAL_FORMATTER = "standard";
@@ -71,6 +73,14 @@ function parseInteger(value, fallback) {
71
73
  }
72
74
  return parsed;
73
75
  }
76
+ function parseSecondsToMs(value, fallbackMs) {
77
+ if (!value) return fallbackMs;
78
+ const seconds = Number.parseInt(value, 10);
79
+ if (Number.isNaN(seconds) || seconds <= 0) {
80
+ throw new Error(`Invalid seconds value: ${value}`);
81
+ }
82
+ return seconds * 1e3;
83
+ }
74
84
  function parseFormatter(value) {
75
85
  if (!value) return DEFAULT_RETRIEVAL_FORMATTER;
76
86
  const normalized = value.toLowerCase();
@@ -117,7 +127,9 @@ async function resolveConfig({
117
127
  maxSnapshotAttempts: DEFAULTS.maxSnapshotAttempts,
118
128
  retrievalFormatter: parseFormatter(
119
129
  process.env.CODERULE_RETRIEVAL_FORMATTER
120
- )
130
+ ),
131
+ uploadChunkSize: DEFAULT_UPLOAD_CHUNK_SIZE,
132
+ maxQueryWaitMs: DEFAULT_MAX_QUERY_WAIT_MS
121
133
  };
122
134
  if (process.env.CODERULE_SNAPSHOT_DEBOUNCE_MS) {
123
135
  baseConfig.snapshotDebounceMs = parseInteger(
@@ -159,6 +171,16 @@ async function resolveConfig({
159
171
  process.env.CODERULE_HTTP_TIMEOUT,
160
172
  DEFAULT_HTTP_TIMEOUT_MS
161
173
  );
174
+ if (process.env.CODERULE_UPLOAD_CHUNK_SIZE) {
175
+ baseConfig.uploadChunkSize = parseInteger(
176
+ process.env.CODERULE_UPLOAD_CHUNK_SIZE,
177
+ baseConfig.uploadChunkSize
178
+ );
179
+ }
180
+ baseConfig.maxQueryWaitMs = parseSecondsToMs(
181
+ process.env.CODERULE_MAX_WAIT_TIME,
182
+ baseConfig.maxQueryWaitMs
183
+ );
162
184
  logger.debug(
163
185
  {
164
186
  rootPath,
@@ -1092,7 +1114,7 @@ async function withRetries(op, logger2, context, maxAttempts) {
1092
1114
  }
1093
1115
  }
1094
1116
  }
1095
- async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts, chunkSize = 64) {
1117
+ async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts, chunkSize = 1) {
1096
1118
  if (!missing || missing.length === 0) return;
1097
1119
  const total = missing.length;
1098
1120
  const chunks = [];
@@ -1134,7 +1156,7 @@ async function uploadMissing(rootPath, missing, syncClient, logger2, maxAttempts
1134
1156
  async function ensureSnapshotCreated(rootPath, computation, syncClient, logger2, options) {
1135
1157
  const { snapshotHash, files } = computation;
1136
1158
  const maxAttempts = options?.maxAttempts ?? 5;
1137
- const uploadChunkSize = options?.uploadChunkSize ?? 64;
1159
+ const uploadChunkSize = options?.uploadChunkSize ?? 1;
1138
1160
  let status = await withRetries(
1139
1161
  () => syncClient.checkSnapshotStatus(snapshotHash),
1140
1162
  logger2,
@@ -1216,7 +1238,7 @@ async function publishSnapshot(rootPath, filesRepo, snapshotsRepo, syncClient, l
1216
1238
  }
1217
1239
 
1218
1240
  // src/service/InitialSync.ts
1219
- async function runInitialSyncPipeline(runtime) {
1241
+ async function runInitialSyncPipeline(runtime, options) {
1220
1242
  const inventoryLogger = runtime.logger.child({ scope: "inventory" });
1221
1243
  await runInventory({
1222
1244
  rootPath: runtime.config.rootPath,
@@ -1232,16 +1254,37 @@ async function runInitialSyncPipeline(runtime) {
1232
1254
  hashLogger.debug("Hasher processed batch");
1233
1255
  }
1234
1256
  }
1235
- const syncLogger = runtime.logger.child({ scope: "snapshot" });
1236
- const result = await publishSnapshot(
1237
- runtime.config.rootPath,
1238
- runtime.filesRepo,
1239
- runtime.snapshotsRepo,
1240
- runtime.clients.sync,
1241
- syncLogger,
1242
- { maxAttempts: runtime.config.maxSnapshotAttempts }
1257
+ if (options?.blockUntilReady !== false) {
1258
+ const syncLogger = runtime.logger.child({ scope: "snapshot" });
1259
+ const result = await publishSnapshot(
1260
+ runtime.config.rootPath,
1261
+ runtime.filesRepo,
1262
+ runtime.snapshotsRepo,
1263
+ runtime.clients.sync,
1264
+ syncLogger,
1265
+ {
1266
+ maxAttempts: runtime.config.maxSnapshotAttempts,
1267
+ uploadChunkSize: runtime.config.uploadChunkSize
1268
+ }
1269
+ );
1270
+ return result;
1271
+ }
1272
+ const computation = computeSnapshot(runtime.filesRepo);
1273
+ const createdAt = Date.now();
1274
+ runtime.snapshotsRepo.insert(
1275
+ computation.snapshotHash,
1276
+ computation.filesCount,
1277
+ computation.totalSize,
1278
+ createdAt
1243
1279
  );
1244
- return result;
1280
+ runtime.outbox.enqueueSnapshot(runtime.config.rootId, 0);
1281
+ return {
1282
+ snapshotHash: computation.snapshotHash,
1283
+ filesCount: computation.filesCount,
1284
+ totalSize: computation.totalSize,
1285
+ status: "READY",
1286
+ createdAt
1287
+ };
1245
1288
  }
1246
1289
  async function createChokidarWatcher(options, usePolling) {
1247
1290
  const log = options.logger.child({
@@ -1643,7 +1686,10 @@ var ServiceRunner = class {
1643
1686
  this.runtime.snapshotsRepo,
1644
1687
  this.runtime.clients.sync,
1645
1688
  log,
1646
- { maxAttempts: this.runtime.config.maxSnapshotAttempts }
1689
+ {
1690
+ maxAttempts: this.runtime.config.maxSnapshotAttempts,
1691
+ uploadChunkSize: this.runtime.config.uploadChunkSize
1692
+ }
1647
1693
  );
1648
1694
  this.runtime.outbox.ack(job.id, this.fsControlLeaseOwner);
1649
1695
  this.state.updateSnapshotReady(result.createdAt);
@@ -1702,7 +1748,9 @@ function awaitShutdownSignals() {
1702
1748
  async function runInitialSync(params) {
1703
1749
  const runtime = await bootstrap(params);
1704
1750
  try {
1705
- const result = await runInitialSyncPipeline(runtime);
1751
+ const result = await runInitialSyncPipeline(runtime, {
1752
+ blockUntilReady: true
1753
+ });
1706
1754
  runtime.logger.info(
1707
1755
  {
1708
1756
  snapshotHash: result.snapshotHash,
@@ -1726,7 +1774,9 @@ async function runService(params) {
1726
1774
  await runner.prepareWatcher(true);
1727
1775
  let initialCreatedAt;
1728
1776
  try {
1729
- const initial = await runInitialSyncPipeline(runtime);
1777
+ const initial = await runInitialSyncPipeline(runtime, {
1778
+ blockUntilReady: false
1779
+ });
1730
1780
  runtime.logger.info(
1731
1781
  {
1732
1782
  snapshotHash: initial.snapshotHash,