midnight-mcp 0.1.41 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -1
- package/dist/bin.d.ts +1 -0
- package/dist/bin.js +60 -0
- package/dist/chunk-HOWO4K5A.js +2197 -0
- package/dist/chunk-S7G4OHA4.js +8306 -0
- package/dist/db-YDGUWI5K.js +7 -0
- package/dist/index.d.ts +205 -3
- package/dist/index.js +28 -16
- package/package.json +16 -6
- package/dist/config/compact-version.d.ts +0 -183
- package/dist/config/compact-version.js +0 -423
- package/dist/db/index.d.ts +0 -3
- package/dist/db/index.js +0 -2
- package/dist/db/vectorStore.d.ts +0 -69
- package/dist/db/vectorStore.js +0 -196
- package/dist/pipeline/embeddings.d.ts +0 -25
- package/dist/pipeline/embeddings.js +0 -103
- package/dist/pipeline/github.d.ts +0 -84
- package/dist/pipeline/github.js +0 -399
- package/dist/pipeline/index.d.ts +0 -11
- package/dist/pipeline/index.js +0 -6
- package/dist/pipeline/indexer.d.ts +0 -41
- package/dist/pipeline/indexer.js +0 -254
- package/dist/pipeline/parser.d.ts +0 -46
- package/dist/pipeline/parser.js +0 -436
- package/dist/pipeline/releases.d.ts +0 -112
- package/dist/pipeline/releases.js +0 -298
- package/dist/pipeline/repository.d.ts +0 -372
- package/dist/pipeline/repository.js +0 -520
- package/dist/prompts/index.d.ts +0 -3
- package/dist/prompts/index.js +0 -2
- package/dist/prompts/templates.d.ts +0 -26
- package/dist/prompts/templates.js +0 -443
- package/dist/resources/code.d.ts +0 -15
- package/dist/resources/code.js +0 -122
- package/dist/resources/content/code-content.d.ts +0 -6
- package/dist/resources/content/code-content.js +0 -802
- package/dist/resources/content/docs-content.d.ts +0 -14
- package/dist/resources/content/docs-content.js +0 -1202
- package/dist/resources/content/index.d.ts +0 -6
- package/dist/resources/content/index.js +0 -6
- package/dist/resources/docs.d.ts +0 -15
- package/dist/resources/docs.js +0 -98
- package/dist/resources/index.d.ts +0 -6
- package/dist/resources/index.js +0 -13
- package/dist/resources/schemas.d.ts +0 -16
- package/dist/resources/schemas.js +0 -407
- package/dist/scripts/index-repos.d.ts +0 -12
- package/dist/scripts/index-repos.js +0 -53
- package/dist/server.d.ts +0 -43
- package/dist/server.js +0 -696
- package/dist/services/index.d.ts +0 -6
- package/dist/services/index.js +0 -6
- package/dist/services/sampling.d.ts +0 -62
- package/dist/services/sampling.js +0 -277
- package/dist/tools/analyze.d.ts +0 -106
- package/dist/tools/analyze.js +0 -431
- package/dist/tools/generation.d.ts +0 -9
- package/dist/tools/generation.js +0 -285
- package/dist/tools/health.d.ts +0 -120
- package/dist/tools/health.js +0 -365
- package/dist/tools/index.d.ts +0 -14
- package/dist/tools/index.js +0 -22
- package/dist/tools/meta.d.ts +0 -61
- package/dist/tools/meta.js +0 -282
- package/dist/tools/repository/constants.d.ts +0 -19
- package/dist/tools/repository/constants.js +0 -324
- package/dist/tools/repository/handlers.d.ts +0 -373
- package/dist/tools/repository/handlers.js +0 -724
- package/dist/tools/repository/index.d.ts +0 -9
- package/dist/tools/repository/index.js +0 -13
- package/dist/tools/repository/schemas.d.ts +0 -153
- package/dist/tools/repository/schemas.js +0 -106
- package/dist/tools/repository/tools.d.ts +0 -7
- package/dist/tools/repository/tools.js +0 -484
- package/dist/tools/repository/validation.d.ts +0 -106
- package/dist/tools/repository/validation.js +0 -820
- package/dist/tools/repository.d.ts +0 -6
- package/dist/tools/repository.js +0 -7
- package/dist/tools/search.d.ts +0 -76
- package/dist/tools/search.js +0 -423
- package/dist/types/index.d.ts +0 -2
- package/dist/types/index.js +0 -2
- package/dist/types/mcp.d.ts +0 -187
- package/dist/types/mcp.js +0 -6
- package/dist/utils/cache.d.ts +0 -77
- package/dist/utils/cache.js +0 -172
- package/dist/utils/config.d.ts +0 -70
- package/dist/utils/config.js +0 -294
- package/dist/utils/errors.d.ts +0 -111
- package/dist/utils/errors.js +0 -165
- package/dist/utils/health.d.ts +0 -29
- package/dist/utils/health.js +0 -132
- package/dist/utils/hosted-api.d.ts +0 -67
- package/dist/utils/hosted-api.js +0 -119
- package/dist/utils/index.d.ts +0 -16
- package/dist/utils/index.js +0 -15
- package/dist/utils/logger.d.ts +0 -48
- package/dist/utils/logger.js +0 -124
- package/dist/utils/rate-limit.d.ts +0 -61
- package/dist/utils/rate-limit.js +0 -148
- package/dist/utils/validation.d.ts +0 -52
- package/dist/utils/validation.js +0 -255
|
@@ -0,0 +1,2197 @@
|
|
|
1
|
+
// src/db/vectorStore.ts
|
|
2
|
+
import { ChromaClient } from "chromadb";
|
|
3
|
+
|
|
4
|
+
// src/utils/config.ts
|
|
5
|
+
import { z } from "zod";
|
|
6
|
+
import dotenv from "dotenv";
|
|
7
|
+
dotenv.config();
|
|
8
|
+
var ConfigSchema = z.object({
|
|
9
|
+
// Mode: 'hosted' (default) or 'local'
|
|
10
|
+
mode: z.enum(["hosted", "local"]).default("hosted"),
|
|
11
|
+
// Hosted API URL (used when mode is 'hosted')
|
|
12
|
+
hostedApiUrl: z.string().default("https://midnight-mcp-api.midnightmcp.workers.dev"),
|
|
13
|
+
// GitHub
|
|
14
|
+
githubToken: z.string().optional(),
|
|
15
|
+
// Vector Database (only needed for local mode)
|
|
16
|
+
chromaUrl: z.string().default("http://localhost:8000"),
|
|
17
|
+
// Embeddings (only needed for local mode)
|
|
18
|
+
openaiApiKey: z.string().optional(),
|
|
19
|
+
embeddingModel: z.string().default("text-embedding-3-small"),
|
|
20
|
+
// Server
|
|
21
|
+
logLevel: z.enum(["debug", "info", "warn", "error"]).default("info"),
|
|
22
|
+
syncInterval: z.number().default(9e5),
|
|
23
|
+
// 15 minutes
|
|
24
|
+
port: z.number().default(3e3),
|
|
25
|
+
// Data directories
|
|
26
|
+
dataDir: z.string().default("./data"),
|
|
27
|
+
cacheDir: z.string().default("./cache")
|
|
28
|
+
});
|
|
29
|
+
function loadConfig() {
|
|
30
|
+
const isLocalMode2 = process.env.MIDNIGHT_LOCAL === "true" || process.env.OPENAI_API_KEY && process.env.CHROMA_URL;
|
|
31
|
+
const rawConfig = {
|
|
32
|
+
mode: isLocalMode2 ? "local" : "hosted",
|
|
33
|
+
hostedApiUrl: process.env.MIDNIGHT_API_URL,
|
|
34
|
+
githubToken: process.env.GITHUB_TOKEN,
|
|
35
|
+
chromaUrl: process.env.CHROMA_URL,
|
|
36
|
+
openaiApiKey: process.env.OPENAI_API_KEY,
|
|
37
|
+
embeddingModel: process.env.EMBEDDING_MODEL,
|
|
38
|
+
logLevel: process.env.LOG_LEVEL,
|
|
39
|
+
syncInterval: process.env.SYNC_INTERVAL ? parseInt(process.env.SYNC_INTERVAL) : void 0,
|
|
40
|
+
port: process.env.PORT ? parseInt(process.env.PORT) : void 0,
|
|
41
|
+
dataDir: process.env.DATA_DIR,
|
|
42
|
+
cacheDir: process.env.CACHE_DIR
|
|
43
|
+
};
|
|
44
|
+
const cleanConfig = Object.fromEntries(
|
|
45
|
+
Object.entries(rawConfig).filter(([_, v]) => v !== void 0)
|
|
46
|
+
);
|
|
47
|
+
return ConfigSchema.parse(cleanConfig);
|
|
48
|
+
}
|
|
49
|
+
var config = loadConfig();
|
|
50
|
+
function isHostedMode() {
|
|
51
|
+
return config.mode === "hosted";
|
|
52
|
+
}
|
|
53
|
+
var DEFAULT_REPOSITORIES = [
|
|
54
|
+
// Core Language & SDK
|
|
55
|
+
{
|
|
56
|
+
owner: "midnightntwrk",
|
|
57
|
+
repo: "compact",
|
|
58
|
+
branch: "main",
|
|
59
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
60
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
owner: "midnightntwrk",
|
|
64
|
+
repo: "midnight-js",
|
|
65
|
+
branch: "main",
|
|
66
|
+
patterns: ["**/*.ts", "**/*.md"],
|
|
67
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
68
|
+
},
|
|
69
|
+
// Documentation
|
|
70
|
+
{
|
|
71
|
+
owner: "midnightntwrk",
|
|
72
|
+
repo: "midnight-docs",
|
|
73
|
+
branch: "main",
|
|
74
|
+
patterns: ["**/*.md", "**/*.mdx"],
|
|
75
|
+
exclude: ["node_modules/**"]
|
|
76
|
+
},
|
|
77
|
+
// Example DApps
|
|
78
|
+
{
|
|
79
|
+
owner: "midnightntwrk",
|
|
80
|
+
repo: "example-counter",
|
|
81
|
+
branch: "main",
|
|
82
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
83
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
84
|
+
},
|
|
85
|
+
{
|
|
86
|
+
owner: "midnightntwrk",
|
|
87
|
+
repo: "example-bboard",
|
|
88
|
+
branch: "main",
|
|
89
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.tsx", "**/*.md"],
|
|
90
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
owner: "midnightntwrk",
|
|
94
|
+
repo: "example-dex",
|
|
95
|
+
branch: "main",
|
|
96
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.tsx", "**/*.md"],
|
|
97
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
98
|
+
},
|
|
99
|
+
// Developer Tools
|
|
100
|
+
{
|
|
101
|
+
owner: "midnightntwrk",
|
|
102
|
+
repo: "create-mn-app",
|
|
103
|
+
branch: "main",
|
|
104
|
+
patterns: ["**/*.ts", "**/*.md", "**/*.json"],
|
|
105
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
owner: "midnightntwrk",
|
|
109
|
+
repo: "midnight-wallet",
|
|
110
|
+
branch: "main",
|
|
111
|
+
patterns: ["**/*.ts", "**/*.tsx", "**/*.md"],
|
|
112
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
113
|
+
},
|
|
114
|
+
// Infrastructure
|
|
115
|
+
{
|
|
116
|
+
owner: "midnightntwrk",
|
|
117
|
+
repo: "midnight-indexer",
|
|
118
|
+
branch: "main",
|
|
119
|
+
patterns: ["**/*.ts", "**/*.md", "**/*.rs"],
|
|
120
|
+
exclude: ["node_modules/**", "dist/**", "target/**"]
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
owner: "midnightntwrk",
|
|
124
|
+
repo: "midnight-node-docker",
|
|
125
|
+
branch: "main",
|
|
126
|
+
patterns: ["**/*.md", "**/Dockerfile", "**/*.yml", "**/*.yaml"],
|
|
127
|
+
exclude: []
|
|
128
|
+
},
|
|
129
|
+
// APIs & Connectors
|
|
130
|
+
{
|
|
131
|
+
owner: "midnightntwrk",
|
|
132
|
+
repo: "midnight-dapp-connector-api",
|
|
133
|
+
branch: "main",
|
|
134
|
+
patterns: ["**/*.ts", "**/*.md"],
|
|
135
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
136
|
+
},
|
|
137
|
+
// Tooling
|
|
138
|
+
{
|
|
139
|
+
owner: "midnightntwrk",
|
|
140
|
+
repo: "compact-tree-sitter",
|
|
141
|
+
branch: "main",
|
|
142
|
+
patterns: ["**/*.js", "**/*.md", "**/*.scm"],
|
|
143
|
+
exclude: ["node_modules/**"]
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
owner: "midnightntwrk",
|
|
147
|
+
repo: "setup-compact-action",
|
|
148
|
+
branch: "main",
|
|
149
|
+
patterns: ["**/*.ts", "**/*.js", "**/*.md", "**/*.yml", "**/*.yaml"],
|
|
150
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
151
|
+
},
|
|
152
|
+
// Community
|
|
153
|
+
{
|
|
154
|
+
owner: "midnightntwrk",
|
|
155
|
+
repo: "midnight-awesome-dapps",
|
|
156
|
+
branch: "main",
|
|
157
|
+
patterns: ["**/*.md"],
|
|
158
|
+
exclude: []
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
owner: "midnightntwrk",
|
|
162
|
+
repo: "contributor-hub",
|
|
163
|
+
branch: "main",
|
|
164
|
+
patterns: ["**/*.md"],
|
|
165
|
+
exclude: []
|
|
166
|
+
},
|
|
167
|
+
// Partner Libraries (OpenZeppelin)
|
|
168
|
+
{
|
|
169
|
+
owner: "OpenZeppelin",
|
|
170
|
+
repo: "compact-contracts",
|
|
171
|
+
branch: "main",
|
|
172
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
173
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
174
|
+
},
|
|
175
|
+
{
|
|
176
|
+
owner: "OpenZeppelin",
|
|
177
|
+
repo: "midnight-apps",
|
|
178
|
+
branch: "main",
|
|
179
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
180
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
181
|
+
},
|
|
182
|
+
// Official Partners (from awesome-dapps)
|
|
183
|
+
{
|
|
184
|
+
owner: "bricktowers",
|
|
185
|
+
repo: "midnight-seabattle",
|
|
186
|
+
branch: "main",
|
|
187
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
188
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
189
|
+
},
|
|
190
|
+
{
|
|
191
|
+
owner: "bricktowers",
|
|
192
|
+
repo: "midnight-identity",
|
|
193
|
+
branch: "main",
|
|
194
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
195
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
196
|
+
},
|
|
197
|
+
{
|
|
198
|
+
owner: "bricktowers",
|
|
199
|
+
repo: "midnight-rwa",
|
|
200
|
+
branch: "main",
|
|
201
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
202
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
owner: "MeshJS",
|
|
206
|
+
repo: "midnight-starter-template",
|
|
207
|
+
branch: "main",
|
|
208
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
209
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
210
|
+
},
|
|
211
|
+
{
|
|
212
|
+
owner: "midnames",
|
|
213
|
+
repo: "core",
|
|
214
|
+
branch: "main",
|
|
215
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
216
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
217
|
+
},
|
|
218
|
+
// Sea Battle Hackathon Winners (Feb 2025)
|
|
219
|
+
{
|
|
220
|
+
owner: "ErickRomeroDev",
|
|
221
|
+
repo: "naval-battle-game_v2",
|
|
222
|
+
branch: "main",
|
|
223
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.tsx", "**/*.md"],
|
|
224
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
owner: "eddex",
|
|
228
|
+
repo: "midnight-sea-battle-hackathon",
|
|
229
|
+
branch: "main",
|
|
230
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
231
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
232
|
+
},
|
|
233
|
+
// Mini DApp Hackathon Winners (Sep 2025)
|
|
234
|
+
{
|
|
235
|
+
owner: "statera-protocol",
|
|
236
|
+
repo: "statera-protocol-midnight",
|
|
237
|
+
branch: "main",
|
|
238
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
239
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
240
|
+
},
|
|
241
|
+
{
|
|
242
|
+
owner: "nel349",
|
|
243
|
+
repo: "midnight-bank",
|
|
244
|
+
branch: "main",
|
|
245
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
246
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
247
|
+
},
|
|
248
|
+
{
|
|
249
|
+
owner: "Imdavyking",
|
|
250
|
+
repo: "zkbadge",
|
|
251
|
+
branch: "main",
|
|
252
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.md"],
|
|
253
|
+
exclude: ["node_modules/**", "dist/**"]
|
|
254
|
+
},
|
|
255
|
+
// Core Partner - PaimaStudios (Gaming Infrastructure)
|
|
256
|
+
{
|
|
257
|
+
owner: "PaimaStudios",
|
|
258
|
+
repo: "midnight-game-2",
|
|
259
|
+
branch: "main",
|
|
260
|
+
patterns: ["**/*.compact", "**/*.ts", "**/*.tsx", "**/*.js", "**/*.md"],
|
|
261
|
+
exclude: ["node_modules/**", "dist/**", "phaser/node_modules/**"]
|
|
262
|
+
},
|
|
263
|
+
{
|
|
264
|
+
owner: "PaimaStudios",
|
|
265
|
+
repo: "midnight-wasm-prover",
|
|
266
|
+
branch: "main",
|
|
267
|
+
patterns: ["**/*.rs", "**/*.ts", "**/*.md"],
|
|
268
|
+
exclude: ["target/**", "node_modules/**", "pkg/**"]
|
|
269
|
+
},
|
|
270
|
+
{
|
|
271
|
+
owner: "PaimaStudios",
|
|
272
|
+
repo: "midnight-batcher",
|
|
273
|
+
branch: "main",
|
|
274
|
+
patterns: ["**/*.rs", "**/*.ts", "**/*.md"],
|
|
275
|
+
exclude: ["target/**", "node_modules/**"]
|
|
276
|
+
},
|
|
277
|
+
{
|
|
278
|
+
owner: "PaimaStudios",
|
|
279
|
+
repo: "midnight-impact-rps-example",
|
|
280
|
+
branch: "main",
|
|
281
|
+
patterns: ["**/*.rs", "**/*.ts", "**/*.md"],
|
|
282
|
+
exclude: ["target/**", "node_modules/**", "www/pkg/**"]
|
|
283
|
+
}
|
|
284
|
+
];
|
|
285
|
+
|
|
286
|
+
// src/utils/logger.ts
|
|
287
|
+
var mcpLogCallback = null;
|
|
288
|
+
function setMCPLogCallback(callback) {
|
|
289
|
+
mcpLogCallback = callback;
|
|
290
|
+
}
|
|
291
|
+
var LOG_LEVELS = {
|
|
292
|
+
debug: 0,
|
|
293
|
+
info: 1,
|
|
294
|
+
warn: 2,
|
|
295
|
+
error: 3
|
|
296
|
+
};
|
|
297
|
+
var Logger = class {
|
|
298
|
+
level;
|
|
299
|
+
format;
|
|
300
|
+
service;
|
|
301
|
+
constructor(level = "info", format = "text", service = "midnight-mcp") {
|
|
302
|
+
this.level = level;
|
|
303
|
+
this.format = format;
|
|
304
|
+
this.service = service;
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Set log format at runtime
|
|
308
|
+
*/
|
|
309
|
+
setFormat(format) {
|
|
310
|
+
this.format = format;
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Set log level at runtime
|
|
314
|
+
*/
|
|
315
|
+
setLevel(level) {
|
|
316
|
+
this.level = level;
|
|
317
|
+
}
|
|
318
|
+
shouldLog(level) {
|
|
319
|
+
return LOG_LEVELS[level] >= LOG_LEVELS[this.level];
|
|
320
|
+
}
|
|
321
|
+
formatTextMessage(level, message, meta) {
|
|
322
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
323
|
+
const metaStr = meta ? ` ${JSON.stringify(meta)}` : "";
|
|
324
|
+
return `[${timestamp}] [${level.toUpperCase()}] ${message}${metaStr}`;
|
|
325
|
+
}
|
|
326
|
+
formatJsonMessage(level, message, meta) {
|
|
327
|
+
const entry = {
|
|
328
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
329
|
+
level,
|
|
330
|
+
message,
|
|
331
|
+
service: this.service
|
|
332
|
+
};
|
|
333
|
+
if (meta) {
|
|
334
|
+
entry.meta = meta;
|
|
335
|
+
}
|
|
336
|
+
return JSON.stringify(entry);
|
|
337
|
+
}
|
|
338
|
+
formatMessage(level, message, meta) {
|
|
339
|
+
if (this.format === "json") {
|
|
340
|
+
return this.formatJsonMessage(level, message, meta);
|
|
341
|
+
}
|
|
342
|
+
return this.formatTextMessage(level, message, meta);
|
|
343
|
+
}
|
|
344
|
+
debug(message, meta) {
|
|
345
|
+
if (this.shouldLog("debug")) {
|
|
346
|
+
console.error(this.formatMessage("debug", message, meta));
|
|
347
|
+
}
|
|
348
|
+
mcpLogCallback?.("debug", this.service, { message, ...meta });
|
|
349
|
+
}
|
|
350
|
+
info(message, meta) {
|
|
351
|
+
if (this.shouldLog("info")) {
|
|
352
|
+
console.error(this.formatMessage("info", message, meta));
|
|
353
|
+
}
|
|
354
|
+
mcpLogCallback?.("info", this.service, { message, ...meta });
|
|
355
|
+
}
|
|
356
|
+
warn(message, meta) {
|
|
357
|
+
if (this.shouldLog("warn")) {
|
|
358
|
+
console.error(this.formatMessage("warn", message, meta));
|
|
359
|
+
}
|
|
360
|
+
mcpLogCallback?.("warning", this.service, { message, ...meta });
|
|
361
|
+
}
|
|
362
|
+
error(message, meta) {
|
|
363
|
+
if (this.shouldLog("error")) {
|
|
364
|
+
console.error(this.formatMessage("error", message, meta));
|
|
365
|
+
}
|
|
366
|
+
mcpLogCallback?.("error", this.service, { message, ...meta });
|
|
367
|
+
}
|
|
368
|
+
/**
|
|
369
|
+
* Create a child logger with additional context
|
|
370
|
+
*/
|
|
371
|
+
child(context) {
|
|
372
|
+
return new ChildLogger(this, context);
|
|
373
|
+
}
|
|
374
|
+
};
|
|
375
|
+
var ChildLogger = class {
|
|
376
|
+
parent;
|
|
377
|
+
context;
|
|
378
|
+
constructor(parent, context) {
|
|
379
|
+
this.parent = parent;
|
|
380
|
+
this.context = context;
|
|
381
|
+
}
|
|
382
|
+
debug(message, meta) {
|
|
383
|
+
this.parent.debug(message, { ...this.context, ...meta });
|
|
384
|
+
}
|
|
385
|
+
info(message, meta) {
|
|
386
|
+
this.parent.info(message, { ...this.context, ...meta });
|
|
387
|
+
}
|
|
388
|
+
warn(message, meta) {
|
|
389
|
+
this.parent.warn(message, { ...this.context, ...meta });
|
|
390
|
+
}
|
|
391
|
+
error(message, meta) {
|
|
392
|
+
this.parent.error(message, { ...this.context, ...meta });
|
|
393
|
+
}
|
|
394
|
+
};
|
|
395
|
+
var logFormat = process.env.LOG_FORMAT === "json" ? "json" : "text";
|
|
396
|
+
var logger = new Logger(config.logLevel, logFormat);
|
|
397
|
+
|
|
398
|
+
// src/utils/errors.ts
|
|
399
|
+
var MCPError = class extends Error {
|
|
400
|
+
code;
|
|
401
|
+
suggestion;
|
|
402
|
+
details;
|
|
403
|
+
constructor(message, code, suggestion, details) {
|
|
404
|
+
super(message);
|
|
405
|
+
this.name = "MCPError";
|
|
406
|
+
this.code = code;
|
|
407
|
+
this.suggestion = suggestion;
|
|
408
|
+
this.details = details;
|
|
409
|
+
}
|
|
410
|
+
toJSON() {
|
|
411
|
+
return {
|
|
412
|
+
error: this.message,
|
|
413
|
+
code: this.code,
|
|
414
|
+
suggestion: this.suggestion,
|
|
415
|
+
...this.details && { details: this.details }
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
var ErrorCodes = {
|
|
420
|
+
RATE_LIMIT: "RATE_LIMIT_EXCEEDED",
|
|
421
|
+
NOT_FOUND: "RESOURCE_NOT_FOUND",
|
|
422
|
+
NETWORK: "NETWORK_ERROR",
|
|
423
|
+
INVALID_INPUT: "INVALID_INPUT",
|
|
424
|
+
UNKNOWN_REPO: "UNKNOWN_REPOSITORY",
|
|
425
|
+
PARSE_ERROR: "PARSE_ERROR",
|
|
426
|
+
CHROMADB_UNAVAILABLE: "CHROMADB_UNAVAILABLE",
|
|
427
|
+
OPENAI_UNAVAILABLE: "OPENAI_UNAVAILABLE",
|
|
428
|
+
MISSING_PARAM: "MISSING_PARAMETER",
|
|
429
|
+
INVALID_VERSION: "INVALID_VERSION",
|
|
430
|
+
SAMPLING_UNAVAILABLE: "SAMPLING_UNAVAILABLE"
|
|
431
|
+
};
|
|
432
|
+
var SelfCorrectionHints = {
|
|
433
|
+
UNKNOWN_REPO: (repo, validRepos) => ({
|
|
434
|
+
error: `Unknown repository: '${repo}'`,
|
|
435
|
+
code: ErrorCodes.UNKNOWN_REPO,
|
|
436
|
+
suggestion: `Try one of these instead: ${validRepos.slice(0, 8).join(", ")}`,
|
|
437
|
+
correction: {
|
|
438
|
+
invalidValue: repo,
|
|
439
|
+
validValues: validRepos,
|
|
440
|
+
parameterName: "repo"
|
|
441
|
+
}
|
|
442
|
+
}),
|
|
443
|
+
INVALID_VERSION: (version, example) => ({
|
|
444
|
+
error: `Invalid version format: '${version}'`,
|
|
445
|
+
code: ErrorCodes.INVALID_VERSION,
|
|
446
|
+
suggestion: `Version should be like '${example}'. Check available versions with midnight-get-version-info first.`,
|
|
447
|
+
correction: {
|
|
448
|
+
invalidValue: version,
|
|
449
|
+
expectedFormat: "v1.0.0 or 0.14.0",
|
|
450
|
+
example
|
|
451
|
+
}
|
|
452
|
+
}),
|
|
453
|
+
MISSING_REQUIRED_PARAM: (paramName, toolName) => ({
|
|
454
|
+
error: `Missing required parameter: '${paramName}'`,
|
|
455
|
+
code: ErrorCodes.MISSING_PARAM,
|
|
456
|
+
suggestion: `The '${paramName}' parameter is required for ${toolName}. Please provide it.`,
|
|
457
|
+
correction: {
|
|
458
|
+
missingParameter: paramName,
|
|
459
|
+
tool: toolName
|
|
460
|
+
}
|
|
461
|
+
}),
|
|
462
|
+
FILE_NOT_FOUND: (path, repo, similarPaths) => ({
|
|
463
|
+
error: `File not found: '${path}' in ${repo}`,
|
|
464
|
+
code: ErrorCodes.NOT_FOUND,
|
|
465
|
+
suggestion: similarPaths?.length ? `Did you mean: ${similarPaths.join(", ")}?` : `Check the file path. Use midnight-get-file with a different path or list directory contents first.`,
|
|
466
|
+
correction: {
|
|
467
|
+
invalidPath: path,
|
|
468
|
+
...similarPaths && { suggestions: similarPaths }
|
|
469
|
+
}
|
|
470
|
+
}),
|
|
471
|
+
SAMPLING_NOT_AVAILABLE: (toolName) => ({
|
|
472
|
+
error: `Sampling capability not available`,
|
|
473
|
+
code: ErrorCodes.SAMPLING_UNAVAILABLE,
|
|
474
|
+
suggestion: `${toolName} requires a client that supports sampling (e.g., Claude Desktop). Use a non-AI alternative or switch clients.`,
|
|
475
|
+
alternatives: {
|
|
476
|
+
"midnight-generate-contract": "Use midnight-search-compact to find similar contracts as templates",
|
|
477
|
+
"midnight-review-contract": "Use midnight-analyze-contract for static analysis",
|
|
478
|
+
"midnight-document-contract": "Manual documentation or inline comments"
|
|
479
|
+
}
|
|
480
|
+
}),
|
|
481
|
+
RATE_LIMIT: (retryAfter) => ({
|
|
482
|
+
error: "GitHub API rate limit exceeded",
|
|
483
|
+
code: ErrorCodes.RATE_LIMIT,
|
|
484
|
+
suggestion: retryAfter ? `Wait ${retryAfter} seconds before retrying. Or add GITHUB_TOKEN for higher limits.` : "Add GITHUB_TOKEN to increase from 60 to 5000 requests/hour.",
|
|
485
|
+
correction: {
|
|
486
|
+
action: "wait_and_retry",
|
|
487
|
+
...retryAfter && { retryAfterSeconds: retryAfter }
|
|
488
|
+
}
|
|
489
|
+
})
|
|
490
|
+
};
|
|
491
|
+
function createUserError(error, context) {
|
|
492
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
493
|
+
const ctx = context ? ` while ${context}` : "";
|
|
494
|
+
if (message.includes("rate limit") || message.includes("403") || message.includes("API rate limit")) {
|
|
495
|
+
return new MCPError(
|
|
496
|
+
`GitHub API rate limit exceeded${ctx}`,
|
|
497
|
+
ErrorCodes.RATE_LIMIT,
|
|
498
|
+
"Add GITHUB_TOKEN to your config to increase limits from 60 to 5000 requests/hour. Get a token at https://github.com/settings/tokens"
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
if (message.includes("404") || message.includes("Not Found")) {
|
|
502
|
+
return new MCPError(
|
|
503
|
+
`Resource not found${ctx}`,
|
|
504
|
+
ErrorCodes.NOT_FOUND,
|
|
505
|
+
"Check that the repository, file, or version exists and is publicly accessible."
|
|
506
|
+
);
|
|
507
|
+
}
|
|
508
|
+
if (message.includes("network") || message.includes("ECONNREFUSED") || message.includes("ETIMEDOUT") || message.includes("timeout")) {
|
|
509
|
+
return new MCPError(
|
|
510
|
+
`Network error${ctx}`,
|
|
511
|
+
ErrorCodes.NETWORK,
|
|
512
|
+
"Check your internet connection and try again. If the problem persists, the service may be temporarily unavailable."
|
|
513
|
+
);
|
|
514
|
+
}
|
|
515
|
+
if (message.includes("chroma") || message.includes("8000")) {
|
|
516
|
+
return new MCPError(
|
|
517
|
+
`ChromaDB is not available${ctx}`,
|
|
518
|
+
ErrorCodes.CHROMADB_UNAVAILABLE,
|
|
519
|
+
"ChromaDB is optional. Without it, search uses keyword matching instead of semantic search. To enable semantic search, run: docker run -d -p 8000:8000 chromadb/chroma"
|
|
520
|
+
);
|
|
521
|
+
}
|
|
522
|
+
if (message.includes("openai") || message.includes("embedding")) {
|
|
523
|
+
return new MCPError(
|
|
524
|
+
`OpenAI API error${ctx}`,
|
|
525
|
+
ErrorCodes.OPENAI_UNAVAILABLE,
|
|
526
|
+
"OpenAI is optional. Without it, search uses keyword matching. To enable semantic search, add OPENAI_API_KEY to your config."
|
|
527
|
+
);
|
|
528
|
+
}
|
|
529
|
+
return new MCPError(
|
|
530
|
+
`An error occurred${ctx}`,
|
|
531
|
+
"UNKNOWN_ERROR",
|
|
532
|
+
"If this problem persists, please report it at https://github.com/Olanetsoft/midnight-mcp/issues"
|
|
533
|
+
);
|
|
534
|
+
}
|
|
535
|
+
function formatErrorResponse(error, context) {
|
|
536
|
+
const mcpError = error instanceof MCPError ? error : createUserError(error, context);
|
|
537
|
+
return mcpError.toJSON();
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
// src/utils/validation.ts
|
|
541
|
+
var MAX_LENGTHS = {
|
|
542
|
+
query: 1e3,
|
|
543
|
+
path: 500,
|
|
544
|
+
repository: 100,
|
|
545
|
+
ref: 100,
|
|
546
|
+
generic: 500
|
|
547
|
+
};
|
|
548
|
+
var DANGEROUS_PATTERNS = [
|
|
549
|
+
/[<>]/g,
|
|
550
|
+
// HTML/XML injection
|
|
551
|
+
/javascript:/gi,
|
|
552
|
+
// JS protocol
|
|
553
|
+
/data:/gi,
|
|
554
|
+
// Data URLs
|
|
555
|
+
/\0/g,
|
|
556
|
+
// Null bytes
|
|
557
|
+
/[\x00-\x08\x0B\x0C\x0E-\x1F]/g
|
|
558
|
+
// Control characters (except newline, tab)
|
|
559
|
+
];
|
|
560
|
+
function sanitizeString(input, maxLength = MAX_LENGTHS.generic) {
|
|
561
|
+
if (!input || typeof input !== "string") {
|
|
562
|
+
return "";
|
|
563
|
+
}
|
|
564
|
+
let sanitized = input;
|
|
565
|
+
for (const pattern of DANGEROUS_PATTERNS) {
|
|
566
|
+
sanitized = sanitized.replace(pattern, "");
|
|
567
|
+
}
|
|
568
|
+
sanitized = sanitized.trim();
|
|
569
|
+
if (sanitized.length > maxLength) {
|
|
570
|
+
sanitized = sanitized.substring(0, maxLength);
|
|
571
|
+
}
|
|
572
|
+
return sanitized;
|
|
573
|
+
}
|
|
574
|
+
function validateQuery(query) {
|
|
575
|
+
const warnings = [];
|
|
576
|
+
const errors = [];
|
|
577
|
+
if (query === null || query === void 0) {
|
|
578
|
+
return {
|
|
579
|
+
isValid: false,
|
|
580
|
+
sanitized: "",
|
|
581
|
+
warnings,
|
|
582
|
+
errors: ["Query is required"]
|
|
583
|
+
};
|
|
584
|
+
}
|
|
585
|
+
if (typeof query !== "string") {
|
|
586
|
+
return {
|
|
587
|
+
isValid: false,
|
|
588
|
+
sanitized: "",
|
|
589
|
+
warnings,
|
|
590
|
+
errors: ["Query must be a string"]
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
const sanitized = sanitizeString(query, MAX_LENGTHS.query);
|
|
594
|
+
if (sanitized.length === 0) {
|
|
595
|
+
errors.push("Query cannot be empty after sanitization");
|
|
596
|
+
}
|
|
597
|
+
if (sanitized.length < 2) {
|
|
598
|
+
warnings.push("Query is very short, results may be limited");
|
|
599
|
+
}
|
|
600
|
+
if (query.length !== sanitized.length) {
|
|
601
|
+
warnings.push(
|
|
602
|
+
"Query was sanitized to remove potentially dangerous characters"
|
|
603
|
+
);
|
|
604
|
+
}
|
|
605
|
+
return {
|
|
606
|
+
isValid: errors.length === 0,
|
|
607
|
+
sanitized,
|
|
608
|
+
warnings,
|
|
609
|
+
errors
|
|
610
|
+
};
|
|
611
|
+
}
|
|
612
|
+
function validateNumber(value, options) {
|
|
613
|
+
const { min = 1, max = 100, defaultValue } = options;
|
|
614
|
+
if (value === null || value === void 0) {
|
|
615
|
+
return { isValid: true, value: defaultValue };
|
|
616
|
+
}
|
|
617
|
+
const num = typeof value === "string" ? parseInt(value, 10) : value;
|
|
618
|
+
if (typeof num !== "number" || isNaN(num)) {
|
|
619
|
+
return {
|
|
620
|
+
isValid: false,
|
|
621
|
+
value: defaultValue,
|
|
622
|
+
error: "Must be a valid number"
|
|
623
|
+
};
|
|
624
|
+
}
|
|
625
|
+
if (num < min) {
|
|
626
|
+
return { isValid: true, value: min };
|
|
627
|
+
}
|
|
628
|
+
if (num > max) {
|
|
629
|
+
return { isValid: true, value: max };
|
|
630
|
+
}
|
|
631
|
+
return { isValid: true, value: num };
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// src/pipeline/github.ts
|
|
635
|
+
import { Octokit } from "octokit";
|
|
636
|
+
var RETRY_CONFIG = {
|
|
637
|
+
maxRetries: 3,
|
|
638
|
+
baseDelayMs: 1e3,
|
|
639
|
+
maxDelayMs: 1e4
|
|
640
|
+
};
|
|
641
|
+
async function withRetry(operation, operationName) {
|
|
642
|
+
let lastError = null;
|
|
643
|
+
for (let attempt = 1; attempt <= RETRY_CONFIG.maxRetries; attempt++) {
|
|
644
|
+
try {
|
|
645
|
+
return await operation();
|
|
646
|
+
} catch (error) {
|
|
647
|
+
lastError = error;
|
|
648
|
+
const isRetryable = isRetryableError(error);
|
|
649
|
+
if (!isRetryable || attempt === RETRY_CONFIG.maxRetries) {
|
|
650
|
+
logger.error(`${operationName} failed after ${attempt} attempt(s)`, {
|
|
651
|
+
error: String(error),
|
|
652
|
+
attempt
|
|
653
|
+
});
|
|
654
|
+
throw enhanceError(error, operationName);
|
|
655
|
+
}
|
|
656
|
+
const delay = Math.min(
|
|
657
|
+
RETRY_CONFIG.baseDelayMs * Math.pow(2, attempt - 1),
|
|
658
|
+
RETRY_CONFIG.maxDelayMs
|
|
659
|
+
);
|
|
660
|
+
logger.warn(`${operationName} failed, retrying in ${delay}ms...`, {
|
|
661
|
+
attempt,
|
|
662
|
+
error: String(error)
|
|
663
|
+
});
|
|
664
|
+
await sleep(delay);
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
throw lastError;
|
|
668
|
+
}
|
|
669
|
+
function isRetryableError(error) {
|
|
670
|
+
if (error instanceof Error) {
|
|
671
|
+
const message = error.message.toLowerCase();
|
|
672
|
+
return message.includes("network") || message.includes("timeout") || message.includes("econnreset") || message.includes("rate limit") || message.includes("403") || message.includes("500") || message.includes("502") || message.includes("503") || message.includes("504");
|
|
673
|
+
}
|
|
674
|
+
return false;
|
|
675
|
+
}
|
|
676
|
+
function enhanceError(error, operation) {
|
|
677
|
+
const originalMessage = error instanceof Error ? error.message : String(error);
|
|
678
|
+
if (originalMessage.includes("rate limit") || originalMessage.includes("403")) {
|
|
679
|
+
return new Error(
|
|
680
|
+
`GitHub API rate limit exceeded during ${operation}. Add a GITHUB_TOKEN to your config to increase limits from 60 to 5000 requests/hour.`
|
|
681
|
+
);
|
|
682
|
+
}
|
|
683
|
+
if (originalMessage.includes("404")) {
|
|
684
|
+
return new Error(
|
|
685
|
+
`Resource not found during ${operation}. Check that the repository/file exists and is accessible.`
|
|
686
|
+
);
|
|
687
|
+
}
|
|
688
|
+
if (originalMessage.includes("timeout") || originalMessage.includes("network")) {
|
|
689
|
+
return new Error(
|
|
690
|
+
`Network error during ${operation}. Check your internet connection and try again.`
|
|
691
|
+
);
|
|
692
|
+
}
|
|
693
|
+
return new Error(`${operation} failed: ${originalMessage}`);
|
|
694
|
+
}
|
|
695
|
+
function sleep(ms) {
|
|
696
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
697
|
+
}
|
|
698
|
+
var SimpleCache = class {
|
|
699
|
+
cache = /* @__PURE__ */ new Map();
|
|
700
|
+
ttlMs;
|
|
701
|
+
constructor(ttlMinutes = 10) {
|
|
702
|
+
this.ttlMs = ttlMinutes * 60 * 1e3;
|
|
703
|
+
}
|
|
704
|
+
get(key) {
|
|
705
|
+
const entry = this.cache.get(key);
|
|
706
|
+
if (!entry) return null;
|
|
707
|
+
if (Date.now() - entry.timestamp > this.ttlMs) {
|
|
708
|
+
this.cache.delete(key);
|
|
709
|
+
return null;
|
|
710
|
+
}
|
|
711
|
+
return entry.data;
|
|
712
|
+
}
|
|
713
|
+
set(key, data) {
|
|
714
|
+
this.cache.set(key, { data, timestamp: Date.now() });
|
|
715
|
+
}
|
|
716
|
+
clear() {
|
|
717
|
+
this.cache.clear();
|
|
718
|
+
}
|
|
719
|
+
};
|
|
720
|
+
var GitHubClient = class {
|
|
721
|
+
octokit;
|
|
722
|
+
fileCache = new SimpleCache(15);
|
|
723
|
+
// 15 min cache for files
|
|
724
|
+
treeCache = new SimpleCache(10);
|
|
725
|
+
// 10 min cache for trees
|
|
726
|
+
repoInfoCache = new SimpleCache(10);
|
|
727
|
+
// 10 min cache for repo info
|
|
728
|
+
constructor(token) {
|
|
729
|
+
this.octokit = new Octokit({
|
|
730
|
+
auth: token || config.githubToken,
|
|
731
|
+
request: {
|
|
732
|
+
timeout: 1e4
|
|
733
|
+
// 10 second timeout for API calls
|
|
734
|
+
}
|
|
735
|
+
});
|
|
736
|
+
}
|
|
737
|
+
/**
|
|
738
|
+
* Get repository information
|
|
739
|
+
*/
|
|
740
|
+
async getRepositoryInfo(owner, repo) {
|
|
741
|
+
const cacheKey = `repo:${owner}/${repo}`;
|
|
742
|
+
const cached = this.repoInfoCache.get(cacheKey);
|
|
743
|
+
if (cached) {
|
|
744
|
+
logger.debug(`Cache hit for repo info: ${cacheKey}`);
|
|
745
|
+
return cached;
|
|
746
|
+
}
|
|
747
|
+
try {
|
|
748
|
+
const { data: repoData } = await withRetry(
|
|
749
|
+
() => this.octokit.rest.repos.get({ owner, repo }),
|
|
750
|
+
`getRepositoryInfo(${owner}/${repo})`
|
|
751
|
+
);
|
|
752
|
+
const { data: commits } = await withRetry(
|
|
753
|
+
() => this.octokit.rest.repos.listCommits({ owner, repo, per_page: 1 }),
|
|
754
|
+
`getCommits(${owner}/${repo})`
|
|
755
|
+
);
|
|
756
|
+
const lastCommit = commits[0] ? {
|
|
757
|
+
sha: commits[0].sha,
|
|
758
|
+
message: commits[0].commit.message,
|
|
759
|
+
author: commits[0].commit.author?.name || "Unknown",
|
|
760
|
+
date: commits[0].commit.author?.date || "",
|
|
761
|
+
url: commits[0].html_url
|
|
762
|
+
} : null;
|
|
763
|
+
const result = {
|
|
764
|
+
owner,
|
|
765
|
+
repo,
|
|
766
|
+
branch: repoData.default_branch,
|
|
767
|
+
lastCommit,
|
|
768
|
+
fileCount: 0
|
|
769
|
+
// Will be updated during tree fetch
|
|
770
|
+
};
|
|
771
|
+
this.repoInfoCache.set(cacheKey, result);
|
|
772
|
+
return result;
|
|
773
|
+
} catch (error) {
|
|
774
|
+
logger.error(`Failed to get repository info for ${owner}/${repo}`, {
|
|
775
|
+
error: String(error)
|
|
776
|
+
});
|
|
777
|
+
throw error;
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
/**
|
|
781
|
+
* Get file content from a repository
|
|
782
|
+
*/
|
|
783
|
+
async getFileContent(owner, repo, path, ref) {
|
|
784
|
+
const cacheKey = `file:${owner}/${repo}/${path}@${ref || "main"}`;
|
|
785
|
+
const cached = this.fileCache.get(cacheKey);
|
|
786
|
+
if (cached) {
|
|
787
|
+
logger.debug(`Cache hit for file: ${cacheKey}`);
|
|
788
|
+
return cached;
|
|
789
|
+
}
|
|
790
|
+
try {
|
|
791
|
+
const { data } = await withRetry(
|
|
792
|
+
() => this.octokit.rest.repos.getContent({ owner, repo, path, ref }),
|
|
793
|
+
`getFileContent(${owner}/${repo}/${path})`
|
|
794
|
+
);
|
|
795
|
+
if (Array.isArray(data) || data.type !== "file") {
|
|
796
|
+
return null;
|
|
797
|
+
}
|
|
798
|
+
const content = data.encoding === "base64" ? Buffer.from(data.content, "base64").toString("utf-8") : data.content;
|
|
799
|
+
const result = {
|
|
800
|
+
path: data.path,
|
|
801
|
+
content,
|
|
802
|
+
sha: data.sha,
|
|
803
|
+
size: data.size,
|
|
804
|
+
encoding: data.encoding
|
|
805
|
+
};
|
|
806
|
+
this.fileCache.set(cacheKey, result);
|
|
807
|
+
return result;
|
|
808
|
+
} catch (error) {
|
|
809
|
+
logger.warn(`Failed to get file ${path} from ${owner}/${repo}`, {
|
|
810
|
+
error: String(error)
|
|
811
|
+
});
|
|
812
|
+
return null;
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
/**
|
|
816
|
+
* Get repository tree (list of all files)
|
|
817
|
+
*/
|
|
818
|
+
async getRepositoryTree(owner, repo, ref) {
|
|
819
|
+
const cacheKey = `tree:${owner}/${repo}@${ref || "main"}`;
|
|
820
|
+
const cached = this.treeCache.get(cacheKey);
|
|
821
|
+
if (cached) {
|
|
822
|
+
logger.debug(`Cache hit for tree: ${cacheKey}`);
|
|
823
|
+
return cached;
|
|
824
|
+
}
|
|
825
|
+
try {
|
|
826
|
+
const { data: refData } = await withRetry(
|
|
827
|
+
() => this.octokit.rest.git.getRef({
|
|
828
|
+
owner,
|
|
829
|
+
repo,
|
|
830
|
+
ref: `heads/${ref || "main"}`
|
|
831
|
+
}),
|
|
832
|
+
`getRef(${owner}/${repo})`
|
|
833
|
+
);
|
|
834
|
+
const { data: treeData } = await withRetry(
|
|
835
|
+
() => this.octokit.rest.git.getTree({
|
|
836
|
+
owner,
|
|
837
|
+
repo,
|
|
838
|
+
tree_sha: refData.object.sha,
|
|
839
|
+
recursive: "true"
|
|
840
|
+
}),
|
|
841
|
+
`getTree(${owner}/${repo})`
|
|
842
|
+
);
|
|
843
|
+
const result = treeData.tree.filter((item) => item.type === "blob" && item.path).map((item) => item.path);
|
|
844
|
+
this.treeCache.set(cacheKey, result);
|
|
845
|
+
return result;
|
|
846
|
+
} catch (error) {
|
|
847
|
+
logger.error(`Failed to get repository tree for ${owner}/${repo}`, {
|
|
848
|
+
error: String(error)
|
|
849
|
+
});
|
|
850
|
+
throw error;
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
/**
|
|
854
|
+
* Filter files by patterns
|
|
855
|
+
*/
|
|
856
|
+
filterFilesByPatterns(files, patterns, exclude) {
|
|
857
|
+
const matchPattern = (file, pattern) => {
|
|
858
|
+
const regexPattern = pattern.replace(/\*\*/g, ".*").replace(/\*/g, "[^/]*").replace(/\./g, "\\.");
|
|
859
|
+
return new RegExp(`^${regexPattern}$`).test(file);
|
|
860
|
+
};
|
|
861
|
+
return files.filter((file) => {
|
|
862
|
+
const matchesInclude = patterns.some((p) => matchPattern(file, p));
|
|
863
|
+
const matchesExclude = exclude.some((p) => matchPattern(file, p));
|
|
864
|
+
return matchesInclude && !matchesExclude;
|
|
865
|
+
});
|
|
866
|
+
}
|
|
867
|
+
/**
|
|
868
|
+
* Fetch all files from a repository matching patterns
|
|
869
|
+
*/
|
|
870
|
+
async fetchRepositoryFiles(repoConfig) {
|
|
871
|
+
const { owner, repo, branch, patterns, exclude } = repoConfig;
|
|
872
|
+
logger.info(`Fetching files from ${owner}/${repo}...`);
|
|
873
|
+
const allFiles = await this.getRepositoryTree(owner, repo, branch);
|
|
874
|
+
const filteredFiles = this.filterFilesByPatterns(
|
|
875
|
+
allFiles,
|
|
876
|
+
patterns,
|
|
877
|
+
exclude
|
|
878
|
+
);
|
|
879
|
+
logger.info(
|
|
880
|
+
`Found ${filteredFiles.length} matching files in ${owner}/${repo}`
|
|
881
|
+
);
|
|
882
|
+
const files = [];
|
|
883
|
+
for (const filePath of filteredFiles) {
|
|
884
|
+
const file = await this.getFileContent(owner, repo, filePath, branch);
|
|
885
|
+
if (file) {
|
|
886
|
+
files.push(file);
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
return files;
|
|
890
|
+
}
|
|
891
|
+
/**
|
|
892
|
+
* Get recent commits
|
|
893
|
+
*/
|
|
894
|
+
async getRecentCommits(owner, repo, since, perPage = 30) {
|
|
895
|
+
try {
|
|
896
|
+
const params = {
|
|
897
|
+
owner,
|
|
898
|
+
repo,
|
|
899
|
+
per_page: perPage
|
|
900
|
+
};
|
|
901
|
+
if (since) {
|
|
902
|
+
params.since = since;
|
|
903
|
+
}
|
|
904
|
+
const { data } = await this.octokit.rest.repos.listCommits(params);
|
|
905
|
+
return data.map((commit) => ({
|
|
906
|
+
sha: commit.sha,
|
|
907
|
+
message: commit.commit.message,
|
|
908
|
+
author: commit.commit.author?.name || "Unknown",
|
|
909
|
+
date: commit.commit.author?.date || "",
|
|
910
|
+
url: commit.html_url
|
|
911
|
+
}));
|
|
912
|
+
} catch (error) {
|
|
913
|
+
logger.error(`Failed to get commits for ${owner}/${repo}`, {
|
|
914
|
+
error: String(error)
|
|
915
|
+
});
|
|
916
|
+
return [];
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
/**
|
|
920
|
+
* Get files changed in recent commits
|
|
921
|
+
*/
|
|
922
|
+
async getChangedFiles(owner, repo, since) {
|
|
923
|
+
try {
|
|
924
|
+
const commits = await this.getRecentCommits(owner, repo, since);
|
|
925
|
+
const changedFiles = /* @__PURE__ */ new Set();
|
|
926
|
+
for (const commit of commits) {
|
|
927
|
+
const { data } = await this.octokit.rest.repos.getCommit({
|
|
928
|
+
owner,
|
|
929
|
+
repo,
|
|
930
|
+
ref: commit.sha
|
|
931
|
+
});
|
|
932
|
+
data.files?.forEach((file) => {
|
|
933
|
+
if (file.filename) {
|
|
934
|
+
changedFiles.add(file.filename);
|
|
935
|
+
}
|
|
936
|
+
});
|
|
937
|
+
}
|
|
938
|
+
return Array.from(changedFiles);
|
|
939
|
+
} catch (error) {
|
|
940
|
+
logger.error(`Failed to get changed files for ${owner}/${repo}`, {
|
|
941
|
+
error: String(error)
|
|
942
|
+
});
|
|
943
|
+
return [];
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
/**
|
|
947
|
+
* Search code in repositories
|
|
948
|
+
*/
|
|
949
|
+
async searchCode(query, owner, repo, language) {
|
|
950
|
+
try {
|
|
951
|
+
let q = query;
|
|
952
|
+
if (owner && repo) {
|
|
953
|
+
q += ` repo:${owner}/${repo}`;
|
|
954
|
+
} else if (owner) {
|
|
955
|
+
q += ` user:${owner}`;
|
|
956
|
+
}
|
|
957
|
+
if (language) {
|
|
958
|
+
q += ` language:${language}`;
|
|
959
|
+
}
|
|
960
|
+
const { data } = await this.octokit.rest.search.code({
|
|
961
|
+
q,
|
|
962
|
+
per_page: 30
|
|
963
|
+
});
|
|
964
|
+
return data.items.map((item) => ({
|
|
965
|
+
path: item.path,
|
|
966
|
+
repository: item.repository.full_name,
|
|
967
|
+
url: item.html_url
|
|
968
|
+
}));
|
|
969
|
+
} catch (error) {
|
|
970
|
+
logger.warn(`Code search failed for query: ${query}`, {
|
|
971
|
+
error: String(error)
|
|
972
|
+
});
|
|
973
|
+
return [];
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
/**
|
|
977
|
+
* Get current rate limit status from GitHub API
|
|
978
|
+
*/
|
|
979
|
+
async getRateLimit() {
|
|
980
|
+
try {
|
|
981
|
+
const { data } = await this.octokit.rest.rateLimit.get();
|
|
982
|
+
const rateLimit = {
|
|
983
|
+
limit: data.rate.limit,
|
|
984
|
+
remaining: data.rate.remaining,
|
|
985
|
+
reset: new Date(data.rate.reset * 1e3),
|
|
986
|
+
used: data.rate.used
|
|
987
|
+
};
|
|
988
|
+
updateRateLimit(rateLimit);
|
|
989
|
+
return rateLimit;
|
|
990
|
+
} catch (error) {
|
|
991
|
+
logger.warn("Failed to get rate limit", { error: String(error) });
|
|
992
|
+
return {
|
|
993
|
+
limit: 60,
|
|
994
|
+
remaining: 60,
|
|
995
|
+
reset: new Date(Date.now() + 36e5),
|
|
996
|
+
used: 0
|
|
997
|
+
};
|
|
998
|
+
}
|
|
999
|
+
}
|
|
1000
|
+
/**
|
|
1001
|
+
* Check if it's safe to make API requests
|
|
1002
|
+
*/
|
|
1003
|
+
checkRateLimit() {
|
|
1004
|
+
const check = shouldProceedWithRequest();
|
|
1005
|
+
const status = getRateLimitStatus();
|
|
1006
|
+
if (!check.proceed) {
|
|
1007
|
+
logger.warn("Rate limit check failed", { reason: check.reason });
|
|
1008
|
+
}
|
|
1009
|
+
return {
|
|
1010
|
+
proceed: check.proceed,
|
|
1011
|
+
reason: check.reason,
|
|
1012
|
+
status
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
};
|
|
1016
|
+
var githubClient = new GitHubClient();
|
|
1017
|
+
|
|
1018
|
+
// src/pipeline/parser.ts
|
|
1019
|
+
function parseCompactFile(path, content) {
|
|
1020
|
+
const lines = content.split("\n");
|
|
1021
|
+
const codeUnits = [];
|
|
1022
|
+
const imports = [];
|
|
1023
|
+
const exports = [];
|
|
1024
|
+
let hasLedger = false;
|
|
1025
|
+
let hasCircuits = false;
|
|
1026
|
+
let hasWitnesses = false;
|
|
1027
|
+
const importRegex = /^include\s+["']([^"']+)["'];?/gm;
|
|
1028
|
+
let importMatch;
|
|
1029
|
+
while ((importMatch = importRegex.exec(content)) !== null) {
|
|
1030
|
+
imports.push(importMatch[1]);
|
|
1031
|
+
}
|
|
1032
|
+
const ledgerRegex = /ledger\s*\{([^}]*(?:\{[^}]*\}[^}]*)*)\}/gs;
|
|
1033
|
+
let ledgerMatch;
|
|
1034
|
+
while ((ledgerMatch = ledgerRegex.exec(content)) !== null) {
|
|
1035
|
+
hasLedger = true;
|
|
1036
|
+
const ledgerContent = ledgerMatch[1];
|
|
1037
|
+
const startLine = content.substring(0, ledgerMatch.index).split("\n").length;
|
|
1038
|
+
const endLine = startLine + ledgerMatch[0].split("\n").length - 1;
|
|
1039
|
+
codeUnits.push({
|
|
1040
|
+
type: "ledger",
|
|
1041
|
+
name: "ledger",
|
|
1042
|
+
code: ledgerMatch[0],
|
|
1043
|
+
startLine,
|
|
1044
|
+
endLine,
|
|
1045
|
+
isPublic: true,
|
|
1046
|
+
isPrivate: false
|
|
1047
|
+
});
|
|
1048
|
+
const fieldRegex = /(@private\s+)?(\w+)\s*:\s*([^;]+);/g;
|
|
1049
|
+
let fieldMatch;
|
|
1050
|
+
while ((fieldMatch = fieldRegex.exec(ledgerContent)) !== null) {
|
|
1051
|
+
const isPrivate = !!fieldMatch[1];
|
|
1052
|
+
codeUnits.push({
|
|
1053
|
+
type: "ledger",
|
|
1054
|
+
name: fieldMatch[2],
|
|
1055
|
+
code: fieldMatch[0].trim(),
|
|
1056
|
+
startLine,
|
|
1057
|
+
endLine: startLine,
|
|
1058
|
+
isPublic: !isPrivate,
|
|
1059
|
+
isPrivate,
|
|
1060
|
+
returnType: fieldMatch[3].trim()
|
|
1061
|
+
});
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
1064
|
+
const circuitRegex = /(export\s+)?circuit\s+(\w+)\s*\(([^)]*)\)\s*(?::\s*(\w+))?\s*\{/g;
|
|
1065
|
+
let circuitMatch;
|
|
1066
|
+
while ((circuitMatch = circuitRegex.exec(content)) !== null) {
|
|
1067
|
+
hasCircuits = true;
|
|
1068
|
+
const isExport = !!circuitMatch[1];
|
|
1069
|
+
const name = circuitMatch[2];
|
|
1070
|
+
const params = circuitMatch[3];
|
|
1071
|
+
const returnType = circuitMatch[4] || "Void";
|
|
1072
|
+
const startIndex = circuitMatch.index;
|
|
1073
|
+
let braceCount = 1;
|
|
1074
|
+
let endIndex = content.indexOf("{", startIndex) + 1;
|
|
1075
|
+
while (braceCount > 0 && endIndex < content.length) {
|
|
1076
|
+
if (content[endIndex] === "{") braceCount++;
|
|
1077
|
+
if (content[endIndex] === "}") braceCount--;
|
|
1078
|
+
endIndex++;
|
|
1079
|
+
}
|
|
1080
|
+
const circuitCode = content.substring(startIndex, endIndex);
|
|
1081
|
+
const startLine = content.substring(0, startIndex).split("\n").length;
|
|
1082
|
+
const endLine = startLine + circuitCode.split("\n").length - 1;
|
|
1083
|
+
const parameters = params.split(",").filter((p) => p.trim()).map((p) => {
|
|
1084
|
+
const parts = p.trim().split(":");
|
|
1085
|
+
return {
|
|
1086
|
+
name: parts[0]?.trim() || "",
|
|
1087
|
+
type: parts[1]?.trim() || "unknown"
|
|
1088
|
+
};
|
|
1089
|
+
});
|
|
1090
|
+
if (isExport) {
|
|
1091
|
+
exports.push(name);
|
|
1092
|
+
}
|
|
1093
|
+
codeUnits.push({
|
|
1094
|
+
type: "circuit",
|
|
1095
|
+
name,
|
|
1096
|
+
code: circuitCode,
|
|
1097
|
+
startLine,
|
|
1098
|
+
endLine,
|
|
1099
|
+
isPublic: isExport,
|
|
1100
|
+
isPrivate: false,
|
|
1101
|
+
parameters,
|
|
1102
|
+
returnType
|
|
1103
|
+
});
|
|
1104
|
+
}
|
|
1105
|
+
const witnessRegex = /witness\s+(\w+)\s*\(([^)]*)\)\s*(?::\s*([^{]+))?\s*\{/g;
|
|
1106
|
+
let witnessMatch;
|
|
1107
|
+
while ((witnessMatch = witnessRegex.exec(content)) !== null) {
|
|
1108
|
+
hasWitnesses = true;
|
|
1109
|
+
const name = witnessMatch[1];
|
|
1110
|
+
const params = witnessMatch[2];
|
|
1111
|
+
const returnType = witnessMatch[3]?.trim() || "unknown";
|
|
1112
|
+
const startIndex = witnessMatch.index;
|
|
1113
|
+
let braceCount = 1;
|
|
1114
|
+
let endIndex = content.indexOf("{", startIndex) + 1;
|
|
1115
|
+
while (braceCount > 0 && endIndex < content.length) {
|
|
1116
|
+
if (content[endIndex] === "{") braceCount++;
|
|
1117
|
+
if (content[endIndex] === "}") braceCount--;
|
|
1118
|
+
endIndex++;
|
|
1119
|
+
}
|
|
1120
|
+
const witnessCode = content.substring(startIndex, endIndex);
|
|
1121
|
+
const startLine = content.substring(0, startIndex).split("\n").length;
|
|
1122
|
+
const endLine = startLine + witnessCode.split("\n").length - 1;
|
|
1123
|
+
const parameters = params.split(",").filter((p) => p.trim()).map((p) => {
|
|
1124
|
+
const parts = p.trim().split(":");
|
|
1125
|
+
return {
|
|
1126
|
+
name: parts[0]?.trim() || "",
|
|
1127
|
+
type: parts[1]?.trim() || "unknown"
|
|
1128
|
+
};
|
|
1129
|
+
});
|
|
1130
|
+
codeUnits.push({
|
|
1131
|
+
type: "witness",
|
|
1132
|
+
name,
|
|
1133
|
+
code: witnessCode,
|
|
1134
|
+
startLine,
|
|
1135
|
+
endLine,
|
|
1136
|
+
isPublic: false,
|
|
1137
|
+
isPrivate: true,
|
|
1138
|
+
parameters,
|
|
1139
|
+
returnType
|
|
1140
|
+
});
|
|
1141
|
+
}
|
|
1142
|
+
const typeRegex = /(export\s+)?type\s+(\w+)\s*=\s*([^;]+);/g;
|
|
1143
|
+
let typeMatch;
|
|
1144
|
+
while ((typeMatch = typeRegex.exec(content)) !== null) {
|
|
1145
|
+
const isExport = !!typeMatch[1];
|
|
1146
|
+
const name = typeMatch[2];
|
|
1147
|
+
const startLine = content.substring(0, typeMatch.index).split("\n").length;
|
|
1148
|
+
if (isExport) {
|
|
1149
|
+
exports.push(name);
|
|
1150
|
+
}
|
|
1151
|
+
codeUnits.push({
|
|
1152
|
+
type: "type",
|
|
1153
|
+
name,
|
|
1154
|
+
code: typeMatch[0],
|
|
1155
|
+
startLine,
|
|
1156
|
+
endLine: startLine,
|
|
1157
|
+
isPublic: isExport,
|
|
1158
|
+
isPrivate: false,
|
|
1159
|
+
returnType: typeMatch[3].trim()
|
|
1160
|
+
});
|
|
1161
|
+
}
|
|
1162
|
+
return {
|
|
1163
|
+
path,
|
|
1164
|
+
language: "compact",
|
|
1165
|
+
content,
|
|
1166
|
+
codeUnits,
|
|
1167
|
+
imports,
|
|
1168
|
+
exports,
|
|
1169
|
+
metadata: {
|
|
1170
|
+
hasLedger,
|
|
1171
|
+
hasCircuits,
|
|
1172
|
+
hasWitnesses,
|
|
1173
|
+
lineCount: lines.length
|
|
1174
|
+
}
|
|
1175
|
+
};
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
// src/pipeline/embeddings.ts
|
|
1179
|
+
import OpenAI from "openai";
|
|
1180
|
+
var EmbeddingGenerator = class {
|
|
1181
|
+
openai = null;
|
|
1182
|
+
model;
|
|
1183
|
+
constructor() {
|
|
1184
|
+
this.model = config.embeddingModel;
|
|
1185
|
+
if (config.openaiApiKey) {
|
|
1186
|
+
this.openai = new OpenAI({
|
|
1187
|
+
apiKey: config.openaiApiKey
|
|
1188
|
+
});
|
|
1189
|
+
}
|
|
1190
|
+
}
|
|
1191
|
+
/**
|
|
1192
|
+
* Generate embeddings for a single text
|
|
1193
|
+
*/
|
|
1194
|
+
async generateEmbedding(text) {
|
|
1195
|
+
if (!this.openai) {
|
|
1196
|
+
logger.warn("No OpenAI API key configured, using dummy embeddings");
|
|
1197
|
+
return {
|
|
1198
|
+
text,
|
|
1199
|
+
embedding: new Array(1536).fill(0).map(() => Math.random() - 0.5),
|
|
1200
|
+
model: "dummy"
|
|
1201
|
+
};
|
|
1202
|
+
}
|
|
1203
|
+
try {
|
|
1204
|
+
const response = await this.openai.embeddings.create({
|
|
1205
|
+
model: this.model,
|
|
1206
|
+
input: text
|
|
1207
|
+
});
|
|
1208
|
+
return {
|
|
1209
|
+
text,
|
|
1210
|
+
embedding: response.data[0].embedding,
|
|
1211
|
+
model: this.model,
|
|
1212
|
+
tokenCount: response.usage?.total_tokens
|
|
1213
|
+
};
|
|
1214
|
+
} catch (error) {
|
|
1215
|
+
logger.error("Failed to generate embedding", { error: String(error) });
|
|
1216
|
+
throw error;
|
|
1217
|
+
}
|
|
1218
|
+
}
|
|
1219
|
+
/**
|
|
1220
|
+
* Generate embeddings for multiple texts in batch
|
|
1221
|
+
*/
|
|
1222
|
+
async generateEmbeddings(texts) {
|
|
1223
|
+
if (!this.openai) {
|
|
1224
|
+
logger.warn("No OpenAI API key configured, using dummy embeddings");
|
|
1225
|
+
return texts.map((text) => ({
|
|
1226
|
+
text,
|
|
1227
|
+
embedding: new Array(1536).fill(0).map(() => Math.random() - 0.5),
|
|
1228
|
+
model: "dummy"
|
|
1229
|
+
}));
|
|
1230
|
+
}
|
|
1231
|
+
try {
|
|
1232
|
+
const batchSize = 100;
|
|
1233
|
+
const results = [];
|
|
1234
|
+
for (let i = 0; i < texts.length; i += batchSize) {
|
|
1235
|
+
const batch = texts.slice(i, i + batchSize);
|
|
1236
|
+
logger.debug(`Generating embeddings for batch ${i / batchSize + 1}`);
|
|
1237
|
+
const response = await this.openai.embeddings.create({
|
|
1238
|
+
model: this.model,
|
|
1239
|
+
input: batch
|
|
1240
|
+
});
|
|
1241
|
+
for (let j = 0; j < batch.length; j++) {
|
|
1242
|
+
results.push({
|
|
1243
|
+
text: batch[j],
|
|
1244
|
+
embedding: response.data[j].embedding,
|
|
1245
|
+
model: this.model
|
|
1246
|
+
});
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
return results;
|
|
1250
|
+
} catch (error) {
|
|
1251
|
+
logger.error("Failed to generate batch embeddings", {
|
|
1252
|
+
error: String(error)
|
|
1253
|
+
});
|
|
1254
|
+
throw error;
|
|
1255
|
+
}
|
|
1256
|
+
}
|
|
1257
|
+
/**
|
|
1258
|
+
* Calculate cosine similarity between two embeddings
|
|
1259
|
+
*/
|
|
1260
|
+
cosineSimilarity(a, b) {
|
|
1261
|
+
if (a.length !== b.length) {
|
|
1262
|
+
throw new Error("Embeddings must have the same length");
|
|
1263
|
+
}
|
|
1264
|
+
let dotProduct = 0;
|
|
1265
|
+
let normA = 0;
|
|
1266
|
+
let normB = 0;
|
|
1267
|
+
for (let i = 0; i < a.length; i++) {
|
|
1268
|
+
dotProduct += a[i] * b[i];
|
|
1269
|
+
normA += a[i] * a[i];
|
|
1270
|
+
normB += b[i] * b[i];
|
|
1271
|
+
}
|
|
1272
|
+
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
1273
|
+
}
|
|
1274
|
+
};
|
|
1275
|
+
var embeddingGenerator = new EmbeddingGenerator();
|
|
1276
|
+
|
|
1277
|
+
// src/pipeline/releases.ts
|
|
1278
|
+
import { Octokit as Octokit2 } from "octokit";
|
|
1279
|
+
var ReleaseTracker = class {
|
|
1280
|
+
octokit;
|
|
1281
|
+
cache = /* @__PURE__ */ new Map();
|
|
1282
|
+
cacheMaxAge = 1e3 * 60 * 15;
|
|
1283
|
+
// 15 minutes
|
|
1284
|
+
constructor(token) {
|
|
1285
|
+
this.octokit = new Octokit2({
|
|
1286
|
+
auth: token || config.githubToken
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
/**
|
|
1290
|
+
* Get all releases for a repository
|
|
1291
|
+
*/
|
|
1292
|
+
async getReleases(owner, repo, limit = 20) {
|
|
1293
|
+
try {
|
|
1294
|
+
const { data } = await this.octokit.rest.repos.listReleases({
|
|
1295
|
+
owner,
|
|
1296
|
+
repo,
|
|
1297
|
+
per_page: limit
|
|
1298
|
+
});
|
|
1299
|
+
return data.map((release) => ({
|
|
1300
|
+
tag: release.tag_name,
|
|
1301
|
+
name: release.name || release.tag_name,
|
|
1302
|
+
body: release.body || "",
|
|
1303
|
+
publishedAt: release.published_at || release.created_at,
|
|
1304
|
+
url: release.html_url,
|
|
1305
|
+
isPrerelease: release.prerelease,
|
|
1306
|
+
assets: release.assets.map((asset) => ({
|
|
1307
|
+
name: asset.name,
|
|
1308
|
+
downloadUrl: asset.browser_download_url
|
|
1309
|
+
}))
|
|
1310
|
+
}));
|
|
1311
|
+
} catch (error) {
|
|
1312
|
+
logger.warn(`Failed to fetch releases for ${owner}/${repo}`, {
|
|
1313
|
+
error: String(error)
|
|
1314
|
+
});
|
|
1315
|
+
return [];
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
/**
|
|
1319
|
+
* Parse a release body to extract structured changelog info
|
|
1320
|
+
*/
|
|
1321
|
+
parseChangelog(releaseBody, version, date) {
|
|
1322
|
+
const changes = {
|
|
1323
|
+
breaking: [],
|
|
1324
|
+
features: [],
|
|
1325
|
+
fixes: [],
|
|
1326
|
+
deprecations: []
|
|
1327
|
+
};
|
|
1328
|
+
const lines = releaseBody.split("\n");
|
|
1329
|
+
let currentSection = null;
|
|
1330
|
+
for (const line of lines) {
|
|
1331
|
+
const trimmed = line.trim().toLowerCase();
|
|
1332
|
+
if (trimmed.includes("breaking") || trimmed.includes("\u26A0\uFE0F")) {
|
|
1333
|
+
currentSection = "breaking";
|
|
1334
|
+
continue;
|
|
1335
|
+
}
|
|
1336
|
+
if (trimmed.includes("feature") || trimmed.includes("added") || trimmed.includes("\u2728") || trimmed.includes("\u{1F680}")) {
|
|
1337
|
+
currentSection = "features";
|
|
1338
|
+
continue;
|
|
1339
|
+
}
|
|
1340
|
+
if (trimmed.includes("fix") || trimmed.includes("bug") || trimmed.includes("\u{1F41B}") || trimmed.includes("\u{1F527}")) {
|
|
1341
|
+
currentSection = "fixes";
|
|
1342
|
+
continue;
|
|
1343
|
+
}
|
|
1344
|
+
if (trimmed.includes("deprecat") || trimmed.includes("\u26A0")) {
|
|
1345
|
+
currentSection = "deprecations";
|
|
1346
|
+
continue;
|
|
1347
|
+
}
|
|
1348
|
+
if (currentSection && (line.startsWith("-") || line.startsWith("*") || line.match(/^\d+\./))) {
|
|
1349
|
+
const content = line.replace(/^[-*]\s*|\d+\.\s*/, "").trim();
|
|
1350
|
+
if (content) {
|
|
1351
|
+
changes[currentSection].push(content);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
return {
|
|
1356
|
+
version,
|
|
1357
|
+
date,
|
|
1358
|
+
changes,
|
|
1359
|
+
rawBody: releaseBody
|
|
1360
|
+
};
|
|
1361
|
+
}
|
|
1362
|
+
/**
|
|
1363
|
+
* Get version info for a repository with caching
|
|
1364
|
+
*/
|
|
1365
|
+
async getVersionInfo(owner, repo) {
|
|
1366
|
+
const repoKey = `${owner}/${repo}`;
|
|
1367
|
+
const cached = this.cache.get(repoKey);
|
|
1368
|
+
if (cached) {
|
|
1369
|
+
const age = Date.now() - new Date(cached.lastChecked).getTime();
|
|
1370
|
+
if (age < this.cacheMaxAge) {
|
|
1371
|
+
return cached;
|
|
1372
|
+
}
|
|
1373
|
+
}
|
|
1374
|
+
const releases = await this.getReleases(owner, repo);
|
|
1375
|
+
const latestRelease = releases[0] || null;
|
|
1376
|
+
const latestStableRelease = releases.find((r) => !r.isPrerelease) || null;
|
|
1377
|
+
const changelog = releases.map(
|
|
1378
|
+
(r) => this.parseChangelog(r.body, r.tag, r.publishedAt)
|
|
1379
|
+
);
|
|
1380
|
+
const info = {
|
|
1381
|
+
repository: repoKey,
|
|
1382
|
+
latestRelease,
|
|
1383
|
+
latestStableRelease,
|
|
1384
|
+
recentReleases: releases.slice(0, 10),
|
|
1385
|
+
changelog,
|
|
1386
|
+
lastChecked: (/* @__PURE__ */ new Date()).toISOString()
|
|
1387
|
+
};
|
|
1388
|
+
this.cache.set(repoKey, info);
|
|
1389
|
+
return info;
|
|
1390
|
+
}
|
|
1391
|
+
/**
|
|
1392
|
+
* Get breaking changes since a specific version
|
|
1393
|
+
*/
|
|
1394
|
+
async getBreakingChangesSince(owner, repo, sinceVersion) {
|
|
1395
|
+
const info = await this.getVersionInfo(owner, repo);
|
|
1396
|
+
const breakingChanges = [];
|
|
1397
|
+
for (const entry of info.changelog) {
|
|
1398
|
+
if (entry.version === sinceVersion) break;
|
|
1399
|
+
breakingChanges.push(...entry.changes.breaking);
|
|
1400
|
+
}
|
|
1401
|
+
return breakingChanges;
|
|
1402
|
+
}
|
|
1403
|
+
/**
|
|
1404
|
+
* Check if a version is outdated
|
|
1405
|
+
*/
|
|
1406
|
+
async isOutdated(owner, repo, currentVersion) {
|
|
1407
|
+
const info = await this.getVersionInfo(owner, repo);
|
|
1408
|
+
const latestVersion = info.latestStableRelease?.tag || info.latestRelease?.tag || null;
|
|
1409
|
+
if (!latestVersion) {
|
|
1410
|
+
return {
|
|
1411
|
+
isOutdated: false,
|
|
1412
|
+
latestVersion: null,
|
|
1413
|
+
versionsBehind: 0,
|
|
1414
|
+
hasBreakingChanges: false
|
|
1415
|
+
};
|
|
1416
|
+
}
|
|
1417
|
+
const releaseIndex = info.recentReleases.findIndex(
|
|
1418
|
+
(r) => r.tag === currentVersion
|
|
1419
|
+
);
|
|
1420
|
+
const versionsBehind = releaseIndex === -1 ? info.recentReleases.length : releaseIndex;
|
|
1421
|
+
const breakingChanges = await this.getBreakingChangesSince(
|
|
1422
|
+
owner,
|
|
1423
|
+
repo,
|
|
1424
|
+
currentVersion
|
|
1425
|
+
);
|
|
1426
|
+
return {
|
|
1427
|
+
isOutdated: currentVersion !== latestVersion,
|
|
1428
|
+
latestVersion,
|
|
1429
|
+
versionsBehind,
|
|
1430
|
+
hasBreakingChanges: breakingChanges.length > 0
|
|
1431
|
+
};
|
|
1432
|
+
}
|
|
1433
|
+
/**
|
|
1434
|
+
* Get migration guide between two versions
|
|
1435
|
+
*/
|
|
1436
|
+
async getMigrationGuide(owner, repo, fromVersion, toVersion) {
|
|
1437
|
+
const info = await this.getVersionInfo(owner, repo);
|
|
1438
|
+
const to = toVersion || info.latestStableRelease?.tag || info.latestRelease?.tag || fromVersion;
|
|
1439
|
+
const result = {
|
|
1440
|
+
from: fromVersion,
|
|
1441
|
+
to,
|
|
1442
|
+
breakingChanges: [],
|
|
1443
|
+
deprecations: [],
|
|
1444
|
+
newFeatures: [],
|
|
1445
|
+
migrationSteps: []
|
|
1446
|
+
};
|
|
1447
|
+
let inRange = false;
|
|
1448
|
+
for (const entry of [...info.changelog].reverse()) {
|
|
1449
|
+
if (entry.version === fromVersion) {
|
|
1450
|
+
inRange = true;
|
|
1451
|
+
continue;
|
|
1452
|
+
}
|
|
1453
|
+
if (entry.version === to) {
|
|
1454
|
+
result.breakingChanges.push(...entry.changes.breaking);
|
|
1455
|
+
result.deprecations.push(...entry.changes.deprecations);
|
|
1456
|
+
result.newFeatures.push(...entry.changes.features);
|
|
1457
|
+
break;
|
|
1458
|
+
}
|
|
1459
|
+
if (inRange) {
|
|
1460
|
+
result.breakingChanges.push(...entry.changes.breaking);
|
|
1461
|
+
result.deprecations.push(...entry.changes.deprecations);
|
|
1462
|
+
result.newFeatures.push(...entry.changes.features);
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
result.migrationSteps = result.breakingChanges.map(
|
|
1466
|
+
(change) => `Review and update: ${change}`
|
|
1467
|
+
);
|
|
1468
|
+
return result;
|
|
1469
|
+
}
|
|
1470
|
+
/**
|
|
1471
|
+
* Get version-specific documentation hints
|
|
1472
|
+
*/
|
|
1473
|
+
getVersionContext(versionInfo) {
|
|
1474
|
+
const parts = [];
|
|
1475
|
+
if (versionInfo.latestRelease) {
|
|
1476
|
+
parts.push(
|
|
1477
|
+
`Latest version: ${versionInfo.latestRelease.tag} (${versionInfo.latestRelease.publishedAt.split("T")[0]})`
|
|
1478
|
+
);
|
|
1479
|
+
}
|
|
1480
|
+
if (versionInfo.latestStableRelease && versionInfo.latestStableRelease !== versionInfo.latestRelease) {
|
|
1481
|
+
parts.push(`Latest stable: ${versionInfo.latestStableRelease.tag}`);
|
|
1482
|
+
}
|
|
1483
|
+
const recentBreaking = versionInfo.changelog.slice(0, 3).flatMap((c) => c.changes.breaking);
|
|
1484
|
+
if (recentBreaking.length > 0) {
|
|
1485
|
+
parts.push(
|
|
1486
|
+
`Recent breaking changes:
|
|
1487
|
+
${recentBreaking.map((b) => ` - ${b}`).join("\n")}`
|
|
1488
|
+
);
|
|
1489
|
+
}
|
|
1490
|
+
return parts.join("\n");
|
|
1491
|
+
}
|
|
1492
|
+
/**
|
|
1493
|
+
* Fetch a file at a specific version/tag
|
|
1494
|
+
* This is critical for ensuring code recommendations match the user's version
|
|
1495
|
+
*/
|
|
1496
|
+
async getFileAtVersion(owner, repo, filePath, version) {
|
|
1497
|
+
try {
|
|
1498
|
+
const { data } = await this.octokit.rest.repos.getContent({
|
|
1499
|
+
owner,
|
|
1500
|
+
repo,
|
|
1501
|
+
path: filePath,
|
|
1502
|
+
ref: version
|
|
1503
|
+
// Can be a tag like "v1.0.0" or branch
|
|
1504
|
+
});
|
|
1505
|
+
if (Array.isArray(data) || data.type !== "file") {
|
|
1506
|
+
return null;
|
|
1507
|
+
}
|
|
1508
|
+
const content = data.encoding === "base64" ? Buffer.from(data.content, "base64").toString("utf-8") : data.content;
|
|
1509
|
+
return { content, version };
|
|
1510
|
+
} catch (error) {
|
|
1511
|
+
logger.warn(
|
|
1512
|
+
`Failed to fetch ${filePath} at ${version} from ${owner}/${repo}`,
|
|
1513
|
+
{
|
|
1514
|
+
error: String(error)
|
|
1515
|
+
}
|
|
1516
|
+
);
|
|
1517
|
+
return null;
|
|
1518
|
+
}
|
|
1519
|
+
}
|
|
1520
|
+
/**
|
|
1521
|
+
* Compare syntax between two versions of a file
|
|
1522
|
+
* Useful for showing what changed in API/syntax
|
|
1523
|
+
*/
|
|
1524
|
+
async compareSyntax(owner, repo, filePath, oldVersion, newVersion) {
|
|
1525
|
+
const [oldFile, newFile] = await Promise.all([
|
|
1526
|
+
this.getFileAtVersion(owner, repo, filePath, oldVersion),
|
|
1527
|
+
this.getFileAtVersion(owner, repo, filePath, newVersion)
|
|
1528
|
+
]);
|
|
1529
|
+
return {
|
|
1530
|
+
oldVersion,
|
|
1531
|
+
newVersion,
|
|
1532
|
+
oldContent: oldFile?.content || null,
|
|
1533
|
+
newContent: newFile?.content || null,
|
|
1534
|
+
hasDifferences: oldFile?.content !== newFile?.content
|
|
1535
|
+
};
|
|
1536
|
+
}
|
|
1537
|
+
/**
|
|
1538
|
+
* Get the grammar/syntax reference files at the latest version
|
|
1539
|
+
* These are the source of truth for valid Compact syntax
|
|
1540
|
+
*/
|
|
1541
|
+
async getLatestSyntaxReference(owner, repo) {
|
|
1542
|
+
const versionInfo = await this.getVersionInfo(owner, repo);
|
|
1543
|
+
const version = versionInfo.latestStableRelease?.tag || versionInfo.latestRelease?.tag || "main";
|
|
1544
|
+
const syntaxFilePaths = [
|
|
1545
|
+
"docs/grammar.md",
|
|
1546
|
+
"docs/syntax.md",
|
|
1547
|
+
"docs/reference.md",
|
|
1548
|
+
"src/grammar.ts",
|
|
1549
|
+
"grammar/compact.grammar",
|
|
1550
|
+
"README.md"
|
|
1551
|
+
];
|
|
1552
|
+
const syntaxFiles = [];
|
|
1553
|
+
for (const path of syntaxFilePaths) {
|
|
1554
|
+
const file = await this.getFileAtVersion(owner, repo, path, version);
|
|
1555
|
+
if (file) {
|
|
1556
|
+
syntaxFiles.push({ path, content: file.content });
|
|
1557
|
+
}
|
|
1558
|
+
}
|
|
1559
|
+
if (syntaxFiles.length === 0) {
|
|
1560
|
+
return null;
|
|
1561
|
+
}
|
|
1562
|
+
return { version, syntaxFiles };
|
|
1563
|
+
}
|
|
1564
|
+
};
|
|
1565
|
+
var releaseTracker = new ReleaseTracker();
|
|
1566
|
+
|
|
1567
|
+
// src/utils/health.ts
|
|
1568
|
+
var startTime = Date.now();
|
|
1569
|
+
var VERSION = process.env.npm_package_version || "0.0.3";
|
|
1570
|
+
async function checkGitHubAPI() {
|
|
1571
|
+
const start = Date.now();
|
|
1572
|
+
try {
|
|
1573
|
+
const rateLimit = await githubClient.getRateLimit();
|
|
1574
|
+
const latency = Date.now() - start;
|
|
1575
|
+
if (rateLimit.remaining < 100) {
|
|
1576
|
+
return {
|
|
1577
|
+
status: "warn",
|
|
1578
|
+
message: `Rate limit low: ${rateLimit.remaining}/${rateLimit.limit} remaining`,
|
|
1579
|
+
latency
|
|
1580
|
+
};
|
|
1581
|
+
}
|
|
1582
|
+
return {
|
|
1583
|
+
status: "pass",
|
|
1584
|
+
message: `Rate limit: ${rateLimit.remaining}/${rateLimit.limit}`,
|
|
1585
|
+
latency
|
|
1586
|
+
};
|
|
1587
|
+
} catch (error) {
|
|
1588
|
+
return {
|
|
1589
|
+
status: "fail",
|
|
1590
|
+
message: `GitHub API error: ${error instanceof Error ? error.message : String(error)}`,
|
|
1591
|
+
latency: Date.now() - start
|
|
1592
|
+
};
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
async function checkVectorStore() {
|
|
1596
|
+
try {
|
|
1597
|
+
const { vectorStore: vectorStore2 } = await import("./db-YDGUWI5K.js");
|
|
1598
|
+
if (vectorStore2) {
|
|
1599
|
+
return {
|
|
1600
|
+
status: "pass",
|
|
1601
|
+
message: "Vector store available"
|
|
1602
|
+
};
|
|
1603
|
+
}
|
|
1604
|
+
return {
|
|
1605
|
+
status: "warn",
|
|
1606
|
+
message: "Vector store not initialized (semantic search unavailable)"
|
|
1607
|
+
};
|
|
1608
|
+
} catch {
|
|
1609
|
+
return {
|
|
1610
|
+
status: "warn",
|
|
1611
|
+
message: "Vector store not configured (semantic search unavailable)"
|
|
1612
|
+
};
|
|
1613
|
+
}
|
|
1614
|
+
}
|
|
1615
|
+
function checkMemory() {
|
|
1616
|
+
const usage = process.memoryUsage();
|
|
1617
|
+
const heapUsedMB = Math.round(usage.heapUsed / 1024 / 1024);
|
|
1618
|
+
const heapTotalMB = Math.round(usage.heapTotal / 1024 / 1024);
|
|
1619
|
+
const percentUsed = Math.round(usage.heapUsed / usage.heapTotal * 100);
|
|
1620
|
+
if (percentUsed > 90) {
|
|
1621
|
+
return {
|
|
1622
|
+
status: "fail",
|
|
1623
|
+
message: `High memory usage: ${heapUsedMB}MB/${heapTotalMB}MB (${percentUsed}%)`
|
|
1624
|
+
};
|
|
1625
|
+
}
|
|
1626
|
+
if (percentUsed > 75) {
|
|
1627
|
+
return {
|
|
1628
|
+
status: "warn",
|
|
1629
|
+
message: `Elevated memory usage: ${heapUsedMB}MB/${heapTotalMB}MB (${percentUsed}%)`
|
|
1630
|
+
};
|
|
1631
|
+
}
|
|
1632
|
+
return {
|
|
1633
|
+
status: "pass",
|
|
1634
|
+
message: `Memory: ${heapUsedMB}MB/${heapTotalMB}MB (${percentUsed}%)`
|
|
1635
|
+
};
|
|
1636
|
+
}
|
|
1637
|
+
async function getHealthStatus() {
|
|
1638
|
+
const checks = [];
|
|
1639
|
+
const [githubCheck, vectorCheck] = await Promise.all([
|
|
1640
|
+
checkGitHubAPI(),
|
|
1641
|
+
checkVectorStore()
|
|
1642
|
+
]);
|
|
1643
|
+
const memoryCheck = checkMemory();
|
|
1644
|
+
checks.push(
|
|
1645
|
+
{ name: "github_api", ...githubCheck },
|
|
1646
|
+
{ name: "vector_store", ...vectorCheck },
|
|
1647
|
+
{ name: "memory", ...memoryCheck }
|
|
1648
|
+
);
|
|
1649
|
+
const hasFailure = checks.some((c) => c.status === "fail");
|
|
1650
|
+
const hasWarning = checks.some((c) => c.status === "warn");
|
|
1651
|
+
let status = "healthy";
|
|
1652
|
+
if (hasFailure) {
|
|
1653
|
+
status = "unhealthy";
|
|
1654
|
+
} else if (hasWarning) {
|
|
1655
|
+
status = "degraded";
|
|
1656
|
+
}
|
|
1657
|
+
return {
|
|
1658
|
+
status,
|
|
1659
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1660
|
+
version: VERSION,
|
|
1661
|
+
uptime: Math.round((Date.now() - startTime) / 1e3),
|
|
1662
|
+
checks
|
|
1663
|
+
};
|
|
1664
|
+
}
|
|
1665
|
+
function getQuickHealthStatus() {
|
|
1666
|
+
return {
|
|
1667
|
+
status: "healthy",
|
|
1668
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1669
|
+
version: VERSION,
|
|
1670
|
+
uptime: Math.round((Date.now() - startTime) / 1e3),
|
|
1671
|
+
checks: [{ name: "server", status: "pass" }]
|
|
1672
|
+
};
|
|
1673
|
+
}
|
|
1674
|
+
|
|
1675
|
+
// src/utils/rate-limit.ts
|
|
1676
|
+
var WARNING_THRESHOLD = 0.8;
|
|
1677
|
+
var CRITICAL_THRESHOLD = 0.95;
|
|
1678
|
+
var cachedRateLimit = null;
|
|
1679
|
+
var lastUpdate = 0;
|
|
1680
|
+
var CACHE_TTL = 60 * 1e3;
|
|
1681
|
+
function updateRateLimit(info) {
|
|
1682
|
+
cachedRateLimit = info;
|
|
1683
|
+
lastUpdate = Date.now();
|
|
1684
|
+
}
|
|
1685
|
+
function getRateLimitStatus() {
|
|
1686
|
+
if (!cachedRateLimit) {
|
|
1687
|
+
return {
|
|
1688
|
+
isLimited: false,
|
|
1689
|
+
isWarning: false,
|
|
1690
|
+
remaining: 5e3,
|
|
1691
|
+
limit: 5e3,
|
|
1692
|
+
resetAt: /* @__PURE__ */ new Date(),
|
|
1693
|
+
percentUsed: 0,
|
|
1694
|
+
message: "Rate limit info not yet available"
|
|
1695
|
+
};
|
|
1696
|
+
}
|
|
1697
|
+
const percentUsed = cachedRateLimit.used / cachedRateLimit.limit;
|
|
1698
|
+
const isWarning = percentUsed >= WARNING_THRESHOLD;
|
|
1699
|
+
const isLimited = percentUsed >= CRITICAL_THRESHOLD || cachedRateLimit.remaining <= 10;
|
|
1700
|
+
let message;
|
|
1701
|
+
if (isLimited) {
|
|
1702
|
+
const minutesUntilReset = Math.ceil(
|
|
1703
|
+
(cachedRateLimit.reset.getTime() - Date.now()) / 6e4
|
|
1704
|
+
);
|
|
1705
|
+
message = `Rate limited! Resets in ${minutesUntilReset} minutes`;
|
|
1706
|
+
} else if (isWarning) {
|
|
1707
|
+
message = `Warning: ${cachedRateLimit.remaining} API calls remaining (${Math.round(percentUsed * 100)}% used)`;
|
|
1708
|
+
} else {
|
|
1709
|
+
message = `${cachedRateLimit.remaining}/${cachedRateLimit.limit} API calls remaining`;
|
|
1710
|
+
}
|
|
1711
|
+
return {
|
|
1712
|
+
isLimited,
|
|
1713
|
+
isWarning,
|
|
1714
|
+
remaining: cachedRateLimit.remaining,
|
|
1715
|
+
limit: cachedRateLimit.limit,
|
|
1716
|
+
resetAt: cachedRateLimit.reset,
|
|
1717
|
+
percentUsed: Math.round(percentUsed * 100),
|
|
1718
|
+
message
|
|
1719
|
+
};
|
|
1720
|
+
}
|
|
1721
|
+
function shouldProceedWithRequest() {
|
|
1722
|
+
if (!cachedRateLimit) {
|
|
1723
|
+
return { proceed: true };
|
|
1724
|
+
}
|
|
1725
|
+
if (cachedRateLimit.remaining <= 10) {
|
|
1726
|
+
const waitMs = Math.max(0, cachedRateLimit.reset.getTime() - Date.now());
|
|
1727
|
+
return {
|
|
1728
|
+
proceed: false,
|
|
1729
|
+
reason: `Rate limit nearly exhausted (${cachedRateLimit.remaining} remaining)`,
|
|
1730
|
+
waitMs
|
|
1731
|
+
};
|
|
1732
|
+
}
|
|
1733
|
+
return { proceed: true };
|
|
1734
|
+
}
|
|
1735
|
+
function formatRateLimitStatus() {
|
|
1736
|
+
const status = getRateLimitStatus();
|
|
1737
|
+
if (status.isLimited) {
|
|
1738
|
+
return `\u26D4 ${status.message}`;
|
|
1739
|
+
} else if (status.isWarning) {
|
|
1740
|
+
return `\u26A0\uFE0F ${status.message}`;
|
|
1741
|
+
} else {
|
|
1742
|
+
return `\u2705 ${status.message}`;
|
|
1743
|
+
}
|
|
1744
|
+
}
|
|
1745
|
+
|
|
1746
|
+
// src/utils/cache.ts
|
|
1747
|
+
var Cache = class {
|
|
1748
|
+
cache = /* @__PURE__ */ new Map();
|
|
1749
|
+
options;
|
|
1750
|
+
stats = { hits: 0, misses: 0 };
|
|
1751
|
+
constructor(options) {
|
|
1752
|
+
this.options = {
|
|
1753
|
+
ttl: options.ttl,
|
|
1754
|
+
maxSize: options.maxSize || 1e3,
|
|
1755
|
+
name: options.name || "cache"
|
|
1756
|
+
};
|
|
1757
|
+
}
|
|
1758
|
+
/**
|
|
1759
|
+
* Get a value from the cache
|
|
1760
|
+
*/
|
|
1761
|
+
get(key) {
|
|
1762
|
+
const entry = this.cache.get(key);
|
|
1763
|
+
if (!entry) {
|
|
1764
|
+
this.stats.misses++;
|
|
1765
|
+
return void 0;
|
|
1766
|
+
}
|
|
1767
|
+
if (Date.now() > entry.expiresAt) {
|
|
1768
|
+
this.cache.delete(key);
|
|
1769
|
+
this.stats.misses++;
|
|
1770
|
+
return void 0;
|
|
1771
|
+
}
|
|
1772
|
+
this.stats.hits++;
|
|
1773
|
+
return entry.value;
|
|
1774
|
+
}
|
|
1775
|
+
/**
|
|
1776
|
+
* Set a value in the cache
|
|
1777
|
+
*/
|
|
1778
|
+
set(key, value, ttl) {
|
|
1779
|
+
if (this.cache.size >= this.options.maxSize) {
|
|
1780
|
+
this.evictOldest();
|
|
1781
|
+
}
|
|
1782
|
+
const now = Date.now();
|
|
1783
|
+
this.cache.set(key, {
|
|
1784
|
+
value,
|
|
1785
|
+
expiresAt: now + (ttl || this.options.ttl),
|
|
1786
|
+
createdAt: now
|
|
1787
|
+
});
|
|
1788
|
+
}
|
|
1789
|
+
/**
|
|
1790
|
+
* Check if a key exists and is not expired
|
|
1791
|
+
*/
|
|
1792
|
+
has(key) {
|
|
1793
|
+
const entry = this.cache.get(key);
|
|
1794
|
+
if (!entry) return false;
|
|
1795
|
+
if (Date.now() > entry.expiresAt) {
|
|
1796
|
+
this.cache.delete(key);
|
|
1797
|
+
return false;
|
|
1798
|
+
}
|
|
1799
|
+
return true;
|
|
1800
|
+
}
|
|
1801
|
+
/**
|
|
1802
|
+
* Delete a key from the cache
|
|
1803
|
+
*/
|
|
1804
|
+
delete(key) {
|
|
1805
|
+
return this.cache.delete(key);
|
|
1806
|
+
}
|
|
1807
|
+
/**
|
|
1808
|
+
* Clear all entries from the cache
|
|
1809
|
+
*/
|
|
1810
|
+
clear() {
|
|
1811
|
+
this.cache.clear();
|
|
1812
|
+
logger.debug(`Cache cleared: ${this.options.name}`);
|
|
1813
|
+
}
|
|
1814
|
+
/**
|
|
1815
|
+
* Remove expired entries
|
|
1816
|
+
*/
|
|
1817
|
+
prune() {
|
|
1818
|
+
const now = Date.now();
|
|
1819
|
+
let pruned = 0;
|
|
1820
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
1821
|
+
if (now > entry.expiresAt) {
|
|
1822
|
+
this.cache.delete(key);
|
|
1823
|
+
pruned++;
|
|
1824
|
+
}
|
|
1825
|
+
}
|
|
1826
|
+
if (pruned > 0) {
|
|
1827
|
+
logger.debug(`Cache pruned: ${this.options.name}`, { pruned });
|
|
1828
|
+
}
|
|
1829
|
+
return pruned;
|
|
1830
|
+
}
|
|
1831
|
+
/**
|
|
1832
|
+
* Evict the oldest entry to make room
|
|
1833
|
+
*/
|
|
1834
|
+
evictOldest() {
|
|
1835
|
+
let oldestKey = null;
|
|
1836
|
+
let oldestTime = Infinity;
|
|
1837
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
1838
|
+
if (entry.createdAt < oldestTime) {
|
|
1839
|
+
oldestTime = entry.createdAt;
|
|
1840
|
+
oldestKey = key;
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
if (oldestKey) {
|
|
1844
|
+
this.cache.delete(oldestKey);
|
|
1845
|
+
}
|
|
1846
|
+
}
|
|
1847
|
+
/**
|
|
1848
|
+
* Get cache statistics
|
|
1849
|
+
*/
|
|
1850
|
+
getStats() {
|
|
1851
|
+
const total = this.stats.hits + this.stats.misses;
|
|
1852
|
+
return {
|
|
1853
|
+
hits: this.stats.hits,
|
|
1854
|
+
misses: this.stats.misses,
|
|
1855
|
+
size: this.cache.size,
|
|
1856
|
+
hitRate: total > 0 ? this.stats.hits / total : 0
|
|
1857
|
+
};
|
|
1858
|
+
}
|
|
1859
|
+
/**
|
|
1860
|
+
* Get or set with a factory function
|
|
1861
|
+
*/
|
|
1862
|
+
async getOrSet(key, factory, ttl) {
|
|
1863
|
+
const cached = this.get(key);
|
|
1864
|
+
if (cached !== void 0) {
|
|
1865
|
+
return cached;
|
|
1866
|
+
}
|
|
1867
|
+
const value = await factory();
|
|
1868
|
+
this.set(key, value, ttl);
|
|
1869
|
+
return value;
|
|
1870
|
+
}
|
|
1871
|
+
};
|
|
1872
|
+
function createCacheKey(...parts) {
|
|
1873
|
+
return parts.filter((p) => p !== void 0).map((p) => String(p)).join(":");
|
|
1874
|
+
}
|
|
1875
|
+
var searchCache = new Cache({
|
|
1876
|
+
ttl: 5 * 60 * 1e3,
|
|
1877
|
+
// 5 minutes
|
|
1878
|
+
maxSize: 500,
|
|
1879
|
+
name: "search"
|
|
1880
|
+
});
|
|
1881
|
+
var fileCache = new Cache({
|
|
1882
|
+
ttl: 10 * 60 * 1e3,
|
|
1883
|
+
// 10 minutes
|
|
1884
|
+
maxSize: 200,
|
|
1885
|
+
name: "file"
|
|
1886
|
+
});
|
|
1887
|
+
var metadataCache = new Cache({
|
|
1888
|
+
ttl: 15 * 60 * 1e3,
|
|
1889
|
+
// 15 minutes
|
|
1890
|
+
maxSize: 100,
|
|
1891
|
+
name: "metadata"
|
|
1892
|
+
});
|
|
1893
|
+
function pruneAllCaches() {
|
|
1894
|
+
searchCache.prune();
|
|
1895
|
+
fileCache.prune();
|
|
1896
|
+
metadataCache.prune();
|
|
1897
|
+
}
|
|
1898
|
+
setInterval(pruneAllCaches, 5 * 60 * 1e3);
|
|
1899
|
+
|
|
1900
|
+
// src/utils/hosted-api.ts
|
|
1901
|
+
var API_TIMEOUT = 1e4;
|
|
1902
|
+
async function apiRequest(endpoint, options = {}) {
|
|
1903
|
+
const url = `${config.hostedApiUrl}${endpoint}`;
|
|
1904
|
+
const controller = new AbortController();
|
|
1905
|
+
const timeout = setTimeout(() => controller.abort(), API_TIMEOUT);
|
|
1906
|
+
try {
|
|
1907
|
+
const response = await fetch(url, {
|
|
1908
|
+
...options,
|
|
1909
|
+
signal: controller.signal,
|
|
1910
|
+
headers: {
|
|
1911
|
+
"Content-Type": "application/json",
|
|
1912
|
+
"User-Agent": "midnight-mcp",
|
|
1913
|
+
...options.headers
|
|
1914
|
+
}
|
|
1915
|
+
});
|
|
1916
|
+
if (!response.ok) {
|
|
1917
|
+
const errorData = await response.json().catch(() => ({ error: "Unknown error" }));
|
|
1918
|
+
throw new Error(errorData.error || `API error: ${response.status}`);
|
|
1919
|
+
}
|
|
1920
|
+
return await response.json();
|
|
1921
|
+
} catch (error) {
|
|
1922
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
1923
|
+
throw new Error(
|
|
1924
|
+
"API request timed out. The hosted service may be unavailable."
|
|
1925
|
+
);
|
|
1926
|
+
}
|
|
1927
|
+
throw error;
|
|
1928
|
+
} finally {
|
|
1929
|
+
clearTimeout(timeout);
|
|
1930
|
+
}
|
|
1931
|
+
}
|
|
1932
|
+
async function searchCompactHosted(query, limit = 10) {
|
|
1933
|
+
logger.debug("Searching Compact code via hosted API", { query });
|
|
1934
|
+
return apiRequest("/v1/search/compact", {
|
|
1935
|
+
method: "POST",
|
|
1936
|
+
body: JSON.stringify({ query, limit })
|
|
1937
|
+
});
|
|
1938
|
+
}
|
|
1939
|
+
async function searchTypeScriptHosted(query, limit = 10, includeTypes = true) {
|
|
1940
|
+
logger.debug("Searching TypeScript code via hosted API", { query });
|
|
1941
|
+
return apiRequest("/v1/search/typescript", {
|
|
1942
|
+
method: "POST",
|
|
1943
|
+
body: JSON.stringify({ query, limit, includeTypes })
|
|
1944
|
+
});
|
|
1945
|
+
}
|
|
1946
|
+
async function searchDocsHosted(query, limit = 10, category = "all") {
|
|
1947
|
+
logger.debug("Searching documentation via hosted API", { query });
|
|
1948
|
+
return apiRequest("/v1/search/docs", {
|
|
1949
|
+
method: "POST",
|
|
1950
|
+
body: JSON.stringify({ query, limit, category })
|
|
1951
|
+
});
|
|
1952
|
+
}
|
|
1953
|
+
function trackToolCall(tool, success, durationMs, version) {
|
|
1954
|
+
apiRequest("/v1/track/tool", {
|
|
1955
|
+
method: "POST",
|
|
1956
|
+
body: JSON.stringify({ tool, success, durationMs, version })
|
|
1957
|
+
}).catch(() => {
|
|
1958
|
+
});
|
|
1959
|
+
}
|
|
1960
|
+
|
|
1961
|
+
// src/utils/serializer.ts
|
|
1962
|
+
import yaml from "js-yaml";
|
|
1963
|
+
var useJsonOutput = false;
|
|
1964
|
+
function setOutputFormat(json) {
|
|
1965
|
+
useJsonOutput = json;
|
|
1966
|
+
}
|
|
1967
|
+
function serialize(data) {
|
|
1968
|
+
if (useJsonOutput) {
|
|
1969
|
+
return JSON.stringify(data, null, 2);
|
|
1970
|
+
}
|
|
1971
|
+
try {
|
|
1972
|
+
return yaml.dump(data, {
|
|
1973
|
+
indent: 2,
|
|
1974
|
+
lineWidth: 120,
|
|
1975
|
+
noRefs: true,
|
|
1976
|
+
// Avoid YAML anchors/aliases for cleaner output
|
|
1977
|
+
quotingType: '"',
|
|
1978
|
+
forceQuotes: false,
|
|
1979
|
+
sortKeys: false
|
|
1980
|
+
// Preserve object key order
|
|
1981
|
+
});
|
|
1982
|
+
} catch {
|
|
1983
|
+
return JSON.stringify(data, null, 2);
|
|
1984
|
+
}
|
|
1985
|
+
}
|
|
1986
|
+
|
|
1987
|
+
// src/utils/version.ts
|
|
1988
|
+
var CURRENT_VERSION = "0.2.2";
|
|
1989
|
+
|
|
1990
|
+
// src/db/vectorStore.ts
|
|
1991
|
+
var VectorStore = class {
|
|
1992
|
+
client = null;
|
|
1993
|
+
collection = null;
|
|
1994
|
+
collectionName = "midnight-code";
|
|
1995
|
+
initialized = false;
|
|
1996
|
+
/**
|
|
1997
|
+
* Initialize the vector store connection
|
|
1998
|
+
*/
|
|
1999
|
+
async initialize() {
|
|
2000
|
+
if (this.initialized) return;
|
|
2001
|
+
try {
|
|
2002
|
+
this.client = new ChromaClient({
|
|
2003
|
+
path: config.chromaUrl
|
|
2004
|
+
});
|
|
2005
|
+
this.collection = await this.client.getOrCreateCollection({
|
|
2006
|
+
name: this.collectionName,
|
|
2007
|
+
metadata: {
|
|
2008
|
+
description: "Midnight blockchain code and documentation"
|
|
2009
|
+
}
|
|
2010
|
+
});
|
|
2011
|
+
this.initialized = true;
|
|
2012
|
+
logger.info("Vector store initialized successfully");
|
|
2013
|
+
} catch (error) {
|
|
2014
|
+
logger.error("Failed to initialize vector store", {
|
|
2015
|
+
error: String(error)
|
|
2016
|
+
});
|
|
2017
|
+
logger.warn("Vector store unavailable, using in-memory fallback");
|
|
2018
|
+
}
|
|
2019
|
+
}
|
|
2020
|
+
/**
|
|
2021
|
+
* Add documents to the vector store
|
|
2022
|
+
*/
|
|
2023
|
+
async addDocuments(documents) {
|
|
2024
|
+
if (!this.collection) {
|
|
2025
|
+
logger.warn("Vector store not initialized, skipping document storage");
|
|
2026
|
+
return;
|
|
2027
|
+
}
|
|
2028
|
+
try {
|
|
2029
|
+
const ids = documents.map((d) => d.id);
|
|
2030
|
+
const embeddings = documents.map((d) => d.embedding);
|
|
2031
|
+
const metadatas = documents.map((d) => ({
|
|
2032
|
+
repository: d.metadata.repository,
|
|
2033
|
+
filePath: d.metadata.filePath,
|
|
2034
|
+
language: d.metadata.language,
|
|
2035
|
+
startLine: d.metadata.startLine,
|
|
2036
|
+
endLine: d.metadata.endLine,
|
|
2037
|
+
codeType: d.metadata.codeType,
|
|
2038
|
+
codeName: d.metadata.codeName,
|
|
2039
|
+
isPublic: d.metadata.isPublic
|
|
2040
|
+
}));
|
|
2041
|
+
const documentContents = documents.map((d) => d.content);
|
|
2042
|
+
await this.collection.add({
|
|
2043
|
+
ids,
|
|
2044
|
+
embeddings,
|
|
2045
|
+
metadatas,
|
|
2046
|
+
documents: documentContents
|
|
2047
|
+
});
|
|
2048
|
+
logger.debug(`Added ${documents.length} documents to vector store`);
|
|
2049
|
+
} catch (error) {
|
|
2050
|
+
logger.error("Failed to add documents to vector store", {
|
|
2051
|
+
error: String(error)
|
|
2052
|
+
});
|
|
2053
|
+
throw error;
|
|
2054
|
+
}
|
|
2055
|
+
}
|
|
2056
|
+
/**
|
|
2057
|
+
* Search for similar documents
|
|
2058
|
+
*/
|
|
2059
|
+
async search(query, limit = 10, filter) {
|
|
2060
|
+
if (!this.collection) {
|
|
2061
|
+
logger.warn("Vector store not initialized, returning empty results");
|
|
2062
|
+
return [];
|
|
2063
|
+
}
|
|
2064
|
+
try {
|
|
2065
|
+
const queryEmbedding = await embeddingGenerator.generateEmbedding(query);
|
|
2066
|
+
const whereFilter = {};
|
|
2067
|
+
if (filter?.repository) {
|
|
2068
|
+
whereFilter.repository = filter.repository;
|
|
2069
|
+
}
|
|
2070
|
+
if (filter?.language) {
|
|
2071
|
+
whereFilter.language = filter.language;
|
|
2072
|
+
}
|
|
2073
|
+
if (filter?.codeType) {
|
|
2074
|
+
whereFilter.codeType = filter.codeType;
|
|
2075
|
+
}
|
|
2076
|
+
if (filter?.isPublic !== void 0) {
|
|
2077
|
+
whereFilter.isPublic = filter.isPublic;
|
|
2078
|
+
}
|
|
2079
|
+
const results = await this.collection.query({
|
|
2080
|
+
queryEmbeddings: [queryEmbedding.embedding],
|
|
2081
|
+
nResults: limit,
|
|
2082
|
+
where: Object.keys(whereFilter).length > 0 ? whereFilter : void 0
|
|
2083
|
+
});
|
|
2084
|
+
const searchResults = [];
|
|
2085
|
+
if (results.ids[0] && results.documents[0] && results.metadatas[0]) {
|
|
2086
|
+
for (let i = 0; i < results.ids[0].length; i++) {
|
|
2087
|
+
const metadata = results.metadatas[0][i];
|
|
2088
|
+
searchResults.push({
|
|
2089
|
+
id: results.ids[0][i],
|
|
2090
|
+
content: results.documents[0][i] || "",
|
|
2091
|
+
score: results.distances ? 1 - (results.distances[0][i] || 0) : 0,
|
|
2092
|
+
metadata
|
|
2093
|
+
});
|
|
2094
|
+
}
|
|
2095
|
+
}
|
|
2096
|
+
return searchResults;
|
|
2097
|
+
} catch (error) {
|
|
2098
|
+
logger.error("Search failed", { error: String(error) });
|
|
2099
|
+
return [];
|
|
2100
|
+
}
|
|
2101
|
+
}
|
|
2102
|
+
/**
|
|
2103
|
+
* Delete documents by file path
|
|
2104
|
+
*/
|
|
2105
|
+
async deleteByPath(repository, filePath) {
|
|
2106
|
+
if (!this.collection) return;
|
|
2107
|
+
try {
|
|
2108
|
+
await this.collection.delete({
|
|
2109
|
+
where: {
|
|
2110
|
+
repository,
|
|
2111
|
+
filePath
|
|
2112
|
+
}
|
|
2113
|
+
});
|
|
2114
|
+
logger.debug(`Deleted documents for ${repository}:${filePath}`);
|
|
2115
|
+
} catch (error) {
|
|
2116
|
+
logger.error("Failed to delete documents", { error: String(error) });
|
|
2117
|
+
}
|
|
2118
|
+
}
|
|
2119
|
+
/**
|
|
2120
|
+
* Delete all documents for a repository
|
|
2121
|
+
*/
|
|
2122
|
+
async deleteRepository(repository) {
|
|
2123
|
+
if (!this.collection) return;
|
|
2124
|
+
try {
|
|
2125
|
+
await this.collection.delete({
|
|
2126
|
+
where: { repository }
|
|
2127
|
+
});
|
|
2128
|
+
logger.info(`Deleted all documents for repository ${repository}`);
|
|
2129
|
+
} catch (error) {
|
|
2130
|
+
logger.error("Failed to delete repository documents", {
|
|
2131
|
+
error: String(error)
|
|
2132
|
+
});
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
/**
|
|
2136
|
+
* Get collection statistics
|
|
2137
|
+
*/
|
|
2138
|
+
async getStats() {
|
|
2139
|
+
if (!this.collection) {
|
|
2140
|
+
return { count: 0 };
|
|
2141
|
+
}
|
|
2142
|
+
try {
|
|
2143
|
+
const count = await this.collection.count();
|
|
2144
|
+
return { count };
|
|
2145
|
+
} catch (error) {
|
|
2146
|
+
logger.error("Failed to get stats", { error: String(error) });
|
|
2147
|
+
return { count: 0 };
|
|
2148
|
+
}
|
|
2149
|
+
}
|
|
2150
|
+
/**
|
|
2151
|
+
* Clear all data from the collection
|
|
2152
|
+
*/
|
|
2153
|
+
async clear() {
|
|
2154
|
+
if (!this.client) return;
|
|
2155
|
+
try {
|
|
2156
|
+
await this.client.deleteCollection({ name: this.collectionName });
|
|
2157
|
+
this.collection = await this.client.getOrCreateCollection({
|
|
2158
|
+
name: this.collectionName
|
|
2159
|
+
});
|
|
2160
|
+
logger.info("Vector store cleared");
|
|
2161
|
+
} catch (error) {
|
|
2162
|
+
logger.error("Failed to clear vector store", { error: String(error) });
|
|
2163
|
+
}
|
|
2164
|
+
}
|
|
2165
|
+
};
|
|
2166
|
+
var vectorStore = new VectorStore();
|
|
2167
|
+
|
|
2168
|
+
export {
|
|
2169
|
+
isHostedMode,
|
|
2170
|
+
DEFAULT_REPOSITORIES,
|
|
2171
|
+
setMCPLogCallback,
|
|
2172
|
+
logger,
|
|
2173
|
+
SelfCorrectionHints,
|
|
2174
|
+
formatErrorResponse,
|
|
2175
|
+
validateQuery,
|
|
2176
|
+
validateNumber,
|
|
2177
|
+
githubClient,
|
|
2178
|
+
parseCompactFile,
|
|
2179
|
+
vectorStore,
|
|
2180
|
+
releaseTracker,
|
|
2181
|
+
getHealthStatus,
|
|
2182
|
+
getQuickHealthStatus,
|
|
2183
|
+
getRateLimitStatus,
|
|
2184
|
+
formatRateLimitStatus,
|
|
2185
|
+
createCacheKey,
|
|
2186
|
+
searchCache,
|
|
2187
|
+
fileCache,
|
|
2188
|
+
metadataCache,
|
|
2189
|
+
searchCompactHosted,
|
|
2190
|
+
searchTypeScriptHosted,
|
|
2191
|
+
searchDocsHosted,
|
|
2192
|
+
trackToolCall,
|
|
2193
|
+
setOutputFormat,
|
|
2194
|
+
serialize,
|
|
2195
|
+
CURRENT_VERSION
|
|
2196
|
+
};
|
|
2197
|
+
//# sourceMappingURL=chunk-HOWO4K5A.js.map
|