midnight-mcp 0.1.41 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -1
- package/dist/bin.d.ts +1 -0
- package/dist/bin.js +10764 -0
- package/dist/index.d.ts +205 -3
- package/dist/index.js +10722 -15
- package/package.json +16 -6
- package/dist/config/compact-version.d.ts +0 -183
- package/dist/config/compact-version.js +0 -423
- package/dist/db/index.d.ts +0 -3
- package/dist/db/index.js +0 -2
- package/dist/db/vectorStore.d.ts +0 -69
- package/dist/db/vectorStore.js +0 -196
- package/dist/pipeline/embeddings.d.ts +0 -25
- package/dist/pipeline/embeddings.js +0 -103
- package/dist/pipeline/github.d.ts +0 -84
- package/dist/pipeline/github.js +0 -399
- package/dist/pipeline/index.d.ts +0 -11
- package/dist/pipeline/index.js +0 -6
- package/dist/pipeline/indexer.d.ts +0 -41
- package/dist/pipeline/indexer.js +0 -254
- package/dist/pipeline/parser.d.ts +0 -46
- package/dist/pipeline/parser.js +0 -436
- package/dist/pipeline/releases.d.ts +0 -112
- package/dist/pipeline/releases.js +0 -298
- package/dist/pipeline/repository.d.ts +0 -372
- package/dist/pipeline/repository.js +0 -520
- package/dist/prompts/index.d.ts +0 -3
- package/dist/prompts/index.js +0 -2
- package/dist/prompts/templates.d.ts +0 -26
- package/dist/prompts/templates.js +0 -443
- package/dist/resources/code.d.ts +0 -15
- package/dist/resources/code.js +0 -122
- package/dist/resources/content/code-content.d.ts +0 -6
- package/dist/resources/content/code-content.js +0 -802
- package/dist/resources/content/docs-content.d.ts +0 -14
- package/dist/resources/content/docs-content.js +0 -1202
- package/dist/resources/content/index.d.ts +0 -6
- package/dist/resources/content/index.js +0 -6
- package/dist/resources/docs.d.ts +0 -15
- package/dist/resources/docs.js +0 -98
- package/dist/resources/index.d.ts +0 -6
- package/dist/resources/index.js +0 -13
- package/dist/resources/schemas.d.ts +0 -16
- package/dist/resources/schemas.js +0 -407
- package/dist/scripts/index-repos.d.ts +0 -12
- package/dist/scripts/index-repos.js +0 -53
- package/dist/server.d.ts +0 -43
- package/dist/server.js +0 -696
- package/dist/services/index.d.ts +0 -6
- package/dist/services/index.js +0 -6
- package/dist/services/sampling.d.ts +0 -62
- package/dist/services/sampling.js +0 -277
- package/dist/tools/analyze.d.ts +0 -106
- package/dist/tools/analyze.js +0 -431
- package/dist/tools/generation.d.ts +0 -9
- package/dist/tools/generation.js +0 -285
- package/dist/tools/health.d.ts +0 -120
- package/dist/tools/health.js +0 -365
- package/dist/tools/index.d.ts +0 -14
- package/dist/tools/index.js +0 -22
- package/dist/tools/meta.d.ts +0 -61
- package/dist/tools/meta.js +0 -282
- package/dist/tools/repository/constants.d.ts +0 -19
- package/dist/tools/repository/constants.js +0 -324
- package/dist/tools/repository/handlers.d.ts +0 -373
- package/dist/tools/repository/handlers.js +0 -724
- package/dist/tools/repository/index.d.ts +0 -9
- package/dist/tools/repository/index.js +0 -13
- package/dist/tools/repository/schemas.d.ts +0 -153
- package/dist/tools/repository/schemas.js +0 -106
- package/dist/tools/repository/tools.d.ts +0 -7
- package/dist/tools/repository/tools.js +0 -484
- package/dist/tools/repository/validation.d.ts +0 -106
- package/dist/tools/repository/validation.js +0 -820
- package/dist/tools/repository.d.ts +0 -6
- package/dist/tools/repository.js +0 -7
- package/dist/tools/search.d.ts +0 -76
- package/dist/tools/search.js +0 -423
- package/dist/types/index.d.ts +0 -2
- package/dist/types/index.js +0 -2
- package/dist/types/mcp.d.ts +0 -187
- package/dist/types/mcp.js +0 -6
- package/dist/utils/cache.d.ts +0 -77
- package/dist/utils/cache.js +0 -172
- package/dist/utils/config.d.ts +0 -70
- package/dist/utils/config.js +0 -294
- package/dist/utils/errors.d.ts +0 -111
- package/dist/utils/errors.js +0 -165
- package/dist/utils/health.d.ts +0 -29
- package/dist/utils/health.js +0 -132
- package/dist/utils/hosted-api.d.ts +0 -67
- package/dist/utils/hosted-api.js +0 -119
- package/dist/utils/index.d.ts +0 -16
- package/dist/utils/index.js +0 -15
- package/dist/utils/logger.d.ts +0 -48
- package/dist/utils/logger.js +0 -124
- package/dist/utils/rate-limit.d.ts +0 -61
- package/dist/utils/rate-limit.js +0 -148
- package/dist/utils/validation.d.ts +0 -52
- package/dist/utils/validation.js +0 -255
package/dist/pipeline/indexer.js
DELETED
|
@@ -1,254 +0,0 @@
|
|
|
1
|
-
import { githubClient } from "./github.js";
|
|
2
|
-
import { parseFile } from "./parser.js";
|
|
3
|
-
import { embeddingGenerator } from "./embeddings.js";
|
|
4
|
-
import { vectorStore } from "../db/vectorStore.js";
|
|
5
|
-
import { logger, DEFAULT_REPOSITORIES, } from "../utils/index.js";
|
|
6
|
-
/**
|
|
7
|
-
* Extract pragma language_version from Compact file content
|
|
8
|
-
* Returns the version string or undefined if not found
|
|
9
|
-
*/
|
|
10
|
-
function extractPragmaVersion(content) {
|
|
11
|
-
// Match patterns like: pragma language_version >= 0.14.0;
|
|
12
|
-
const pragmaMatch = content.match(/pragma\s+language_version\s*[><=]*\s*([\d.]+)/);
|
|
13
|
-
return pragmaMatch?.[1];
|
|
14
|
-
}
|
|
15
|
-
/**
|
|
16
|
-
* Create chunks from a parsed file
|
|
17
|
-
* Uses intelligent chunking based on code structure
|
|
18
|
-
*/
|
|
19
|
-
function createChunks(file, repository, repoVersion) {
|
|
20
|
-
const chunks = [];
|
|
21
|
-
const indexedAt = new Date().toISOString();
|
|
22
|
-
// Extract pragma version for Compact files
|
|
23
|
-
const pragmaVersion = file.language === "compact"
|
|
24
|
-
? extractPragmaVersion(file.content)
|
|
25
|
-
: undefined;
|
|
26
|
-
// Add code units as individual chunks
|
|
27
|
-
for (const unit of file.codeUnits) {
|
|
28
|
-
chunks.push({
|
|
29
|
-
text: unit.code,
|
|
30
|
-
metadata: {
|
|
31
|
-
repository,
|
|
32
|
-
filePath: file.path,
|
|
33
|
-
language: file.language,
|
|
34
|
-
chunkType: "code_unit",
|
|
35
|
-
codeUnitType: unit.type,
|
|
36
|
-
codeUnitName: unit.name,
|
|
37
|
-
startLine: unit.startLine,
|
|
38
|
-
endLine: unit.endLine,
|
|
39
|
-
isPublic: unit.isPublic,
|
|
40
|
-
repoVersion,
|
|
41
|
-
pragmaVersion,
|
|
42
|
-
indexedAt,
|
|
43
|
-
},
|
|
44
|
-
});
|
|
45
|
-
}
|
|
46
|
-
// If no code units were extracted, chunk the entire file
|
|
47
|
-
if (file.codeUnits.length === 0 && file.content.length > 0) {
|
|
48
|
-
const chunkSize = 2000; // characters
|
|
49
|
-
const overlapLines = 5; // lines to overlap
|
|
50
|
-
const lines = file.content.split("\n");
|
|
51
|
-
let currentChunk = "";
|
|
52
|
-
let startLine = 1;
|
|
53
|
-
let currentLine = 1;
|
|
54
|
-
for (const line of lines) {
|
|
55
|
-
if (currentChunk.length + line.length > chunkSize &&
|
|
56
|
-
currentChunk.length > 0) {
|
|
57
|
-
chunks.push({
|
|
58
|
-
text: currentChunk,
|
|
59
|
-
metadata: {
|
|
60
|
-
repository,
|
|
61
|
-
filePath: file.path,
|
|
62
|
-
language: file.language,
|
|
63
|
-
chunkType: "file_chunk",
|
|
64
|
-
startLine,
|
|
65
|
-
endLine: currentLine - 1,
|
|
66
|
-
isPublic: true,
|
|
67
|
-
repoVersion,
|
|
68
|
-
pragmaVersion,
|
|
69
|
-
indexedAt,
|
|
70
|
-
},
|
|
71
|
-
});
|
|
72
|
-
// Start new chunk with overlap
|
|
73
|
-
const prevLines = currentChunk.split("\n").slice(-overlapLines);
|
|
74
|
-
currentChunk = prevLines.join("\n") + "\n";
|
|
75
|
-
startLine = Math.max(1, currentLine - overlapLines);
|
|
76
|
-
}
|
|
77
|
-
currentChunk += line + "\n";
|
|
78
|
-
currentLine++;
|
|
79
|
-
}
|
|
80
|
-
// Add remaining content
|
|
81
|
-
if (currentChunk.trim().length > 0) {
|
|
82
|
-
chunks.push({
|
|
83
|
-
text: currentChunk,
|
|
84
|
-
metadata: {
|
|
85
|
-
repository,
|
|
86
|
-
filePath: file.path,
|
|
87
|
-
language: file.language,
|
|
88
|
-
chunkType: "file_chunk",
|
|
89
|
-
startLine,
|
|
90
|
-
endLine: currentLine - 1,
|
|
91
|
-
isPublic: true,
|
|
92
|
-
repoVersion,
|
|
93
|
-
pragmaVersion,
|
|
94
|
-
indexedAt,
|
|
95
|
-
},
|
|
96
|
-
});
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
return chunks;
|
|
100
|
-
}
|
|
101
|
-
/**
|
|
102
|
-
* Index a single repository
|
|
103
|
-
*/
|
|
104
|
-
export async function indexRepository(repoConfig) {
|
|
105
|
-
const repoName = `${repoConfig.owner}/${repoConfig.repo}`;
|
|
106
|
-
logger.info(`Starting index for ${repoName}...`);
|
|
107
|
-
// Get repo version (branch name or tag)
|
|
108
|
-
const repoVersion = repoConfig.branch || "main";
|
|
109
|
-
try {
|
|
110
|
-
// Fetch all files from the repository
|
|
111
|
-
const files = await githubClient.fetchRepositoryFiles(repoConfig);
|
|
112
|
-
logger.info(`Fetched ${files.length} files from ${repoName}`);
|
|
113
|
-
let chunkCount = 0;
|
|
114
|
-
const documents = [];
|
|
115
|
-
for (const file of files) {
|
|
116
|
-
// Parse the file
|
|
117
|
-
const parsed = parseFile(file.path, file.content);
|
|
118
|
-
// Create chunks with version info
|
|
119
|
-
const chunks = createChunks(parsed, repoName, repoVersion);
|
|
120
|
-
for (const chunk of chunks) {
|
|
121
|
-
documents.push({
|
|
122
|
-
id: `${repoName}:${file.path}:${chunk.metadata.startLine}`,
|
|
123
|
-
content: chunk.text,
|
|
124
|
-
metadata: {
|
|
125
|
-
repository: chunk.metadata.repository,
|
|
126
|
-
filePath: chunk.metadata.filePath,
|
|
127
|
-
language: chunk.metadata.language,
|
|
128
|
-
startLine: chunk.metadata.startLine,
|
|
129
|
-
endLine: chunk.metadata.endLine,
|
|
130
|
-
codeType: chunk.metadata.codeUnitType || "unknown",
|
|
131
|
-
codeName: chunk.metadata.codeUnitName || "",
|
|
132
|
-
isPublic: chunk.metadata.isPublic,
|
|
133
|
-
repoVersion: chunk.metadata.repoVersion,
|
|
134
|
-
pragmaVersion: chunk.metadata.pragmaVersion,
|
|
135
|
-
indexedAt: chunk.metadata.indexedAt,
|
|
136
|
-
},
|
|
137
|
-
});
|
|
138
|
-
chunkCount++;
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
// Generate embeddings and store in vector database
|
|
142
|
-
if (documents.length > 0) {
|
|
143
|
-
logger.info(`Generating embeddings for ${documents.length} chunks...`);
|
|
144
|
-
const texts = documents.map((d) => d.content);
|
|
145
|
-
const embeddings = await embeddingGenerator.generateEmbeddings(texts);
|
|
146
|
-
// Add embeddings to documents
|
|
147
|
-
for (let i = 0; i < documents.length; i++) {
|
|
148
|
-
documents[i].embedding = embeddings[i].embedding;
|
|
149
|
-
}
|
|
150
|
-
// Store in vector database
|
|
151
|
-
await vectorStore.addDocuments(documents);
|
|
152
|
-
logger.info(`Stored ${documents.length} documents in vector store`);
|
|
153
|
-
}
|
|
154
|
-
return { fileCount: files.length, chunkCount };
|
|
155
|
-
}
|
|
156
|
-
catch (error) {
|
|
157
|
-
logger.error(`Failed to index repository ${repoName}`, {
|
|
158
|
-
error: String(error),
|
|
159
|
-
});
|
|
160
|
-
throw error;
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
/**
|
|
164
|
-
* Index all configured repositories
|
|
165
|
-
*/
|
|
166
|
-
export async function indexAllRepositories() {
|
|
167
|
-
logger.info("Starting full index of all repositories...");
|
|
168
|
-
const stats = {
|
|
169
|
-
totalFiles: 0,
|
|
170
|
-
totalChunks: 0,
|
|
171
|
-
totalCodeUnits: 0,
|
|
172
|
-
repositoriesIndexed: [],
|
|
173
|
-
lastIndexed: new Date().toISOString(),
|
|
174
|
-
};
|
|
175
|
-
for (const repoConfig of DEFAULT_REPOSITORIES) {
|
|
176
|
-
try {
|
|
177
|
-
const { fileCount, chunkCount } = await indexRepository(repoConfig);
|
|
178
|
-
stats.totalFiles += fileCount;
|
|
179
|
-
stats.totalChunks += chunkCount;
|
|
180
|
-
stats.repositoriesIndexed.push(`${repoConfig.owner}/${repoConfig.repo}`);
|
|
181
|
-
}
|
|
182
|
-
catch (error) {
|
|
183
|
-
logger.error(`Failed to index ${repoConfig.owner}/${repoConfig.repo}`, {
|
|
184
|
-
error: String(error),
|
|
185
|
-
});
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
logger.info("Indexing complete", stats);
|
|
189
|
-
return stats;
|
|
190
|
-
}
|
|
191
|
-
/**
|
|
192
|
-
* Incremental update - only index changed files
|
|
193
|
-
*/
|
|
194
|
-
export async function incrementalUpdate(repoConfig, since) {
|
|
195
|
-
const repoName = `${repoConfig.owner}/${repoConfig.repo}`;
|
|
196
|
-
logger.info(`Starting incremental update for ${repoName} since ${since}...`);
|
|
197
|
-
try {
|
|
198
|
-
// Get changed files
|
|
199
|
-
const changedPaths = await githubClient.getChangedFiles(repoConfig.owner, repoConfig.repo, since);
|
|
200
|
-
// Filter by patterns
|
|
201
|
-
const filteredPaths = githubClient.filterFilesByPatterns(changedPaths, repoConfig.patterns, repoConfig.exclude);
|
|
202
|
-
logger.info(`Found ${filteredPaths.length} changed files matching patterns`);
|
|
203
|
-
let chunkCount = 0;
|
|
204
|
-
const documents = [];
|
|
205
|
-
for (const filePath of filteredPaths) {
|
|
206
|
-
// Delete existing documents for this file
|
|
207
|
-
await vectorStore.deleteByPath(repoName, filePath);
|
|
208
|
-
// Fetch new content
|
|
209
|
-
const file = await githubClient.getFileContent(repoConfig.owner, repoConfig.repo, filePath, repoConfig.branch);
|
|
210
|
-
if (file) {
|
|
211
|
-
const parsed = parseFile(file.path, file.content);
|
|
212
|
-
const repoVersion = repoConfig.branch || "main";
|
|
213
|
-
const chunks = createChunks(parsed, repoName, repoVersion);
|
|
214
|
-
for (const chunk of chunks) {
|
|
215
|
-
documents.push({
|
|
216
|
-
id: `${repoName}:${filePath}:${chunk.metadata.startLine}`,
|
|
217
|
-
content: chunk.text,
|
|
218
|
-
metadata: {
|
|
219
|
-
repository: chunk.metadata.repository,
|
|
220
|
-
filePath: chunk.metadata.filePath,
|
|
221
|
-
language: chunk.metadata.language,
|
|
222
|
-
startLine: chunk.metadata.startLine,
|
|
223
|
-
endLine: chunk.metadata.endLine,
|
|
224
|
-
codeType: chunk.metadata.codeUnitType || "unknown",
|
|
225
|
-
codeName: chunk.metadata.codeUnitName || "",
|
|
226
|
-
isPublic: chunk.metadata.isPublic,
|
|
227
|
-
repoVersion: chunk.metadata.repoVersion,
|
|
228
|
-
pragmaVersion: chunk.metadata.pragmaVersion,
|
|
229
|
-
indexedAt: chunk.metadata.indexedAt,
|
|
230
|
-
},
|
|
231
|
-
});
|
|
232
|
-
chunkCount++;
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
// Generate embeddings and store
|
|
237
|
-
if (documents.length > 0) {
|
|
238
|
-
const texts = documents.map((d) => d.content);
|
|
239
|
-
const embeddings = await embeddingGenerator.generateEmbeddings(texts);
|
|
240
|
-
for (let i = 0; i < documents.length; i++) {
|
|
241
|
-
documents[i].embedding = embeddings[i].embedding;
|
|
242
|
-
}
|
|
243
|
-
await vectorStore.addDocuments(documents);
|
|
244
|
-
}
|
|
245
|
-
return { fileCount: filteredPaths.length, chunkCount };
|
|
246
|
-
}
|
|
247
|
-
catch (error) {
|
|
248
|
-
logger.error(`Failed incremental update for ${repoName}`, {
|
|
249
|
-
error: String(error),
|
|
250
|
-
});
|
|
251
|
-
throw error;
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
//# sourceMappingURL=indexer.js.map
|
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
export interface CodeUnit {
|
|
2
|
-
type: "ledger" | "circuit" | "witness" | "function" | "type" | "import" | "export" | "class" | "interface";
|
|
3
|
-
name: string;
|
|
4
|
-
code: string;
|
|
5
|
-
startLine: number;
|
|
6
|
-
endLine: number;
|
|
7
|
-
isPublic: boolean;
|
|
8
|
-
isPrivate: boolean;
|
|
9
|
-
documentation?: string;
|
|
10
|
-
parameters?: Array<{
|
|
11
|
-
name: string;
|
|
12
|
-
type: string;
|
|
13
|
-
}>;
|
|
14
|
-
returnType?: string;
|
|
15
|
-
}
|
|
16
|
-
export interface ParsedFile {
|
|
17
|
-
path: string;
|
|
18
|
-
language: "compact" | "typescript" | "markdown";
|
|
19
|
-
content: string;
|
|
20
|
-
codeUnits: CodeUnit[];
|
|
21
|
-
imports: string[];
|
|
22
|
-
exports: string[];
|
|
23
|
-
metadata: {
|
|
24
|
-
hasLedger: boolean;
|
|
25
|
-
hasCircuits: boolean;
|
|
26
|
-
hasWitnesses: boolean;
|
|
27
|
-
lineCount: number;
|
|
28
|
-
};
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Parse Compact smart contract files
|
|
32
|
-
*/
|
|
33
|
-
export declare function parseCompactFile(path: string, content: string): ParsedFile;
|
|
34
|
-
/**
|
|
35
|
-
* Parse TypeScript files
|
|
36
|
-
*/
|
|
37
|
-
export declare function parseTypeScriptFile(path: string, content: string): ParsedFile;
|
|
38
|
-
/**
|
|
39
|
-
* Parse Markdown documentation files
|
|
40
|
-
*/
|
|
41
|
-
export declare function parseMarkdownFile(path: string, content: string): ParsedFile;
|
|
42
|
-
/**
|
|
43
|
-
* Parse a file based on its extension
|
|
44
|
-
*/
|
|
45
|
-
export declare function parseFile(path: string, content: string): ParsedFile;
|
|
46
|
-
//# sourceMappingURL=parser.d.ts.map
|