@vespermcp/mcp-server 1.2.21 ā 1.2.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +49 -0
- package/build/cache/service.js +7 -0
- package/build/cloud/adapters/supabase.js +49 -0
- package/build/cloud/storage-manager.js +6 -0
- package/build/export/exporter.js +22 -9
- package/build/gateway/unified-dataset-gateway.js +441 -0
- package/build/index.js +1815 -839
- package/build/ingestion/ingestor.js +7 -4
- package/build/install/install-service.js +11 -6
- package/build/lib/supabase.js +3 -0
- package/build/metadata/arxiv-source.js +229 -0
- package/build/metadata/circuit-breaker.js +62 -0
- package/build/metadata/github-source.js +203 -0
- package/build/metadata/hackernews-source.js +123 -0
- package/build/metadata/quality.js +27 -0
- package/build/metadata/scraper.js +85 -14
- package/build/metadata/semantic-scholar-source.js +138 -0
- package/build/python/asset_downloader_engine.py +2 -0
- package/build/python/convert_engine.py +92 -0
- package/build/python/export_engine.py +45 -0
- package/build/python/kaggle_engine.py +77 -5
- package/build/python/normalize_engine.py +83 -0
- package/build/python/vesper/core/asset_downloader.py +5 -1
- package/build/scripts/test-phase1-webcore-quality.js +104 -0
- package/build/search/engine.js +45 -6
- package/build/search/jit-orchestrator.js +18 -14
- package/build/search/query-intent.js +509 -0
- package/build/tools/formatter.js +6 -3
- package/build/utils/python-runtime.js +130 -0
- package/build/web/extract-web.js +297 -0
- package/build/web/fusion-engine.js +457 -0
- package/build/web/types.js +1 -0
- package/build/web/web-core.js +242 -0
- package/package.json +12 -5
- package/scripts/postinstall.cjs +87 -31
- package/scripts/wizard.cjs +652 -0
- package/scripts/wizard.js +338 -12
- package/src/python/__pycache__/config.cpython-312.pyc +0 -0
- package/src/python/__pycache__/kaggle_engine.cpython-312.pyc +0 -0
- package/src/python/asset_downloader_engine.py +2 -0
- package/src/python/convert_engine.py +92 -0
- package/src/python/export_engine.py +45 -0
- package/src/python/kaggle_engine.py +77 -5
- package/src/python/normalize_engine.py +83 -0
- package/src/python/requirements.txt +12 -0
- package/src/python/vesper/core/asset_downloader.py +5 -1
- package/wizard.cjs +3 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
function nowIso() {
|
|
2
|
+
return new Date().toISOString();
|
|
3
|
+
}
|
|
4
|
+
function clamp01(x) {
|
|
5
|
+
if (!Number.isFinite(x))
|
|
6
|
+
return 0;
|
|
7
|
+
return Math.max(0, Math.min(1, x));
|
|
8
|
+
}
|
|
9
|
+
function scoreTo01(score0to100) {
|
|
10
|
+
return clamp01(Number(score0to100 || 0) / 100);
|
|
11
|
+
}
|
|
12
|
+
export class WebCoreEngine {
|
|
13
|
+
deps;
|
|
14
|
+
constructor(deps) {
|
|
15
|
+
this.deps = deps;
|
|
16
|
+
}
|
|
17
|
+
async find(input) {
|
|
18
|
+
const query = String(input.query || "").trim();
|
|
19
|
+
if (!query)
|
|
20
|
+
throw new Error("query is required");
|
|
21
|
+
const limit = Math.max(1, Math.min(50, Number(input.limit || 10)));
|
|
22
|
+
const requested = (input.sources && input.sources.length > 0 ? input.sources : ["arxiv", "github"])
|
|
23
|
+
.filter((s) => s === "arxiv" || s === "github" || s === "semantic_scholar" || s === "hackernews");
|
|
24
|
+
const arxivFullText = input.arxiv_full_text === true;
|
|
25
|
+
const githubIncludeReadme = input.github_include_readme === true;
|
|
26
|
+
const collectedAt = nowIso();
|
|
27
|
+
const results = [];
|
|
28
|
+
const telemetry = { per_source: [] };
|
|
29
|
+
const perSource = Math.max(1, Math.ceil(limit / Math.max(1, requested.length)));
|
|
30
|
+
for (const source of requested) {
|
|
31
|
+
if (source === "arxiv") {
|
|
32
|
+
const t0 = Date.now();
|
|
33
|
+
try {
|
|
34
|
+
const out = await this.deps.arxivSource.discoverWithTelemetry(query, perSource, { full_text: arxivFullText });
|
|
35
|
+
const docs = out.results.map((m) => this.fromArxiv(m, collectedAt)).filter((d) => d !== null);
|
|
36
|
+
results.push(...docs);
|
|
37
|
+
telemetry.per_source.push({
|
|
38
|
+
source,
|
|
39
|
+
cache_hit: out.cacheHit,
|
|
40
|
+
latency_ms: out.latencyMs || (Date.now() - t0),
|
|
41
|
+
result_count: docs.length,
|
|
42
|
+
pdf_extract_ms_total: out.pdf_extract_ms_total,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
catch (e) {
|
|
46
|
+
telemetry.per_source.push({
|
|
47
|
+
source,
|
|
48
|
+
cache_hit: false,
|
|
49
|
+
latency_ms: Date.now() - t0,
|
|
50
|
+
result_count: 0,
|
|
51
|
+
error: e?.message || String(e),
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
if (source === "github") {
|
|
56
|
+
const t0 = Date.now();
|
|
57
|
+
try {
|
|
58
|
+
const out = await this.deps.githubSource.discoverWithTelemetry(query, perSource, { include_readme: githubIncludeReadme });
|
|
59
|
+
const docs = out.results.map((m) => this.fromGithub(m, collectedAt)).filter((d) => d !== null);
|
|
60
|
+
results.push(...docs);
|
|
61
|
+
telemetry.per_source.push({
|
|
62
|
+
source,
|
|
63
|
+
cache_hit: out.cacheHit,
|
|
64
|
+
latency_ms: out.latencyMs || (Date.now() - t0),
|
|
65
|
+
result_count: docs.length,
|
|
66
|
+
readme_fetch_ms_total: out.readme_fetch_ms_total,
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
catch (e) {
|
|
70
|
+
telemetry.per_source.push({
|
|
71
|
+
source,
|
|
72
|
+
cache_hit: false,
|
|
73
|
+
latency_ms: Date.now() - t0,
|
|
74
|
+
result_count: 0,
|
|
75
|
+
error: e?.message || String(e),
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (source === "semantic_scholar") {
|
|
80
|
+
const t0 = Date.now();
|
|
81
|
+
try {
|
|
82
|
+
const out = await this.deps.semanticScholarSource.discoverWithTelemetry(query, perSource);
|
|
83
|
+
const docs = out.results.map((m) => this.fromSemanticScholar(m, collectedAt)).filter((d) => d !== null);
|
|
84
|
+
results.push(...docs);
|
|
85
|
+
telemetry.per_source.push({
|
|
86
|
+
source,
|
|
87
|
+
cache_hit: out.cacheHit,
|
|
88
|
+
latency_ms: out.latencyMs || (Date.now() - t0),
|
|
89
|
+
result_count: docs.length,
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
catch (e) {
|
|
93
|
+
telemetry.per_source.push({
|
|
94
|
+
source,
|
|
95
|
+
cache_hit: false,
|
|
96
|
+
latency_ms: Date.now() - t0,
|
|
97
|
+
result_count: 0,
|
|
98
|
+
error: e?.message || String(e),
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
if (source === "hackernews") {
|
|
103
|
+
const t0 = Date.now();
|
|
104
|
+
try {
|
|
105
|
+
const out = await this.deps.hackerNewsSource.discoverWithTelemetry(query, perSource);
|
|
106
|
+
const docs = out.results.map((m) => this.fromHackerNews(m, collectedAt)).filter((d) => d !== null);
|
|
107
|
+
results.push(...docs);
|
|
108
|
+
telemetry.per_source.push({
|
|
109
|
+
source,
|
|
110
|
+
cache_hit: out.cacheHit,
|
|
111
|
+
latency_ms: out.latencyMs || (Date.now() - t0),
|
|
112
|
+
result_count: docs.length,
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
catch (e) {
|
|
116
|
+
telemetry.per_source.push({
|
|
117
|
+
source,
|
|
118
|
+
cache_hit: false,
|
|
119
|
+
latency_ms: Date.now() - t0,
|
|
120
|
+
result_count: 0,
|
|
121
|
+
error: e?.message || String(e),
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return {
|
|
127
|
+
query,
|
|
128
|
+
sources: requested,
|
|
129
|
+
results: results.slice(0, limit),
|
|
130
|
+
collected_at: collectedAt,
|
|
131
|
+
telemetry,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
fromArxiv(meta, collectedAt) {
|
|
135
|
+
const arxivId = meta.id;
|
|
136
|
+
const absUrl = meta.metadata_url || `https://arxiv.org/abs/${arxivId}`;
|
|
137
|
+
const pdfUrl = meta.download_url || `https://arxiv.org/pdf/${arxivId}`;
|
|
138
|
+
const title = meta.name;
|
|
139
|
+
const abstract = meta.description;
|
|
140
|
+
const authors = Array.isArray(meta.authors) ? meta.authors : undefined;
|
|
141
|
+
const content = meta.webcore_content || abstract;
|
|
142
|
+
if (!absUrl || !content)
|
|
143
|
+
return null;
|
|
144
|
+
return {
|
|
145
|
+
source_type: "arxiv",
|
|
146
|
+
source_url: absUrl,
|
|
147
|
+
content,
|
|
148
|
+
metadata_json: {
|
|
149
|
+
arxiv_id: arxivId,
|
|
150
|
+
title,
|
|
151
|
+
authors,
|
|
152
|
+
categories: meta.tags,
|
|
153
|
+
updated_at: meta.last_updated,
|
|
154
|
+
pdf_url: pdfUrl,
|
|
155
|
+
abstract,
|
|
156
|
+
pdf_text_present: !!meta.webcore_content,
|
|
157
|
+
pdf_text_preview_length: meta.webcore_content ? meta.webcore_content.length : 0,
|
|
158
|
+
},
|
|
159
|
+
quality_score: scoreTo01(meta.quality_score),
|
|
160
|
+
collected_at: collectedAt,
|
|
161
|
+
content_type: meta.webcore_content ? "paper_fulltext" : "paper_abstract",
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
fromGithub(meta, collectedAt) {
|
|
165
|
+
const fullName = meta.id;
|
|
166
|
+
const url = meta.metadata_url || meta.download_url || `https://github.com/${fullName}`;
|
|
167
|
+
const content = meta.webcore_content || meta.description || meta.name;
|
|
168
|
+
if (!url || !content)
|
|
169
|
+
return null;
|
|
170
|
+
return {
|
|
171
|
+
source_type: "github",
|
|
172
|
+
source_url: url,
|
|
173
|
+
content,
|
|
174
|
+
metadata_json: {
|
|
175
|
+
full_name: fullName,
|
|
176
|
+
name: meta.name,
|
|
177
|
+
description: meta.description,
|
|
178
|
+
stars: meta.stars,
|
|
179
|
+
forks_proxy_downloads: meta.downloads,
|
|
180
|
+
topics: meta.tags,
|
|
181
|
+
license_id: meta.license?.id,
|
|
182
|
+
license_name: meta.license?.name,
|
|
183
|
+
updated_at: meta.last_updated,
|
|
184
|
+
language: (meta.languages || [])[0],
|
|
185
|
+
readme_present: !!meta.webcore_content,
|
|
186
|
+
readme_preview_length: meta.webcore_content ? meta.webcore_content.length : 0,
|
|
187
|
+
},
|
|
188
|
+
quality_score: scoreTo01(meta.quality_score),
|
|
189
|
+
collected_at: collectedAt,
|
|
190
|
+
content_type: meta.webcore_content ? "text" : "repo",
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
fromSemanticScholar(meta, collectedAt) {
|
|
194
|
+
const paperId = meta.id;
|
|
195
|
+
const url = meta.metadata_url || meta.download_url || (paperId ? `https://www.semanticscholar.org/paper/${paperId}` : "");
|
|
196
|
+
const content = meta.description;
|
|
197
|
+
if (!url || !content)
|
|
198
|
+
return null;
|
|
199
|
+
return {
|
|
200
|
+
source_type: "semantic_scholar",
|
|
201
|
+
source_url: url,
|
|
202
|
+
content,
|
|
203
|
+
metadata_json: {
|
|
204
|
+
paper_id: paperId,
|
|
205
|
+
title: meta.name,
|
|
206
|
+
authors: meta.authors,
|
|
207
|
+
abstract: meta.description,
|
|
208
|
+
tags: meta.tags,
|
|
209
|
+
citation_count: meta.stars,
|
|
210
|
+
updated_at: meta.last_updated,
|
|
211
|
+
},
|
|
212
|
+
quality_score: scoreTo01(meta.quality_score),
|
|
213
|
+
collected_at: collectedAt,
|
|
214
|
+
content_type: "paper_abstract",
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
fromHackerNews(meta, collectedAt) {
|
|
218
|
+
const itemId = meta.id;
|
|
219
|
+
const url = meta.metadata_url || meta.download_url || (itemId ? `https://news.ycombinator.com/item?id=${itemId}` : "");
|
|
220
|
+
const content = meta.description;
|
|
221
|
+
const authors = meta.authors;
|
|
222
|
+
if (!url || !content)
|
|
223
|
+
return null;
|
|
224
|
+
return {
|
|
225
|
+
source_type: "hackernews",
|
|
226
|
+
source_url: url,
|
|
227
|
+
content,
|
|
228
|
+
metadata_json: {
|
|
229
|
+
item_id: itemId,
|
|
230
|
+
title: meta.name,
|
|
231
|
+
authors,
|
|
232
|
+
points: meta.stars,
|
|
233
|
+
comments_proxy: meta.downloads,
|
|
234
|
+
tags: meta.tags,
|
|
235
|
+
updated_at: meta.last_updated,
|
|
236
|
+
},
|
|
237
|
+
quality_score: scoreTo01(meta.quality_score),
|
|
238
|
+
collected_at: collectedAt,
|
|
239
|
+
content_type: "thread",
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
}
|
package/package.json
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@vespermcp/mcp-server",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.24",
|
|
4
4
|
"description": "AI-powered dataset discovery, quality analysis, and preparation MCP server with multimodal support (text, image, audio, video)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "build/index.js",
|
|
7
7
|
"bin": {
|
|
8
|
-
"
|
|
9
|
-
"
|
|
10
|
-
"vesper-wizard": "scripts/wizard.js"
|
|
8
|
+
"vespermcp": "build/index.js",
|
|
9
|
+
"vesper-wizard": "wizard.cjs"
|
|
11
10
|
},
|
|
12
11
|
"files": [
|
|
13
12
|
"build/**/*",
|
|
14
13
|
"src/python/**/*",
|
|
14
|
+
"wizard.cjs",
|
|
15
15
|
"scripts/**/*",
|
|
16
16
|
"README.md",
|
|
17
17
|
"LICENSE",
|
|
@@ -31,6 +31,7 @@
|
|
|
31
31
|
"fuse": "node build/index.js fuse",
|
|
32
32
|
"discover": "node build/index.js discover",
|
|
33
33
|
"download": "node build/index.js download",
|
|
34
|
+
"export": "node build/index.js export",
|
|
34
35
|
"config": "node build/index.js config",
|
|
35
36
|
"test-fusion-engine": "py src/python/test_fusion_engine.py",
|
|
36
37
|
"setup": "node build/index.js --setup",
|
|
@@ -59,15 +60,20 @@
|
|
|
59
60
|
"license": "MIT",
|
|
60
61
|
"repository": {
|
|
61
62
|
"type": "git",
|
|
62
|
-
"url": "https://github.com/vesper/mcp-server"
|
|
63
|
+
"url": "git+https://github.com/vesper/mcp-server.git"
|
|
63
64
|
},
|
|
64
65
|
"engines": {
|
|
65
66
|
"node": ">=18.0.0",
|
|
66
67
|
"npm": ">=8.0.0"
|
|
67
68
|
},
|
|
68
69
|
"dependencies": {
|
|
70
|
+
"@aws-sdk/client-s3": "^3.1017.0",
|
|
71
|
+
"@aws-sdk/credential-providers": "^3.1017.0",
|
|
69
72
|
"@huggingface/hub": "^2.7.1",
|
|
70
73
|
"@modelcontextprotocol/sdk": "^1.25.2",
|
|
74
|
+
"@polar-sh/nextjs": "^0.9.4",
|
|
75
|
+
"@supabase/supabase-js": "^2.98.0",
|
|
76
|
+
"@vercel/analytics": "^2.0.0",
|
|
71
77
|
"@xenova/transformers": "^2.17.2",
|
|
72
78
|
"adm-zip": "^0.5.16",
|
|
73
79
|
"ajv": "^8.17.1",
|
|
@@ -75,6 +81,7 @@
|
|
|
75
81
|
"better-sqlite3": "^12.6.0",
|
|
76
82
|
"inquirer": "^13.3.0",
|
|
77
83
|
"lodash": "^4.17.21",
|
|
84
|
+
"pdf-parse": "^2.4.5",
|
|
78
85
|
"uuid": "^13.0.0",
|
|
79
86
|
"zod": "^4.3.5",
|
|
80
87
|
"zod-to-json-schema": "^3.25.1"
|
package/scripts/postinstall.cjs
CHANGED
|
@@ -2,13 +2,34 @@
|
|
|
2
2
|
|
|
3
3
|
const { execSync } = require('child_process');
|
|
4
4
|
const fs = require('fs');
|
|
5
|
+
const os = require('os');
|
|
5
6
|
const path = require('path');
|
|
6
7
|
|
|
7
8
|
console.log('\nš Setting up Vesper MCP Server...\n');
|
|
8
9
|
|
|
10
|
+
function getPythonBootstrapCommand() {
|
|
11
|
+
const attempts = process.platform === 'win32'
|
|
12
|
+
? ['py -3', 'python']
|
|
13
|
+
: ['python3', 'python'];
|
|
14
|
+
|
|
15
|
+
for (const command of attempts) {
|
|
16
|
+
try {
|
|
17
|
+
execSync(`${command} --version`, { stdio: 'pipe' });
|
|
18
|
+
return command;
|
|
19
|
+
} catch {
|
|
20
|
+
// try next command
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
|
|
9
27
|
// 1. Check for Python
|
|
28
|
+
const pythonBootstrap = getPythonBootstrapCommand();
|
|
10
29
|
try {
|
|
11
|
-
|
|
30
|
+
if (!pythonBootstrap) {
|
|
31
|
+
throw new Error('Python not found');
|
|
32
|
+
}
|
|
12
33
|
console.log('ā
Python found');
|
|
13
34
|
} catch (e) {
|
|
14
35
|
console.warn('ā ļø Python not found. Please install Python 3.8+ for full functionality.');
|
|
@@ -16,36 +37,15 @@ try {
|
|
|
16
37
|
process.exit(0); // Don't fail installation
|
|
17
38
|
}
|
|
18
39
|
|
|
19
|
-
|
|
20
|
-
console.log('\nš¦ Installing Python dependencies...');
|
|
21
|
-
const pythonPackages = [
|
|
22
|
-
'opencv-python',
|
|
23
|
-
'pillow',
|
|
24
|
-
'numpy',
|
|
25
|
-
'librosa',
|
|
26
|
-
'soundfile',
|
|
27
|
-
'aiohttp',
|
|
28
|
-
'aiofiles',
|
|
29
|
-
'datasets',
|
|
30
|
-
'webdataset',
|
|
31
|
-
'kaggle'
|
|
32
|
-
];
|
|
33
|
-
|
|
34
|
-
try {
|
|
35
|
-
execSync(`python -m pip install ${pythonPackages.join(' ')}`, {
|
|
36
|
-
stdio: 'inherit',
|
|
37
|
-
timeout: 120000 // 2 minutes timeout
|
|
38
|
-
});
|
|
39
|
-
console.log('ā
Python dependencies installed');
|
|
40
|
-
} catch (e) {
|
|
41
|
-
console.warn('ā ļø Failed to install some Python dependencies.');
|
|
42
|
-
console.warn(' You may need to install them manually:');
|
|
43
|
-
console.warn(` pip install ${pythonPackages.join(' ')}\n`);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
// 3. Create data directories
|
|
47
|
-
const homeDir = process.env.HOME || process.env.USERPROFILE;
|
|
40
|
+
const homeDir = os.homedir() || process.env.HOME || process.env.USERPROFILE;
|
|
48
41
|
const vesperDataDir = path.join(homeDir, '.vesper');
|
|
42
|
+
const managedVenvDir = path.join(vesperDataDir, '.venv');
|
|
43
|
+
const managedPython = process.platform === 'win32'
|
|
44
|
+
? path.join(managedVenvDir, 'Scripts', 'python.exe')
|
|
45
|
+
: path.join(managedVenvDir, 'bin', 'python');
|
|
46
|
+
const requirementsPath = path.resolve(__dirname, '..', 'src', 'python', 'requirements.txt');
|
|
47
|
+
|
|
48
|
+
// 2. Create data directories
|
|
49
49
|
const dirs = [
|
|
50
50
|
vesperDataDir,
|
|
51
51
|
path.join(vesperDataDir, 'data'),
|
|
@@ -62,7 +62,63 @@ dirs.forEach(dir => {
|
|
|
62
62
|
|
|
63
63
|
console.log(`ā
Data directories created at ${vesperDataDir}`);
|
|
64
64
|
|
|
65
|
-
//
|
|
65
|
+
// 3. Create a managed Vesper Python environment
|
|
66
|
+
console.log('\nš Preparing managed Python environment...');
|
|
67
|
+
try {
|
|
68
|
+
if (!fs.existsSync(managedPython)) {
|
|
69
|
+
execSync(`${pythonBootstrap} -m venv "${managedVenvDir}"`, {
|
|
70
|
+
stdio: 'inherit',
|
|
71
|
+
timeout: 180000,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
console.log(`ā
Managed Python ready at ${managedVenvDir}`);
|
|
75
|
+
} catch (e) {
|
|
76
|
+
console.warn('ā ļø Failed to create the managed Vesper Python environment.');
|
|
77
|
+
console.warn(` Vesper will fall back to PATH Python and may need to self-heal at runtime. ${(e && e.message) || ''}`.trim());
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// 4. Install Python dependencies into the managed environment
|
|
81
|
+
console.log('\nš¦ Installing Python dependencies...');
|
|
82
|
+
const pythonPackages = [
|
|
83
|
+
'opencv-python',
|
|
84
|
+
'pillow',
|
|
85
|
+
'librosa',
|
|
86
|
+
'soundfile',
|
|
87
|
+
'pyarrow'
|
|
88
|
+
];
|
|
89
|
+
|
|
90
|
+
try {
|
|
91
|
+
const targetPython = fs.existsSync(managedPython) ? `"${managedPython}"` : pythonBootstrap;
|
|
92
|
+
execSync(`${targetPython} -m pip install --disable-pip-version-check --upgrade pip`, {
|
|
93
|
+
stdio: 'inherit',
|
|
94
|
+
timeout: 180000,
|
|
95
|
+
});
|
|
96
|
+
execSync(`${targetPython} -m pip install --disable-pip-version-check -r "${requirementsPath}" ${pythonPackages.join(' ')}`, {
|
|
97
|
+
stdio: 'inherit',
|
|
98
|
+
timeout: 600000,
|
|
99
|
+
});
|
|
100
|
+
console.log('ā
Python dependencies installed');
|
|
101
|
+
} catch (e) {
|
|
102
|
+
console.warn('ā ļø Failed to install some Python dependencies.');
|
|
103
|
+
console.warn(' You may need to install them manually into the Vesper runtime:');
|
|
104
|
+
console.warn(` ${fs.existsSync(managedPython) ? managedPython : pythonBootstrap} -m pip install -r "${requirementsPath}" ${pythonPackages.join(' ')}\n`);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// 5. Rebuild better-sqlite3 for current Node.js version
|
|
108
|
+
console.log('\nš§ Rebuilding native modules for current Node.js...');
|
|
109
|
+
try {
|
|
110
|
+
execSync('npm rebuild better-sqlite3', {
|
|
111
|
+
stdio: 'pipe',
|
|
112
|
+
timeout: 60000,
|
|
113
|
+
cwd: path.resolve(__dirname, '..')
|
|
114
|
+
});
|
|
115
|
+
console.log('ā
Native modules rebuilt successfully');
|
|
116
|
+
} catch (e) {
|
|
117
|
+
console.warn('ā ļø Could not rebuild better-sqlite3: ' + (e.message || e));
|
|
118
|
+
console.warn(' If you see ERR_DLOPEN_FAILED, run: npm rebuild better-sqlite3');
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// 6. Auto-configure Claude Desktop (Best Effort)
|
|
66
122
|
console.log('\nāļø Attempting to auto-configure Claude Desktop...');
|
|
67
123
|
|
|
68
124
|
function getClaudeConfigPath() {
|