preflight-mcp 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -67
- package/README.zh-CN.md +141 -124
- package/dist/ast/treeSitter.js +588 -0
- package/dist/bundle/analysis.js +47 -0
- package/dist/bundle/cleanup.js +155 -0
- package/dist/bundle/context7.js +65 -36
- package/dist/bundle/facts.js +829 -0
- package/dist/bundle/githubArchive.js +49 -28
- package/dist/bundle/overview.js +226 -48
- package/dist/bundle/service.js +124 -156
- package/dist/config.js +29 -3
- package/dist/context7/client.js +5 -2
- package/dist/evidence/dependencyGraph.js +826 -0
- package/dist/http/server.js +109 -0
- package/dist/search/sqliteFts.js +150 -10
- package/dist/server.js +125 -293
- package/dist/trace/service.js +108 -0
- package/dist/trace/store.js +170 -0
- package/package.json +4 -2
- package/dist/bundle/deepwiki.js +0 -206
package/dist/bundle/service.js
CHANGED
|
@@ -11,7 +11,6 @@ import { writeAgentsMd, writeStartHereMd } from './guides.js';
|
|
|
11
11
|
import { generateOverviewMarkdown, writeOverviewFile } from './overview.js';
|
|
12
12
|
import { rebuildIndex } from '../search/sqliteFts.js';
|
|
13
13
|
import { ingestContext7Libraries } from './context7.js';
|
|
14
|
-
import { ingestDeepWikiRepo } from './deepwiki.js';
|
|
15
14
|
import { analyzeBundleStatic } from './analysis.js';
|
|
16
15
|
import { autoDetectTags, generateDisplayName, generateDescription } from './tagging.js';
|
|
17
16
|
import { bundleCreationLimiter } from '../core/concurrency-limiter.js';
|
|
@@ -26,48 +25,20 @@ function normalizeList(values) {
|
|
|
26
25
|
.map((s) => s.toLowerCase())
|
|
27
26
|
.sort();
|
|
28
27
|
}
|
|
29
|
-
function normalizeDeepWikiUrl(raw) {
|
|
30
|
-
const trimmed = raw.trim();
|
|
31
|
-
try {
|
|
32
|
-
const u = new URL(trimmed);
|
|
33
|
-
u.hash = '';
|
|
34
|
-
// Normalize host and strip trailing slash.
|
|
35
|
-
u.host = u.host.toLowerCase();
|
|
36
|
-
u.pathname = u.pathname.replace(/\/+$/g, '');
|
|
37
|
-
return u.toString();
|
|
38
|
-
}
|
|
39
|
-
catch {
|
|
40
|
-
return trimmed;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
28
|
function canonicalizeCreateInput(input) {
|
|
44
29
|
const repos = input.repos
|
|
45
30
|
.map((r) => {
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
return {
|
|
49
|
-
kind: 'github',
|
|
50
|
-
repo: `${owner.toLowerCase()}/${repo.toLowerCase()}`,
|
|
51
|
-
ref: (r.ref ?? '').trim() || undefined,
|
|
52
|
-
};
|
|
53
|
-
}
|
|
54
|
-
if (r.kind === 'local') {
|
|
55
|
-
// For de-duplication, treat local imports as equivalent to github imports of the same logical repo/ref.
|
|
56
|
-
const { owner, repo } = parseOwnerRepo(r.repo);
|
|
57
|
-
return {
|
|
58
|
-
kind: 'github',
|
|
59
|
-
repo: `${owner.toLowerCase()}/${repo.toLowerCase()}`,
|
|
60
|
-
ref: (r.ref ?? '').trim() || undefined,
|
|
61
|
-
};
|
|
62
|
-
}
|
|
31
|
+
// For de-duplication, treat local imports as equivalent to github imports of the same logical repo/ref.
|
|
32
|
+
const { owner, repo } = parseOwnerRepo(r.repo);
|
|
63
33
|
return {
|
|
64
|
-
kind: '
|
|
65
|
-
|
|
34
|
+
kind: 'github',
|
|
35
|
+
repo: `${owner.toLowerCase()}/${repo.toLowerCase()}`,
|
|
36
|
+
ref: (r.ref ?? '').trim() || undefined,
|
|
66
37
|
};
|
|
67
38
|
})
|
|
68
39
|
.sort((a, b) => {
|
|
69
|
-
const ka =
|
|
70
|
-
const kb =
|
|
40
|
+
const ka = `github:${a.repo}:${a.ref ?? ''}`;
|
|
41
|
+
const kb = `github:${b.repo}:${b.ref ?? ''}`;
|
|
71
42
|
return ka.localeCompare(kb);
|
|
72
43
|
});
|
|
73
44
|
return {
|
|
@@ -113,8 +84,8 @@ async function writeDedupIndex(storageDir, idx) {
|
|
|
113
84
|
try {
|
|
114
85
|
await fs.unlink(tmpPath);
|
|
115
86
|
}
|
|
116
|
-
catch {
|
|
117
|
-
|
|
87
|
+
catch (cleanupErr) {
|
|
88
|
+
logger.debug('Failed to cleanup temp dedup index file (non-critical)', cleanupErr instanceof Error ? cleanupErr : undefined);
|
|
118
89
|
}
|
|
119
90
|
throw err;
|
|
120
91
|
}
|
|
@@ -131,8 +102,8 @@ async function updateDedupIndexBestEffort(cfg, fingerprint, bundleId, bundleUpda
|
|
|
131
102
|
idx.updatedAt = nowIso();
|
|
132
103
|
await writeDedupIndex(storageDir, idx);
|
|
133
104
|
}
|
|
134
|
-
catch {
|
|
135
|
-
|
|
105
|
+
catch (err) {
|
|
106
|
+
logger.debug(`Failed to update dedup index in ${storageDir} (best-effort)`, err instanceof Error ? err : undefined);
|
|
136
107
|
}
|
|
137
108
|
}
|
|
138
109
|
}
|
|
@@ -768,21 +739,23 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
768
739
|
const createdAt = nowIso();
|
|
769
740
|
// Use effective storage dir (falls back if primary unavailable)
|
|
770
741
|
const effectiveStorageDir = await getEffectiveStorageDirForWrite(cfg);
|
|
771
|
-
|
|
772
|
-
const
|
|
773
|
-
await ensureDir(
|
|
774
|
-
|
|
742
|
+
// Create bundle in temporary directory for atomic creation
|
|
743
|
+
const tmpBundlesDir = path.join(cfg.tmpDir, 'bundles-wip');
|
|
744
|
+
await ensureDir(tmpBundlesDir);
|
|
745
|
+
const tmpPaths = getBundlePaths(tmpBundlesDir, bundleId);
|
|
746
|
+
await ensureDir(tmpPaths.rootDir);
|
|
747
|
+
const finalPaths = getBundlePaths(effectiveStorageDir, bundleId);
|
|
775
748
|
const allIngestedFiles = [];
|
|
776
749
|
const reposSummary = [];
|
|
777
750
|
try {
|
|
778
|
-
|
|
751
|
+
// All operations happen in tmpPaths (temporary directory)
|
|
779
752
|
for (const repoInput of input.repos) {
|
|
780
753
|
if (repoInput.kind === 'github') {
|
|
781
754
|
const { owner, repo } = parseOwnerRepo(repoInput.repo);
|
|
782
755
|
const { headSha, files, skipped, notes, source } = await cloneAndIngestGitHubRepo({
|
|
783
756
|
cfg,
|
|
784
757
|
bundleId,
|
|
785
|
-
storageDir:
|
|
758
|
+
storageDir: tmpBundlesDir,
|
|
786
759
|
owner,
|
|
787
760
|
repo,
|
|
788
761
|
ref: repoInput.ref,
|
|
@@ -796,12 +769,13 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
796
769
|
notes: [...notes, ...skipped].slice(0, 50),
|
|
797
770
|
});
|
|
798
771
|
}
|
|
799
|
-
else
|
|
772
|
+
else {
|
|
773
|
+
// Local repository
|
|
800
774
|
const { owner, repo } = parseOwnerRepo(repoInput.repo);
|
|
801
775
|
const { files, skipped } = await ingestLocalRepo({
|
|
802
776
|
cfg,
|
|
803
777
|
bundleId,
|
|
804
|
-
storageDir:
|
|
778
|
+
storageDir: tmpBundlesDir,
|
|
805
779
|
owner,
|
|
806
780
|
repo,
|
|
807
781
|
localPath: repoInput.path,
|
|
@@ -810,31 +784,16 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
810
784
|
allIngestedFiles.push(...files);
|
|
811
785
|
reposSummary.push({ kind: 'local', id: `${owner}/${repo}`, source: 'local', notes: skipped.slice(0, 50) });
|
|
812
786
|
}
|
|
813
|
-
else {
|
|
814
|
-
// DeepWiki integration: fetch and convert to Markdown.
|
|
815
|
-
const deepwikiResult = await ingestDeepWikiRepo({
|
|
816
|
-
cfg,
|
|
817
|
-
bundlePaths: paths,
|
|
818
|
-
url: repoInput.url,
|
|
819
|
-
});
|
|
820
|
-
allIngestedFiles.push(...deepwikiResult.files);
|
|
821
|
-
reposSummary.push({
|
|
822
|
-
kind: 'deepwiki',
|
|
823
|
-
id: deepwikiResult.summary.repoId,
|
|
824
|
-
source: 'deepwiki',
|
|
825
|
-
notes: deepwikiResult.summary.notes,
|
|
826
|
-
});
|
|
827
|
-
}
|
|
828
787
|
}
|
|
829
788
|
// Context7 libraries (best-effort).
|
|
830
789
|
let librariesSummary;
|
|
831
790
|
if (input.libraries?.length) {
|
|
832
791
|
// Clean libraries dir in case something wrote here earlier.
|
|
833
|
-
await rmIfExists(
|
|
834
|
-
await ensureDir(
|
|
792
|
+
await rmIfExists(tmpPaths.librariesDir);
|
|
793
|
+
await ensureDir(tmpPaths.librariesDir);
|
|
835
794
|
const libIngest = await ingestContext7Libraries({
|
|
836
795
|
cfg,
|
|
837
|
-
bundlePaths:
|
|
796
|
+
bundlePaths: tmpPaths,
|
|
838
797
|
libraries: input.libraries,
|
|
839
798
|
topics: input.topics,
|
|
840
799
|
});
|
|
@@ -842,7 +801,7 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
842
801
|
librariesSummary = libIngest.libraries;
|
|
843
802
|
}
|
|
844
803
|
// Build index.
|
|
845
|
-
await rebuildIndex(
|
|
804
|
+
await rebuildIndex(tmpPaths.searchDbPath, allIngestedFiles, {
|
|
846
805
|
includeDocs: true,
|
|
847
806
|
includeCode: true,
|
|
848
807
|
});
|
|
@@ -890,16 +849,23 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
890
849
|
includeCode: true,
|
|
891
850
|
},
|
|
892
851
|
};
|
|
893
|
-
await writeManifest(
|
|
852
|
+
await writeManifest(tmpPaths.manifestPath, manifest);
|
|
894
853
|
// Guides.
|
|
895
|
-
await writeAgentsMd(
|
|
854
|
+
await writeAgentsMd(tmpPaths.agentsPath);
|
|
896
855
|
await writeStartHereMd({
|
|
897
|
-
targetPath:
|
|
856
|
+
targetPath: tmpPaths.startHerePath,
|
|
898
857
|
bundleId,
|
|
899
858
|
repos: reposSummary.map((r) => ({ id: r.id, headSha: r.headSha })),
|
|
900
859
|
libraries: librariesSummary,
|
|
901
860
|
});
|
|
902
|
-
//
|
|
861
|
+
// Generate static facts (FACTS.json) FIRST. This is intentionally non-LLM and safe to keep inside bundles.
|
|
862
|
+
await generateFactsBestEffort({
|
|
863
|
+
bundleId,
|
|
864
|
+
bundleRoot: tmpPaths.rootDir,
|
|
865
|
+
files: allIngestedFiles,
|
|
866
|
+
mode: cfg.analysisMode,
|
|
867
|
+
});
|
|
868
|
+
// Overview (S2: factual-only with evidence pointers) - generated AFTER FACTS.json
|
|
903
869
|
const perRepoOverviews = reposSummary
|
|
904
870
|
.filter((r) => r.kind === 'github' || r.kind === 'local')
|
|
905
871
|
.map((r) => {
|
|
@@ -909,30 +875,46 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
909
875
|
});
|
|
910
876
|
const overviewMd = await generateOverviewMarkdown({
|
|
911
877
|
bundleId,
|
|
912
|
-
bundleRootDir:
|
|
878
|
+
bundleRootDir: tmpPaths.rootDir,
|
|
913
879
|
repos: perRepoOverviews,
|
|
914
880
|
libraries: librariesSummary,
|
|
915
881
|
});
|
|
916
|
-
await writeOverviewFile(
|
|
917
|
-
//
|
|
918
|
-
await
|
|
919
|
-
bundleId,
|
|
920
|
-
bundleRoot: paths.rootDir,
|
|
921
|
-
files: allIngestedFiles,
|
|
922
|
-
mode: cfg.analysisMode,
|
|
923
|
-
});
|
|
924
|
-
// Mirror to backup storage directories (non-blocking on failures)
|
|
925
|
-
if (cfg.storageDirs.length > 1) {
|
|
926
|
-
await mirrorBundleToBackups(effectiveStorageDir, cfg.storageDirs, bundleId);
|
|
927
|
-
}
|
|
928
|
-
// CRITICAL: Validate bundle completeness before finalizing
|
|
929
|
-
const validation = await validateBundleCompleteness(paths.rootDir);
|
|
882
|
+
await writeOverviewFile(tmpPaths.overviewPath, overviewMd);
|
|
883
|
+
// CRITICAL: Validate bundle completeness BEFORE atomic move
|
|
884
|
+
const validation = await validateBundleCompleteness(tmpPaths.rootDir);
|
|
930
885
|
if (!validation.isValid) {
|
|
931
886
|
const errorMsg = `Bundle creation incomplete. Missing: ${validation.missingComponents.join(', ')}`;
|
|
932
887
|
logger.error(errorMsg);
|
|
933
888
|
throw new Error(errorMsg);
|
|
934
889
|
}
|
|
935
|
-
//
|
|
890
|
+
// ATOMIC OPERATION: Move from temp to final location
|
|
891
|
+
// This is atomic on most filesystems - bundle becomes visible only when complete
|
|
892
|
+
logger.info(`Moving bundle ${bundleId} from temp to final location (atomic)`);
|
|
893
|
+
await ensureDir(effectiveStorageDir);
|
|
894
|
+
try {
|
|
895
|
+
// Try rename first (atomic, but only works on same filesystem)
|
|
896
|
+
await fs.rename(tmpPaths.rootDir, finalPaths.rootDir);
|
|
897
|
+
logger.info(`Bundle ${bundleId} moved atomically to ${finalPaths.rootDir}`);
|
|
898
|
+
}
|
|
899
|
+
catch (renameErr) {
|
|
900
|
+
// Rename failed - likely cross-filesystem. Fall back to copy+delete
|
|
901
|
+
const errCode = renameErr.code;
|
|
902
|
+
if (errCode === 'EXDEV') {
|
|
903
|
+
logger.warn(`Cross-filesystem move detected for ${bundleId}, falling back to copy`);
|
|
904
|
+
await copyDir(tmpPaths.rootDir, finalPaths.rootDir);
|
|
905
|
+
await rmIfExists(tmpPaths.rootDir);
|
|
906
|
+
logger.info(`Bundle ${bundleId} copied to ${finalPaths.rootDir}`);
|
|
907
|
+
}
|
|
908
|
+
else {
|
|
909
|
+
// Some other error, rethrow
|
|
910
|
+
throw renameErr;
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
// Mirror to backup storage directories (non-blocking on failures)
|
|
914
|
+
if (cfg.storageDirs.length > 1) {
|
|
915
|
+
await mirrorBundleToBackups(effectiveStorageDir, cfg.storageDirs, bundleId);
|
|
916
|
+
}
|
|
917
|
+
// Update de-duplication index (best-effort). This is intentionally after atomic move.
|
|
936
918
|
await updateDedupIndexBestEffort(cfg, fingerprint, bundleId, createdAt);
|
|
937
919
|
const summary = {
|
|
938
920
|
bundleId,
|
|
@@ -944,15 +926,19 @@ async function createBundleInternal(cfg, input, options) {
|
|
|
944
926
|
return summary;
|
|
945
927
|
}
|
|
946
928
|
catch (err) {
|
|
947
|
-
//
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
await cleanupFailedBundle(cfg, bundleId);
|
|
951
|
-
}
|
|
929
|
+
// Clean up temp directory on failure
|
|
930
|
+
logger.error(`Bundle creation failed, cleaning up temp: ${bundleId}`, err instanceof Error ? err : undefined);
|
|
931
|
+
await rmIfExists(tmpPaths.rootDir);
|
|
952
932
|
// Enhance error message
|
|
953
933
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
954
934
|
throw new Error(`Failed to create bundle: ${errorMsg}`);
|
|
955
935
|
}
|
|
936
|
+
finally {
|
|
937
|
+
// Ensure temp directory is cleaned up (double safety)
|
|
938
|
+
await rmIfExists(tmpPaths.rootDir).catch((err) => {
|
|
939
|
+
logger.debug('Failed to cleanup temp bundle directory in finally block (non-critical)', err instanceof Error ? err : undefined);
|
|
940
|
+
});
|
|
941
|
+
}
|
|
956
942
|
}
|
|
957
943
|
/** Check if a bundle has upstream changes without applying updates. */
|
|
958
944
|
export async function checkForUpdates(cfg, bundleId) {
|
|
@@ -979,19 +965,14 @@ export async function checkForUpdates(cfg, bundleId) {
|
|
|
979
965
|
hasUpdates = true;
|
|
980
966
|
details.push({ repoId, currentSha: prev?.headSha, remoteSha, changed });
|
|
981
967
|
}
|
|
982
|
-
else
|
|
968
|
+
else {
|
|
969
|
+
// Local: can't reliably detect whether local files changed without scanning; assume possible update.
|
|
983
970
|
const { owner, repo } = parseOwnerRepo(repoInput.repo);
|
|
984
971
|
const repoId = `${owner}/${repo}`;
|
|
985
|
-
// We can't reliably detect whether local files changed without scanning; assume possible update.
|
|
986
972
|
const prev = manifest.repos.find((r) => r.id === repoId);
|
|
987
973
|
details.push({ repoId, currentSha: prev?.headSha, changed: true });
|
|
988
974
|
hasUpdates = true;
|
|
989
975
|
}
|
|
990
|
-
else {
|
|
991
|
-
// DeepWiki: can't easily detect changes, assume possible update
|
|
992
|
-
details.push({ repoId: repoInput.url, changed: true });
|
|
993
|
-
hasUpdates = true;
|
|
994
|
-
}
|
|
995
976
|
}
|
|
996
977
|
return { hasUpdates, details };
|
|
997
978
|
}
|
|
@@ -1093,41 +1074,6 @@ async function scanBundleIndexableFiles(params) {
|
|
|
1093
1074
|
});
|
|
1094
1075
|
}
|
|
1095
1076
|
}
|
|
1096
|
-
// 3) deepwiki/<owner>/<repo>/norm/** (docs-only)
|
|
1097
|
-
const deepwikiDir = path.join(params.bundleRootDir, 'deepwiki');
|
|
1098
|
-
const dwSt = await statOrNull(deepwikiDir);
|
|
1099
|
-
if (dwSt?.isDirectory()) {
|
|
1100
|
-
// Only walk the norm subtrees.
|
|
1101
|
-
const owners = await fs.readdir(deepwikiDir, { withFileTypes: true });
|
|
1102
|
-
for (const ownerEnt of owners) {
|
|
1103
|
-
if (!ownerEnt.isDirectory())
|
|
1104
|
-
continue;
|
|
1105
|
-
const owner = ownerEnt.name;
|
|
1106
|
-
const ownerDir = path.join(deepwikiDir, owner);
|
|
1107
|
-
const repos = await fs.readdir(ownerDir, { withFileTypes: true });
|
|
1108
|
-
for (const repoEnt of repos) {
|
|
1109
|
-
if (!repoEnt.isDirectory())
|
|
1110
|
-
continue;
|
|
1111
|
-
const repo = repoEnt.name;
|
|
1112
|
-
const normDir = path.join(ownerDir, repo, 'norm');
|
|
1113
|
-
const normSt = await statOrNull(normDir);
|
|
1114
|
-
if (!normSt?.isDirectory())
|
|
1115
|
-
continue;
|
|
1116
|
-
for await (const wf of walkFilesNoIgnore(normDir)) {
|
|
1117
|
-
if (!wf.relPosix.toLowerCase().endsWith('.md'))
|
|
1118
|
-
continue;
|
|
1119
|
-
const bundleRel = `deepwiki/${owner}/${repo}/norm/${wf.relPosix}`;
|
|
1120
|
-
await pushFile({
|
|
1121
|
-
repoId: `deepwiki:${owner}/${repo}`,
|
|
1122
|
-
kind: 'doc',
|
|
1123
|
-
repoRelativePath: wf.relPosix,
|
|
1124
|
-
bundleRelPosix: bundleRel,
|
|
1125
|
-
absPath: wf.absPath,
|
|
1126
|
-
});
|
|
1127
|
-
}
|
|
1128
|
-
}
|
|
1129
|
-
}
|
|
1130
|
-
}
|
|
1131
1077
|
return { files, totalBytes, skipped };
|
|
1132
1078
|
}
|
|
1133
1079
|
export async function repairBundle(cfg, bundleId, options) {
|
|
@@ -1290,7 +1236,8 @@ export async function updateBundle(cfg, bundleId, options) {
|
|
|
1290
1236
|
allIngestedFiles.push(...files);
|
|
1291
1237
|
reposSummary.push({ kind: 'github', id: repoId, source, headSha, notes: [...notes, ...skipped].slice(0, 50) });
|
|
1292
1238
|
}
|
|
1293
|
-
else
|
|
1239
|
+
else {
|
|
1240
|
+
// Local repository
|
|
1294
1241
|
const { owner, repo } = parseOwnerRepo(repoInput.repo);
|
|
1295
1242
|
const repoId = `${owner}/${repo}`;
|
|
1296
1243
|
const { files, skipped } = await ingestLocalRepo({
|
|
@@ -1306,23 +1253,6 @@ export async function updateBundle(cfg, bundleId, options) {
|
|
|
1306
1253
|
reposSummary.push({ kind: 'local', id: repoId, source: 'local', notes: skipped.slice(0, 50) });
|
|
1307
1254
|
changed = true;
|
|
1308
1255
|
}
|
|
1309
|
-
else {
|
|
1310
|
-
// DeepWiki integration: fetch and convert to Markdown.
|
|
1311
|
-
const deepwikiResult = await ingestDeepWikiRepo({
|
|
1312
|
-
cfg,
|
|
1313
|
-
bundlePaths: paths,
|
|
1314
|
-
url: repoInput.url,
|
|
1315
|
-
});
|
|
1316
|
-
allIngestedFiles.push(...deepwikiResult.files);
|
|
1317
|
-
reposSummary.push({
|
|
1318
|
-
kind: 'deepwiki',
|
|
1319
|
-
id: deepwikiResult.summary.repoId,
|
|
1320
|
-
source: 'deepwiki',
|
|
1321
|
-
notes: deepwikiResult.summary.notes,
|
|
1322
|
-
});
|
|
1323
|
-
// Always mark as changed for DeepWiki since we can't easily detect content changes.
|
|
1324
|
-
changed = true;
|
|
1325
|
-
}
|
|
1326
1256
|
}
|
|
1327
1257
|
// Context7 libraries (best-effort).
|
|
1328
1258
|
let librariesSummary;
|
|
@@ -1407,11 +1337,22 @@ export async function updateBundle(cfg, bundleId, options) {
|
|
|
1407
1337
|
};
|
|
1408
1338
|
return { summary, changed };
|
|
1409
1339
|
}
|
|
1340
|
+
/**
|
|
1341
|
+
* Check if a string is a valid UUID (v4 format).
|
|
1342
|
+
* Bundle IDs should be UUIDs with dashes.
|
|
1343
|
+
*/
|
|
1344
|
+
function isValidBundleId(id) {
|
|
1345
|
+
// UUID v4 format: xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx
|
|
1346
|
+
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
|
1347
|
+
return uuidRegex.test(id);
|
|
1348
|
+
}
|
|
1410
1349
|
/** List bundles from a single storage directory. */
|
|
1411
1350
|
export async function listBundles(storageDir) {
|
|
1412
1351
|
try {
|
|
1413
1352
|
const entries = await fs.readdir(storageDir, { withFileTypes: true });
|
|
1414
|
-
return entries
|
|
1353
|
+
return entries
|
|
1354
|
+
.filter((e) => e.isDirectory() && isValidBundleId(e.name))
|
|
1355
|
+
.map((e) => e.name);
|
|
1415
1356
|
}
|
|
1416
1357
|
catch {
|
|
1417
1358
|
return [];
|
|
@@ -1457,18 +1398,45 @@ export async function clearBundle(storageDir, bundleId) {
|
|
|
1457
1398
|
const p = getBundlePaths(storageDir, bundleId);
|
|
1458
1399
|
await rmIfExists(p.rootDir);
|
|
1459
1400
|
}
|
|
1460
|
-
/**
|
|
1401
|
+
/**
|
|
1402
|
+
* Clear bundle from ALL storage directories (mirror delete).
|
|
1403
|
+
* Uses fast rename + background deletion to avoid blocking.
|
|
1404
|
+
*/
|
|
1461
1405
|
export async function clearBundleMulti(storageDirs, bundleId) {
|
|
1462
1406
|
let deleted = false;
|
|
1463
1407
|
for (const dir of storageDirs) {
|
|
1464
1408
|
try {
|
|
1465
|
-
|
|
1409
|
+
const paths = getBundlePaths(dir, bundleId);
|
|
1410
|
+
// Check if the bundle directory exists
|
|
1411
|
+
try {
|
|
1412
|
+
await fs.stat(paths.rootDir);
|
|
1413
|
+
}
|
|
1414
|
+
catch {
|
|
1415
|
+
// Directory doesn't exist, skip
|
|
1416
|
+
continue;
|
|
1417
|
+
}
|
|
1418
|
+
// Fast deletion strategy: rename first (instant), then delete in background
|
|
1419
|
+
const deletingPath = `${paths.rootDir}.deleting.${Date.now()}`;
|
|
1420
|
+
try {
|
|
1421
|
+
// Rename is atomic and instant on most filesystems
|
|
1422
|
+
await fs.rename(paths.rootDir, deletingPath);
|
|
1423
|
+
deleted = true;
|
|
1424
|
+
// Background deletion (fire-and-forget)
|
|
1425
|
+
// The renamed directory is invisible to listBundles (not a valid UUID)
|
|
1426
|
+
rmIfExists(deletingPath).catch((err) => {
|
|
1427
|
+
logger.warn(`Background deletion failed for ${bundleId}: ${err instanceof Error ? err.message : String(err)}`);
|
|
1428
|
+
});
|
|
1429
|
+
}
|
|
1430
|
+
catch (err) {
|
|
1431
|
+
// Rename failed (maybe concurrent deletion), try direct delete as fallback
|
|
1432
|
+
logger.warn(`Rename failed for ${bundleId}, falling back to direct delete`);
|
|
1466
1433
|
await clearBundle(dir, bundleId);
|
|
1467
1434
|
deleted = true;
|
|
1468
1435
|
}
|
|
1469
1436
|
}
|
|
1470
|
-
catch {
|
|
1437
|
+
catch (err) {
|
|
1471
1438
|
// Skip unavailable paths
|
|
1439
|
+
logger.debug(`Failed to delete bundle from ${dir}: ${err instanceof Error ? err.message : String(err)}`);
|
|
1472
1440
|
}
|
|
1473
1441
|
}
|
|
1474
1442
|
return deleted;
|
package/dist/config.js
CHANGED
|
@@ -7,16 +7,35 @@ function envNumber(name, fallback) {
|
|
|
7
7
|
const n = Number(raw);
|
|
8
8
|
return Number.isFinite(n) && n > 0 ? n : fallback;
|
|
9
9
|
}
|
|
10
|
+
function envBoolean(name, fallback) {
|
|
11
|
+
const raw = process.env[name];
|
|
12
|
+
if (!raw)
|
|
13
|
+
return fallback;
|
|
14
|
+
const v = raw.trim().toLowerCase();
|
|
15
|
+
if (v === '1' || v === 'true' || v === 'yes' || v === 'y' || v === 'on')
|
|
16
|
+
return true;
|
|
17
|
+
if (v === '0' || v === 'false' || v === 'no' || v === 'n' || v === 'off')
|
|
18
|
+
return false;
|
|
19
|
+
return fallback;
|
|
20
|
+
}
|
|
21
|
+
function parseAstEngine(raw) {
|
|
22
|
+
const v = (raw ?? '').trim().toLowerCase();
|
|
23
|
+
if (v === 'native')
|
|
24
|
+
return 'native';
|
|
25
|
+
return 'wasm';
|
|
26
|
+
}
|
|
10
27
|
function parseAnalysisMode(raw) {
|
|
11
28
|
const v = (raw ?? '').trim().toLowerCase();
|
|
12
29
|
if (v === 'none')
|
|
13
30
|
return 'none';
|
|
14
31
|
if (v === 'quick')
|
|
15
32
|
return 'quick';
|
|
16
|
-
|
|
33
|
+
if (v === 'full')
|
|
34
|
+
return 'full'; // Phase 2 module analysis
|
|
35
|
+
// Back-compat: deep used to exist; treat it as full (for better analysis).
|
|
17
36
|
if (v === 'deep')
|
|
18
|
-
return '
|
|
19
|
-
return '
|
|
37
|
+
return 'full';
|
|
38
|
+
return 'full'; // Default to full for better analysis
|
|
20
39
|
}
|
|
21
40
|
/**
|
|
22
41
|
* Parse storage directories from environment.
|
|
@@ -43,6 +62,9 @@ export function getConfig() {
|
|
|
43
62
|
const storageDir = storageDirs[0]; // Primary for new bundles (always at least one from default)
|
|
44
63
|
const tmpDir = process.env.PREFLIGHT_TMP_DIR ?? path.join(os.tmpdir(), 'preflight-mcp');
|
|
45
64
|
const analysisMode = parseAnalysisMode(process.env.PREFLIGHT_ANALYSIS_MODE);
|
|
65
|
+
const httpEnabled = envBoolean('PREFLIGHT_HTTP_ENABLED', true);
|
|
66
|
+
const httpHost = (process.env.PREFLIGHT_HTTP_HOST ?? '127.0.0.1').trim() || '127.0.0.1';
|
|
67
|
+
const httpPort = envNumber('PREFLIGHT_HTTP_PORT', 37123);
|
|
46
68
|
return {
|
|
47
69
|
storageDir,
|
|
48
70
|
storageDirs,
|
|
@@ -54,6 +76,10 @@ export function getConfig() {
|
|
|
54
76
|
maxFileBytes: envNumber('PREFLIGHT_MAX_FILE_BYTES', 512 * 1024),
|
|
55
77
|
maxTotalBytes: envNumber('PREFLIGHT_MAX_TOTAL_BYTES', 50 * 1024 * 1024),
|
|
56
78
|
analysisMode,
|
|
79
|
+
astEngine: parseAstEngine(process.env.PREFLIGHT_AST_ENGINE),
|
|
80
|
+
httpEnabled,
|
|
81
|
+
httpHost,
|
|
82
|
+
httpPort,
|
|
57
83
|
// Tuning parameters with defaults (can be overridden via env vars)
|
|
58
84
|
maxContext7Libraries: envNumber('PREFLIGHT_MAX_CONTEXT7_LIBRARIES', 20),
|
|
59
85
|
maxContext7Topics: envNumber('PREFLIGHT_MAX_CONTEXT7_TOPICS', 10),
|
package/dist/context7/client.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
|
2
2
|
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
|
|
3
|
+
import { logger } from '../logging/logger.js';
|
|
3
4
|
export async function connectContext7(cfg) {
|
|
4
5
|
const url = new URL(cfg.context7McpUrl);
|
|
5
6
|
const headers = {};
|
|
@@ -19,12 +20,14 @@ export async function connectContext7(cfg) {
|
|
|
19
20
|
maxRetries: 1,
|
|
20
21
|
},
|
|
21
22
|
});
|
|
22
|
-
const client = new Client({ name: 'preflight-context7', version: '0.1.
|
|
23
|
+
const client = new Client({ name: 'preflight-context7', version: '0.1.3' });
|
|
23
24
|
await client.connect(transport);
|
|
24
25
|
return {
|
|
25
26
|
client,
|
|
26
27
|
close: async () => {
|
|
27
|
-
await client.close().catch(() =>
|
|
28
|
+
await client.close().catch((err) => {
|
|
29
|
+
logger.debug('Context7 client close failed (non-critical)', err instanceof Error ? err : undefined);
|
|
30
|
+
});
|
|
28
31
|
},
|
|
29
32
|
};
|
|
30
33
|
}
|