@fanduzi/deltascope-mcp 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/bin/deltascope-mcp.js +91 -0
- package/lib/cache.js +13 -0
- package/lib/download.js +96 -0
- package/lib/launcher.js +150 -0
- package/lib/releases.js +56 -0
- package/package.json +34 -0
package/README.md
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# DeltaScope MCP Launcher
|
|
2
|
+
|
|
3
|
+
Bootstrap package for launching the native `deltascope-mcp` stdio server from npm-based MCP clients.
|
|
4
|
+
|
|
5
|
+
## What This Package Does
|
|
6
|
+
|
|
7
|
+
This package does not implement DeltaScope MCP tools itself. It downloads the matching native `deltascope-mcp` release binary, caches it locally, and then starts the real stdio server.
|
|
8
|
+
|
|
9
|
+
The launcher verifies the downloaded archive against the official DeltaScope release checksums before it updates the local cache.
|
|
10
|
+
|
|
11
|
+
## Version Contract
|
|
12
|
+
|
|
13
|
+
- npm package version should track the DeltaScope release version it boots
|
|
14
|
+
- by default the launcher resolves the native binary from its own package version
|
|
15
|
+
- `DELTASCOPE_MCP_VERSION` overrides the target DeltaScope version
|
|
16
|
+
- `DELTASCOPE_MCP_BASE_URL` overrides only the archive download base URL before the version and archive name are appended
|
|
17
|
+
- checksum verification still uses the official GitHub release checksums file
|
|
18
|
+
|
|
19
|
+
Default download base:
|
|
20
|
+
|
|
21
|
+
```text
|
|
22
|
+
https://github.com/Fanduzi/DeltaScope/releases/download
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
Example override:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
DELTASCOPE_MCP_BASE_URL=https://mirror.example.com/deltascope/releases/download \
|
|
29
|
+
npx -y @fanduzi/deltascope-mcp
|
|
30
|
+
```
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from "node:fs/promises";
|
|
4
|
+
import process from "node:process";
|
|
5
|
+
|
|
6
|
+
import { downloadAndExtractBinary } from "../lib/download.js";
|
|
7
|
+
import { ensureExecutable, formatBootstrapContext, spawnBinary } from "../lib/launcher.js";
|
|
8
|
+
import {
|
|
9
|
+
resolveArchiveName,
|
|
10
|
+
resolveArchiveURL,
|
|
11
|
+
resolveChecksumsURL,
|
|
12
|
+
resolveDeltaScopeVersion,
|
|
13
|
+
resolvePlatform
|
|
14
|
+
} from "../lib/releases.js";
|
|
15
|
+
|
|
16
|
+
const packageJson = JSON.parse(
|
|
17
|
+
await fs.readFile(new URL("../package.json", import.meta.url), "utf8")
|
|
18
|
+
);
|
|
19
|
+
const version = resolveDeltaScopeVersion({
|
|
20
|
+
packageVersion: packageJson.version,
|
|
21
|
+
envVersion: process.env.DELTASCOPE_MCP_VERSION ?? ""
|
|
22
|
+
});
|
|
23
|
+
const platform = resolvePlatform();
|
|
24
|
+
const archiveURL = resolveArchiveURL({
|
|
25
|
+
baseURL: process.env.DELTASCOPE_MCP_BASE_URL ?? "",
|
|
26
|
+
version,
|
|
27
|
+
os: platform.os,
|
|
28
|
+
arch: platform.arch
|
|
29
|
+
});
|
|
30
|
+
const archiveName = resolveArchiveName({
|
|
31
|
+
version,
|
|
32
|
+
os: platform.os,
|
|
33
|
+
arch: platform.arch
|
|
34
|
+
});
|
|
35
|
+
const checksumsURL = resolveChecksumsURL({
|
|
36
|
+
version
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
function log(message) {
|
|
40
|
+
process.stderr.write(`[deltascope-mcp-launcher] ${message}\n`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
log(`resolved DeltaScope version ${version}`);
|
|
44
|
+
log(`detected platform ${platform.os}-${platform.arch}`);
|
|
45
|
+
|
|
46
|
+
const binaryPath = await ensureExecutable({
|
|
47
|
+
version,
|
|
48
|
+
platform,
|
|
49
|
+
archiveURL,
|
|
50
|
+
checksumsURL,
|
|
51
|
+
downloadBinary: (destinationPath) =>
|
|
52
|
+
(async () => {
|
|
53
|
+
log(`cache miss; downloading ${archiveURL}`);
|
|
54
|
+
log(`cache target ${destinationPath}`);
|
|
55
|
+
log(`verifying archive against ${checksumsURL}`);
|
|
56
|
+
try {
|
|
57
|
+
const result = await downloadAndExtractBinary({
|
|
58
|
+
archiveURL,
|
|
59
|
+
checksumsURL,
|
|
60
|
+
archiveName,
|
|
61
|
+
destinationPath
|
|
62
|
+
});
|
|
63
|
+
log(`downloaded archive and staged native binary for ${destinationPath}`);
|
|
64
|
+
return result;
|
|
65
|
+
} catch (error) {
|
|
66
|
+
process.stderr.write(`${formatBootstrapContext({
|
|
67
|
+
version,
|
|
68
|
+
platform,
|
|
69
|
+
archiveURL,
|
|
70
|
+
destinationPath
|
|
71
|
+
})}\n`);
|
|
72
|
+
throw error;
|
|
73
|
+
}
|
|
74
|
+
})()
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
log(`launching native binary ${binaryPath}`);
|
|
78
|
+
|
|
79
|
+
const child = spawnBinary(binaryPath, process.argv.slice(2), {
|
|
80
|
+
stdio: "inherit",
|
|
81
|
+
env: process.env
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
child.on("error", (error) => {
|
|
85
|
+
process.stderr.write(`deltascope-mcp launcher: ${error.message}\n`);
|
|
86
|
+
process.exit(1);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
child.on("close", (code) => {
|
|
90
|
+
process.exit(code ?? 1);
|
|
91
|
+
});
|
package/lib/cache.js
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
export function resolveCacheRoot({ homeDir }) {
|
|
4
|
+
return path.join(homeDir, ".cache", "deltascope-mcp");
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export function resolveCacheBinaryPath({ homeDir, version, os, arch }) {
|
|
8
|
+
return path.join(resolveCacheRoot({ homeDir }), version, `${os}-${arch}`, "deltascope-mcp");
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function resolveCacheMetadataPath({ homeDir, version, os, arch }) {
|
|
12
|
+
return path.join(resolveCacheRoot({ homeDir }), version, `${os}-${arch}`, "deltascope-mcp.json");
|
|
13
|
+
}
|
package/lib/download.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { spawn } from "node:child_process";
|
|
5
|
+
import crypto from "node:crypto";
|
|
6
|
+
|
|
7
|
+
async function runTar(args) {
|
|
8
|
+
const child = spawn("tar", args, { stdio: ["ignore", "pipe", "pipe"] });
|
|
9
|
+
let stderr = "";
|
|
10
|
+
for await (const chunk of child.stderr) {
|
|
11
|
+
stderr += chunk.toString();
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const exitCode = await new Promise((resolve, reject) => {
|
|
15
|
+
child.on("error", reject);
|
|
16
|
+
child.on("close", resolve);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
if (exitCode !== 0) {
|
|
20
|
+
throw new Error(`tar failed: ${stderr.trim()}`);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function sha256(buffer) {
|
|
25
|
+
return crypto.createHash("sha256").update(buffer).digest("hex");
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function parseChecksums(text) {
|
|
29
|
+
const map = new Map();
|
|
30
|
+
for (const rawLine of text.split(/\r?\n/)) {
|
|
31
|
+
const line = rawLine.trim();
|
|
32
|
+
if (!line) {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
const match = line.match(/^([a-fA-F0-9]{64})\s+\*?(.+)$/);
|
|
36
|
+
if (!match) {
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
map.set(match[2].trim(), match[1].toLowerCase());
|
|
40
|
+
}
|
|
41
|
+
return map;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export async function downloadAndExtractBinary({
|
|
45
|
+
archiveURL,
|
|
46
|
+
checksumsURL,
|
|
47
|
+
archiveName,
|
|
48
|
+
destinationPath,
|
|
49
|
+
fetchImpl = globalThis.fetch
|
|
50
|
+
}) {
|
|
51
|
+
if (typeof fetchImpl !== "function") {
|
|
52
|
+
throw new Error("fetch implementation is required");
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const [archiveResponse, checksumsResponse] = await Promise.all([
|
|
56
|
+
fetchImpl(archiveURL),
|
|
57
|
+
fetchImpl(checksumsURL)
|
|
58
|
+
]);
|
|
59
|
+
|
|
60
|
+
if (!archiveResponse.ok) {
|
|
61
|
+
throw new Error(`failed to download ${archiveURL}`);
|
|
62
|
+
}
|
|
63
|
+
if (!checksumsResponse.ok) {
|
|
64
|
+
throw new Error(`failed to download ${checksumsURL}`);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "deltascope-mcp-archive-"));
|
|
68
|
+
const archivePath = path.join(tempDir, "archive.tar.gz");
|
|
69
|
+
const extractDir = path.join(tempDir, "extract");
|
|
70
|
+
const archiveBuffer = Buffer.from(await archiveResponse.arrayBuffer());
|
|
71
|
+
const checksums = parseChecksums(await checksumsResponse.text());
|
|
72
|
+
const expectedChecksum = checksums.get(archiveName);
|
|
73
|
+
const actualChecksum = sha256(archiveBuffer);
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
if (!expectedChecksum) {
|
|
77
|
+
throw new Error(`missing checksum for ${archiveName}`);
|
|
78
|
+
}
|
|
79
|
+
if (actualChecksum !== expectedChecksum) {
|
|
80
|
+
throw new Error(`checksum mismatch for ${archiveName}`);
|
|
81
|
+
}
|
|
82
|
+
await fs.mkdir(extractDir, { recursive: true });
|
|
83
|
+
await fs.writeFile(archivePath, archiveBuffer);
|
|
84
|
+
await runTar(["-xzf", archivePath, "-C", extractDir]);
|
|
85
|
+
await fs.mkdir(path.dirname(destinationPath), { recursive: true });
|
|
86
|
+
const tempBinaryPath = `${destinationPath}.tmp-${process.pid}`;
|
|
87
|
+
await fs.copyFile(path.join(extractDir, "deltascope-mcp"), tempBinaryPath);
|
|
88
|
+
await fs.chmod(tempBinaryPath, 0o755);
|
|
89
|
+
return {
|
|
90
|
+
binaryPath: tempBinaryPath,
|
|
91
|
+
archiveChecksum: expectedChecksum
|
|
92
|
+
};
|
|
93
|
+
} finally {
|
|
94
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
95
|
+
}
|
|
96
|
+
}
|
package/lib/launcher.js
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { spawn } from "node:child_process";
|
|
5
|
+
|
|
6
|
+
import { resolveCacheBinaryPath, resolveCacheMetadataPath } from "./cache.js";
|
|
7
|
+
import { resolvePlatform } from "./releases.js";
|
|
8
|
+
|
|
9
|
+
async function fileExists(targetPath) {
|
|
10
|
+
try {
|
|
11
|
+
await fs.access(targetPath);
|
|
12
|
+
return true;
|
|
13
|
+
} catch {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async function readJSON(pathname) {
|
|
19
|
+
try {
|
|
20
|
+
return JSON.parse(await fs.readFile(pathname, "utf8"));
|
|
21
|
+
} catch {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async function acquireLock(lockDir, { staleLockMs = 60000, lockTimeoutMs = 10000, lockRetryDelayMs = 100 } = {}) {
|
|
27
|
+
const startedAt = Date.now();
|
|
28
|
+
for (;;) {
|
|
29
|
+
try {
|
|
30
|
+
await fs.mkdir(lockDir);
|
|
31
|
+
return;
|
|
32
|
+
} catch (error) {
|
|
33
|
+
if (error && error.code === "EEXIST") {
|
|
34
|
+
try {
|
|
35
|
+
const stat = await fs.stat(lockDir);
|
|
36
|
+
if (Date.now() - stat.mtimeMs > staleLockMs) {
|
|
37
|
+
await fs.rm(lockDir, { recursive: true, force: true });
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
} catch {
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
if (Date.now() - startedAt > lockTimeoutMs) {
|
|
44
|
+
throw new Error("timed out waiting for launcher cache lock");
|
|
45
|
+
}
|
|
46
|
+
await new Promise((resolve) => setTimeout(resolve, lockRetryDelayMs));
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
throw error;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async function releaseLock(lockDir) {
|
|
55
|
+
await fs.rm(lockDir, { recursive: true, force: true });
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export async function ensureExecutable({
|
|
59
|
+
version,
|
|
60
|
+
homeDir = os.homedir(),
|
|
61
|
+
platform = resolvePlatform(),
|
|
62
|
+
archiveURL = "",
|
|
63
|
+
checksumsURL = "",
|
|
64
|
+
staleLockMs,
|
|
65
|
+
lockTimeoutMs,
|
|
66
|
+
lockRetryDelayMs,
|
|
67
|
+
downloadBinary
|
|
68
|
+
}) {
|
|
69
|
+
const binaryPath = resolveCacheBinaryPath({
|
|
70
|
+
homeDir,
|
|
71
|
+
version,
|
|
72
|
+
os: platform.os,
|
|
73
|
+
arch: platform.arch
|
|
74
|
+
});
|
|
75
|
+
const metadataPath = resolveCacheMetadataPath({
|
|
76
|
+
homeDir,
|
|
77
|
+
version,
|
|
78
|
+
os: platform.os,
|
|
79
|
+
arch: platform.arch
|
|
80
|
+
});
|
|
81
|
+
const cacheDir = path.dirname(binaryPath);
|
|
82
|
+
const lockDir = path.join(cacheDir, ".lock");
|
|
83
|
+
const expectedMetadata = {
|
|
84
|
+
version,
|
|
85
|
+
os: platform.os,
|
|
86
|
+
arch: platform.arch,
|
|
87
|
+
archiveURL,
|
|
88
|
+
checksumsURL
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
const isCacheValid = async () => {
|
|
92
|
+
if (!(await fileExists(binaryPath))) {
|
|
93
|
+
return false;
|
|
94
|
+
}
|
|
95
|
+
const metadata = await readJSON(metadataPath);
|
|
96
|
+
if (!metadata) {
|
|
97
|
+
return false;
|
|
98
|
+
}
|
|
99
|
+
return Object.entries(expectedMetadata).every(([key, value]) => metadata[key] === value);
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
if (await isCacheValid()) {
|
|
103
|
+
return binaryPath;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if (typeof downloadBinary !== "function") {
|
|
107
|
+
throw new Error("downloadBinary is required when the DeltaScope MCP binary is not cached");
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
await fs.mkdir(cacheDir, { recursive: true });
|
|
111
|
+
await acquireLock(lockDir, {
|
|
112
|
+
staleLockMs,
|
|
113
|
+
lockTimeoutMs,
|
|
114
|
+
lockRetryDelayMs
|
|
115
|
+
});
|
|
116
|
+
try {
|
|
117
|
+
if (await isCacheValid()) {
|
|
118
|
+
return binaryPath;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const { binaryPath: downloadedBinaryPath, archiveChecksum } = await downloadBinary(binaryPath);
|
|
122
|
+
const tempMetadataPath = `${metadataPath}.tmp-${process.pid}`;
|
|
123
|
+
const finalMetadata = {
|
|
124
|
+
...expectedMetadata,
|
|
125
|
+
archiveChecksum
|
|
126
|
+
};
|
|
127
|
+
await fs.writeFile(tempMetadataPath, `${JSON.stringify(finalMetadata, null, 2)}\n`);
|
|
128
|
+
await fs.rename(downloadedBinaryPath, binaryPath);
|
|
129
|
+
await fs.rename(tempMetadataPath, metadataPath);
|
|
130
|
+
return binaryPath;
|
|
131
|
+
} finally {
|
|
132
|
+
await releaseLock(lockDir);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export function spawnBinary(binaryPath, args = [], options = {}) {
|
|
137
|
+
return spawn(binaryPath, args, options);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export function formatBootstrapContext({ version, platform, archiveURL, destinationPath }) {
|
|
141
|
+
return [
|
|
142
|
+
`DeltaScope MCP launcher context:`,
|
|
143
|
+
` version: ${version}`,
|
|
144
|
+
` platform: ${platform.os}-${platform.arch}`,
|
|
145
|
+
` archive: ${archiveURL}`,
|
|
146
|
+
` cache target: ${destinationPath}`,
|
|
147
|
+
` checksums: always verified against the official GitHub release checksums file`,
|
|
148
|
+
` hint: if your network requires a proxy, set HTTP_PROXY / HTTPS_PROXY and NODE_USE_ENV_PROXY=1`
|
|
149
|
+
].join("\n");
|
|
150
|
+
}
|
package/lib/releases.js
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import os from "node:os";
|
|
2
|
+
|
|
3
|
+
export function resolvePlatform({ platform = os.platform(), arch = os.arch() } = {}) {
|
|
4
|
+
const resolvedPlatform = platform === "darwin" || platform === "linux" ? platform : null;
|
|
5
|
+
if (resolvedPlatform === null) {
|
|
6
|
+
throw new Error(`unsupported platform: ${platform}`);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
const resolvedArch = (() => {
|
|
10
|
+
switch (arch) {
|
|
11
|
+
case "x64":
|
|
12
|
+
case "amd64":
|
|
13
|
+
return "amd64";
|
|
14
|
+
case "arm64":
|
|
15
|
+
case "aarch64":
|
|
16
|
+
return "arm64";
|
|
17
|
+
default:
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
})();
|
|
21
|
+
|
|
22
|
+
if (resolvedArch === null) {
|
|
23
|
+
throw new Error(`unsupported architecture: ${arch}`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return { os: resolvedPlatform, arch: resolvedArch };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function resolveDeltaScopeVersion({ packageVersion, envVersion = "" }) {
|
|
30
|
+
const source = envVersion || packageVersion;
|
|
31
|
+
if (!source) {
|
|
32
|
+
throw new Error("could not resolve DeltaScope version");
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return source.startsWith("v") ? source : `v${source}`;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export function resolveArchiveName({ version, os, arch }) {
|
|
39
|
+
return `deltascope_${version.replace(/^v/, "")}_${os}_${arch}.tar.gz`;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function resolveChecksumsName({ version }) {
|
|
43
|
+
return `deltascope_${version.replace(/^v/, "")}_checksums.txt`;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function resolveArchiveURL({ repo = "Fanduzi/DeltaScope", version, os, arch }) {
|
|
47
|
+
const baseURL = arguments[0].baseURL ?? "";
|
|
48
|
+
if (baseURL) {
|
|
49
|
+
return `${baseURL.replace(/\/$/, "")}/${version}/${resolveArchiveName({ version, os, arch })}`;
|
|
50
|
+
}
|
|
51
|
+
return `https://github.com/${repo}/releases/download/${version}/${resolveArchiveName({ version, os, arch })}`;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function resolveChecksumsURL({ repo = "Fanduzi/DeltaScope", version }) {
|
|
55
|
+
return `https://github.com/${repo}/releases/download/${version}/${resolveChecksumsName({ version })}`;
|
|
56
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@fanduzi/deltascope-mcp",
|
|
3
|
+
"version": "0.8.1",
|
|
4
|
+
"description": "Launcher package for the DeltaScope MCP stdio server",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "git+https://github.com/Fanduzi/DeltaScope.git"
|
|
9
|
+
},
|
|
10
|
+
"publishConfig": {
|
|
11
|
+
"access": "public"
|
|
12
|
+
},
|
|
13
|
+
"bin": {
|
|
14
|
+
"deltascope-mcp": "bin/deltascope-mcp.js"
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"bin",
|
|
18
|
+
"lib",
|
|
19
|
+
"README.md"
|
|
20
|
+
],
|
|
21
|
+
"scripts": {
|
|
22
|
+
"test": "node --test"
|
|
23
|
+
},
|
|
24
|
+
"engines": {
|
|
25
|
+
"node": ">=24"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"mcp",
|
|
29
|
+
"deltascope",
|
|
30
|
+
"sql",
|
|
31
|
+
"audit"
|
|
32
|
+
],
|
|
33
|
+
"license": "Apache-2.0"
|
|
34
|
+
}
|