@alcyone-labs/arg-parser 2.2.1 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +146 -20
- package/dist/assets/.dxtignore.template +0 -1
- package/dist/core/ArgParserBase.d.ts.map +1 -1
- package/dist/dxt/DxtGenerator-testUtils.d.ts +22 -0
- package/dist/dxt/DxtGenerator-testUtils.d.ts.map +1 -0
- package/dist/dxt/DxtGenerator.d.ts +9 -67
- package/dist/dxt/DxtGenerator.d.ts.map +1 -1
- package/dist/index.cjs +367 -1382
- package/dist/index.cjs.map +1 -1
- package/dist/index.min.mjs +6059 -6986
- package/dist/index.min.mjs.map +1 -1
- package/dist/index.mjs +367 -1382
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -3
- package/dist/assets/tsdown.dxt.config.ts +0 -37
package/dist/index.mjs
CHANGED
|
@@ -1197,86 +1197,22 @@ function createOutputSchema(pattern2) {
|
|
|
1197
1197
|
}
|
|
1198
1198
|
return OutputSchemaPatterns.successError();
|
|
1199
1199
|
}
|
|
1200
|
-
class
|
|
1201
|
-
constructor(argParserInstance) {
|
|
1200
|
+
class DxtGeneratorTestUtils {
|
|
1201
|
+
constructor(argParserInstance, extractMcpServerInfo, handleExit) {
|
|
1202
1202
|
this.argParserInstance = argParserInstance;
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
* Helper method to handle exit logic based on autoExit setting
|
|
1206
|
-
*/
|
|
1207
|
-
_handleExit(exitCode, message, type2, data2) {
|
|
1208
|
-
const result = {
|
|
1209
|
-
success: exitCode === 0,
|
|
1210
|
-
exitCode,
|
|
1211
|
-
message,
|
|
1212
|
-
type: type2 || (exitCode === 0 ? "success" : "error"),
|
|
1213
|
-
shouldExit: true,
|
|
1214
|
-
data: data2
|
|
1215
|
-
};
|
|
1216
|
-
if (this.argParserInstance.getAutoExit() && typeof process === "object" && typeof process.exit === "function") {
|
|
1217
|
-
process.exit(exitCode);
|
|
1218
|
-
}
|
|
1219
|
-
return result;
|
|
1220
|
-
}
|
|
1221
|
-
/**
|
|
1222
|
-
* Handles the --s-build-dxt system flag to generate DXT packages for MCP servers
|
|
1223
|
-
*/
|
|
1224
|
-
async handleBuildDxtFlag(processArgs, buildDxtIndex) {
|
|
1225
|
-
var _a, _b, _c;
|
|
1226
|
-
try {
|
|
1227
|
-
const isTestMode = process.env["NODE_ENV"] === "test" || ((_a = process.argv[0]) == null ? void 0 : _a.includes("vitest")) || ((_b = process.argv[1]) == null ? void 0 : _b.includes("vitest")) || ((_c = process.argv[1]) == null ? void 0 : _c.includes("tinypool"));
|
|
1228
|
-
if (isTestMode) {
|
|
1229
|
-
return await this.handleTestModeDxtGeneration(
|
|
1230
|
-
processArgs,
|
|
1231
|
-
buildDxtIndex
|
|
1232
|
-
);
|
|
1233
|
-
}
|
|
1234
|
-
const entryPointFile = process.argv[1];
|
|
1235
|
-
if (!entryPointFile || !fs.existsSync(entryPointFile)) {
|
|
1236
|
-
console.error(
|
|
1237
|
-
simpleChalk.red(`Error: Entry point file not found: ${entryPointFile}`)
|
|
1238
|
-
);
|
|
1239
|
-
return this._handleExit(1, "Entry point file not found", "error");
|
|
1240
|
-
}
|
|
1241
|
-
const outputDir = processArgs[buildDxtIndex + 1] || "./dxt";
|
|
1242
|
-
console.log(
|
|
1243
|
-
simpleChalk.cyan(
|
|
1244
|
-
`
|
|
1245
|
-
🔧 Building DXT package for entry point: ${path.basename(entryPointFile)}`
|
|
1246
|
-
)
|
|
1247
|
-
);
|
|
1248
|
-
console.log(simpleChalk.gray(`Output directory: ${outputDir}`));
|
|
1249
|
-
await this.buildDxtWithTsdown(entryPointFile, outputDir);
|
|
1250
|
-
console.log(simpleChalk.green(`
|
|
1251
|
-
✅ DXT package generation completed!`));
|
|
1252
|
-
return this._handleExit(
|
|
1253
|
-
0,
|
|
1254
|
-
"DXT package generation completed",
|
|
1255
|
-
"success",
|
|
1256
|
-
{ entryPoint: entryPointFile, outputDir }
|
|
1257
|
-
);
|
|
1258
|
-
} catch (error) {
|
|
1259
|
-
console.error(
|
|
1260
|
-
simpleChalk.red(
|
|
1261
|
-
`Error generating DXT package: ${error instanceof Error ? error.message : String(error)}`
|
|
1262
|
-
)
|
|
1263
|
-
);
|
|
1264
|
-
return this._handleExit(
|
|
1265
|
-
1,
|
|
1266
|
-
`Error generating DXT package: ${error instanceof Error ? error.message : String(error)}`,
|
|
1267
|
-
"error"
|
|
1268
|
-
);
|
|
1269
|
-
}
|
|
1203
|
+
this.extractMcpServerInfo = extractMcpServerInfo;
|
|
1204
|
+
this.handleExit = handleExit;
|
|
1270
1205
|
}
|
|
1271
1206
|
/**
|
|
1272
1207
|
* Handles DXT generation in test mode by creating mock DXT package structure
|
|
1208
|
+
* This method creates simplified mock files for testing purposes
|
|
1273
1209
|
*/
|
|
1274
1210
|
async handleTestModeDxtGeneration(processArgs, buildDxtIndex) {
|
|
1275
1211
|
try {
|
|
1276
1212
|
const outputDir = processArgs[buildDxtIndex + 1] || "./dxt-packages";
|
|
1277
1213
|
const mcpTools = this.argParserInstance.toMcpTools();
|
|
1278
1214
|
if (mcpTools.length === 0) {
|
|
1279
|
-
return this.
|
|
1215
|
+
return this.handleExit(0, "No MCP servers found", "success");
|
|
1280
1216
|
}
|
|
1281
1217
|
const serverInfo = this.extractMcpServerInfo();
|
|
1282
1218
|
const folderName = `${serverInfo.name.replace(/[^a-zA-Z0-9_-]/g, "_")}-dxt`;
|
|
@@ -1332,17 +1268,12 @@ echo "Mock DXT build script for ${serverInfo.name}"`;
|
|
|
1332
1268
|
path.join(buildDir, "build-dxt-package.sh"),
|
|
1333
1269
|
buildScript
|
|
1334
1270
|
);
|
|
1335
|
-
return this.
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
{
|
|
1340
|
-
entryPoint: "test-mode",
|
|
1341
|
-
outputDir: buildDir
|
|
1342
|
-
}
|
|
1343
|
-
);
|
|
1271
|
+
return this.handleExit(0, "DXT package generation completed", "success", {
|
|
1272
|
+
entryPoint: "test-mode",
|
|
1273
|
+
outputDir: buildDir
|
|
1274
|
+
});
|
|
1344
1275
|
} catch (error) {
|
|
1345
|
-
return this.
|
|
1276
|
+
return this.handleExit(
|
|
1346
1277
|
1,
|
|
1347
1278
|
`Test mode DXT generation failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
1348
1279
|
"error"
|
|
@@ -1350,96 +1281,155 @@ echo "Mock DXT build script for ${serverInfo.name}"`;
|
|
|
1350
1281
|
}
|
|
1351
1282
|
}
|
|
1352
1283
|
/**
|
|
1353
|
-
*
|
|
1354
|
-
*
|
|
1284
|
+
* Checks if the current environment is in test mode
|
|
1285
|
+
* Used to determine whether to use test utilities or production code
|
|
1355
1286
|
*/
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
const
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
JSON.stringify(manifest, null, 2)
|
|
1380
|
-
);
|
|
1381
|
-
this.addOriginalCliToFolder(buildDir);
|
|
1382
|
-
const bundledCliPath = await this.bundleOriginalCliWithTsdown(serverDir);
|
|
1383
|
-
const serverScript = this.createServerScript(serverInfo, bundledCliPath);
|
|
1384
|
-
const serverScriptPath = path.join(serverDir, "index.mjs");
|
|
1385
|
-
fs.writeFileSync(serverScriptPath, serverScript);
|
|
1386
|
-
try {
|
|
1387
|
-
fs.chmodSync(serverScriptPath, 493);
|
|
1388
|
-
} catch (error) {
|
|
1389
|
-
console.warn(
|
|
1390
|
-
"⚠ Could not set executable permission on server script:",
|
|
1391
|
-
error instanceof Error ? error.message : String(error)
|
|
1392
|
-
);
|
|
1393
|
-
}
|
|
1394
|
-
const packageJson = this.createDxtPackageJson(serverInfo);
|
|
1395
|
-
fs.writeFileSync(
|
|
1396
|
-
path.join(buildDir, "package.json"),
|
|
1397
|
-
JSON.stringify(packageJson, null, 2)
|
|
1398
|
-
);
|
|
1399
|
-
const readme = this.createDxtReadme(serverInfo);
|
|
1400
|
-
fs.writeFileSync(path.join(buildDir, "README.md"), readme);
|
|
1401
|
-
const buildScript = this.createSimpleBuildScript(serverInfo);
|
|
1402
|
-
fs.writeFileSync(path.join(buildDir, "build-dxt.sh"), buildScript);
|
|
1403
|
-
const dxtIgnore = this.createDxtIgnore();
|
|
1404
|
-
fs.writeFileSync(path.join(buildDir, ".dxtignore"), dxtIgnore);
|
|
1405
|
-
try {
|
|
1406
|
-
fs.chmodSync(path.join(buildDir, "build-dxt.sh"), 493);
|
|
1407
|
-
} catch (error) {
|
|
1287
|
+
static isTestMode() {
|
|
1288
|
+
var _a, _b, _c;
|
|
1289
|
+
return process.env["NODE_ENV"] === "test" || ((_a = process.argv[0]) == null ? void 0 : _a.includes("vitest")) || ((_b = process.argv[1]) == null ? void 0 : _b.includes("vitest")) || ((_c = process.argv[1]) == null ? void 0 : _c.includes("tinypool"));
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
1292
|
+
class DxtGenerator {
|
|
1293
|
+
constructor(argParserInstance) {
|
|
1294
|
+
this.argParserInstance = argParserInstance;
|
|
1295
|
+
}
|
|
1296
|
+
/**
|
|
1297
|
+
* Helper method to handle exit logic based on autoExit setting
|
|
1298
|
+
*/
|
|
1299
|
+
_handleExit(exitCode, message, type2, data2) {
|
|
1300
|
+
const result = {
|
|
1301
|
+
success: exitCode === 0,
|
|
1302
|
+
exitCode,
|
|
1303
|
+
message,
|
|
1304
|
+
type: type2 || (exitCode === 0 ? "success" : "error"),
|
|
1305
|
+
shouldExit: true,
|
|
1306
|
+
data: data2
|
|
1307
|
+
};
|
|
1308
|
+
if (this.argParserInstance.getAutoExit() && typeof process === "object" && typeof process.exit === "function") {
|
|
1309
|
+
process.exit(exitCode);
|
|
1408
1310
|
}
|
|
1409
|
-
|
|
1410
|
-
console.log(
|
|
1411
|
-
simpleChalk.gray(` Server: ${serverInfo.name} v${serverInfo.version}`)
|
|
1412
|
-
);
|
|
1413
|
-
console.log(simpleChalk.gray(` Tools: ${tools.length} tool(s)`));
|
|
1414
|
-
console.log(simpleChalk.gray(` Location: ${buildDir}`));
|
|
1415
|
-
console.log(
|
|
1416
|
-
simpleChalk.cyan(`
|
|
1417
|
-
📦 Creating DXT package using Anthropic's dxt pack...`)
|
|
1418
|
-
);
|
|
1419
|
-
console.log(simpleChalk.cyan(`
|
|
1420
|
-
📋 Manual steps to create your DXT package:`));
|
|
1421
|
-
console.log(simpleChalk.white(` cd ${path.relative(process.cwd(), buildDir)}`));
|
|
1422
|
-
console.log(simpleChalk.white(` ./build-dxt.sh`));
|
|
1311
|
+
return result;
|
|
1423
1312
|
}
|
|
1424
1313
|
/**
|
|
1425
|
-
*
|
|
1314
|
+
* Handles the --s-build-dxt system flag to generate DXT packages for MCP servers
|
|
1426
1315
|
*/
|
|
1427
|
-
|
|
1316
|
+
async handleBuildDxtFlag(processArgs, buildDxtIndex) {
|
|
1428
1317
|
try {
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1318
|
+
if (DxtGeneratorTestUtils.isTestMode()) {
|
|
1319
|
+
const testUtils = new DxtGeneratorTestUtils(
|
|
1320
|
+
this.argParserInstance,
|
|
1321
|
+
() => this.extractMcpServerInfo(),
|
|
1322
|
+
(exitCode, message, type2, data2) => this._handleExit(exitCode, message, type2, data2)
|
|
1323
|
+
);
|
|
1324
|
+
return await testUtils.handleTestModeDxtGeneration(
|
|
1325
|
+
processArgs,
|
|
1326
|
+
buildDxtIndex
|
|
1327
|
+
);
|
|
1328
|
+
}
|
|
1329
|
+
const withNodeModules = processArgs.includes("--s-with-node-modules");
|
|
1330
|
+
if (withNodeModules) {
|
|
1331
|
+
console.log(
|
|
1332
|
+
simpleChalk.yellow(
|
|
1333
|
+
"🗂️ --s-with-node-modules detected: will include node_modules in bundle"
|
|
1334
|
+
)
|
|
1335
|
+
);
|
|
1336
|
+
const nodeModulesPath = path.resolve("./node_modules");
|
|
1337
|
+
if (!fs.existsSync(nodeModulesPath)) {
|
|
1338
|
+
console.error(
|
|
1339
|
+
simpleChalk.red(
|
|
1340
|
+
"❌ Error: node_modules directory not found. Please run the installation command first."
|
|
1341
|
+
)
|
|
1342
|
+
);
|
|
1343
|
+
console.log(
|
|
1344
|
+
simpleChalk.cyan(
|
|
1345
|
+
"💡 Required command: pnpm install --prod --node-linker=hoisted"
|
|
1346
|
+
)
|
|
1347
|
+
);
|
|
1348
|
+
return this._handleExit(
|
|
1349
|
+
1,
|
|
1350
|
+
"node_modules directory not found",
|
|
1351
|
+
"error"
|
|
1352
|
+
);
|
|
1353
|
+
}
|
|
1354
|
+
try {
|
|
1355
|
+
const nodeModulesContents = fs.readdirSync(nodeModulesPath);
|
|
1356
|
+
const hasNestedNodeModules = nodeModulesContents.filter((item) => !item.startsWith(".") && !item.startsWith("@")).some((item) => {
|
|
1357
|
+
const itemPath = path.join(nodeModulesPath, item);
|
|
1358
|
+
try {
|
|
1359
|
+
return fs.statSync(itemPath).isDirectory() && fs.existsSync(path.join(itemPath, "node_modules"));
|
|
1360
|
+
} catch {
|
|
1361
|
+
return false;
|
|
1362
|
+
}
|
|
1363
|
+
});
|
|
1364
|
+
if (hasNestedNodeModules) {
|
|
1365
|
+
console.warn(
|
|
1366
|
+
simpleChalk.yellow(
|
|
1367
|
+
"⚠️ Warning: Detected nested node_modules. For best results, ensure hoisted installation:"
|
|
1368
|
+
)
|
|
1369
|
+
);
|
|
1370
|
+
console.log(
|
|
1371
|
+
simpleChalk.cyan(
|
|
1372
|
+
" rm -rf node_modules && pnpm install --prod --node-linker=hoisted"
|
|
1373
|
+
)
|
|
1374
|
+
);
|
|
1375
|
+
} else {
|
|
1376
|
+
console.log(
|
|
1377
|
+
simpleChalk.green(
|
|
1378
|
+
"✅ node_modules appears properly hoisted and ready for bundling"
|
|
1379
|
+
)
|
|
1380
|
+
);
|
|
1381
|
+
}
|
|
1382
|
+
} catch (error) {
|
|
1383
|
+
console.warn(
|
|
1384
|
+
simpleChalk.yellow(
|
|
1385
|
+
`⚠️ Could not validate node_modules structure: ${error instanceof Error ? error.message : String(error)}`
|
|
1386
|
+
)
|
|
1387
|
+
);
|
|
1388
|
+
}
|
|
1389
|
+
console.log(
|
|
1390
|
+
simpleChalk.gray(
|
|
1391
|
+
"💡 This will create a fully autonomous DXT with all native dependencies included"
|
|
1392
|
+
)
|
|
1393
|
+
);
|
|
1439
1394
|
}
|
|
1395
|
+
const entryPointFile = process.argv[1];
|
|
1396
|
+
if (!entryPointFile || !fs.existsSync(entryPointFile)) {
|
|
1397
|
+
console.error(
|
|
1398
|
+
simpleChalk.red(`Error: Entry point file not found: ${entryPointFile}`)
|
|
1399
|
+
);
|
|
1400
|
+
return this._handleExit(1, "Entry point file not found", "error");
|
|
1401
|
+
}
|
|
1402
|
+
let outputDir = processArgs[buildDxtIndex + 1] || "./dxt";
|
|
1403
|
+
if (outputDir.startsWith("--s-")) outputDir = "./dxt";
|
|
1404
|
+
console.log(
|
|
1405
|
+
simpleChalk.cyan(
|
|
1406
|
+
`
|
|
1407
|
+
🔧 Building DXT package for entry point: ${entryPointFile}`
|
|
1408
|
+
)
|
|
1409
|
+
);
|
|
1410
|
+
console.log(simpleChalk.gray(`Output directory: ${outputDir}`));
|
|
1411
|
+
console.log(simpleChalk.gray(`Entrypoint file: ${entryPointFile}`));
|
|
1412
|
+
await this.buildDxtWithTsdown(entryPointFile, outputDir, withNodeModules);
|
|
1413
|
+
console.log(simpleChalk.green(`
|
|
1414
|
+
✅ DXT package generation completed!`));
|
|
1415
|
+
return this._handleExit(
|
|
1416
|
+
0,
|
|
1417
|
+
"DXT package generation completed",
|
|
1418
|
+
"success",
|
|
1419
|
+
{ entryPoint: entryPointFile, outputDir }
|
|
1420
|
+
);
|
|
1440
1421
|
} catch (error) {
|
|
1422
|
+
console.error(
|
|
1423
|
+
simpleChalk.red(
|
|
1424
|
+
`Error generating DXT package: ${error instanceof Error ? error.message : String(error)}`
|
|
1425
|
+
)
|
|
1426
|
+
);
|
|
1427
|
+
return this._handleExit(
|
|
1428
|
+
1,
|
|
1429
|
+
`Error generating DXT package: ${error instanceof Error ? error.message : String(error)}`,
|
|
1430
|
+
"error"
|
|
1431
|
+
);
|
|
1441
1432
|
}
|
|
1442
|
-
return null;
|
|
1443
1433
|
}
|
|
1444
1434
|
/**
|
|
1445
1435
|
* Extracts server information from MCP configuration
|
|
@@ -1524,518 +1514,9 @@ echo "Mock DXT build script for ${serverInfo.name}"`;
|
|
|
1524
1514
|
];
|
|
1525
1515
|
}
|
|
1526
1516
|
}
|
|
1527
|
-
createDxtManifest(serverInfo, tools, mcpSubCommand, logoFilename) {
|
|
1528
|
-
var _a;
|
|
1529
|
-
const packageInfo = this.readPackageJsonInfo();
|
|
1530
|
-
let author = serverInfo.author;
|
|
1531
|
-
if (!author && (packageInfo == null ? void 0 : packageInfo.author)) {
|
|
1532
|
-
if (typeof packageInfo.author === "string") {
|
|
1533
|
-
const match = packageInfo.author.match(/^([^<]+?)(?:\s*<([^>]+)>)?$/);
|
|
1534
|
-
if (match) {
|
|
1535
|
-
author = {
|
|
1536
|
-
name: match[1].trim(),
|
|
1537
|
-
email: (_a = match[2]) == null ? void 0 : _a.trim()
|
|
1538
|
-
};
|
|
1539
|
-
} else {
|
|
1540
|
-
author = { name: packageInfo.author };
|
|
1541
|
-
}
|
|
1542
|
-
} else {
|
|
1543
|
-
author = packageInfo.author;
|
|
1544
|
-
}
|
|
1545
|
-
}
|
|
1546
|
-
if (!author) {
|
|
1547
|
-
throw new Error(
|
|
1548
|
-
"DXT manifest requires author information. Please provide it via withMcp() serverInfo.author, addMcpSubCommand serverInfo.author, or in package.json"
|
|
1549
|
-
);
|
|
1550
|
-
}
|
|
1551
|
-
const cliArgs = this.generateCliArgsForDxt(mcpSubCommand);
|
|
1552
|
-
const { envVars, userConfig } = this.generateEnvAndUserConfig();
|
|
1553
|
-
const manifest = {
|
|
1554
|
-
dxt_version: "0.1",
|
|
1555
|
-
name: serverInfo.name,
|
|
1556
|
-
version: serverInfo.version,
|
|
1557
|
-
description: serverInfo.description || "MCP server generated from ArgParser",
|
|
1558
|
-
author,
|
|
1559
|
-
server: {
|
|
1560
|
-
type: "node",
|
|
1561
|
-
entry_point: "server/index.mjs",
|
|
1562
|
-
mcp_config: {
|
|
1563
|
-
command: "node",
|
|
1564
|
-
args: ["${__dirname}/server/index.mjs", ...cliArgs],
|
|
1565
|
-
env: envVars
|
|
1566
|
-
}
|
|
1567
|
-
},
|
|
1568
|
-
tools: tools.map((tool) => ({
|
|
1569
|
-
name: tool.name,
|
|
1570
|
-
description: tool.description
|
|
1571
|
-
}))
|
|
1572
|
-
};
|
|
1573
|
-
if (logoFilename) {
|
|
1574
|
-
manifest.icon = logoFilename;
|
|
1575
|
-
}
|
|
1576
|
-
if (userConfig && Object.keys(userConfig).length > 0) {
|
|
1577
|
-
manifest.user_config = userConfig;
|
|
1578
|
-
}
|
|
1579
|
-
if (serverInfo.repository || (packageInfo == null ? void 0 : packageInfo.repository)) {
|
|
1580
|
-
manifest.repository = serverInfo.repository || (packageInfo == null ? void 0 : packageInfo.repository);
|
|
1581
|
-
}
|
|
1582
|
-
if (serverInfo.license || (packageInfo == null ? void 0 : packageInfo.license)) {
|
|
1583
|
-
manifest.license = serverInfo.license || (packageInfo == null ? void 0 : packageInfo.license);
|
|
1584
|
-
}
|
|
1585
|
-
if (serverInfo.homepage || (packageInfo == null ? void 0 : packageInfo.homepage)) {
|
|
1586
|
-
manifest.homepage = serverInfo.homepage || (packageInfo == null ? void 0 : packageInfo.homepage);
|
|
1587
|
-
}
|
|
1588
|
-
return manifest;
|
|
1589
|
-
}
|
|
1590
|
-
validateDxtManifest(manifest) {
|
|
1591
|
-
const errors = [];
|
|
1592
|
-
if (!manifest.dxt_version)
|
|
1593
|
-
errors.push("Missing required field: dxt_version");
|
|
1594
|
-
if (!manifest.name) errors.push("Missing required field: name");
|
|
1595
|
-
if (!manifest.version) errors.push("Missing required field: version");
|
|
1596
|
-
if (!manifest.server) errors.push("Missing required field: server");
|
|
1597
|
-
if (!manifest.author) errors.push("Missing required field: author");
|
|
1598
|
-
if (manifest.server) {
|
|
1599
|
-
if (!manifest.server.type)
|
|
1600
|
-
errors.push("Missing required field: server.type");
|
|
1601
|
-
if (!manifest.server.entry_point)
|
|
1602
|
-
errors.push("Missing required field: server.entry_point");
|
|
1603
|
-
if (!manifest.server.mcp_config)
|
|
1604
|
-
errors.push("Missing required field: server.mcp_config");
|
|
1605
|
-
if (manifest.server.mcp_config) {
|
|
1606
|
-
if (!manifest.server.mcp_config.command)
|
|
1607
|
-
errors.push("Missing required field: server.mcp_config.command");
|
|
1608
|
-
if (!manifest.server.mcp_config.args || !Array.isArray(manifest.server.mcp_config.args)) {
|
|
1609
|
-
errors.push(
|
|
1610
|
-
"Missing or invalid field: server.mcp_config.args (must be array)"
|
|
1611
|
-
);
|
|
1612
|
-
}
|
|
1613
|
-
}
|
|
1614
|
-
}
|
|
1615
|
-
if (manifest.author && typeof manifest.author === "object") {
|
|
1616
|
-
if (!manifest.author.name)
|
|
1617
|
-
errors.push("Missing required field: author.name");
|
|
1618
|
-
}
|
|
1619
|
-
if (manifest.dxt_version && manifest.dxt_version !== "0.1") {
|
|
1620
|
-
errors.push("Unsupported dxt_version: only '0.1' is currently supported");
|
|
1621
|
-
}
|
|
1622
|
-
if (errors.length > 0) {
|
|
1623
|
-
throw new Error(
|
|
1624
|
-
`DXT manifest validation failed:
|
|
1625
|
-
${errors.map((e) => ` - ${e}`).join("\n")}`
|
|
1626
|
-
);
|
|
1627
|
-
}
|
|
1628
|
-
}
|
|
1629
|
-
createServerScript(serverInfo, bundledCliPath) {
|
|
1630
|
-
const cliImportPath = bundledCliPath || "original-cli.mjs";
|
|
1631
|
-
return `#!/usr/bin/env node
|
|
1632
|
-
|
|
1633
|
-
// Generated MCP server for ${serverInfo.name}
|
|
1634
|
-
// This server uses @alcyone-labs/arg-parser's built-in MCP functionality for full protocol compliance
|
|
1635
|
-
|
|
1636
|
-
// FIRST: Set up MCP-safe logging to prevent STDOUT contamination
|
|
1637
|
-
import { createMcpLogger } from '@alcyone-labs/simple-mcp-logger';
|
|
1638
|
-
|
|
1639
|
-
// Auto-detect MCP mode and hijack console to prevent protocol corruption
|
|
1640
|
-
const mcpLogger = createMcpLogger('${serverInfo.name}');
|
|
1641
|
-
globalThis.console = mcpLogger;
|
|
1642
|
-
|
|
1643
|
-
// Now import the CLI which already has MCP functionality configured
|
|
1644
|
-
import originalCli from './${cliImportPath}';
|
|
1645
|
-
|
|
1646
|
-
// Server configuration
|
|
1647
|
-
const serverInfo = ${JSON.stringify(serverInfo, null, 2)};
|
|
1648
|
-
|
|
1649
|
-
// Use mcpError for debugging output (safe STDERR, visible in client logs)
|
|
1650
|
-
console.error(\`MCP Server: \${serverInfo.name} v\${serverInfo.version}\`);
|
|
1651
|
-
console.error(\`Description: \${serverInfo.description}\`);
|
|
1652
|
-
console.error(\`Generated from @alcyone-labs/arg-parser with built-in MCP functionality\`);
|
|
1653
|
-
${bundledCliPath ? "console.error(`Using bundled CLI for autonomous execution`);" : ""}
|
|
1654
|
-
|
|
1655
|
-
// The original CLI has MCP functionality configured via withMcp() or addMcpSubCommand()
|
|
1656
|
-
// We use the centralized --s-mcp-serve system flag to start the unified MCP server
|
|
1657
|
-
|
|
1658
|
-
// Start the MCP server using the library's built-in centralized serving
|
|
1659
|
-
// This works with both withMcp() configuration and legacy addMcpSubCommand() setups
|
|
1660
|
-
originalCli.parse(['--s-mcp-serve']);
|
|
1661
|
-
`;
|
|
1662
|
-
}
|
|
1663
|
-
createDxtPackageJson(serverInfo) {
|
|
1664
|
-
const useLocalBuild = process.env["LOCAL_BUILD"] === "1";
|
|
1665
|
-
const argParserDependency = useLocalBuild ? "file:../../arg-parser-local.tgz" : "^1.3.0";
|
|
1666
|
-
let originalDependencies = {};
|
|
1667
|
-
try {
|
|
1668
|
-
const originalPackageJsonPath = path.join(process.cwd(), "package.json");
|
|
1669
|
-
if (fs.existsSync(originalPackageJsonPath)) {
|
|
1670
|
-
const originalPackageJson = JSON.parse(
|
|
1671
|
-
fs.readFileSync(originalPackageJsonPath, "utf8")
|
|
1672
|
-
);
|
|
1673
|
-
originalDependencies = originalPackageJson.dependencies || {};
|
|
1674
|
-
}
|
|
1675
|
-
} catch (error) {
|
|
1676
|
-
console.warn(
|
|
1677
|
-
"⚠ Could not read original package.json for dependencies:",
|
|
1678
|
-
error instanceof Error ? error.message : String(error)
|
|
1679
|
-
);
|
|
1680
|
-
}
|
|
1681
|
-
const dependencies2 = {
|
|
1682
|
-
...originalDependencies,
|
|
1683
|
-
"@alcyone-labs/arg-parser": argParserDependency,
|
|
1684
|
-
"@alcyone-labs/simple-mcp-logger": "^1.0.0",
|
|
1685
|
-
"@modelcontextprotocol/sdk": "^1.15.0",
|
|
1686
|
-
zod: "^3.22.4"
|
|
1687
|
-
};
|
|
1688
|
-
const devDependencies = {
|
|
1689
|
-
tsup: "^8.3.5"
|
|
1690
|
-
};
|
|
1691
|
-
Object.keys(dependencies2).forEach((key) => {
|
|
1692
|
-
const depValue = dependencies2[key];
|
|
1693
|
-
if (key !== "@alcyone-labs/arg-parser" && typeof depValue === "string" && depValue.startsWith("file:")) {
|
|
1694
|
-
delete dependencies2[key];
|
|
1695
|
-
console.warn(
|
|
1696
|
-
`⚠ Removed file: dependency ${key} from DXT package (not suitable for distribution)`
|
|
1697
|
-
);
|
|
1698
|
-
}
|
|
1699
|
-
});
|
|
1700
|
-
return {
|
|
1701
|
-
name: serverInfo.name,
|
|
1702
|
-
version: serverInfo.version,
|
|
1703
|
-
description: serverInfo.description,
|
|
1704
|
-
main: "server/index.mjs",
|
|
1705
|
-
type: "module",
|
|
1706
|
-
scripts: {
|
|
1707
|
-
start: "node server/index.mjs",
|
|
1708
|
-
"build-dxt": "./build-dxt.sh"
|
|
1709
|
-
},
|
|
1710
|
-
dependencies: dependencies2,
|
|
1711
|
-
devDependencies,
|
|
1712
|
-
engines: {
|
|
1713
|
-
node: ">=22.0.0"
|
|
1714
|
-
},
|
|
1715
|
-
author: serverInfo.author,
|
|
1716
|
-
license: serverInfo.license || "MIT",
|
|
1717
|
-
repository: serverInfo.repository
|
|
1718
|
-
};
|
|
1719
|
-
}
|
|
1720
|
-
/**
|
|
1721
|
-
* Creates a .dxtignore file to exclude build artifacts and unnecessary files
|
|
1722
|
-
*/
|
|
1723
|
-
createDxtIgnore() {
|
|
1724
|
-
return `# DXT ignore file - exclude these files from the DXT package
|
|
1725
|
-
# Generated by @alcyone-labs/arg-parser
|
|
1726
|
-
|
|
1727
|
-
# Build artifacts and logs
|
|
1728
|
-
*.log
|
|
1729
|
-
*.tmp
|
|
1730
|
-
temp-dxt-build/
|
|
1731
|
-
|
|
1732
|
-
# Build scripts (not needed in final package)
|
|
1733
|
-
build-dxt.sh
|
|
1734
|
-
arg-parser-local.tgz
|
|
1735
|
-
tsup.config.autonomous.js
|
|
1736
|
-
tsdown.config.mjs
|
|
1737
|
-
|
|
1738
|
-
# Original files (replaced by bundled autonomous build)
|
|
1739
|
-
server/index.original.mjs
|
|
1740
|
-
server/*.autonomous.mjs
|
|
1741
|
-
|
|
1742
|
-
# NOTE: server/original-cli.mjs is NOT excluded because it's needed for the MCP server to function
|
|
1743
|
-
# The bundled version (if created) will be server/original-cli.bundled.mjs
|
|
1744
|
-
|
|
1745
|
-
# NOTE: node_modules/ is NOT excluded because TSDown bundling may not be 100% autonomous
|
|
1746
|
-
# If bundling is successful, node_modules won't be needed, but we include it as fallback
|
|
1747
|
-
# The bundled server/index.mjs should be fully autonomous and not require node_modules
|
|
1748
|
-
|
|
1749
|
-
# Development files
|
|
1750
|
-
.git/
|
|
1751
|
-
.gitignore
|
|
1752
|
-
.env
|
|
1753
|
-
.env.*
|
|
1754
|
-
|
|
1755
|
-
# OS files
|
|
1756
|
-
.DS_Store
|
|
1757
|
-
Thumbs.db
|
|
1758
|
-
|
|
1759
|
-
# IDE files
|
|
1760
|
-
.vscode/
|
|
1761
|
-
.idea/
|
|
1762
|
-
*.swp
|
|
1763
|
-
*.swo
|
|
1764
|
-
`;
|
|
1765
|
-
}
|
|
1766
|
-
/**
|
|
1767
|
-
* Creates a simple build script that uses TSDown bundling and Anthropic's dxt pack
|
|
1768
|
-
*/
|
|
1769
|
-
createSimpleBuildScript(serverInfo) {
|
|
1770
|
-
return `#!/bin/bash
|
|
1771
|
-
|
|
1772
|
-
# Simple DXT Build Script for ${serverInfo.name}
|
|
1773
|
-
# Generated by @alcyone-labs/arg-parser with TSDown bundling
|
|
1774
|
-
|
|
1775
|
-
set -e
|
|
1776
|
-
|
|
1777
|
-
echo "📦 Creating DXT package for ${serverInfo.name}..."
|
|
1778
|
-
|
|
1779
|
-
# Step 1: Make server executable (required for MCP)
|
|
1780
|
-
echo "🔧 Making server executable..."
|
|
1781
|
-
chmod +x server/index.mjs
|
|
1782
|
-
|
|
1783
|
-
# Step 2: Handle local development dependencies
|
|
1784
|
-
if grep -q "file:.*arg-parser-local.tgz" package.json; then
|
|
1785
|
-
echo "🔧 Checking for local package tarball..."
|
|
1786
|
-
|
|
1787
|
-
# Check if the tarball exists in the parent directory
|
|
1788
|
-
if [ -f "../../arg-parser-local.tgz" ]; then
|
|
1789
|
-
echo "✅ Found local package tarball: ../../arg-parser-local.tgz"
|
|
1790
|
-
else
|
|
1791
|
-
echo "⚠️ Local tarball not found, falling back to npm registry"
|
|
1792
|
-
echo "💡 To use local build, run: cd /path/to/arg-parser && npm pack && cp *.tgz examples/community/canny-cli/"
|
|
1793
|
-
|
|
1794
|
-
# Replace with npm version
|
|
1795
|
-
sed -i.bak 's|"file:.*arg-parser-local.tgz"|"^1.3.0"|g' package.json 2>/dev/null || \\
|
|
1796
|
-
sed -i 's|"file:.*arg-parser-local.tgz"|"^1.3.0"|g' package.json
|
|
1797
|
-
fi
|
|
1798
|
-
fi
|
|
1799
|
-
|
|
1800
|
-
# Step 3: Install dependencies (for runtime only, bundling was done during generation)
|
|
1801
|
-
echo "📦 Installing dependencies..."
|
|
1802
|
-
npm install
|
|
1803
|
-
|
|
1804
|
-
# Step 4: Validate manifest
|
|
1805
|
-
echo "🔍 Validating DXT manifest..."
|
|
1806
|
-
if command -v npx >/dev/null 2>&1; then
|
|
1807
|
-
if npx @anthropic-ai/dxt validate manifest.json; then
|
|
1808
|
-
echo "✅ DXT manifest validation passed"
|
|
1809
|
-
else
|
|
1810
|
-
echo "❌ DXT manifest validation failed"
|
|
1811
|
-
exit 1
|
|
1812
|
-
fi
|
|
1813
|
-
else
|
|
1814
|
-
echo "⚠️ npx not found, skipping DXT validation"
|
|
1815
|
-
fi
|
|
1816
|
-
|
|
1817
|
-
# Step 5: Create DXT package using Anthropic's official packer
|
|
1818
|
-
echo "📦 Creating DXT package..."
|
|
1819
|
-
if command -v npx >/dev/null 2>&1; then
|
|
1820
|
-
# Use dxt pack directly with .dxtignore for clean packaging
|
|
1821
|
-
npx @anthropic-ai/dxt pack . "${serverInfo.name}.dxt"
|
|
1822
|
-
else
|
|
1823
|
-
# Fallback to standard zip if npx not available
|
|
1824
|
-
echo "⚠️ npx not found, using zip fallback"
|
|
1825
|
-
zip -r "${serverInfo.name}.dxt" . -x "node_modules/*" "*.log" ".git/*" "build-dxt.sh" "temp-dxt-build/*"
|
|
1826
|
-
fi
|
|
1827
|
-
|
|
1828
|
-
# Step 6: Sign the DXT package (optional)
|
|
1829
|
-
echo "🔐 Signing DXT package..."
|
|
1830
|
-
if command -v npx >/dev/null 2>&1 && command -v openssl >/dev/null 2>&1; then
|
|
1831
|
-
if npx @anthropic-ai/dxt sign "${serverInfo.name}.dxt" --self-signed; then
|
|
1832
|
-
echo "✅ DXT package signed successfully"
|
|
1833
|
-
else
|
|
1834
|
-
echo "⚠️ DXT signing failed, but package is still usable"
|
|
1835
|
-
fi
|
|
1836
|
-
else
|
|
1837
|
-
echo "⚠️ npx or openssl not found, skipping DXT signing"
|
|
1838
|
-
fi
|
|
1839
|
-
|
|
1840
|
-
echo "✅ DXT package created: ${serverInfo.name}.dxt"
|
|
1841
|
-
echo "🎯 This package includes bundled CLI with all dependencies!"
|
|
1842
|
-
echo ""
|
|
1843
|
-
echo "🎉 Installation Instructions:"
|
|
1844
|
-
echo "You can now take the file '${serverInfo.name}.dxt' and install it on Claude Desktop"
|
|
1845
|
-
echo "or supporting applications by using drag & drop on the Extensions Settings page,"
|
|
1846
|
-
echo "or directly pointing the file selector to this file."
|
|
1847
|
-
echo ""
|
|
1848
|
-
echo "📁 DXT file location: $(pwd)/${serverInfo.name}.dxt"
|
|
1849
|
-
`;
|
|
1850
|
-
}
|
|
1851
|
-
createDxtReadme(serverInfo) {
|
|
1852
|
-
return `# ${serverInfo.name}
|
|
1853
|
-
|
|
1854
|
-
${serverInfo.description}
|
|
1855
|
-
|
|
1856
|
-
## Installation
|
|
1857
|
-
|
|
1858
|
-
This is a Desktop Extension (DXT) package generated from @alcyone-labs/arg-parser.
|
|
1859
|
-
|
|
1860
|
-
### Automatic Installation
|
|
1861
|
-
Open this .dxt file with Claude Desktop or other DXT-compatible applications for single-click installation.
|
|
1862
|
-
|
|
1863
|
-
### Manual Installation
|
|
1864
|
-
1. Extract the .dxt file (it's a ZIP archive)
|
|
1865
|
-
2. Run \`npm install\` to install dependencies
|
|
1866
|
-
3. Start the server with \`npm start\`
|
|
1867
|
-
|
|
1868
|
-
## Tools
|
|
1869
|
-
|
|
1870
|
-
This MCP server provides the following tools:
|
|
1871
|
-
${this.generateMcpToolsForDxt().map((tool) => `- **${tool.name}**: ${tool.description}`).join("\n")}
|
|
1872
|
-
|
|
1873
|
-
## Building DXT Packages
|
|
1874
|
-
|
|
1875
|
-
To rebuild the DXT package:
|
|
1876
|
-
|
|
1877
|
-
### Prerequisites
|
|
1878
|
-
- Node.js 18+ installed
|
|
1879
|
-
- npm package manager
|
|
1880
|
-
|
|
1881
|
-
### Build Steps
|
|
1882
|
-
|
|
1883
|
-
\`\`\`bash
|
|
1884
|
-
# 1. Install dependencies
|
|
1885
|
-
npm install
|
|
1886
|
-
|
|
1887
|
-
# 2. Build DXT package
|
|
1888
|
-
npm run build-dxt
|
|
1889
|
-
# or
|
|
1890
|
-
./build-dxt.sh
|
|
1891
|
-
|
|
1892
|
-
# 3. The build script will:
|
|
1893
|
-
# - Install dependencies
|
|
1894
|
-
# - Validate the manifest
|
|
1895
|
-
# - Create the DXT package using Anthropic's official packer
|
|
1896
|
-
# - Sign the package (optional)
|
|
1897
|
-
\`\`\`
|
|
1898
|
-
|
|
1899
|
-
### Manual Build Process
|
|
1900
|
-
|
|
1901
|
-
If the automated build script doesn't work, you can build manually:
|
|
1902
|
-
|
|
1903
|
-
\`\`\`bash
|
|
1904
|
-
# 1. Install dependencies
|
|
1905
|
-
npm install
|
|
1906
|
-
|
|
1907
|
-
# 2. Create DXT package
|
|
1908
|
-
npx @anthropic-ai/dxt pack . ${serverInfo.name}.dxt
|
|
1909
|
-
|
|
1910
|
-
# 2. Update manifest.json
|
|
1911
|
-
# Change "entry_point" from "server/index.js" to "dist-autonomous/server.cjs"
|
|
1912
|
-
|
|
1913
|
-
# 3. Create new DXT with bundled server
|
|
1914
|
-
# Replace server/index.js with dist-autonomous/server.cjs
|
|
1915
|
-
# Remove package.json dependencies (optional)
|
|
1916
|
-
\`\`\`
|
|
1917
|
-
|
|
1918
|
-
### Result
|
|
1919
|
-
The resulting DXT package will be completely autonomous and won't require \`npm install\`.
|
|
1920
|
-
|
|
1921
|
-
## Generated Information
|
|
1922
|
-
|
|
1923
|
-
- **Generator**: @alcyone-labs/arg-parser v1.3.0
|
|
1924
|
-
- **Generated**: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
1925
|
-
- **DXT Version**: 0.1
|
|
1926
|
-
|
|
1927
|
-
## Note
|
|
1928
|
-
|
|
1929
|
-
This is a simplified DXT package. For full functionality and the latest features, use the original CLI directly.
|
|
1930
|
-
For autonomous packages, follow the build instructions above.
|
|
1931
|
-
`;
|
|
1932
|
-
}
|
|
1933
1517
|
/**
|
|
1934
1518
|
* Maps ArgParser flag types to DXT user config types
|
|
1935
1519
|
*/
|
|
1936
|
-
mapFlagTypeToUserConfigType(flagType) {
|
|
1937
|
-
if (typeof flagType === "function") {
|
|
1938
|
-
if (flagType === String) return "string";
|
|
1939
|
-
if (flagType === Number) return "number";
|
|
1940
|
-
if (flagType === Boolean) return "boolean";
|
|
1941
|
-
if (flagType === Array) return "array";
|
|
1942
|
-
if (flagType === Object) return "object";
|
|
1943
|
-
return "string";
|
|
1944
|
-
}
|
|
1945
|
-
switch (String(flagType).toLowerCase()) {
|
|
1946
|
-
case "string":
|
|
1947
|
-
return "string";
|
|
1948
|
-
case "number":
|
|
1949
|
-
return "number";
|
|
1950
|
-
case "boolean":
|
|
1951
|
-
return "boolean";
|
|
1952
|
-
case "table":
|
|
1953
|
-
return "array";
|
|
1954
|
-
case "array":
|
|
1955
|
-
return "array";
|
|
1956
|
-
case "object":
|
|
1957
|
-
return "object";
|
|
1958
|
-
default:
|
|
1959
|
-
return "string";
|
|
1960
|
-
}
|
|
1961
|
-
}
|
|
1962
|
-
/**
|
|
1963
|
-
* Generates CLI arguments for DXT manifest based on ArgParser flags
|
|
1964
|
-
*/
|
|
1965
|
-
generateCliArgsForDxt(_mcpSubCommand) {
|
|
1966
|
-
const args = [];
|
|
1967
|
-
args.push("--s-mcp-serve");
|
|
1968
|
-
return args;
|
|
1969
|
-
}
|
|
1970
|
-
/**
|
|
1971
|
-
* Generates environment variables and user config for DXT manifest
|
|
1972
|
-
*/
|
|
1973
|
-
generateEnvAndUserConfig() {
|
|
1974
|
-
const envVars = {};
|
|
1975
|
-
const userConfig = {};
|
|
1976
|
-
const flags = this.argParserInstance.flags || [];
|
|
1977
|
-
for (const flag of flags) {
|
|
1978
|
-
const flagName = flag["name"];
|
|
1979
|
-
if (flagName === "help" || flagName === "mcp") continue;
|
|
1980
|
-
if (flag["env"]) {
|
|
1981
|
-
const envVarName = flag["env"];
|
|
1982
|
-
envVars[envVarName] = `\${user_config.${envVarName}}`;
|
|
1983
|
-
userConfig[envVarName] = {
|
|
1984
|
-
type: this.mapFlagTypeToUserConfigType(flag["type"]),
|
|
1985
|
-
title: this.generateUserConfigTitle(envVarName),
|
|
1986
|
-
description: flag["description"] || `${envVarName} environment variable`,
|
|
1987
|
-
required: true,
|
|
1988
|
-
// Always require env vars in user_config for better UX
|
|
1989
|
-
sensitive: this.isSensitiveField(envVarName)
|
|
1990
|
-
};
|
|
1991
|
-
}
|
|
1992
|
-
}
|
|
1993
|
-
if (typeof this.argParserInstance.getTools === "function") {
|
|
1994
|
-
const tools = this.argParserInstance.getTools();
|
|
1995
|
-
for (const [, toolConfig] of tools) {
|
|
1996
|
-
const toolFlags = toolConfig.flags || [];
|
|
1997
|
-
for (const flag of toolFlags) {
|
|
1998
|
-
const flagName = flag["name"];
|
|
1999
|
-
if (flagName === "help" || flagName.startsWith("s-")) continue;
|
|
2000
|
-
if (flag["env"]) {
|
|
2001
|
-
const envVarName = flag["env"];
|
|
2002
|
-
if (!envVars[envVarName]) {
|
|
2003
|
-
envVars[envVarName] = `\${user_config.${envVarName}}`;
|
|
2004
|
-
userConfig[envVarName] = {
|
|
2005
|
-
type: this.mapFlagTypeToUserConfigType(flag["type"]),
|
|
2006
|
-
title: this.generateUserConfigTitle(envVarName),
|
|
2007
|
-
description: flag["description"] || `${envVarName} environment variable`,
|
|
2008
|
-
required: true,
|
|
2009
|
-
// Always require env vars in user_config for better UX
|
|
2010
|
-
sensitive: this.isSensitiveField(envVarName)
|
|
2011
|
-
};
|
|
2012
|
-
}
|
|
2013
|
-
}
|
|
2014
|
-
}
|
|
2015
|
-
}
|
|
2016
|
-
}
|
|
2017
|
-
return { envVars, userConfig };
|
|
2018
|
-
}
|
|
2019
|
-
/**
|
|
2020
|
-
* Generates a user-friendly title for user config fields
|
|
2021
|
-
*/
|
|
2022
|
-
generateUserConfigTitle(flagName) {
|
|
2023
|
-
return flagName.split(/[-_]/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join(" ");
|
|
2024
|
-
}
|
|
2025
|
-
/**
|
|
2026
|
-
* Checks if a field should be marked as sensitive in user config
|
|
2027
|
-
*/
|
|
2028
|
-
isSensitiveField(fieldName) {
|
|
2029
|
-
const sensitivePatterns = [
|
|
2030
|
-
/key/i,
|
|
2031
|
-
/token/i,
|
|
2032
|
-
/secret/i,
|
|
2033
|
-
/password/i,
|
|
2034
|
-
/auth/i,
|
|
2035
|
-
/credential/i
|
|
2036
|
-
];
|
|
2037
|
-
return sensitivePatterns.some((pattern2) => pattern2.test(fieldName));
|
|
2038
|
-
}
|
|
2039
1520
|
/**
|
|
2040
1521
|
* Adds the logo to the build folder if available
|
|
2041
1522
|
* @returns The filename of the logo that was added, or undefined if no logo was added
|
|
@@ -2055,7 +1536,10 @@ For autonomous packages, follow the build instructions above.
|
|
|
2055
1536
|
const urlPath = new URL(customLogo).pathname;
|
|
2056
1537
|
const urlFilename = path.basename(urlPath);
|
|
2057
1538
|
if (urlFilename && urlFilename.includes(".")) {
|
|
2058
|
-
|
|
1539
|
+
const ext = path.extname(urlFilename);
|
|
1540
|
+
logoFilename = `logo${ext}`;
|
|
1541
|
+
} else {
|
|
1542
|
+
logoFilename = "logo.jpg";
|
|
2059
1543
|
}
|
|
2060
1544
|
console.log("✓ Downloaded logo from URL");
|
|
2061
1545
|
} else {
|
|
@@ -2082,7 +1566,8 @@ For autonomous packages, follow the build instructions above.
|
|
|
2082
1566
|
}
|
|
2083
1567
|
if (fs.existsSync(logoPath)) {
|
|
2084
1568
|
logoBuffer = fs.readFileSync(logoPath);
|
|
2085
|
-
|
|
1569
|
+
const ext = path.extname(logoPath);
|
|
1570
|
+
logoFilename = `logo${ext}`;
|
|
2086
1571
|
console.log("✓ Added custom logo from local file");
|
|
2087
1572
|
} else {
|
|
2088
1573
|
console.warn(`⚠ Custom logo file not found: ${logoPath}`);
|
|
@@ -2111,9 +1596,29 @@ For autonomous packages, follow the build instructions above.
|
|
|
2111
1596
|
"logo_1_small.jpg"
|
|
2112
1597
|
);
|
|
2113
1598
|
}
|
|
1599
|
+
if (!fs.existsSync(logoPath)) {
|
|
1600
|
+
logoPath = path.join(
|
|
1601
|
+
process.cwd(),
|
|
1602
|
+
"node_modules",
|
|
1603
|
+
"@alcyone-labs",
|
|
1604
|
+
"arg-parser",
|
|
1605
|
+
"dist",
|
|
1606
|
+
"assets",
|
|
1607
|
+
"logo_1_small.jpg"
|
|
1608
|
+
);
|
|
1609
|
+
}
|
|
1610
|
+
if (!fs.existsSync(logoPath)) {
|
|
1611
|
+
logoPath = path.join(
|
|
1612
|
+
process.cwd(),
|
|
1613
|
+
"dist",
|
|
1614
|
+
"assets",
|
|
1615
|
+
"logo_1_small.jpg"
|
|
1616
|
+
);
|
|
1617
|
+
}
|
|
2114
1618
|
if (fs.existsSync(logoPath)) {
|
|
2115
1619
|
logoBuffer = fs.readFileSync(logoPath);
|
|
2116
|
-
|
|
1620
|
+
const ext = path.extname(logoPath);
|
|
1621
|
+
logoFilename = `logo${ext}`;
|
|
2117
1622
|
console.log("✓ Added default logo to build folder");
|
|
2118
1623
|
} else {
|
|
2119
1624
|
console.warn(
|
|
@@ -2135,317 +1640,82 @@ For autonomous packages, follow the build instructions above.
|
|
|
2135
1640
|
return void 0;
|
|
2136
1641
|
}
|
|
2137
1642
|
}
|
|
2138
|
-
/**
|
|
2139
|
-
* Processes CLI source code to replace global console with MCP-compliant Logger
|
|
2140
|
-
*/
|
|
2141
|
-
processCliSourceForMcp(cliSource) {
|
|
2142
|
-
const consoleReplacement = `import { createMcpLogger } from '@alcyone-labs/arg-parser';
|
|
2143
|
-
|
|
2144
|
-
// Replace global console with MCP-compliant logger for DXT packages
|
|
2145
|
-
const mcpLogger = createMcpLogger('[CLI]');
|
|
2146
|
-
const originalConsole = globalThis.console;
|
|
2147
|
-
globalThis.console = {
|
|
2148
|
-
...originalConsole,
|
|
2149
|
-
log: (...args) => mcpLogger.info(...args),
|
|
2150
|
-
info: (...args) => mcpLogger.info(...args),
|
|
2151
|
-
warn: (...args) => mcpLogger.warn(...args),
|
|
2152
|
-
debug: (...args) => mcpLogger.debug(...args),
|
|
2153
|
-
// Keep error/trace/etc as-is since they use stderr (MCP-compliant)
|
|
2154
|
-
error: originalConsole.error,
|
|
2155
|
-
trace: originalConsole.trace,
|
|
2156
|
-
assert: originalConsole.assert,
|
|
2157
|
-
clear: originalConsole.clear,
|
|
2158
|
-
count: originalConsole.count,
|
|
2159
|
-
countReset: originalConsole.countReset,
|
|
2160
|
-
dir: originalConsole.dir,
|
|
2161
|
-
dirxml: originalConsole.dirxml,
|
|
2162
|
-
group: originalConsole.group,
|
|
2163
|
-
groupCollapsed: originalConsole.groupCollapsed,
|
|
2164
|
-
groupEnd: originalConsole.groupEnd,
|
|
2165
|
-
table: originalConsole.table,
|
|
2166
|
-
time: originalConsole.time,
|
|
2167
|
-
timeEnd: originalConsole.timeEnd,
|
|
2168
|
-
timeLog: originalConsole.timeLog,
|
|
2169
|
-
timeStamp: originalConsole.timeStamp,
|
|
2170
|
-
};
|
|
2171
|
-
|
|
2172
|
-
`;
|
|
2173
|
-
const lines = cliSource.split("\n");
|
|
2174
|
-
let lastImportIndex = -1;
|
|
2175
|
-
for (let i = 0; i < lines.length; i++) {
|
|
2176
|
-
const line = lines[i].trim();
|
|
2177
|
-
if (line.startsWith("import ") && line.includes("from")) {
|
|
2178
|
-
lastImportIndex = i;
|
|
2179
|
-
} else if (line && !line.startsWith("//") && !line.startsWith("/*") && lastImportIndex >= 0) {
|
|
2180
|
-
break;
|
|
2181
|
-
}
|
|
2182
|
-
}
|
|
2183
|
-
if (lastImportIndex >= 0) {
|
|
2184
|
-
lines.splice(
|
|
2185
|
-
lastImportIndex + 1,
|
|
2186
|
-
0,
|
|
2187
|
-
"",
|
|
2188
|
-
...consoleReplacement.trim().split("\n")
|
|
2189
|
-
);
|
|
2190
|
-
return lines.join("\n");
|
|
2191
|
-
} else {
|
|
2192
|
-
return consoleReplacement + cliSource;
|
|
2193
|
-
}
|
|
2194
|
-
}
|
|
2195
|
-
/**
|
|
2196
|
-
* Adds the original CLI source to the build folder for handler execution
|
|
2197
|
-
*/
|
|
2198
|
-
addOriginalCliToFolder(buildDir) {
|
|
2199
|
-
try {
|
|
2200
|
-
const appCommandName = this.argParserInstance.getAppCommandName();
|
|
2201
|
-
const appName = this.argParserInstance.getAppName();
|
|
2202
|
-
const possibleCliFiles = [
|
|
2203
|
-
// Current working directory common patterns
|
|
2204
|
-
path.join(process.cwd(), "index.js"),
|
|
2205
|
-
path.join(process.cwd(), "index.mjs"),
|
|
2206
|
-
path.join(process.cwd(), "cli.js"),
|
|
2207
|
-
path.join(process.cwd(), "cli.mjs"),
|
|
2208
|
-
path.join(process.cwd(), "main.js"),
|
|
2209
|
-
path.join(process.cwd(), "main.mjs"),
|
|
2210
|
-
// Look for files with the app command name
|
|
2211
|
-
path.join(process.cwd(), `${appCommandName}.js`),
|
|
2212
|
-
path.join(process.cwd(), `${appCommandName}.mjs`),
|
|
2213
|
-
// Look for files with the app command name (sanitized)
|
|
2214
|
-
path.join(
|
|
2215
|
-
process.cwd(),
|
|
2216
|
-
`${appCommandName.replace(/[^a-zA-Z0-9-]/g, "-")}.js`
|
|
2217
|
-
),
|
|
2218
|
-
path.join(
|
|
2219
|
-
process.cwd(),
|
|
2220
|
-
`${appCommandName.replace(/[^a-zA-Z0-9-]/g, "-")}.mjs`
|
|
2221
|
-
),
|
|
2222
|
-
// Look for files with app name patterns
|
|
2223
|
-
path.join(
|
|
2224
|
-
process.cwd(),
|
|
2225
|
-
`${appName.toLowerCase().replace(/\s+/g, "-")}-cli.js`
|
|
2226
|
-
),
|
|
2227
|
-
path.join(
|
|
2228
|
-
process.cwd(),
|
|
2229
|
-
`${appName.toLowerCase().replace(/\s+/g, "-")}-cli.mjs`
|
|
2230
|
-
),
|
|
2231
|
-
// Look for files with first word of app name + cli
|
|
2232
|
-
path.join(
|
|
2233
|
-
process.cwd(),
|
|
2234
|
-
`${appName.split(" ")[0].toLowerCase()}-cli.js`
|
|
2235
|
-
),
|
|
2236
|
-
path.join(
|
|
2237
|
-
process.cwd(),
|
|
2238
|
-
`${appName.split(" ")[0].toLowerCase()}-cli.mjs`
|
|
2239
|
-
)
|
|
2240
|
-
];
|
|
2241
|
-
let cliSourcePath = null;
|
|
2242
|
-
for (const filePath of possibleCliFiles) {
|
|
2243
|
-
if (fs.existsSync(filePath)) {
|
|
2244
|
-
cliSourcePath = filePath;
|
|
2245
|
-
break;
|
|
2246
|
-
}
|
|
2247
|
-
}
|
|
2248
|
-
if (cliSourcePath) {
|
|
2249
|
-
let cliSource = fs.readFileSync(cliSourcePath, "utf8");
|
|
2250
|
-
cliSource = cliSource.replace(
|
|
2251
|
-
/import\s*{\s*([^}]+)\s*}\s*from\s*['"][^'"]*\/dist\/index\.mjs['"];?/g,
|
|
2252
|
-
"import { $1 } from '@alcyone-labs/arg-parser';"
|
|
2253
|
-
);
|
|
2254
|
-
cliSource = cliSource.replace(
|
|
2255
|
-
/import\s+(\w+)\s+from\s*['"][^'"]*\/dist\/index\.mjs['"];?/g,
|
|
2256
|
-
"import $1 from '@alcyone-labs/arg-parser';"
|
|
2257
|
-
);
|
|
2258
|
-
cliSource = this.processCliSourceForMcp(cliSource);
|
|
2259
|
-
const parserVariableMatch = cliSource.match(
|
|
2260
|
-
/const\s+(\w+)\s*=\s*ArgParser\.withMcp\(/
|
|
2261
|
-
);
|
|
2262
|
-
if (parserVariableMatch) {
|
|
2263
|
-
const parserVariable = parserVariableMatch[1];
|
|
2264
|
-
cliSource += `
|
|
2265
|
-
|
|
2266
|
-
// Export the parser instance for MCP server use
|
|
2267
|
-
export default ${parserVariable};
|
|
2268
|
-
|
|
2269
|
-
// Add debugging for main execution
|
|
2270
|
-
console.error('[MCP-DEBUG] CLI source loaded, checking execution context...');
|
|
2271
|
-
console.error('[MCP-DEBUG] import.meta.url:', import.meta.url);
|
|
2272
|
-
console.error('[MCP-DEBUG] process.argv[1]:', process.argv[1]);
|
|
2273
|
-
|
|
2274
|
-
// Ensure MCP server processes don't exit prematurely
|
|
2275
|
-
console.error('[MCP-DEBUG] Process argv:', process.argv);
|
|
2276
|
-
console.error('[MCP-DEBUG] Checking for serve command...');
|
|
2277
|
-
|
|
2278
|
-
if (process.argv.includes('serve')) {
|
|
2279
|
-
console.error('[MCP-DEBUG] Detected serve command, setting up MCP server lifecycle...');
|
|
2280
|
-
|
|
2281
|
-
// Override the original parse method to handle async MCP server
|
|
2282
|
-
const originalParse = ${parserVariable}.parse;
|
|
2283
|
-
${parserVariable}.parse = async function(args) {
|
|
2284
|
-
console.error('[MCP-DEBUG] Starting parse with args:', args);
|
|
2285
|
-
|
|
2286
|
-
try {
|
|
2287
|
-
const result = originalParse.call(this, args);
|
|
2288
|
-
console.error('[MCP-DEBUG] Parse result:', typeof result, result?.constructor?.name);
|
|
2289
|
-
|
|
2290
|
-
// If result is a Promise (MCP server), await it and keep process alive
|
|
2291
|
-
if (result && typeof result.then === 'function') {
|
|
2292
|
-
console.error('[MCP-DEBUG] Detected Promise result, awaiting...');
|
|
2293
|
-
const mcpResult = await result;
|
|
2294
|
-
console.error('[MCP-DEBUG] MCP server started, keeping process alive...');
|
|
2295
|
-
|
|
2296
|
-
// Keep the process alive indefinitely for MCP server
|
|
2297
|
-
const keepAlive = setInterval(() => {
|
|
2298
|
-
// Do nothing, just keep the event loop alive
|
|
2299
|
-
}, 30000);
|
|
2300
|
-
|
|
2301
|
-
// Handle graceful shutdown
|
|
2302
|
-
process.on('SIGINT', () => {
|
|
2303
|
-
console.error('[MCP-INFO] Received SIGINT, shutting down gracefully...');
|
|
2304
|
-
clearInterval(keepAlive);
|
|
2305
|
-
process.exit(0);
|
|
2306
|
-
});
|
|
2307
|
-
|
|
2308
|
-
process.on('SIGTERM', () => {
|
|
2309
|
-
console.error('[MCP-INFO] Received SIGTERM, shutting down gracefully...');
|
|
2310
|
-
clearInterval(keepAlive);
|
|
2311
|
-
process.exit(0);
|
|
2312
|
-
});
|
|
2313
|
-
|
|
2314
|
-
return mcpResult;
|
|
2315
|
-
} else {
|
|
2316
|
-
console.error('[MCP-DEBUG] Non-Promise result, returning normally');
|
|
2317
|
-
return result;
|
|
2318
|
-
}
|
|
2319
|
-
} catch (error) {
|
|
2320
|
-
console.error('[MCP-ERROR] Error in parse:', error);
|
|
2321
|
-
throw error;
|
|
2322
|
-
}
|
|
2323
|
-
};
|
|
2324
|
-
}
|
|
2325
|
-
`;
|
|
2326
|
-
} else {
|
|
2327
|
-
console.warn(
|
|
2328
|
-
"⚠ Could not find ArgParser instance in CLI source, MCP server may not work properly"
|
|
2329
|
-
);
|
|
2330
|
-
}
|
|
2331
|
-
const serverDir = path.join(buildDir, "server");
|
|
2332
|
-
if (!fs.existsSync(serverDir)) {
|
|
2333
|
-
fs.mkdirSync(serverDir, { recursive: true });
|
|
2334
|
-
}
|
|
2335
|
-
fs.writeFileSync(path.join(serverDir, "original-cli.mjs"), cliSource);
|
|
2336
|
-
console.log(
|
|
2337
|
-
`✓ Added original CLI source to build folder: ${path.basename(cliSourcePath)}`
|
|
2338
|
-
);
|
|
2339
|
-
} else {
|
|
2340
|
-
console.warn(
|
|
2341
|
-
"⚠ Original CLI source not found, handlers may not work properly"
|
|
2342
|
-
);
|
|
2343
|
-
console.warn(
|
|
2344
|
-
" Searched for:",
|
|
2345
|
-
possibleCliFiles.map((f) => path.basename(f)).join(", ")
|
|
2346
|
-
);
|
|
2347
|
-
}
|
|
2348
|
-
} catch (error) {
|
|
2349
|
-
console.warn(
|
|
2350
|
-
"⚠ Failed to add original CLI source:",
|
|
2351
|
-
error instanceof Error ? error.message : String(error)
|
|
2352
|
-
);
|
|
2353
|
-
}
|
|
2354
|
-
}
|
|
2355
1643
|
/**
|
|
2356
1644
|
* Builds a complete DXT package using TSDown CLI for autonomous execution
|
|
2357
1645
|
*/
|
|
2358
|
-
async buildDxtWithTsdown(entryPointFile, outputDir = "./dxt") {
|
|
1646
|
+
async buildDxtWithTsdown(entryPointFile, outputDir = "./dxt", withNodeModules = false) {
|
|
2359
1647
|
try {
|
|
2360
1648
|
console.log(simpleChalk.cyan("🔧 Building DXT package with TSDown..."));
|
|
2361
|
-
const
|
|
1649
|
+
const projectRoot = this.findProjectRoot(entryPointFile);
|
|
1650
|
+
const absoluteEntryPath = path.resolve(entryPointFile);
|
|
1651
|
+
const relativeEntryPath = path.relative(projectRoot, absoluteEntryPath);
|
|
2362
1652
|
const entryFileName = path.basename(entryPointFile);
|
|
2363
1653
|
console.log(simpleChalk.gray(`Entry point: ${entryPointFile}`));
|
|
2364
|
-
console.log(simpleChalk.gray(`
|
|
1654
|
+
console.log(simpleChalk.gray(`Project root: ${projectRoot}`));
|
|
1655
|
+
console.log(simpleChalk.gray(`Relative entry path: ${relativeEntryPath}`));
|
|
2365
1656
|
const dxtIgnorePath = this.getDxtIgnoreTemplatePath();
|
|
2366
1657
|
if (fs.existsSync(dxtIgnorePath)) {
|
|
2367
|
-
fs.copyFileSync(dxtIgnorePath, path.join(
|
|
1658
|
+
fs.copyFileSync(dxtIgnorePath, path.join(projectRoot, ".dxtignore"));
|
|
2368
1659
|
}
|
|
1660
|
+
const serverInfo = this.extractMcpServerInfo();
|
|
1661
|
+
const logoFilename = await this.addLogoToFolder(
|
|
1662
|
+
projectRoot,
|
|
1663
|
+
serverInfo,
|
|
1664
|
+
entryPointFile
|
|
1665
|
+
);
|
|
1666
|
+
console.log(
|
|
1667
|
+
logoFilename ? simpleChalk.gray(`✓ Logo prepared: ${logoFilename}`) : simpleChalk.gray("⚠ No logo available")
|
|
1668
|
+
);
|
|
2369
1669
|
const originalCwd = process.cwd();
|
|
2370
1670
|
try {
|
|
2371
|
-
process.chdir(
|
|
1671
|
+
process.chdir(projectRoot);
|
|
2372
1672
|
const { build } = await import("tsdown");
|
|
2373
|
-
console.log(simpleChalk.gray(`Building with TSDown: ${
|
|
1673
|
+
console.log(simpleChalk.gray(`Building with TSDown: ${relativeEntryPath}`));
|
|
1674
|
+
console.log(
|
|
1675
|
+
simpleChalk.green(
|
|
1676
|
+
`${withNodeModules ? "with node_modules" : "without node_modules"}`
|
|
1677
|
+
)
|
|
1678
|
+
);
|
|
2374
1679
|
const buildConfig = {
|
|
2375
|
-
entry: [
|
|
2376
|
-
outDir: path.resolve(
|
|
2377
|
-
format: ["
|
|
1680
|
+
entry: [relativeEntryPath],
|
|
1681
|
+
outDir: path.resolve(originalCwd, outputDir),
|
|
1682
|
+
format: ["es"],
|
|
2378
1683
|
target: "node22",
|
|
2379
|
-
|
|
1684
|
+
define: {
|
|
1685
|
+
// Define any compile-time constants
|
|
1686
|
+
NODE_ENV: '"production"'
|
|
1687
|
+
},
|
|
2380
1688
|
minify: false,
|
|
2381
1689
|
sourcemap: false,
|
|
2382
|
-
|
|
1690
|
+
// Remove all output folders and artefacts
|
|
1691
|
+
clean: [outputDir, "./.dxtignore", `${outputDir}.dxt`],
|
|
2383
1692
|
silent: process.env["NO_SILENCE"] !== "1",
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
"..",
|
|
2392
|
-
"assets",
|
|
2393
|
-
"logo_1_small.jpg"
|
|
2394
|
-
),
|
|
2395
|
-
// From node_modules
|
|
2396
|
-
path.join(
|
|
2397
|
-
process.cwd(),
|
|
2398
|
-
"node_modules",
|
|
2399
|
-
"@alcyone-labs",
|
|
2400
|
-
"arg-parser",
|
|
2401
|
-
"dist",
|
|
2402
|
-
"assets",
|
|
2403
|
-
"logo_1_small.jpg"
|
|
2404
|
-
),
|
|
2405
|
-
// From package root dist/assets (for local build)
|
|
2406
|
-
path.join(process.cwd(), "dist", "assets", "logo_1_small.jpg"),
|
|
2407
|
-
// From library root (development)
|
|
2408
|
-
path.join(
|
|
2409
|
-
process.cwd(),
|
|
2410
|
-
"..",
|
|
2411
|
-
"..",
|
|
2412
|
-
"..",
|
|
2413
|
-
"docs",
|
|
2414
|
-
"MCP",
|
|
2415
|
-
"icons",
|
|
2416
|
-
"logo_1_small.jpg"
|
|
2417
|
-
)
|
|
2418
|
-
];
|
|
2419
|
-
for (const logoPath of possibleLogoPaths) {
|
|
2420
|
-
if (fs.existsSync(logoPath)) {
|
|
2421
|
-
console.log(simpleChalk.gray(`Found logo at: ${logoPath}`));
|
|
2422
|
-
return [{ from: logoPath, to: "logo.jpg" }];
|
|
2423
|
-
}
|
|
2424
|
-
}
|
|
1693
|
+
external: (_, importer) => withNodeModules ? importer == null ? void 0 : importer.includes("node_modules") : false,
|
|
1694
|
+
noExternal: (_, importer) => withNodeModules ? (importer == null ? void 0 : importer.includes("node_modules")) === false : true,
|
|
1695
|
+
copy: async (options) => {
|
|
1696
|
+
const outputPaths = [
|
|
1697
|
+
"package.json"
|
|
1698
|
+
];
|
|
1699
|
+
if (withNodeModules) {
|
|
2425
1700
|
console.log(
|
|
2426
|
-
simpleChalk.
|
|
1701
|
+
simpleChalk.gray(
|
|
1702
|
+
"📦 Including node_modules in bundle (may take longer)..."
|
|
1703
|
+
)
|
|
2427
1704
|
);
|
|
2428
|
-
|
|
2429
|
-
}
|
|
2430
|
-
|
|
2431
|
-
|
|
2432
|
-
|
|
2433
|
-
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
|
|
2437
|
-
|
|
2438
|
-
|
|
2439
|
-
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
"process",
|
|
2443
|
-
"crypto",
|
|
2444
|
-
"http",
|
|
2445
|
-
"https",
|
|
2446
|
-
"net",
|
|
2447
|
-
"zlib"
|
|
2448
|
-
],
|
|
1705
|
+
outputPaths.push("node_modules");
|
|
1706
|
+
}
|
|
1707
|
+
if (logoFilename) {
|
|
1708
|
+
const logoPath = path.join(process.cwd(), logoFilename);
|
|
1709
|
+
if (fs.existsSync(logoPath)) {
|
|
1710
|
+
console.log(simpleChalk.gray(`Adding logo from: ${logoPath}`));
|
|
1711
|
+
outputPaths.push({
|
|
1712
|
+
from: logoPath,
|
|
1713
|
+
to: path.join(options.outDir, logoFilename)
|
|
1714
|
+
});
|
|
1715
|
+
}
|
|
1716
|
+
}
|
|
1717
|
+
return outputPaths;
|
|
1718
|
+
},
|
|
2449
1719
|
platform: "node",
|
|
2450
1720
|
plugins: []
|
|
2451
1721
|
};
|
|
@@ -2474,15 +1744,15 @@ export default ${JSON.stringify(buildConfig, null, 2)};
|
|
|
2474
1744
|
}
|
|
2475
1745
|
await build(buildConfig);
|
|
2476
1746
|
console.log(simpleChalk.green("✅ TSDown bundling completed"));
|
|
2477
|
-
const
|
|
1747
|
+
const detectedOutputFile = this.detectTsdownOutputFile(
|
|
2478
1748
|
outputDir,
|
|
2479
1749
|
entryFileName
|
|
2480
1750
|
);
|
|
2481
|
-
await this.copyLogoManually(outputDir);
|
|
2482
1751
|
await this.setupDxtPackageFiles(
|
|
2483
1752
|
entryPointFile,
|
|
2484
1753
|
outputDir,
|
|
2485
|
-
|
|
1754
|
+
detectedOutputFile ?? void 0,
|
|
1755
|
+
logoFilename ?? "logo.jpg"
|
|
2486
1756
|
);
|
|
2487
1757
|
console.log(simpleChalk.cyan("📦 DXT package ready for packing"));
|
|
2488
1758
|
console.log(
|
|
@@ -2499,292 +1769,6 @@ export default ${JSON.stringify(buildConfig, null, 2)};
|
|
|
2499
1769
|
);
|
|
2500
1770
|
}
|
|
2501
1771
|
}
|
|
2502
|
-
/**
|
|
2503
|
-
* Bundles the original CLI using TSDown for autonomous execution (legacy method)
|
|
2504
|
-
*/
|
|
2505
|
-
async bundleOriginalCliWithTsdown(serverDir) {
|
|
2506
|
-
try {
|
|
2507
|
-
const { build } = await import("tsdown");
|
|
2508
|
-
console.log(
|
|
2509
|
-
simpleChalk.cyan("🔧 Bundling CLI with TSDown for autonomous execution...")
|
|
2510
|
-
);
|
|
2511
|
-
const configContent = this.getTsdownConfigContent();
|
|
2512
|
-
const localConfigPath = path.join(serverDir, "tsdown.config.mjs");
|
|
2513
|
-
fs.writeFileSync(localConfigPath, configContent);
|
|
2514
|
-
const originalCliPath = path.join(serverDir, "original-cli.mjs");
|
|
2515
|
-
if (!fs.existsSync(originalCliPath)) {
|
|
2516
|
-
console.warn(
|
|
2517
|
-
simpleChalk.yellow("⚠ Original CLI not found, skipping TSDown bundling")
|
|
2518
|
-
);
|
|
2519
|
-
return null;
|
|
2520
|
-
}
|
|
2521
|
-
const buildOptions = {
|
|
2522
|
-
entry: ["original-cli.mjs"],
|
|
2523
|
-
// Use relative path since we'll chdir to serverDir
|
|
2524
|
-
outDir: ".",
|
|
2525
|
-
// Output to current directory (serverDir)
|
|
2526
|
-
format: "esm",
|
|
2527
|
-
target: "node22",
|
|
2528
|
-
// Bundle EVERYTHING except Node.js built-ins for true autonomy
|
|
2529
|
-
noExternal: (id) => {
|
|
2530
|
-
if (!id.startsWith("node:") && !this.isNodeBuiltin(id)) return true;
|
|
2531
|
-
return false;
|
|
2532
|
-
},
|
|
2533
|
-
minify: false,
|
|
2534
|
-
sourcemap: false,
|
|
2535
|
-
clean: false,
|
|
2536
|
-
outExtension: () => ({ js: ".bundled.mjs" }),
|
|
2537
|
-
alias: {
|
|
2538
|
-
// Alias chalk to SimpleChalk for autonomous builds
|
|
2539
|
-
chalk: path.resolve(
|
|
2540
|
-
process.cwd(),
|
|
2541
|
-
"node_modules/@alcyone-labs/arg-parser/dist/SimpleChalk.mjs"
|
|
2542
|
-
)
|
|
2543
|
-
},
|
|
2544
|
-
external: [
|
|
2545
|
-
// Only Node.js built-ins - everything else gets bundled for true autonomy
|
|
2546
|
-
"node:stream",
|
|
2547
|
-
"node:fs",
|
|
2548
|
-
"node:path",
|
|
2549
|
-
"node:url",
|
|
2550
|
-
"node:util",
|
|
2551
|
-
"node:events",
|
|
2552
|
-
"node:child_process",
|
|
2553
|
-
"node:os",
|
|
2554
|
-
"node:tty",
|
|
2555
|
-
"node:process",
|
|
2556
|
-
"node:crypto",
|
|
2557
|
-
"node:http",
|
|
2558
|
-
"node:https",
|
|
2559
|
-
"node:net",
|
|
2560
|
-
"node:zlib",
|
|
2561
|
-
"node:fs/promises",
|
|
2562
|
-
"node:timers",
|
|
2563
|
-
"stream",
|
|
2564
|
-
"fs",
|
|
2565
|
-
"path",
|
|
2566
|
-
"url",
|
|
2567
|
-
"util",
|
|
2568
|
-
"events",
|
|
2569
|
-
"child_process",
|
|
2570
|
-
"os",
|
|
2571
|
-
"tty",
|
|
2572
|
-
"process",
|
|
2573
|
-
"crypto",
|
|
2574
|
-
"http",
|
|
2575
|
-
"https",
|
|
2576
|
-
"net",
|
|
2577
|
-
"zlib",
|
|
2578
|
-
"fs/promises",
|
|
2579
|
-
"timers",
|
|
2580
|
-
"timers/promises",
|
|
2581
|
-
"perf_hooks",
|
|
2582
|
-
"async_hooks",
|
|
2583
|
-
"inspector",
|
|
2584
|
-
"v8",
|
|
2585
|
-
"vm",
|
|
2586
|
-
"assert",
|
|
2587
|
-
"constants",
|
|
2588
|
-
"module",
|
|
2589
|
-
"repl",
|
|
2590
|
-
"string_decoder",
|
|
2591
|
-
"punycode",
|
|
2592
|
-
"domain",
|
|
2593
|
-
"querystring",
|
|
2594
|
-
"readline",
|
|
2595
|
-
"worker_threads",
|
|
2596
|
-
"cluster",
|
|
2597
|
-
"dgram",
|
|
2598
|
-
"dns",
|
|
2599
|
-
"buffer"
|
|
2600
|
-
],
|
|
2601
|
-
platform: "node",
|
|
2602
|
-
plugins: [],
|
|
2603
|
-
// Resolve local dependencies properly
|
|
2604
|
-
resolve: {
|
|
2605
|
-
alias: {
|
|
2606
|
-
// Handle local monorepo dependencies
|
|
2607
|
-
"@alcyone-labs/arg-parser": path.resolve(process.cwd())
|
|
2608
|
-
}
|
|
2609
|
-
}
|
|
2610
|
-
};
|
|
2611
|
-
const originalCwd = process.cwd();
|
|
2612
|
-
try {
|
|
2613
|
-
process.chdir(serverDir);
|
|
2614
|
-
await build(buildOptions);
|
|
2615
|
-
} finally {
|
|
2616
|
-
process.chdir(originalCwd);
|
|
2617
|
-
}
|
|
2618
|
-
const possibleBundledFiles = [
|
|
2619
|
-
"original-cli.bundled.mjs",
|
|
2620
|
-
"original-cli.js",
|
|
2621
|
-
"original-cli.mjs"
|
|
2622
|
-
];
|
|
2623
|
-
let bundledPath = null;
|
|
2624
|
-
let bundledFileName = null;
|
|
2625
|
-
for (const fileName of possibleBundledFiles) {
|
|
2626
|
-
const filePath = path.join(serverDir, fileName);
|
|
2627
|
-
if (fs.existsSync(filePath) && fileName !== "original-cli.mjs") {
|
|
2628
|
-
bundledPath = filePath;
|
|
2629
|
-
bundledFileName = fileName;
|
|
2630
|
-
break;
|
|
2631
|
-
}
|
|
2632
|
-
}
|
|
2633
|
-
if (bundledPath && bundledFileName) {
|
|
2634
|
-
console.log(
|
|
2635
|
-
simpleChalk.green(
|
|
2636
|
-
`✅ TSDown bundling completed successfully: ${bundledFileName}`
|
|
2637
|
-
)
|
|
2638
|
-
);
|
|
2639
|
-
const expectedBundledPath = path.join(
|
|
2640
|
-
serverDir,
|
|
2641
|
-
"original-cli.bundled.mjs"
|
|
2642
|
-
);
|
|
2643
|
-
if (bundledPath !== expectedBundledPath) {
|
|
2644
|
-
fs.renameSync(bundledPath, expectedBundledPath);
|
|
2645
|
-
bundledFileName = "original-cli.bundled.mjs";
|
|
2646
|
-
}
|
|
2647
|
-
try {
|
|
2648
|
-
fs.unlinkSync(localConfigPath);
|
|
2649
|
-
} catch (error) {
|
|
2650
|
-
}
|
|
2651
|
-
try {
|
|
2652
|
-
fs.chmodSync(expectedBundledPath, 493);
|
|
2653
|
-
} catch (error) {
|
|
2654
|
-
console.warn(
|
|
2655
|
-
"⚠ Could not set executable permission on bundled file:",
|
|
2656
|
-
error instanceof Error ? error.message : String(error)
|
|
2657
|
-
);
|
|
2658
|
-
}
|
|
2659
|
-
return bundledFileName;
|
|
2660
|
-
} else {
|
|
2661
|
-
console.warn(
|
|
2662
|
-
simpleChalk.yellow("⚠ TSDown bundling failed, bundled file not found")
|
|
2663
|
-
);
|
|
2664
|
-
return null;
|
|
2665
|
-
}
|
|
2666
|
-
} catch (error) {
|
|
2667
|
-
console.warn(
|
|
2668
|
-
simpleChalk.yellow(
|
|
2669
|
-
`⚠ TSDown bundling failed: ${error instanceof Error ? error.message : String(error)}`
|
|
2670
|
-
)
|
|
2671
|
-
);
|
|
2672
|
-
console.log(simpleChalk.gray(" Falling back to non-bundled approach"));
|
|
2673
|
-
return null;
|
|
2674
|
-
}
|
|
2675
|
-
}
|
|
2676
|
-
/**
|
|
2677
|
-
* Checks if a module ID is a Node.js built-in
|
|
2678
|
-
*/
|
|
2679
|
-
isNodeBuiltin(id) {
|
|
2680
|
-
const nodeBuiltins = [
|
|
2681
|
-
"stream",
|
|
2682
|
-
"fs",
|
|
2683
|
-
"path",
|
|
2684
|
-
"url",
|
|
2685
|
-
"util",
|
|
2686
|
-
"events",
|
|
2687
|
-
"child_process",
|
|
2688
|
-
"os",
|
|
2689
|
-
"tty",
|
|
2690
|
-
"process",
|
|
2691
|
-
"crypto",
|
|
2692
|
-
"http",
|
|
2693
|
-
"https",
|
|
2694
|
-
"net",
|
|
2695
|
-
"zlib",
|
|
2696
|
-
"fs/promises",
|
|
2697
|
-
"timers",
|
|
2698
|
-
"timers/promises",
|
|
2699
|
-
"perf_hooks",
|
|
2700
|
-
"async_hooks",
|
|
2701
|
-
"inspector",
|
|
2702
|
-
"v8",
|
|
2703
|
-
"vm",
|
|
2704
|
-
"assert",
|
|
2705
|
-
"constants",
|
|
2706
|
-
"module",
|
|
2707
|
-
"repl",
|
|
2708
|
-
"string_decoder",
|
|
2709
|
-
"punycode",
|
|
2710
|
-
"domain",
|
|
2711
|
-
"querystring",
|
|
2712
|
-
"readline",
|
|
2713
|
-
"worker_threads",
|
|
2714
|
-
"cluster",
|
|
2715
|
-
"dgram",
|
|
2716
|
-
"dns",
|
|
2717
|
-
"buffer"
|
|
2718
|
-
];
|
|
2719
|
-
return nodeBuiltins.includes(id) || id.startsWith("node:");
|
|
2720
|
-
}
|
|
2721
|
-
/**
|
|
2722
|
-
* Gets the TSDown configuration content as a string
|
|
2723
|
-
*/
|
|
2724
|
-
getTsdownConfigContent() {
|
|
2725
|
-
const currentDir = path.dirname(new URL(import.meta.url).pathname);
|
|
2726
|
-
const assetsConfigPath = path.join(
|
|
2727
|
-
currentDir,
|
|
2728
|
-
"..",
|
|
2729
|
-
"assets",
|
|
2730
|
-
"tsdown.dxt.config.ts"
|
|
2731
|
-
);
|
|
2732
|
-
if (fs.existsSync(assetsConfigPath)) {
|
|
2733
|
-
try {
|
|
2734
|
-
const content = fs.readFileSync(assetsConfigPath, "utf-8");
|
|
2735
|
-
return content.replace('/// <reference types="tsdown" />', "").replace(
|
|
2736
|
-
'import { defineConfig } from "tsdown/config";',
|
|
2737
|
-
'import { defineConfig } from "tsdown";'
|
|
2738
|
-
).replace(
|
|
2739
|
-
"export default defineConfig(",
|
|
2740
|
-
"export default defineConfig("
|
|
2741
|
-
);
|
|
2742
|
-
} catch (error) {
|
|
2743
|
-
console.warn(
|
|
2744
|
-
simpleChalk.yellow(
|
|
2745
|
-
"⚠ Could not read TSDown config from assets, using fallback"
|
|
2746
|
-
)
|
|
2747
|
-
);
|
|
2748
|
-
}
|
|
2749
|
-
}
|
|
2750
|
-
const rootConfigPath = path.join(process.cwd(), "tsdown.dxt.config.ts");
|
|
2751
|
-
if (fs.existsSync(rootConfigPath)) {
|
|
2752
|
-
try {
|
|
2753
|
-
const content = fs.readFileSync(rootConfigPath, "utf-8");
|
|
2754
|
-
return content.replace('/// <reference types="tsdown" />', "").replace(
|
|
2755
|
-
'import { defineConfig } from "tsdown/config";',
|
|
2756
|
-
'import { defineConfig } from "tsdown";'
|
|
2757
|
-
);
|
|
2758
|
-
} catch (error) {
|
|
2759
|
-
console.warn(
|
|
2760
|
-
simpleChalk.yellow(
|
|
2761
|
-
"⚠ Could not read TSDown config from root, using default"
|
|
2762
|
-
)
|
|
2763
|
-
);
|
|
2764
|
-
}
|
|
2765
|
-
}
|
|
2766
|
-
return `import { defineConfig } from "tsdown";
|
|
2767
|
-
import path from "path";
|
|
2768
|
-
|
|
2769
|
-
export default defineConfig({
|
|
2770
|
-
outDir: "server",
|
|
2771
|
-
format: ["esm", "module"],
|
|
2772
|
-
target: "node22",
|
|
2773
|
-
noExternal: () => true,
|
|
2774
|
-
minify: false,
|
|
2775
|
-
sourcemap: false,
|
|
2776
|
-
clean: false,
|
|
2777
|
-
alias: {
|
|
2778
|
-
chalk: path.resolve(process.cwd(), "node_modules/@alcyone-labs/arg-parser/dist/SimpleChalk.mjs"),
|
|
2779
|
-
},
|
|
2780
|
-
external: [
|
|
2781
|
-
"stream", "fs", "path", "url", "util", "events", "child_process",
|
|
2782
|
-
"os", "tty", "process", "crypto", "http", "https", "net", "zlib",
|
|
2783
|
-
],
|
|
2784
|
-
platform: "node",
|
|
2785
|
-
plugins: [],
|
|
2786
|
-
});`;
|
|
2787
|
-
}
|
|
2788
1772
|
/**
|
|
2789
1773
|
* Gets the path to the .dxtignore template file in assets
|
|
2790
1774
|
*/
|
|
@@ -2824,8 +1808,8 @@ export default defineConfig({
|
|
|
2824
1808
|
/**
|
|
2825
1809
|
* Sets up DXT package files (manifest.json) in the output directory
|
|
2826
1810
|
*/
|
|
2827
|
-
async setupDxtPackageFiles(entryPointFile, outputDir = "./dxt", actualOutputFilename) {
|
|
2828
|
-
var _a, _b, _c, _d, _e;
|
|
1811
|
+
async setupDxtPackageFiles(entryPointFile, outputDir = "./dxt", actualOutputFilename, logoFilename = "logo.jpg") {
|
|
1812
|
+
var _a, _b, _c, _d, _e, _f;
|
|
2829
1813
|
const dxtDir = path.resolve(process.cwd(), outputDir);
|
|
2830
1814
|
if (!fs.existsSync(dxtDir)) {
|
|
2831
1815
|
throw new Error(`TSDown output directory (${outputDir}) not found`);
|
|
@@ -2852,10 +1836,10 @@ export default defineConfig({
|
|
|
2852
1836
|
`Warning: Could not generate unified tool list: ${error instanceof Error ? error.message : String(error)}`
|
|
2853
1837
|
)
|
|
2854
1838
|
);
|
|
2855
|
-
const
|
|
1839
|
+
const mainFlags = this.argParserInstance.flags;
|
|
2856
1840
|
const properties2 = {};
|
|
2857
1841
|
const required2 = [];
|
|
2858
|
-
for (const flag of
|
|
1842
|
+
for (const flag of mainFlags) {
|
|
2859
1843
|
if (flag.name === "help" || flag.name.startsWith("s-")) continue;
|
|
2860
1844
|
properties2[flag.name] = {
|
|
2861
1845
|
type: getJsonSchemaTypeFromFlag(flag.type),
|
|
@@ -2879,58 +1863,17 @@ export default defineConfig({
|
|
|
2879
1863
|
}
|
|
2880
1864
|
];
|
|
2881
1865
|
}
|
|
2882
|
-
const envVars =
|
|
2883
|
-
const userConfig = {};
|
|
2884
|
-
const mainFlags = this.argParserInstance.flags;
|
|
2885
|
-
for (const flag of mainFlags) {
|
|
2886
|
-
const envVar = flag.env || flag.envVar;
|
|
2887
|
-
if (envVar) {
|
|
2888
|
-
envVars[envVar] = `\${user_config.${envVar}}`;
|
|
2889
|
-
userConfig[envVar] = {
|
|
2890
|
-
type: "string",
|
|
2891
|
-
title: envVar.replace(/_/g, " ").replace(/\b\w/g, (l) => l.toUpperCase()),
|
|
2892
|
-
description: flag.description || `${envVar} environment variable`,
|
|
2893
|
-
required: true,
|
|
2894
|
-
// Always require env vars in user_config for better UX
|
|
2895
|
-
sensitive: true
|
|
2896
|
-
// Assume env vars are sensitive
|
|
2897
|
-
};
|
|
2898
|
-
}
|
|
2899
|
-
}
|
|
2900
|
-
if (typeof this.argParserInstance.getTools === "function") {
|
|
2901
|
-
const tools2 = this.argParserInstance.getTools();
|
|
2902
|
-
for (const [, toolConfig] of tools2) {
|
|
2903
|
-
const toolFlags = toolConfig.flags || [];
|
|
2904
|
-
for (const flag of toolFlags) {
|
|
2905
|
-
const envVar = flag.env || flag.envVar;
|
|
2906
|
-
if (envVar && !envVars[envVar]) {
|
|
2907
|
-
envVars[envVar] = `\${user_config.${envVar}}`;
|
|
2908
|
-
userConfig[envVar] = {
|
|
2909
|
-
type: "string",
|
|
2910
|
-
title: envVar.replace(/_/g, " ").replace(/\b\w/g, (l) => l.toUpperCase()),
|
|
2911
|
-
description: flag.description || `${envVar} environment variable`,
|
|
2912
|
-
required: true,
|
|
2913
|
-
// Always require env vars in user_config for better UX
|
|
2914
|
-
sensitive: true
|
|
2915
|
-
// Assume env vars are sensitive
|
|
2916
|
-
};
|
|
2917
|
-
}
|
|
2918
|
-
}
|
|
2919
|
-
}
|
|
2920
|
-
}
|
|
1866
|
+
const { envVars, userConfig } = this.generateEnvAndUserConfig();
|
|
2921
1867
|
const serverInfo = this.extractMcpServerInfo();
|
|
2922
|
-
let
|
|
2923
|
-
if (
|
|
2924
|
-
|
|
2925
|
-
|
|
2926
|
-
|
|
2927
|
-
|
|
2928
|
-
);
|
|
2929
|
-
|
|
2930
|
-
logoFilename = customLogoFilename;
|
|
2931
|
-
}
|
|
1868
|
+
let entryFileName;
|
|
1869
|
+
if (actualOutputFilename) {
|
|
1870
|
+
entryFileName = actualOutputFilename;
|
|
1871
|
+
} else {
|
|
1872
|
+
const projectRoot = this.findProjectRoot(entryPointFile);
|
|
1873
|
+
const absoluteEntryPath = path.resolve(entryPointFile);
|
|
1874
|
+
const relativeEntryPath = path.relative(projectRoot, absoluteEntryPath);
|
|
1875
|
+
entryFileName = relativeEntryPath.replace(/\.ts$/, ".js");
|
|
2932
1876
|
}
|
|
2933
|
-
const entryFileName = actualOutputFilename || path.basename(entryPointFile).replace(/\.ts$/, ".js");
|
|
2934
1877
|
const manifest = {
|
|
2935
1878
|
dxt_version: "0.1",
|
|
2936
1879
|
name: serverInfo.name || packageInfo.name || "mcp-server",
|
|
@@ -2946,17 +1889,23 @@ export default defineConfig({
|
|
|
2946
1889
|
entry_point: entryFileName,
|
|
2947
1890
|
mcp_config: {
|
|
2948
1891
|
command: "node",
|
|
2949
|
-
args: [
|
|
1892
|
+
args: [
|
|
1893
|
+
`\${__dirname}/${entryFileName}`,
|
|
1894
|
+
"--s-mcp-serve",
|
|
1895
|
+
// Overwrite the CLI config to only use stdio to avoid conflicts
|
|
1896
|
+
"--s-mcp-transport",
|
|
1897
|
+
"stdio"
|
|
1898
|
+
],
|
|
2950
1899
|
env: envVars
|
|
2951
1900
|
}
|
|
2952
1901
|
},
|
|
2953
1902
|
tools,
|
|
2954
1903
|
icon: logoFilename,
|
|
2955
1904
|
...Object.keys(userConfig).length > 0 && { user_config: userConfig },
|
|
2956
|
-
repository: {
|
|
1905
|
+
repository: ((_e = packageInfo.repository) == null ? void 0 : _e.url) ? {
|
|
2957
1906
|
type: "git",
|
|
2958
|
-
url: (
|
|
2959
|
-
},
|
|
1907
|
+
url: (_f = packageInfo.repository) == null ? void 0 : _f.url
|
|
1908
|
+
} : void 0,
|
|
2960
1909
|
license: packageInfo.license || "MIT"
|
|
2961
1910
|
};
|
|
2962
1911
|
fs.writeFileSync(
|
|
@@ -2965,66 +1914,6 @@ export default defineConfig({
|
|
|
2965
1914
|
);
|
|
2966
1915
|
console.log(simpleChalk.gray("✅ DXT package files set up"));
|
|
2967
1916
|
}
|
|
2968
|
-
/**
|
|
2969
|
-
* Manually copy logo since TSDown's copy option doesn't work programmatically
|
|
2970
|
-
*/
|
|
2971
|
-
async copyLogoManually(outputDir = "./dxt") {
|
|
2972
|
-
const dxtDir = path.resolve(process.cwd(), outputDir);
|
|
2973
|
-
if (!fs.existsSync(dxtDir)) {
|
|
2974
|
-
console.warn(
|
|
2975
|
-
simpleChalk.yellow(
|
|
2976
|
-
`⚠ Output directory (${outputDir}) not found, skipping logo copy`
|
|
2977
|
-
)
|
|
2978
|
-
);
|
|
2979
|
-
return;
|
|
2980
|
-
}
|
|
2981
|
-
const possibleLogoPaths = [
|
|
2982
|
-
// From built library assets
|
|
2983
|
-
path.join(
|
|
2984
|
-
path.dirname(new URL(import.meta.url).pathname),
|
|
2985
|
-
"..",
|
|
2986
|
-
"assets",
|
|
2987
|
-
"logo_1_small.jpg"
|
|
2988
|
-
),
|
|
2989
|
-
// From node_modules
|
|
2990
|
-
path.join(
|
|
2991
|
-
process.cwd(),
|
|
2992
|
-
"node_modules",
|
|
2993
|
-
"@alcyone-labs",
|
|
2994
|
-
"arg-parser",
|
|
2995
|
-
"dist",
|
|
2996
|
-
"assets",
|
|
2997
|
-
"logo_1_small.jpg"
|
|
2998
|
-
),
|
|
2999
|
-
// From package root dist/assets (for local build)
|
|
3000
|
-
path.join(process.cwd(), "dist", "assets", "logo_1_small.jpg"),
|
|
3001
|
-
// From library root (development)
|
|
3002
|
-
path.join(
|
|
3003
|
-
process.cwd(),
|
|
3004
|
-
"..",
|
|
3005
|
-
"..",
|
|
3006
|
-
"..",
|
|
3007
|
-
"docs",
|
|
3008
|
-
"MCP",
|
|
3009
|
-
"icons",
|
|
3010
|
-
"logo_1_small.jpg"
|
|
3011
|
-
)
|
|
3012
|
-
];
|
|
3013
|
-
for (const logoPath of possibleLogoPaths) {
|
|
3014
|
-
if (fs.existsSync(logoPath)) {
|
|
3015
|
-
try {
|
|
3016
|
-
fs.copyFileSync(logoPath, path.join(dxtDir, "logo.jpg"));
|
|
3017
|
-
console.log(simpleChalk.gray(`✅ Logo copied from: ${logoPath}`));
|
|
3018
|
-
return;
|
|
3019
|
-
} catch (error) {
|
|
3020
|
-
console.warn(
|
|
3021
|
-
simpleChalk.yellow(`⚠ Failed to copy logo from ${logoPath}: ${error}`)
|
|
3022
|
-
);
|
|
3023
|
-
}
|
|
3024
|
-
}
|
|
3025
|
-
}
|
|
3026
|
-
console.warn(simpleChalk.yellow("⚠ Logo not found in any expected location"));
|
|
3027
|
-
}
|
|
3028
1917
|
/**
|
|
3029
1918
|
* Detects the actual output filename generated by TSDown
|
|
3030
1919
|
*/
|
|
@@ -3089,6 +1978,72 @@ export default defineConfig({
|
|
|
3089
1978
|
return null;
|
|
3090
1979
|
}
|
|
3091
1980
|
}
|
|
1981
|
+
findProjectRoot(entryPointFile) {
|
|
1982
|
+
let currentDir = path.dirname(path.resolve(entryPointFile));
|
|
1983
|
+
let attempts = 0;
|
|
1984
|
+
const maxAttempts = 5;
|
|
1985
|
+
while (attempts < maxAttempts) {
|
|
1986
|
+
const packageJsonPath = path.join(currentDir, "package.json");
|
|
1987
|
+
if (fs.existsSync(packageJsonPath)) {
|
|
1988
|
+
return currentDir;
|
|
1989
|
+
}
|
|
1990
|
+
const parentDir = path.dirname(currentDir);
|
|
1991
|
+
if (parentDir === currentDir) {
|
|
1992
|
+
break;
|
|
1993
|
+
}
|
|
1994
|
+
currentDir = parentDir;
|
|
1995
|
+
attempts++;
|
|
1996
|
+
}
|
|
1997
|
+
throw new Error(
|
|
1998
|
+
`Could not find package.json within ${maxAttempts} directories up from ${entryPointFile}. Please ensure your entry point is within a project that has a package.json file.`
|
|
1999
|
+
);
|
|
2000
|
+
}
|
|
2001
|
+
/**
|
|
2002
|
+
* Generate environment variables and user configuration from ArgParser flags
|
|
2003
|
+
* @returns Object containing envVars and userConfig
|
|
2004
|
+
*/
|
|
2005
|
+
generateEnvAndUserConfig() {
|
|
2006
|
+
const envVars = {};
|
|
2007
|
+
const userConfig = {};
|
|
2008
|
+
const mainFlags = this.argParserInstance.flags;
|
|
2009
|
+
for (const flag of mainFlags) {
|
|
2010
|
+
const envVar = flag.env || flag.envVar;
|
|
2011
|
+
if (envVar) {
|
|
2012
|
+
envVars[envVar] = `\${user_config.${envVar}}`;
|
|
2013
|
+
userConfig[envVar] = {
|
|
2014
|
+
type: "string",
|
|
2015
|
+
title: envVar.replace(/_/g, " ").replace(/\b\w/g, (l) => l.toUpperCase()),
|
|
2016
|
+
description: flag.description || `${envVar} environment variable`,
|
|
2017
|
+
required: true,
|
|
2018
|
+
// Always require env vars in user_config for better UX
|
|
2019
|
+
sensitive: true
|
|
2020
|
+
// Assume env vars are sensitive
|
|
2021
|
+
};
|
|
2022
|
+
}
|
|
2023
|
+
}
|
|
2024
|
+
if (typeof this.argParserInstance.getTools === "function") {
|
|
2025
|
+
const tools = this.argParserInstance.getTools();
|
|
2026
|
+
for (const [, toolConfig] of tools) {
|
|
2027
|
+
const toolFlags = toolConfig.flags || [];
|
|
2028
|
+
for (const flag of toolFlags) {
|
|
2029
|
+
const envVar = flag.env || flag.envVar;
|
|
2030
|
+
if (envVar && !envVars[envVar]) {
|
|
2031
|
+
envVars[envVar] = `\${user_config.${envVar}}`;
|
|
2032
|
+
userConfig[envVar] = {
|
|
2033
|
+
type: "string",
|
|
2034
|
+
title: envVar.replace(/_/g, " ").replace(/\b\w/g, (l) => l.toUpperCase()),
|
|
2035
|
+
description: flag.description || `${envVar} environment variable`,
|
|
2036
|
+
required: true,
|
|
2037
|
+
// Always require env vars in user_config for better UX
|
|
2038
|
+
sensitive: true
|
|
2039
|
+
// Assume env vars are sensitive
|
|
2040
|
+
};
|
|
2041
|
+
}
|
|
2042
|
+
}
|
|
2043
|
+
}
|
|
2044
|
+
}
|
|
2045
|
+
return { envVars, userConfig };
|
|
2046
|
+
}
|
|
3092
2047
|
}
|
|
3093
2048
|
class McpNotificationsManager {
|
|
3094
2049
|
constructor() {
|
|
@@ -5143,9 +4098,7 @@ _handleMcpServeFlag_fn = async function(processArgs, _mcpServeIndex) {
|
|
|
5143
4098
|
const resolvedLogPath = resolveLogPath(effectiveLogPath);
|
|
5144
4099
|
let mcpLogger;
|
|
5145
4100
|
try {
|
|
5146
|
-
const mcpLoggerModule = await
|
|
5147
|
-
'return import("@alcyone-labs/simple-mcp-logger")'
|
|
5148
|
-
)();
|
|
4101
|
+
const mcpLoggerModule = await import("@alcyone-labs/simple-mcp-logger");
|
|
5149
4102
|
mcpLogger = mcpLoggerModule.createMcpLogger("MCP Serve", resolvedLogPath);
|
|
5150
4103
|
globalThis.console = mcpLogger;
|
|
5151
4104
|
} catch {
|
|
@@ -5256,6 +4209,20 @@ _startUnifiedMcpServer_fn = async function(mcpServerConfig, transportOptions) {
|
|
|
5256
4209
|
`Error parsing transports configuration: ${error.message}. Expected JSON format: '[{"type":"stdio"},{"type":"sse","port":3001}]'`
|
|
5257
4210
|
);
|
|
5258
4211
|
}
|
|
4212
|
+
} else if (transportOptions.transportType) {
|
|
4213
|
+
const transportType = transportOptions.transportType;
|
|
4214
|
+
const finalTransportOptions = {
|
|
4215
|
+
port: transportOptions.port,
|
|
4216
|
+
host: transportOptions.host || "localhost",
|
|
4217
|
+
path: transportOptions.path || "/mcp"
|
|
4218
|
+
};
|
|
4219
|
+
await mcpParser.startMcpServerWithTransport(
|
|
4220
|
+
serverInfo,
|
|
4221
|
+
transportType,
|
|
4222
|
+
finalTransportOptions,
|
|
4223
|
+
toolOptions,
|
|
4224
|
+
transportOptions.logPath
|
|
4225
|
+
);
|
|
5259
4226
|
} else if (defaultTransports && defaultTransports.length > 0) {
|
|
5260
4227
|
await mcpParser.startMcpServerWithMultipleTransports(
|
|
5261
4228
|
serverInfo,
|
|
@@ -5277,16 +4244,10 @@ _startUnifiedMcpServer_fn = async function(mcpServerConfig, transportOptions) {
|
|
|
5277
4244
|
transportOptions.logPath
|
|
5278
4245
|
);
|
|
5279
4246
|
} else {
|
|
5280
|
-
const transportType = transportOptions.transportType || "stdio";
|
|
5281
|
-
const finalTransportOptions = {
|
|
5282
|
-
port: transportOptions.port,
|
|
5283
|
-
host: transportOptions.host || "localhost",
|
|
5284
|
-
path: transportOptions.path || "/mcp"
|
|
5285
|
-
};
|
|
5286
4247
|
await mcpParser.startMcpServerWithTransport(
|
|
5287
4248
|
serverInfo,
|
|
5288
|
-
|
|
5289
|
-
|
|
4249
|
+
"stdio",
|
|
4250
|
+
{},
|
|
5290
4251
|
toolOptions,
|
|
5291
4252
|
transportOptions.logPath
|
|
5292
4253
|
);
|
|
@@ -8813,6 +7774,7 @@ class Protocol {
|
|
|
8813
7774
|
this._responseHandlers = /* @__PURE__ */ new Map();
|
|
8814
7775
|
this._progressHandlers = /* @__PURE__ */ new Map();
|
|
8815
7776
|
this._timeoutInfo = /* @__PURE__ */ new Map();
|
|
7777
|
+
this._pendingDebouncedNotifications = /* @__PURE__ */ new Set();
|
|
8816
7778
|
this.setNotificationHandler(CancelledNotificationSchema, (notification) => {
|
|
8817
7779
|
const controller = this._requestHandlerAbortControllers.get(notification.params.requestId);
|
|
8818
7780
|
controller === null || controller === void 0 ? void 0 : controller.abort(notification.params.reason);
|
|
@@ -8894,6 +7856,7 @@ class Protocol {
|
|
|
8894
7856
|
const responseHandlers = this._responseHandlers;
|
|
8895
7857
|
this._responseHandlers = /* @__PURE__ */ new Map();
|
|
8896
7858
|
this._progressHandlers.clear();
|
|
7859
|
+
this._pendingDebouncedNotifications.clear();
|
|
8897
7860
|
this._transport = void 0;
|
|
8898
7861
|
(_a = this.onclose) === null || _a === void 0 ? void 0 : _a.call(this);
|
|
8899
7862
|
const error = new McpError(ErrorCode.ConnectionClosed, "Connection closed");
|
|
@@ -9093,10 +8056,32 @@ class Protocol {
|
|
|
9093
8056
|
* Emits a notification, which is a one-way message that does not expect a response.
|
|
9094
8057
|
*/
|
|
9095
8058
|
async notification(notification, options) {
|
|
8059
|
+
var _a, _b;
|
|
9096
8060
|
if (!this._transport) {
|
|
9097
8061
|
throw new Error("Not connected");
|
|
9098
8062
|
}
|
|
9099
8063
|
this.assertNotificationCapability(notification.method);
|
|
8064
|
+
const debouncedMethods = (_b = (_a = this._options) === null || _a === void 0 ? void 0 : _a.debouncedNotificationMethods) !== null && _b !== void 0 ? _b : [];
|
|
8065
|
+
const canDebounce = debouncedMethods.includes(notification.method) && !notification.params && !(options === null || options === void 0 ? void 0 : options.relatedRequestId);
|
|
8066
|
+
if (canDebounce) {
|
|
8067
|
+
if (this._pendingDebouncedNotifications.has(notification.method)) {
|
|
8068
|
+
return;
|
|
8069
|
+
}
|
|
8070
|
+
this._pendingDebouncedNotifications.add(notification.method);
|
|
8071
|
+
Promise.resolve().then(() => {
|
|
8072
|
+
var _a2;
|
|
8073
|
+
this._pendingDebouncedNotifications.delete(notification.method);
|
|
8074
|
+
if (!this._transport) {
|
|
8075
|
+
return;
|
|
8076
|
+
}
|
|
8077
|
+
const jsonrpcNotification2 = {
|
|
8078
|
+
...notification,
|
|
8079
|
+
jsonrpc: "2.0"
|
|
8080
|
+
};
|
|
8081
|
+
(_a2 = this._transport) === null || _a2 === void 0 ? void 0 : _a2.send(jsonrpcNotification2, options).catch((error) => this._onerror(error));
|
|
8082
|
+
});
|
|
8083
|
+
return;
|
|
8084
|
+
}
|
|
9100
8085
|
const jsonrpcNotification = {
|
|
9101
8086
|
...notification,
|
|
9102
8087
|
jsonrpc: "2.0"
|