@tempad-dev/mcp 0.3.4 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/asset-http-server.mjs +239 -0
- package/dist/asset-http-server.mjs.map +1 -0
- package/dist/asset-store.mjs +167 -0
- package/dist/asset-store.mjs.map +1 -0
- package/dist/cli.mjs +147 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/config.mjs +29 -0
- package/dist/config.mjs.map +1 -0
- package/dist/hub.mjs +464 -0
- package/dist/hub.mjs.map +1 -0
- package/dist/instructions.md +11 -0
- package/dist/package.mjs +38 -0
- package/dist/package.mjs.map +1 -0
- package/dist/request.mjs +68 -0
- package/dist/request.mjs.map +1 -0
- package/dist/shared.mjs +57 -0
- package/dist/shared.mjs.map +1 -0
- package/dist/tools.mjs +146 -0
- package/dist/tools.mjs.map +1 -0
- package/package.json +15 -4
- package/dist/cli.js +0 -236
- package/dist/cli.js.map +0 -7
- package/dist/hub.js +0 -1328
- package/dist/hub.js.map +0 -7
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import { ASSET_DIR, log } from "./shared.mjs";
|
|
2
|
+
import { getMcpServerConfig } from "./config.mjs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { URL } from "node:url";
|
|
5
|
+
import { createReadStream, createWriteStream, existsSync, renameSync, statSync, unlinkSync } from "node:fs";
|
|
6
|
+
import { MCP_HASH_PATTERN } from "@tempad-dev/mcp-shared";
|
|
7
|
+
import { nanoid } from "nanoid";
|
|
8
|
+
import { createHash } from "node:crypto";
|
|
9
|
+
import { createServer } from "node:http";
|
|
10
|
+
import { Transform, pipeline } from "node:stream";
|
|
11
|
+
|
|
12
|
+
//#region src/asset-http-server.ts
|
|
13
|
+
const LOOPBACK_HOST = "127.0.0.1";
|
|
14
|
+
const { maxAssetSizeBytes } = getMcpServerConfig();
|
|
15
|
+
function createAssetHttpServer(store) {
|
|
16
|
+
const server = createServer(handleRequest);
|
|
17
|
+
let port = null;
|
|
18
|
+
async function start() {
|
|
19
|
+
if (port !== null) return;
|
|
20
|
+
await new Promise((resolve, reject) => {
|
|
21
|
+
const onError = (error) => {
|
|
22
|
+
server.off("listening", onListening);
|
|
23
|
+
reject(error);
|
|
24
|
+
};
|
|
25
|
+
const onListening = () => {
|
|
26
|
+
server.off("error", onError);
|
|
27
|
+
const address = server.address();
|
|
28
|
+
if (address && typeof address === "object") {
|
|
29
|
+
port = address.port;
|
|
30
|
+
resolve();
|
|
31
|
+
} else reject(/* @__PURE__ */ new Error("Failed to determine HTTP server port."));
|
|
32
|
+
};
|
|
33
|
+
server.once("error", onError);
|
|
34
|
+
server.once("listening", onListening);
|
|
35
|
+
server.listen(0, LOOPBACK_HOST);
|
|
36
|
+
});
|
|
37
|
+
log.info({ port }, "Asset HTTP server ready.");
|
|
38
|
+
}
|
|
39
|
+
function stop() {
|
|
40
|
+
if (port === null) return;
|
|
41
|
+
server.close();
|
|
42
|
+
port = null;
|
|
43
|
+
}
|
|
44
|
+
function getBaseUrl() {
|
|
45
|
+
if (port === null) throw new Error("Asset HTTP server is not running.");
|
|
46
|
+
return `http://${LOOPBACK_HOST}:${port}`;
|
|
47
|
+
}
|
|
48
|
+
function handleRequest(req, res) {
|
|
49
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
50
|
+
res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
|
|
51
|
+
res.setHeader("Access-Control-Allow-Headers", "Content-Type, X-Asset-Width, X-Asset-Height");
|
|
52
|
+
if (req.method === "OPTIONS") {
|
|
53
|
+
res.writeHead(204);
|
|
54
|
+
res.end();
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
if (!req.url) {
|
|
58
|
+
res.writeHead(400);
|
|
59
|
+
res.end("Missing URL");
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
const segments = new URL(req.url, getBaseUrl()).pathname.split("/").filter(Boolean);
|
|
63
|
+
if (segments.length !== 2 || segments[0] !== "assets") {
|
|
64
|
+
res.writeHead(404);
|
|
65
|
+
res.end("Not Found");
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const hash = segments[1];
|
|
69
|
+
if (req.method === "POST") {
|
|
70
|
+
handleUpload(req, res, hash);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
if (req.method === "GET") {
|
|
74
|
+
handleDownload(req, res, hash);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
res.writeHead(405);
|
|
78
|
+
res.end("Method Not Allowed");
|
|
79
|
+
}
|
|
80
|
+
function handleDownload(req, res, hash) {
|
|
81
|
+
const record = store.get(hash);
|
|
82
|
+
if (!record) {
|
|
83
|
+
res.writeHead(404);
|
|
84
|
+
res.end("Not Found");
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
let stat;
|
|
88
|
+
try {
|
|
89
|
+
stat = statSync(record.filePath);
|
|
90
|
+
} catch (error) {
|
|
91
|
+
if (error.code === "ENOENT") {
|
|
92
|
+
store.remove(hash, { removeFile: false });
|
|
93
|
+
res.writeHead(404);
|
|
94
|
+
res.end("Not Found");
|
|
95
|
+
} else {
|
|
96
|
+
log.error({
|
|
97
|
+
error,
|
|
98
|
+
hash
|
|
99
|
+
}, "Failed to stat asset file.");
|
|
100
|
+
res.writeHead(500);
|
|
101
|
+
res.end("Internal Server Error");
|
|
102
|
+
}
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
res.writeHead(200, {
|
|
106
|
+
"Content-Type": record.mimeType,
|
|
107
|
+
"Content-Length": stat.size.toString(),
|
|
108
|
+
"Cache-Control": "public, max-age=31536000, immutable"
|
|
109
|
+
});
|
|
110
|
+
const stream = createReadStream(record.filePath);
|
|
111
|
+
stream.on("error", (error) => {
|
|
112
|
+
log.warn({
|
|
113
|
+
error,
|
|
114
|
+
hash
|
|
115
|
+
}, "Failed to stream asset file.");
|
|
116
|
+
if (!res.headersSent) res.writeHead(500);
|
|
117
|
+
res.end("Internal Server Error");
|
|
118
|
+
});
|
|
119
|
+
stream.on("open", () => {
|
|
120
|
+
store.touch(hash);
|
|
121
|
+
});
|
|
122
|
+
stream.pipe(res);
|
|
123
|
+
}
|
|
124
|
+
function handleUpload(req, res, hash) {
|
|
125
|
+
if (!MCP_HASH_PATTERN.test(hash)) {
|
|
126
|
+
res.writeHead(400);
|
|
127
|
+
res.end("Invalid Hash Format");
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
const mimeType = req.headers["content-type"] || "application/octet-stream";
|
|
131
|
+
const filePath = join(ASSET_DIR, hash);
|
|
132
|
+
const width = parseInt(req.headers["x-asset-width"], 10);
|
|
133
|
+
const height = parseInt(req.headers["x-asset-height"], 10);
|
|
134
|
+
const metadata = !isNaN(width) && !isNaN(height) && width > 0 && height > 0 ? {
|
|
135
|
+
width,
|
|
136
|
+
height
|
|
137
|
+
} : void 0;
|
|
138
|
+
if (store.has(hash) && existsSync(filePath)) {
|
|
139
|
+
req.resume();
|
|
140
|
+
const existing = store.get(hash);
|
|
141
|
+
let changed = false;
|
|
142
|
+
if (metadata) {
|
|
143
|
+
existing.metadata = metadata;
|
|
144
|
+
changed = true;
|
|
145
|
+
}
|
|
146
|
+
if (existing.mimeType !== mimeType) {
|
|
147
|
+
existing.mimeType = mimeType;
|
|
148
|
+
changed = true;
|
|
149
|
+
}
|
|
150
|
+
if (changed) store.upsert(existing);
|
|
151
|
+
store.touch(hash);
|
|
152
|
+
res.writeHead(200);
|
|
153
|
+
res.end("OK");
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
const tmpPath = `${filePath}.tmp.${nanoid()}`;
|
|
157
|
+
const writeStream = createWriteStream(tmpPath);
|
|
158
|
+
const hasher = createHash("sha256");
|
|
159
|
+
let size = 0;
|
|
160
|
+
const cleanup = () => {
|
|
161
|
+
if (existsSync(tmpPath)) try {
|
|
162
|
+
unlinkSync(tmpPath);
|
|
163
|
+
} catch (e) {
|
|
164
|
+
log.warn({
|
|
165
|
+
error: e,
|
|
166
|
+
tmpPath
|
|
167
|
+
}, "Failed to cleanup temp file.");
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
pipeline(req, new Transform({ transform(chunk, encoding, callback) {
|
|
171
|
+
size += chunk.length;
|
|
172
|
+
if (size > maxAssetSizeBytes) {
|
|
173
|
+
callback(/* @__PURE__ */ new Error("PayloadTooLarge"));
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
hasher.update(chunk);
|
|
177
|
+
callback(null, chunk);
|
|
178
|
+
} }), writeStream, (err) => {
|
|
179
|
+
if (err) {
|
|
180
|
+
cleanup();
|
|
181
|
+
if (err.message === "PayloadTooLarge") {
|
|
182
|
+
res.writeHead(413);
|
|
183
|
+
res.end("Payload Too Large");
|
|
184
|
+
} else if (err.code === "ERR_STREAM_PREMATURE_CLOSE") log.warn({ hash }, "Upload request closed prematurely.");
|
|
185
|
+
else {
|
|
186
|
+
log.error({
|
|
187
|
+
error: err,
|
|
188
|
+
hash
|
|
189
|
+
}, "Upload pipeline failed.");
|
|
190
|
+
if (!res.headersSent) {
|
|
191
|
+
res.writeHead(500);
|
|
192
|
+
res.end("Internal Server Error");
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
return;
|
|
196
|
+
}
|
|
197
|
+
if (hasher.digest("hex") !== hash) {
|
|
198
|
+
cleanup();
|
|
199
|
+
res.writeHead(400);
|
|
200
|
+
res.end("Hash Mismatch");
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
try {
|
|
204
|
+
renameSync(tmpPath, filePath);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
log.error({
|
|
207
|
+
error,
|
|
208
|
+
hash
|
|
209
|
+
}, "Failed to rename temp file to asset.");
|
|
210
|
+
cleanup();
|
|
211
|
+
res.writeHead(500);
|
|
212
|
+
res.end("Internal Server Error");
|
|
213
|
+
return;
|
|
214
|
+
}
|
|
215
|
+
store.upsert({
|
|
216
|
+
hash,
|
|
217
|
+
filePath,
|
|
218
|
+
mimeType,
|
|
219
|
+
size,
|
|
220
|
+
metadata
|
|
221
|
+
});
|
|
222
|
+
log.info({
|
|
223
|
+
hash,
|
|
224
|
+
size
|
|
225
|
+
}, "Stored uploaded asset via HTTP.");
|
|
226
|
+
res.writeHead(201);
|
|
227
|
+
res.end("Created");
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
return {
|
|
231
|
+
start,
|
|
232
|
+
stop,
|
|
233
|
+
getBaseUrl
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
//#endregion
|
|
238
|
+
export { createAssetHttpServer };
|
|
239
|
+
//# sourceMappingURL=asset-http-server.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"asset-http-server.mjs","names":["port: number | null"],"sources":["../src/asset-http-server.ts"],"sourcesContent":["import { MCP_HASH_PATTERN } from '@tempad-dev/mcp-shared'\nimport { nanoid } from 'nanoid'\nimport { createHash } from 'node:crypto'\nimport {\n createReadStream,\n createWriteStream,\n existsSync,\n renameSync,\n statSync,\n unlinkSync\n} from 'node:fs'\nimport { createServer, type IncomingMessage, type ServerResponse } from 'node:http'\nimport { join } from 'node:path'\nimport { pipeline, Transform } from 'node:stream'\nimport { URL } from 'node:url'\n\nimport type { AssetStore } from './asset-store'\n\nimport { getMcpServerConfig } from './config'\nimport { ASSET_DIR, log } from './shared'\n\nconst LOOPBACK_HOST = '127.0.0.1'\nconst { maxAssetSizeBytes } = getMcpServerConfig()\n\nexport interface AssetHttpServer {\n start(): Promise<void>\n stop(): void\n getBaseUrl(): string\n}\n\nexport function createAssetHttpServer(store: AssetStore): AssetHttpServer {\n const server = createServer(handleRequest)\n let port: number | null = null\n\n async function start(): Promise<void> {\n if (port !== null) return\n await new Promise<void>((resolve, reject) => {\n const onError = (error: Error) => {\n server.off('listening', onListening)\n reject(error)\n }\n const onListening = () => {\n server.off('error', onError)\n const address = server.address()\n if (address && typeof address === 'object') {\n port = address.port\n resolve()\n } else {\n reject(new Error('Failed to determine HTTP server port.'))\n }\n }\n server.once('error', onError)\n server.once('listening', onListening)\n server.listen(0, LOOPBACK_HOST)\n })\n log.info({ port }, 'Asset HTTP server ready.')\n }\n\n function stop(): void {\n if (port === null) return\n server.close()\n port = null\n }\n\n function getBaseUrl(): string {\n if (port === null) throw new Error('Asset HTTP server is not running.')\n return `http://${LOOPBACK_HOST}:${port}`\n }\n\n function handleRequest(req: IncomingMessage, res: ServerResponse): void {\n res.setHeader('Access-Control-Allow-Origin', '*')\n res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')\n res.setHeader('Access-Control-Allow-Headers', 'Content-Type, X-Asset-Width, X-Asset-Height')\n\n if (req.method === 'OPTIONS') {\n res.writeHead(204)\n res.end()\n return\n }\n\n if (!req.url) {\n res.writeHead(400)\n res.end('Missing URL')\n return\n }\n\n const url = new URL(req.url, getBaseUrl())\n const segments = url.pathname.split('/').filter(Boolean)\n if (segments.length !== 2 || segments[0] !== 'assets') {\n res.writeHead(404)\n res.end('Not Found')\n return\n }\n\n const hash = segments[1]\n\n if (req.method === 'POST') {\n handleUpload(req, res, hash)\n return\n }\n\n if (req.method === 'GET') {\n handleDownload(req, res, hash)\n return\n }\n\n res.writeHead(405)\n res.end('Method Not Allowed')\n }\n\n function handleDownload(req: IncomingMessage, res: ServerResponse, hash: string): void {\n const record = store.get(hash)\n if (!record) {\n res.writeHead(404)\n res.end('Not Found')\n return\n }\n\n let stat\n try {\n stat = statSync(record.filePath)\n } catch (error) {\n const err = error as NodeJS.ErrnoException\n if (err.code === 'ENOENT') {\n store.remove(hash, { removeFile: false })\n res.writeHead(404)\n res.end('Not Found')\n } else {\n log.error({ error, hash }, 'Failed to stat asset file.')\n res.writeHead(500)\n res.end('Internal Server Error')\n }\n return\n }\n\n res.writeHead(200, {\n 'Content-Type': record.mimeType,\n 'Content-Length': stat.size.toString(),\n 'Cache-Control': 'public, max-age=31536000, immutable'\n })\n\n const stream = createReadStream(record.filePath)\n stream.on('error', (error) => {\n log.warn({ error, hash }, 'Failed to stream asset file.')\n if (!res.headersSent) {\n res.writeHead(500)\n }\n res.end('Internal Server Error')\n })\n stream.on('open', () => {\n store.touch(hash)\n })\n stream.pipe(res)\n }\n\n function handleUpload(req: IncomingMessage, res: ServerResponse, hash: string): void {\n if (!MCP_HASH_PATTERN.test(hash)) {\n res.writeHead(400)\n res.end('Invalid Hash Format')\n return\n }\n\n const mimeType = req.headers['content-type'] || 'application/octet-stream'\n const filePath = join(ASSET_DIR, hash)\n\n const width = parseInt(req.headers['x-asset-width'] as string, 10)\n const height = parseInt(req.headers['x-asset-height'] as string, 10)\n const metadata =\n !isNaN(width) && !isNaN(height) && width > 0 && height > 0 ? { width, height } : undefined\n\n // If asset already exists and file is present, skip write\n if (store.has(hash) && existsSync(filePath)) {\n // Drain request to ensure connection is clean\n req.resume()\n\n const existing = store.get(hash)!\n let changed = false\n if (metadata) {\n existing.metadata = metadata\n changed = true\n }\n if (existing.mimeType !== mimeType) {\n existing.mimeType = mimeType\n changed = true\n }\n if (changed) {\n store.upsert(existing)\n }\n store.touch(hash)\n res.writeHead(200)\n res.end('OK')\n return\n }\n\n const tmpPath = `${filePath}.tmp.${nanoid()}`\n const writeStream = createWriteStream(tmpPath)\n const hasher = createHash('sha256')\n let size = 0\n\n const cleanup = () => {\n if (existsSync(tmpPath)) {\n try {\n unlinkSync(tmpPath)\n } catch (e) {\n log.warn({ error: e, tmpPath }, 'Failed to cleanup temp file.')\n }\n }\n }\n\n const monitor = new Transform({\n transform(chunk, encoding, callback) {\n size += chunk.length\n if (size > maxAssetSizeBytes) {\n callback(new Error('PayloadTooLarge'))\n return\n }\n hasher.update(chunk)\n callback(null, chunk)\n }\n })\n\n pipeline(req, monitor, writeStream, (err) => {\n if (err) {\n cleanup()\n if (err.message === 'PayloadTooLarge') {\n res.writeHead(413)\n res.end('Payload Too Large')\n } else if (err.code === 'ERR_STREAM_PREMATURE_CLOSE') {\n log.warn({ hash }, 'Upload request closed prematurely.')\n } else {\n log.error({ error: err, hash }, 'Upload pipeline failed.')\n if (!res.headersSent) {\n res.writeHead(500)\n res.end('Internal Server Error')\n }\n }\n return\n }\n\n const computedHash = hasher.digest('hex')\n if (computedHash !== hash) {\n cleanup()\n res.writeHead(400)\n res.end('Hash Mismatch')\n return\n }\n\n try {\n renameSync(tmpPath, filePath)\n } catch (error) {\n log.error({ error, hash }, 'Failed to rename temp file to asset.')\n cleanup()\n res.writeHead(500)\n res.end('Internal Server Error')\n return\n }\n\n store.upsert({\n hash,\n filePath,\n mimeType,\n size,\n metadata\n })\n log.info({ hash, size }, 'Stored uploaded asset via HTTP.')\n res.writeHead(201)\n res.end('Created')\n })\n }\n\n return {\n start,\n stop,\n getBaseUrl\n }\n}\n"],"mappings":";;;;;;;;;;;;AAqBA,MAAM,gBAAgB;AACtB,MAAM,EAAE,sBAAsB,oBAAoB;AAQlD,SAAgB,sBAAsB,OAAoC;CACxE,MAAM,SAAS,aAAa,cAAc;CAC1C,IAAIA,OAAsB;CAE1B,eAAe,QAAuB;AACpC,MAAI,SAAS,KAAM;AACnB,QAAM,IAAI,SAAe,SAAS,WAAW;GAC3C,MAAM,WAAW,UAAiB;AAChC,WAAO,IAAI,aAAa,YAAY;AACpC,WAAO,MAAM;;GAEf,MAAM,oBAAoB;AACxB,WAAO,IAAI,SAAS,QAAQ;IAC5B,MAAM,UAAU,OAAO,SAAS;AAChC,QAAI,WAAW,OAAO,YAAY,UAAU;AAC1C,YAAO,QAAQ;AACf,cAAS;UAET,wBAAO,IAAI,MAAM,wCAAwC,CAAC;;AAG9D,UAAO,KAAK,SAAS,QAAQ;AAC7B,UAAO,KAAK,aAAa,YAAY;AACrC,UAAO,OAAO,GAAG,cAAc;IAC/B;AACF,MAAI,KAAK,EAAE,MAAM,EAAE,2BAA2B;;CAGhD,SAAS,OAAa;AACpB,MAAI,SAAS,KAAM;AACnB,SAAO,OAAO;AACd,SAAO;;CAGT,SAAS,aAAqB;AAC5B,MAAI,SAAS,KAAM,OAAM,IAAI,MAAM,oCAAoC;AACvE,SAAO,UAAU,cAAc,GAAG;;CAGpC,SAAS,cAAc,KAAsB,KAA2B;AACtE,MAAI,UAAU,+BAA+B,IAAI;AACjD,MAAI,UAAU,gCAAgC,qBAAqB;AACnE,MAAI,UAAU,gCAAgC,8CAA8C;AAE5F,MAAI,IAAI,WAAW,WAAW;AAC5B,OAAI,UAAU,IAAI;AAClB,OAAI,KAAK;AACT;;AAGF,MAAI,CAAC,IAAI,KAAK;AACZ,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,cAAc;AACtB;;EAIF,MAAM,WADM,IAAI,IAAI,IAAI,KAAK,YAAY,CAAC,CACrB,SAAS,MAAM,IAAI,CAAC,OAAO,QAAQ;AACxD,MAAI,SAAS,WAAW,KAAK,SAAS,OAAO,UAAU;AACrD,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,YAAY;AACpB;;EAGF,MAAM,OAAO,SAAS;AAEtB,MAAI,IAAI,WAAW,QAAQ;AACzB,gBAAa,KAAK,KAAK,KAAK;AAC5B;;AAGF,MAAI,IAAI,WAAW,OAAO;AACxB,kBAAe,KAAK,KAAK,KAAK;AAC9B;;AAGF,MAAI,UAAU,IAAI;AAClB,MAAI,IAAI,qBAAqB;;CAG/B,SAAS,eAAe,KAAsB,KAAqB,MAAoB;EACrF,MAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,MAAI,CAAC,QAAQ;AACX,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,YAAY;AACpB;;EAGF,IAAI;AACJ,MAAI;AACF,UAAO,SAAS,OAAO,SAAS;WACzB,OAAO;AAEd,OADY,MACJ,SAAS,UAAU;AACzB,UAAM,OAAO,MAAM,EAAE,YAAY,OAAO,CAAC;AACzC,QAAI,UAAU,IAAI;AAClB,QAAI,IAAI,YAAY;UACf;AACL,QAAI,MAAM;KAAE;KAAO;KAAM,EAAE,6BAA6B;AACxD,QAAI,UAAU,IAAI;AAClB,QAAI,IAAI,wBAAwB;;AAElC;;AAGF,MAAI,UAAU,KAAK;GACjB,gBAAgB,OAAO;GACvB,kBAAkB,KAAK,KAAK,UAAU;GACtC,iBAAiB;GAClB,CAAC;EAEF,MAAM,SAAS,iBAAiB,OAAO,SAAS;AAChD,SAAO,GAAG,UAAU,UAAU;AAC5B,OAAI,KAAK;IAAE;IAAO;IAAM,EAAE,+BAA+B;AACzD,OAAI,CAAC,IAAI,YACP,KAAI,UAAU,IAAI;AAEpB,OAAI,IAAI,wBAAwB;IAChC;AACF,SAAO,GAAG,cAAc;AACtB,SAAM,MAAM,KAAK;IACjB;AACF,SAAO,KAAK,IAAI;;CAGlB,SAAS,aAAa,KAAsB,KAAqB,MAAoB;AACnF,MAAI,CAAC,iBAAiB,KAAK,KAAK,EAAE;AAChC,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,sBAAsB;AAC9B;;EAGF,MAAM,WAAW,IAAI,QAAQ,mBAAmB;EAChD,MAAM,WAAW,KAAK,WAAW,KAAK;EAEtC,MAAM,QAAQ,SAAS,IAAI,QAAQ,kBAA4B,GAAG;EAClE,MAAM,SAAS,SAAS,IAAI,QAAQ,mBAA6B,GAAG;EACpE,MAAM,WACJ,CAAC,MAAM,MAAM,IAAI,CAAC,MAAM,OAAO,IAAI,QAAQ,KAAK,SAAS,IAAI;GAAE;GAAO;GAAQ,GAAG;AAGnF,MAAI,MAAM,IAAI,KAAK,IAAI,WAAW,SAAS,EAAE;AAE3C,OAAI,QAAQ;GAEZ,MAAM,WAAW,MAAM,IAAI,KAAK;GAChC,IAAI,UAAU;AACd,OAAI,UAAU;AACZ,aAAS,WAAW;AACpB,cAAU;;AAEZ,OAAI,SAAS,aAAa,UAAU;AAClC,aAAS,WAAW;AACpB,cAAU;;AAEZ,OAAI,QACF,OAAM,OAAO,SAAS;AAExB,SAAM,MAAM,KAAK;AACjB,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,KAAK;AACb;;EAGF,MAAM,UAAU,GAAG,SAAS,OAAO,QAAQ;EAC3C,MAAM,cAAc,kBAAkB,QAAQ;EAC9C,MAAM,SAAS,WAAW,SAAS;EACnC,IAAI,OAAO;EAEX,MAAM,gBAAgB;AACpB,OAAI,WAAW,QAAQ,CACrB,KAAI;AACF,eAAW,QAAQ;YACZ,GAAG;AACV,QAAI,KAAK;KAAE,OAAO;KAAG;KAAS,EAAE,+BAA+B;;;AAiBrE,WAAS,KAZO,IAAI,UAAU,EAC5B,UAAU,OAAO,UAAU,UAAU;AACnC,WAAQ,MAAM;AACd,OAAI,OAAO,mBAAmB;AAC5B,6BAAS,IAAI,MAAM,kBAAkB,CAAC;AACtC;;AAEF,UAAO,OAAO,MAAM;AACpB,YAAS,MAAM,MAAM;KAExB,CAAC,EAEqB,cAAc,QAAQ;AAC3C,OAAI,KAAK;AACP,aAAS;AACT,QAAI,IAAI,YAAY,mBAAmB;AACrC,SAAI,UAAU,IAAI;AAClB,SAAI,IAAI,oBAAoB;eACnB,IAAI,SAAS,6BACtB,KAAI,KAAK,EAAE,MAAM,EAAE,qCAAqC;SACnD;AACL,SAAI,MAAM;MAAE,OAAO;MAAK;MAAM,EAAE,0BAA0B;AAC1D,SAAI,CAAC,IAAI,aAAa;AACpB,UAAI,UAAU,IAAI;AAClB,UAAI,IAAI,wBAAwB;;;AAGpC;;AAIF,OADqB,OAAO,OAAO,MAAM,KACpB,MAAM;AACzB,aAAS;AACT,QAAI,UAAU,IAAI;AAClB,QAAI,IAAI,gBAAgB;AACxB;;AAGF,OAAI;AACF,eAAW,SAAS,SAAS;YACtB,OAAO;AACd,QAAI,MAAM;KAAE;KAAO;KAAM,EAAE,uCAAuC;AAClE,aAAS;AACT,QAAI,UAAU,IAAI;AAClB,QAAI,IAAI,wBAAwB;AAChC;;AAGF,SAAM,OAAO;IACX;IACA;IACA;IACA;IACA;IACD,CAAC;AACF,OAAI,KAAK;IAAE;IAAM;IAAM,EAAE,kCAAkC;AAC3D,OAAI,UAAU,IAAI;AAClB,OAAI,IAAI,UAAU;IAClB;;AAGJ,QAAO;EACL;EACA;EACA;EACD"}
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import { ASSET_DIR, ensureDir, ensureFile, log } from "./shared.mjs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { existsSync, readFileSync, readdirSync, rmSync, statSync, writeFileSync } from "node:fs";
|
|
4
|
+
import { MCP_HASH_PATTERN } from "@tempad-dev/mcp-shared";
|
|
5
|
+
|
|
6
|
+
//#region src/asset-store.ts
|
|
7
|
+
const INDEX_FILENAME = "assets.json";
|
|
8
|
+
const DEFAULT_INDEX_PATH = join(ASSET_DIR, INDEX_FILENAME);
|
|
9
|
+
function readIndex(indexPath) {
|
|
10
|
+
if (!existsSync(indexPath)) return [];
|
|
11
|
+
try {
|
|
12
|
+
const raw = readFileSync(indexPath, "utf8").trim();
|
|
13
|
+
if (!raw) return [];
|
|
14
|
+
const parsed = JSON.parse(raw);
|
|
15
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
16
|
+
} catch (error) {
|
|
17
|
+
log.warn({
|
|
18
|
+
error,
|
|
19
|
+
indexPath
|
|
20
|
+
}, "Failed to read asset catalog; starting fresh.");
|
|
21
|
+
return [];
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
function writeIndex(indexPath, values) {
|
|
25
|
+
writeFileSync(indexPath, JSON.stringify(values, null, 2), "utf8");
|
|
26
|
+
}
|
|
27
|
+
function createAssetStore(options = {}) {
|
|
28
|
+
ensureDir(ASSET_DIR);
|
|
29
|
+
const indexPath = options.indexPath ?? DEFAULT_INDEX_PATH;
|
|
30
|
+
ensureFile(indexPath);
|
|
31
|
+
const records = /* @__PURE__ */ new Map();
|
|
32
|
+
let persistTimer = null;
|
|
33
|
+
function loadExisting() {
|
|
34
|
+
const list$1 = readIndex(indexPath);
|
|
35
|
+
for (const record of list$1) if (record?.hash && record?.filePath) records.set(record.hash, record);
|
|
36
|
+
}
|
|
37
|
+
function persist() {
|
|
38
|
+
if (persistTimer) return;
|
|
39
|
+
persistTimer = setTimeout(() => {
|
|
40
|
+
persistTimer = null;
|
|
41
|
+
writeIndex(indexPath, [...records.values()]);
|
|
42
|
+
}, 5e3);
|
|
43
|
+
if (typeof persistTimer.unref === "function") persistTimer.unref();
|
|
44
|
+
}
|
|
45
|
+
function flush() {
|
|
46
|
+
if (persistTimer) {
|
|
47
|
+
clearTimeout(persistTimer);
|
|
48
|
+
persistTimer = null;
|
|
49
|
+
}
|
|
50
|
+
writeIndex(indexPath, [...records.values()]);
|
|
51
|
+
}
|
|
52
|
+
function list() {
|
|
53
|
+
return [...records.values()];
|
|
54
|
+
}
|
|
55
|
+
function has(hash) {
|
|
56
|
+
return records.has(hash);
|
|
57
|
+
}
|
|
58
|
+
function get(hash) {
|
|
59
|
+
return records.get(hash);
|
|
60
|
+
}
|
|
61
|
+
function getMany(hashes) {
|
|
62
|
+
return hashes.map((hash) => records.get(hash)).filter((record) => !!record);
|
|
63
|
+
}
|
|
64
|
+
function upsert(input) {
|
|
65
|
+
const now = Date.now();
|
|
66
|
+
const record = {
|
|
67
|
+
...input,
|
|
68
|
+
uploadedAt: input.uploadedAt ?? now,
|
|
69
|
+
lastAccess: input.lastAccess ?? now
|
|
70
|
+
};
|
|
71
|
+
records.set(record.hash, record);
|
|
72
|
+
persist();
|
|
73
|
+
return record;
|
|
74
|
+
}
|
|
75
|
+
function touch(hash) {
|
|
76
|
+
const existing = records.get(hash);
|
|
77
|
+
if (!existing) return void 0;
|
|
78
|
+
existing.lastAccess = Date.now();
|
|
79
|
+
persist();
|
|
80
|
+
return existing;
|
|
81
|
+
}
|
|
82
|
+
function remove(hash, { removeFile = true } = {}) {
|
|
83
|
+
const record = records.get(hash);
|
|
84
|
+
if (!record) return;
|
|
85
|
+
records.delete(hash);
|
|
86
|
+
persist();
|
|
87
|
+
if (removeFile) try {
|
|
88
|
+
rmSync(record.filePath, { force: true });
|
|
89
|
+
} catch (error) {
|
|
90
|
+
log.warn({
|
|
91
|
+
hash,
|
|
92
|
+
error
|
|
93
|
+
}, "Failed to remove asset file on delete.");
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
function reconcile() {
|
|
97
|
+
let changed = false;
|
|
98
|
+
for (const [hash, record] of records) if (!existsSync(record.filePath)) {
|
|
99
|
+
records.delete(hash);
|
|
100
|
+
changed = true;
|
|
101
|
+
}
|
|
102
|
+
try {
|
|
103
|
+
const files = readdirSync(ASSET_DIR);
|
|
104
|
+
const now = Date.now();
|
|
105
|
+
for (const file of files) {
|
|
106
|
+
if (file === INDEX_FILENAME) continue;
|
|
107
|
+
if (file.includes(".tmp.")) {
|
|
108
|
+
try {
|
|
109
|
+
const filePath = join(ASSET_DIR, file);
|
|
110
|
+
if (now - statSync(filePath).mtimeMs > 3600 * 1e3) {
|
|
111
|
+
rmSync(filePath, { force: true });
|
|
112
|
+
log.info({ file }, "Cleaned up stale temp file.");
|
|
113
|
+
}
|
|
114
|
+
} catch (e) {
|
|
115
|
+
log.debug({
|
|
116
|
+
error: e,
|
|
117
|
+
file
|
|
118
|
+
}, "Failed to cleanup stale temp file.");
|
|
119
|
+
}
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
if (!MCP_HASH_PATTERN.test(file)) continue;
|
|
123
|
+
if (!records.has(file)) {
|
|
124
|
+
const filePath = join(ASSET_DIR, file);
|
|
125
|
+
try {
|
|
126
|
+
const stat = statSync(filePath);
|
|
127
|
+
records.set(file, {
|
|
128
|
+
hash: file,
|
|
129
|
+
filePath,
|
|
130
|
+
mimeType: "application/octet-stream",
|
|
131
|
+
size: stat.size,
|
|
132
|
+
uploadedAt: stat.birthtimeMs,
|
|
133
|
+
lastAccess: stat.atimeMs
|
|
134
|
+
});
|
|
135
|
+
changed = true;
|
|
136
|
+
log.info({ hash: file }, "Recovered orphan asset file.");
|
|
137
|
+
} catch (e) {
|
|
138
|
+
log.warn({
|
|
139
|
+
error: e,
|
|
140
|
+
file
|
|
141
|
+
}, "Failed to stat orphan file.");
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
} catch (error) {
|
|
146
|
+
log.warn({ error }, "Failed to scan asset directory for orphans.");
|
|
147
|
+
}
|
|
148
|
+
if (changed) flush();
|
|
149
|
+
}
|
|
150
|
+
loadExisting();
|
|
151
|
+
reconcile();
|
|
152
|
+
return {
|
|
153
|
+
list,
|
|
154
|
+
has,
|
|
155
|
+
get,
|
|
156
|
+
getMany,
|
|
157
|
+
upsert,
|
|
158
|
+
touch,
|
|
159
|
+
remove,
|
|
160
|
+
reconcile,
|
|
161
|
+
flush
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
//#endregion
|
|
166
|
+
export { createAssetStore };
|
|
167
|
+
//# sourceMappingURL=asset-store.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"asset-store.mjs","names":["persistTimer: NodeJS.Timeout | null","list","record: AssetRecord"],"sources":["../src/asset-store.ts"],"sourcesContent":["import { MCP_HASH_PATTERN } from '@tempad-dev/mcp-shared'\nimport { existsSync, readFileSync, rmSync, writeFileSync, readdirSync, statSync } from 'node:fs'\nimport { join } from 'node:path'\n\nimport type { AssetRecord } from './types'\n\nimport { ASSET_DIR, ensureDir, ensureFile, log } from './shared'\n\nconst INDEX_FILENAME = 'assets.json'\nconst DEFAULT_INDEX_PATH = join(ASSET_DIR, INDEX_FILENAME)\n\nexport interface AssetStoreOptions {\n indexPath?: string\n}\n\nexport interface AssetStore {\n list(): AssetRecord[]\n has(hash: string): boolean\n get(hash: string): AssetRecord | undefined\n getMany(hashes: string[]): AssetRecord[]\n upsert(\n input: Omit<AssetRecord, 'uploadedAt' | 'lastAccess'> &\n Partial<Pick<AssetRecord, 'uploadedAt' | 'lastAccess'>>\n ): AssetRecord\n touch(hash: string): AssetRecord | undefined\n remove(hash: string, opts?: { removeFile?: boolean }): void\n reconcile(): void\n flush(): void\n}\n\nfunction readIndex(indexPath: string): AssetRecord[] {\n if (!existsSync(indexPath)) return []\n try {\n const raw = readFileSync(indexPath, 'utf8').trim()\n if (!raw) return []\n const parsed = JSON.parse(raw)\n return Array.isArray(parsed) ? (parsed as AssetRecord[]) : []\n } catch (error) {\n log.warn({ error, indexPath }, 'Failed to read asset catalog; starting fresh.')\n return []\n }\n}\n\nfunction writeIndex(indexPath: string, values: AssetRecord[]): void {\n const payload = JSON.stringify(values, null, 2)\n writeFileSync(indexPath, payload, 'utf8')\n}\n\nexport function createAssetStore(options: AssetStoreOptions = {}): AssetStore {\n ensureDir(ASSET_DIR)\n const indexPath = options.indexPath ?? DEFAULT_INDEX_PATH\n ensureFile(indexPath)\n const records = new Map<string, AssetRecord>()\n let persistTimer: NodeJS.Timeout | null = null\n\n function loadExisting(): void {\n const list = readIndex(indexPath)\n for (const record of list) {\n if (record?.hash && record?.filePath) {\n records.set(record.hash, record)\n }\n }\n }\n\n function persist(): void {\n if (persistTimer) return\n persistTimer = setTimeout(() => {\n persistTimer = null\n writeIndex(indexPath, [...records.values()])\n }, 5000)\n if (typeof persistTimer.unref === 'function') {\n persistTimer.unref()\n }\n }\n\n function flush(): void {\n if (persistTimer) {\n clearTimeout(persistTimer)\n persistTimer = null\n }\n writeIndex(indexPath, [...records.values()])\n }\n\n function list(): AssetRecord[] {\n return [...records.values()]\n }\n\n function has(hash: string): boolean {\n return records.has(hash)\n }\n\n function get(hash: string): AssetRecord | undefined {\n return records.get(hash)\n }\n\n function getMany(hashes: string[]): AssetRecord[] {\n return hashes\n .map((hash) => records.get(hash))\n .filter((record): record is AssetRecord => !!record)\n }\n\n function upsert(\n input: Omit<AssetRecord, 'uploadedAt' | 'lastAccess'> &\n Partial<Pick<AssetRecord, 'uploadedAt' | 'lastAccess'>>\n ): AssetRecord {\n const now = Date.now()\n const record: AssetRecord = {\n ...input,\n uploadedAt: input.uploadedAt ?? now,\n lastAccess: input.lastAccess ?? now\n }\n records.set(record.hash, record)\n persist()\n return record\n }\n\n function touch(hash: string): AssetRecord | undefined {\n const existing = records.get(hash)\n if (!existing) return undefined\n existing.lastAccess = Date.now()\n persist()\n return existing\n }\n\n function remove(hash: string, { removeFile = true } = {}): void {\n const record = records.get(hash)\n if (!record) return\n records.delete(hash)\n persist()\n\n if (removeFile) {\n try {\n rmSync(record.filePath, { force: true })\n } catch (error) {\n log.warn({ hash, error }, 'Failed to remove asset file on delete.')\n }\n }\n }\n\n function reconcile(): void {\n let changed = false\n for (const [hash, record] of records) {\n if (!existsSync(record.filePath)) {\n records.delete(hash)\n changed = true\n }\n }\n\n try {\n const files = readdirSync(ASSET_DIR)\n const now = Date.now()\n for (const file of files) {\n if (file === INDEX_FILENAME) continue\n\n // Cleanup stale tmp files (> 1 hour)\n if (file.includes('.tmp.')) {\n try {\n const filePath = join(ASSET_DIR, file)\n const stat = statSync(filePath)\n if (now - stat.mtimeMs > 3600 * 1000) {\n rmSync(filePath, { force: true })\n log.info({ file }, 'Cleaned up stale temp file.')\n }\n } catch (e) {\n // Ignore errors during cleanup\n log.debug({ error: e, file }, 'Failed to cleanup stale temp file.')\n }\n continue\n }\n\n if (!MCP_HASH_PATTERN.test(file)) continue\n\n if (!records.has(file)) {\n const filePath = join(ASSET_DIR, file)\n try {\n const stat = statSync(filePath)\n records.set(file, {\n hash: file,\n filePath,\n mimeType: 'application/octet-stream',\n size: stat.size,\n uploadedAt: stat.birthtimeMs,\n lastAccess: stat.atimeMs\n })\n changed = true\n log.info({ hash: file }, 'Recovered orphan asset file.')\n } catch (e) {\n log.warn({ error: e, file }, 'Failed to stat orphan file.')\n }\n }\n }\n } catch (error) {\n log.warn({ error }, 'Failed to scan asset directory for orphans.')\n }\n\n if (changed) flush()\n }\n\n loadExisting()\n reconcile()\n\n return {\n list,\n has,\n get,\n getMany,\n upsert,\n touch,\n remove,\n reconcile,\n flush\n }\n}\n"],"mappings":";;;;;;AAQA,MAAM,iBAAiB;AACvB,MAAM,qBAAqB,KAAK,WAAW,eAAe;AAqB1D,SAAS,UAAU,WAAkC;AACnD,KAAI,CAAC,WAAW,UAAU,CAAE,QAAO,EAAE;AACrC,KAAI;EACF,MAAM,MAAM,aAAa,WAAW,OAAO,CAAC,MAAM;AAClD,MAAI,CAAC,IAAK,QAAO,EAAE;EACnB,MAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,SAAO,MAAM,QAAQ,OAAO,GAAI,SAA2B,EAAE;UACtD,OAAO;AACd,MAAI,KAAK;GAAE;GAAO;GAAW,EAAE,gDAAgD;AAC/E,SAAO,EAAE;;;AAIb,SAAS,WAAW,WAAmB,QAA6B;AAElE,eAAc,WADE,KAAK,UAAU,QAAQ,MAAM,EAAE,EACb,OAAO;;AAG3C,SAAgB,iBAAiB,UAA6B,EAAE,EAAc;AAC5E,WAAU,UAAU;CACpB,MAAM,YAAY,QAAQ,aAAa;AACvC,YAAW,UAAU;CACrB,MAAM,0BAAU,IAAI,KAA0B;CAC9C,IAAIA,eAAsC;CAE1C,SAAS,eAAqB;EAC5B,MAAMC,SAAO,UAAU,UAAU;AACjC,OAAK,MAAM,UAAUA,OACnB,KAAI,QAAQ,QAAQ,QAAQ,SAC1B,SAAQ,IAAI,OAAO,MAAM,OAAO;;CAKtC,SAAS,UAAgB;AACvB,MAAI,aAAc;AAClB,iBAAe,iBAAiB;AAC9B,kBAAe;AACf,cAAW,WAAW,CAAC,GAAG,QAAQ,QAAQ,CAAC,CAAC;KAC3C,IAAK;AACR,MAAI,OAAO,aAAa,UAAU,WAChC,cAAa,OAAO;;CAIxB,SAAS,QAAc;AACrB,MAAI,cAAc;AAChB,gBAAa,aAAa;AAC1B,kBAAe;;AAEjB,aAAW,WAAW,CAAC,GAAG,QAAQ,QAAQ,CAAC,CAAC;;CAG9C,SAAS,OAAsB;AAC7B,SAAO,CAAC,GAAG,QAAQ,QAAQ,CAAC;;CAG9B,SAAS,IAAI,MAAuB;AAClC,SAAO,QAAQ,IAAI,KAAK;;CAG1B,SAAS,IAAI,MAAuC;AAClD,SAAO,QAAQ,IAAI,KAAK;;CAG1B,SAAS,QAAQ,QAAiC;AAChD,SAAO,OACJ,KAAK,SAAS,QAAQ,IAAI,KAAK,CAAC,CAChC,QAAQ,WAAkC,CAAC,CAAC,OAAO;;CAGxD,SAAS,OACP,OAEa;EACb,MAAM,MAAM,KAAK,KAAK;EACtB,MAAMC,SAAsB;GAC1B,GAAG;GACH,YAAY,MAAM,cAAc;GAChC,YAAY,MAAM,cAAc;GACjC;AACD,UAAQ,IAAI,OAAO,MAAM,OAAO;AAChC,WAAS;AACT,SAAO;;CAGT,SAAS,MAAM,MAAuC;EACpD,MAAM,WAAW,QAAQ,IAAI,KAAK;AAClC,MAAI,CAAC,SAAU,QAAO;AACtB,WAAS,aAAa,KAAK,KAAK;AAChC,WAAS;AACT,SAAO;;CAGT,SAAS,OAAO,MAAc,EAAE,aAAa,SAAS,EAAE,EAAQ;EAC9D,MAAM,SAAS,QAAQ,IAAI,KAAK;AAChC,MAAI,CAAC,OAAQ;AACb,UAAQ,OAAO,KAAK;AACpB,WAAS;AAET,MAAI,WACF,KAAI;AACF,UAAO,OAAO,UAAU,EAAE,OAAO,MAAM,CAAC;WACjC,OAAO;AACd,OAAI,KAAK;IAAE;IAAM;IAAO,EAAE,yCAAyC;;;CAKzE,SAAS,YAAkB;EACzB,IAAI,UAAU;AACd,OAAK,MAAM,CAAC,MAAM,WAAW,QAC3B,KAAI,CAAC,WAAW,OAAO,SAAS,EAAE;AAChC,WAAQ,OAAO,KAAK;AACpB,aAAU;;AAId,MAAI;GACF,MAAM,QAAQ,YAAY,UAAU;GACpC,MAAM,MAAM,KAAK,KAAK;AACtB,QAAK,MAAM,QAAQ,OAAO;AACxB,QAAI,SAAS,eAAgB;AAG7B,QAAI,KAAK,SAAS,QAAQ,EAAE;AAC1B,SAAI;MACF,MAAM,WAAW,KAAK,WAAW,KAAK;AAEtC,UAAI,MADS,SAAS,SAAS,CAChB,UAAU,OAAO,KAAM;AACpC,cAAO,UAAU,EAAE,OAAO,MAAM,CAAC;AACjC,WAAI,KAAK,EAAE,MAAM,EAAE,8BAA8B;;cAE5C,GAAG;AAEV,UAAI,MAAM;OAAE,OAAO;OAAG;OAAM,EAAE,qCAAqC;;AAErE;;AAGF,QAAI,CAAC,iBAAiB,KAAK,KAAK,CAAE;AAElC,QAAI,CAAC,QAAQ,IAAI,KAAK,EAAE;KACtB,MAAM,WAAW,KAAK,WAAW,KAAK;AACtC,SAAI;MACF,MAAM,OAAO,SAAS,SAAS;AAC/B,cAAQ,IAAI,MAAM;OAChB,MAAM;OACN;OACA,UAAU;OACV,MAAM,KAAK;OACX,YAAY,KAAK;OACjB,YAAY,KAAK;OAClB,CAAC;AACF,gBAAU;AACV,UAAI,KAAK,EAAE,MAAM,MAAM,EAAE,+BAA+B;cACjD,GAAG;AACV,UAAI,KAAK;OAAE,OAAO;OAAG;OAAM,EAAE,8BAA8B;;;;WAI1D,OAAO;AACd,OAAI,KAAK,EAAE,OAAO,EAAE,8CAA8C;;AAGpE,MAAI,QAAS,QAAO;;AAGtB,eAAc;AACd,YAAW;AAEX,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD"}
|
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { LOCK_PATH, PACKAGE_VERSION, RUNTIME_DIR, SOCK_PATH, ensureDir, log } from "./shared.mjs";
|
|
3
|
+
import { spawn } from "node:child_process";
|
|
4
|
+
import { connect } from "node:net";
|
|
5
|
+
import { join } from "node:path";
|
|
6
|
+
import { fileURLToPath } from "node:url";
|
|
7
|
+
import lockfile from "proper-lockfile";
|
|
8
|
+
|
|
9
|
+
//#region src/cli.ts
|
|
10
|
+
let activeSocket = null;
|
|
11
|
+
let shuttingDown = false;
|
|
12
|
+
function closeActiveSocket() {
|
|
13
|
+
if (!activeSocket) return;
|
|
14
|
+
try {
|
|
15
|
+
activeSocket.end();
|
|
16
|
+
} catch {}
|
|
17
|
+
try {
|
|
18
|
+
activeSocket.destroy();
|
|
19
|
+
} catch {}
|
|
20
|
+
activeSocket = null;
|
|
21
|
+
}
|
|
22
|
+
function shutdownCli(reason) {
|
|
23
|
+
if (shuttingDown) return;
|
|
24
|
+
shuttingDown = true;
|
|
25
|
+
log.info(`${reason} Shutting down CLI.`);
|
|
26
|
+
closeActiveSocket();
|
|
27
|
+
process.exit(0);
|
|
28
|
+
}
|
|
29
|
+
process.on("SIGINT", () => shutdownCli("SIGINT received."));
|
|
30
|
+
process.on("SIGTERM", () => shutdownCli("SIGTERM received."));
|
|
31
|
+
const HUB_STARTUP_TIMEOUT = 5e3;
|
|
32
|
+
const CONNECT_RETRY_DELAY = 200;
|
|
33
|
+
const FAILED_RESTART_DELAY = 5e3;
|
|
34
|
+
const HUB_ENTRY = join(fileURLToPath(new URL(".", import.meta.url)), "hub.js");
|
|
35
|
+
ensureDir(RUNTIME_DIR);
|
|
36
|
+
function bridge(socket) {
|
|
37
|
+
return new Promise((resolve) => {
|
|
38
|
+
log.info("Bridge established with Hub. Forwarding I/O.");
|
|
39
|
+
activeSocket = socket;
|
|
40
|
+
const onStdinEnd = () => {
|
|
41
|
+
shutdownCli("Consumer stream ended.");
|
|
42
|
+
};
|
|
43
|
+
process.stdin.once("end", onStdinEnd);
|
|
44
|
+
const onSocketClose = () => {
|
|
45
|
+
log.warn("Connection to Hub lost. Attempting to reconnect...");
|
|
46
|
+
activeSocket = null;
|
|
47
|
+
process.stdin.removeListener("end", onStdinEnd);
|
|
48
|
+
process.stdin.unpipe(socket);
|
|
49
|
+
socket.unpipe(process.stdout);
|
|
50
|
+
socket.removeAllListeners();
|
|
51
|
+
resolve();
|
|
52
|
+
};
|
|
53
|
+
socket.once("close", onSocketClose);
|
|
54
|
+
socket.on("error", (err) => log.warn({ err }, "Socket error occurred."));
|
|
55
|
+
process.stdin.pipe(socket, { end: false }).pipe(process.stdout);
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
function connectHub() {
|
|
59
|
+
return new Promise((resolve, reject) => {
|
|
60
|
+
const socket = connect(SOCK_PATH);
|
|
61
|
+
socket.on("connect", () => {
|
|
62
|
+
socket.removeAllListeners("error");
|
|
63
|
+
resolve(socket);
|
|
64
|
+
});
|
|
65
|
+
socket.on("error", reject);
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
async function connectWithRetry(timeout) {
|
|
69
|
+
const startTime = Date.now();
|
|
70
|
+
let delay = CONNECT_RETRY_DELAY;
|
|
71
|
+
while (Date.now() - startTime < timeout) try {
|
|
72
|
+
return await connectHub();
|
|
73
|
+
} catch (err) {
|
|
74
|
+
if (err && typeof err === "object" && "code" in err && (err.code === "ENOENT" || err.code === "ECONNREFUSED")) {
|
|
75
|
+
const remainingTime = timeout - (Date.now() - startTime);
|
|
76
|
+
const waitTime = Math.min(delay, remainingTime);
|
|
77
|
+
if (waitTime <= 0) break;
|
|
78
|
+
await new Promise((r) => setTimeout(r, waitTime));
|
|
79
|
+
delay = Math.min(delay * 1.5, 1e3);
|
|
80
|
+
} else throw err;
|
|
81
|
+
}
|
|
82
|
+
throw new Error(`Failed to connect to Hub within ${timeout}ms.`);
|
|
83
|
+
}
|
|
84
|
+
function startHub() {
|
|
85
|
+
log.info("Spawning new Hub process...");
|
|
86
|
+
return spawn(process.execPath, [HUB_ENTRY], {
|
|
87
|
+
detached: true,
|
|
88
|
+
stdio: "ignore"
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
async function tryBecomeLeaderAndStartHub() {
|
|
92
|
+
let releaseLock = null;
|
|
93
|
+
try {
|
|
94
|
+
releaseLock = await lockfile.lock(LOCK_PATH, {
|
|
95
|
+
retries: {
|
|
96
|
+
retries: 5,
|
|
97
|
+
factor: 1.2,
|
|
98
|
+
minTimeout: 50
|
|
99
|
+
},
|
|
100
|
+
stale: 15e3
|
|
101
|
+
});
|
|
102
|
+
} catch {
|
|
103
|
+
log.info("Another process is starting the Hub. Waiting...");
|
|
104
|
+
return connectWithRetry(HUB_STARTUP_TIMEOUT);
|
|
105
|
+
}
|
|
106
|
+
log.info("Acquired lock. Starting Hub as the leader...");
|
|
107
|
+
let child = null;
|
|
108
|
+
try {
|
|
109
|
+
try {
|
|
110
|
+
return await connectHub();
|
|
111
|
+
} catch {
|
|
112
|
+
log.info("Hub not running. Proceeding to start it...");
|
|
113
|
+
}
|
|
114
|
+
child = startHub();
|
|
115
|
+
child.on("error", (err) => log.error({ err }, "Hub child process error."));
|
|
116
|
+
const socket = await connectWithRetry(HUB_STARTUP_TIMEOUT);
|
|
117
|
+
child.unref();
|
|
118
|
+
return socket;
|
|
119
|
+
} catch (err) {
|
|
120
|
+
log.error({ err }, "Failed to start or connect to the Hub.");
|
|
121
|
+
if (child && !child.killed) {
|
|
122
|
+
log.warn(`Killing stale Hub process (PID: ${child.pid})...`);
|
|
123
|
+
child.kill("SIGTERM");
|
|
124
|
+
}
|
|
125
|
+
throw err;
|
|
126
|
+
} finally {
|
|
127
|
+
if (releaseLock) await releaseLock();
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
async function main() {
|
|
131
|
+
log.info({ version: PACKAGE_VERSION }, "TemPad MCP Client starting...");
|
|
132
|
+
while (true) try {
|
|
133
|
+
await bridge(await connectHub().catch(() => {
|
|
134
|
+
log.info("Hub not running. Initiating startup sequence...");
|
|
135
|
+
return tryBecomeLeaderAndStartHub();
|
|
136
|
+
}));
|
|
137
|
+
log.info("Bridge disconnected. Restarting connection process...");
|
|
138
|
+
} catch (err) {
|
|
139
|
+
log.error({ err }, `Connection attempt failed. Retrying in ${FAILED_RESTART_DELAY / 1e3}s...`);
|
|
140
|
+
await new Promise((r) => setTimeout(r, FAILED_RESTART_DELAY));
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
main();
|
|
144
|
+
|
|
145
|
+
//#endregion
|
|
146
|
+
export { };
|
|
147
|
+
//# sourceMappingURL=cli.mjs.map
|
package/dist/cli.mjs.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.mjs","names":["activeSocket: Socket | null","err: unknown","releaseLock: (() => Promise<void>) | null","child: ChildProcess | null"],"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport type { ChildProcess } from 'node:child_process'\nimport type { Socket } from 'node:net'\n\nimport { spawn } from 'node:child_process'\nimport { connect } from 'node:net'\nimport { join } from 'node:path'\nimport { fileURLToPath } from 'node:url'\nimport lockfile from 'proper-lockfile'\n\nimport { PACKAGE_VERSION, log, LOCK_PATH, RUNTIME_DIR, SOCK_PATH, ensureDir } from './shared'\n\nlet activeSocket: Socket | null = null\nlet shuttingDown = false\n\nfunction closeActiveSocket() {\n if (!activeSocket) return\n try {\n activeSocket.end()\n } catch {\n // ignore\n }\n try {\n activeSocket.destroy()\n } catch {\n // ignore\n }\n activeSocket = null\n}\n\nfunction shutdownCli(reason: string) {\n if (shuttingDown) return\n shuttingDown = true\n log.info(`${reason} Shutting down CLI.`)\n closeActiveSocket()\n process.exit(0)\n}\n\nprocess.on('SIGINT', () => shutdownCli('SIGINT received.'))\nprocess.on('SIGTERM', () => shutdownCli('SIGTERM received.'))\n\nconst HUB_STARTUP_TIMEOUT = 5000\nconst CONNECT_RETRY_DELAY = 200\nconst FAILED_RESTART_DELAY = 5000\nconst HERE = fileURLToPath(new URL('.', import.meta.url))\nconst HUB_ENTRY = join(HERE, 'hub.js')\n\nensureDir(RUNTIME_DIR)\n\nfunction bridge(socket: Socket): Promise<void> {\n return new Promise((resolve) => {\n log.info('Bridge established with Hub. Forwarding I/O.')\n activeSocket = socket\n\n const onStdinEnd = () => {\n shutdownCli('Consumer stream ended.')\n }\n process.stdin.once('end', onStdinEnd)\n\n const onSocketClose = () => {\n log.warn('Connection to Hub lost. Attempting to reconnect...')\n activeSocket = null\n process.stdin.removeListener('end', onStdinEnd)\n process.stdin.unpipe(socket)\n socket.unpipe(process.stdout)\n socket.removeAllListeners()\n resolve()\n }\n socket.once('close', onSocketClose)\n socket.on('error', (err) => log.warn({ err }, 'Socket error occurred.'))\n\n // The `{ end: false }` option prevents stdin from closing the socket.\n process.stdin.pipe(socket, { end: false }).pipe(process.stdout)\n })\n}\n\nfunction connectHub(): Promise<Socket> {\n return new Promise((resolve, reject) => {\n const socket = connect(SOCK_PATH)\n socket.on('connect', () => {\n socket.removeAllListeners('error')\n resolve(socket)\n })\n socket.on('error', reject)\n })\n}\n\nasync function connectWithRetry(timeout: number): Promise<Socket> {\n const startTime = Date.now()\n let delay = CONNECT_RETRY_DELAY\n while (Date.now() - startTime < timeout) {\n try {\n return await connectHub()\n } catch (err: unknown) {\n if (\n err &&\n typeof err === 'object' &&\n 'code' in err &&\n (err.code === 'ENOENT' || err.code === 'ECONNREFUSED')\n ) {\n const remainingTime = timeout - (Date.now() - startTime)\n const waitTime = Math.min(delay, remainingTime)\n if (waitTime <= 0) break\n await new Promise((r) => setTimeout(r, waitTime))\n delay = Math.min(delay * 1.5, 1000)\n } else {\n throw err\n }\n }\n }\n throw new Error(`Failed to connect to Hub within ${timeout}ms.`)\n}\n\nfunction startHub(): ChildProcess {\n log.info('Spawning new Hub process...')\n return spawn(process.execPath, [HUB_ENTRY], {\n detached: true,\n stdio: 'ignore'\n })\n}\n\nasync function tryBecomeLeaderAndStartHub(): Promise<Socket> {\n let releaseLock: (() => Promise<void>) | null = null\n try {\n releaseLock = await lockfile.lock(LOCK_PATH, {\n retries: { retries: 5, factor: 1.2, minTimeout: 50 },\n stale: 15000\n })\n } catch {\n log.info('Another process is starting the Hub. Waiting...')\n return connectWithRetry(HUB_STARTUP_TIMEOUT)\n }\n\n log.info('Acquired lock. Starting Hub as the leader...')\n let child: ChildProcess | null = null\n try {\n try {\n return await connectHub()\n } catch {\n // If the Hub is not running, we proceed to start it.\n log.info('Hub not running. Proceeding to start it...')\n }\n child = startHub()\n child.on('error', (err) => log.error({ err }, 'Hub child process error.'))\n const socket = await connectWithRetry(HUB_STARTUP_TIMEOUT)\n child.unref()\n return socket\n } catch (err: unknown) {\n log.error({ err }, 'Failed to start or connect to the Hub.')\n if (child && !child.killed) {\n log.warn(`Killing stale Hub process (PID: ${child.pid})...`)\n child.kill('SIGTERM')\n }\n throw err\n } finally {\n if (releaseLock) await releaseLock()\n }\n}\n\nasync function main() {\n log.info({ version: PACKAGE_VERSION }, 'TemPad MCP Client starting...')\n\n while (true) {\n try {\n const socket = await connectHub().catch(() => {\n log.info('Hub not running. Initiating startup sequence...')\n return tryBecomeLeaderAndStartHub()\n })\n await bridge(socket)\n log.info('Bridge disconnected. Restarting connection process...')\n } catch (err: unknown) {\n log.error(\n { err },\n `Connection attempt failed. Retrying in ${FAILED_RESTART_DELAY / 1000}s...`\n )\n await new Promise((r) => setTimeout(r, FAILED_RESTART_DELAY))\n }\n }\n}\n\nmain()\n"],"mappings":";;;;;;;;;AAaA,IAAIA,eAA8B;AAClC,IAAI,eAAe;AAEnB,SAAS,oBAAoB;AAC3B,KAAI,CAAC,aAAc;AACnB,KAAI;AACF,eAAa,KAAK;SACZ;AAGR,KAAI;AACF,eAAa,SAAS;SAChB;AAGR,gBAAe;;AAGjB,SAAS,YAAY,QAAgB;AACnC,KAAI,aAAc;AAClB,gBAAe;AACf,KAAI,KAAK,GAAG,OAAO,qBAAqB;AACxC,oBAAmB;AACnB,SAAQ,KAAK,EAAE;;AAGjB,QAAQ,GAAG,gBAAgB,YAAY,mBAAmB,CAAC;AAC3D,QAAQ,GAAG,iBAAiB,YAAY,oBAAoB,CAAC;AAE7D,MAAM,sBAAsB;AAC5B,MAAM,sBAAsB;AAC5B,MAAM,uBAAuB;AAE7B,MAAM,YAAY,KADL,cAAc,IAAI,IAAI,KAAK,OAAO,KAAK,IAAI,CAAC,EAC5B,SAAS;AAEtC,UAAU,YAAY;AAEtB,SAAS,OAAO,QAA+B;AAC7C,QAAO,IAAI,SAAS,YAAY;AAC9B,MAAI,KAAK,+CAA+C;AACxD,iBAAe;EAEf,MAAM,mBAAmB;AACvB,eAAY,yBAAyB;;AAEvC,UAAQ,MAAM,KAAK,OAAO,WAAW;EAErC,MAAM,sBAAsB;AAC1B,OAAI,KAAK,qDAAqD;AAC9D,kBAAe;AACf,WAAQ,MAAM,eAAe,OAAO,WAAW;AAC/C,WAAQ,MAAM,OAAO,OAAO;AAC5B,UAAO,OAAO,QAAQ,OAAO;AAC7B,UAAO,oBAAoB;AAC3B,YAAS;;AAEX,SAAO,KAAK,SAAS,cAAc;AACnC,SAAO,GAAG,UAAU,QAAQ,IAAI,KAAK,EAAE,KAAK,EAAE,yBAAyB,CAAC;AAGxE,UAAQ,MAAM,KAAK,QAAQ,EAAE,KAAK,OAAO,CAAC,CAAC,KAAK,QAAQ,OAAO;GAC/D;;AAGJ,SAAS,aAA8B;AACrC,QAAO,IAAI,SAAS,SAAS,WAAW;EACtC,MAAM,SAAS,QAAQ,UAAU;AACjC,SAAO,GAAG,iBAAiB;AACzB,UAAO,mBAAmB,QAAQ;AAClC,WAAQ,OAAO;IACf;AACF,SAAO,GAAG,SAAS,OAAO;GAC1B;;AAGJ,eAAe,iBAAiB,SAAkC;CAChE,MAAM,YAAY,KAAK,KAAK;CAC5B,IAAI,QAAQ;AACZ,QAAO,KAAK,KAAK,GAAG,YAAY,QAC9B,KAAI;AACF,SAAO,MAAM,YAAY;UAClBC,KAAc;AACrB,MACE,OACA,OAAO,QAAQ,YACf,UAAU,QACT,IAAI,SAAS,YAAY,IAAI,SAAS,iBACvC;GACA,MAAM,gBAAgB,WAAW,KAAK,KAAK,GAAG;GAC9C,MAAM,WAAW,KAAK,IAAI,OAAO,cAAc;AAC/C,OAAI,YAAY,EAAG;AACnB,SAAM,IAAI,SAAS,MAAM,WAAW,GAAG,SAAS,CAAC;AACjD,WAAQ,KAAK,IAAI,QAAQ,KAAK,IAAK;QAEnC,OAAM;;AAIZ,OAAM,IAAI,MAAM,mCAAmC,QAAQ,KAAK;;AAGlE,SAAS,WAAyB;AAChC,KAAI,KAAK,8BAA8B;AACvC,QAAO,MAAM,QAAQ,UAAU,CAAC,UAAU,EAAE;EAC1C,UAAU;EACV,OAAO;EACR,CAAC;;AAGJ,eAAe,6BAA8C;CAC3D,IAAIC,cAA4C;AAChD,KAAI;AACF,gBAAc,MAAM,SAAS,KAAK,WAAW;GAC3C,SAAS;IAAE,SAAS;IAAG,QAAQ;IAAK,YAAY;IAAI;GACpD,OAAO;GACR,CAAC;SACI;AACN,MAAI,KAAK,kDAAkD;AAC3D,SAAO,iBAAiB,oBAAoB;;AAG9C,KAAI,KAAK,+CAA+C;CACxD,IAAIC,QAA6B;AACjC,KAAI;AACF,MAAI;AACF,UAAO,MAAM,YAAY;UACnB;AAEN,OAAI,KAAK,6CAA6C;;AAExD,UAAQ,UAAU;AAClB,QAAM,GAAG,UAAU,QAAQ,IAAI,MAAM,EAAE,KAAK,EAAE,2BAA2B,CAAC;EAC1E,MAAM,SAAS,MAAM,iBAAiB,oBAAoB;AAC1D,QAAM,OAAO;AACb,SAAO;UACAF,KAAc;AACrB,MAAI,MAAM,EAAE,KAAK,EAAE,yCAAyC;AAC5D,MAAI,SAAS,CAAC,MAAM,QAAQ;AAC1B,OAAI,KAAK,mCAAmC,MAAM,IAAI,MAAM;AAC5D,SAAM,KAAK,UAAU;;AAEvB,QAAM;WACE;AACR,MAAI,YAAa,OAAM,aAAa;;;AAIxC,eAAe,OAAO;AACpB,KAAI,KAAK,EAAE,SAAS,iBAAiB,EAAE,gCAAgC;AAEvE,QAAO,KACL,KAAI;AAKF,QAAM,OAJS,MAAM,YAAY,CAAC,YAAY;AAC5C,OAAI,KAAK,kDAAkD;AAC3D,UAAO,4BAA4B;IACnC,CACkB;AACpB,MAAI,KAAK,wDAAwD;UAC1DA,KAAc;AACrB,MAAI,MACF,EAAE,KAAK,EACP,0CAA0C,uBAAuB,IAAK,MACvE;AACD,QAAM,IAAI,SAAS,MAAM,WAAW,GAAG,qBAAqB,CAAC;;;AAKnE,MAAM"}
|
package/dist/config.mjs
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { MCP_AUTO_ACTIVATE_GRACE_MS, MCP_MAX_ASSET_BYTES, MCP_MAX_PAYLOAD_BYTES, MCP_PORT_CANDIDATES, MCP_TOOL_TIMEOUT_MS } from "@tempad-dev/mcp-shared";
|
|
2
|
+
|
|
3
|
+
//#region src/config.ts
|
|
4
|
+
function parsePositiveInt(envValue, fallback) {
|
|
5
|
+
const parsed = envValue ? Number.parseInt(envValue, 10) : NaN;
|
|
6
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
|
|
7
|
+
}
|
|
8
|
+
function resolveToolTimeoutMs() {
|
|
9
|
+
return parsePositiveInt(process.env.TEMPAD_MCP_TOOL_TIMEOUT, MCP_TOOL_TIMEOUT_MS);
|
|
10
|
+
}
|
|
11
|
+
function resolveAutoActivateGraceMs() {
|
|
12
|
+
return parsePositiveInt(process.env.TEMPAD_MCP_AUTO_ACTIVATE_GRACE, MCP_AUTO_ACTIVATE_GRACE_MS);
|
|
13
|
+
}
|
|
14
|
+
function resolveMaxAssetSizeBytes() {
|
|
15
|
+
return parsePositiveInt(process.env.TEMPAD_MCP_MAX_ASSET_BYTES, MCP_MAX_ASSET_BYTES);
|
|
16
|
+
}
|
|
17
|
+
function getMcpServerConfig() {
|
|
18
|
+
return {
|
|
19
|
+
wsPortCandidates: [...MCP_PORT_CANDIDATES],
|
|
20
|
+
toolTimeoutMs: resolveToolTimeoutMs(),
|
|
21
|
+
maxPayloadBytes: MCP_MAX_PAYLOAD_BYTES,
|
|
22
|
+
autoActivateGraceMs: resolveAutoActivateGraceMs(),
|
|
23
|
+
maxAssetSizeBytes: resolveMaxAssetSizeBytes()
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
export { getMcpServerConfig };
|
|
29
|
+
//# sourceMappingURL=config.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.mjs","names":[],"sources":["../src/config.ts"],"sourcesContent":["import {\n MCP_AUTO_ACTIVATE_GRACE_MS,\n MCP_MAX_ASSET_BYTES,\n MCP_MAX_PAYLOAD_BYTES,\n MCP_PORT_CANDIDATES,\n MCP_TOOL_TIMEOUT_MS\n} from '@tempad-dev/mcp-shared'\n\nfunction parsePositiveInt(envValue: string | undefined, fallback: number): number {\n const parsed = envValue ? Number.parseInt(envValue, 10) : Number.NaN\n return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback\n}\n\nfunction resolveToolTimeoutMs(): number {\n return parsePositiveInt(process.env.TEMPAD_MCP_TOOL_TIMEOUT, MCP_TOOL_TIMEOUT_MS)\n}\n\nfunction resolveAutoActivateGraceMs(): number {\n return parsePositiveInt(process.env.TEMPAD_MCP_AUTO_ACTIVATE_GRACE, MCP_AUTO_ACTIVATE_GRACE_MS)\n}\n\nfunction resolveMaxAssetSizeBytes(): number {\n return parsePositiveInt(process.env.TEMPAD_MCP_MAX_ASSET_BYTES, MCP_MAX_ASSET_BYTES)\n}\n\nexport function getMcpServerConfig() {\n return {\n wsPortCandidates: [...MCP_PORT_CANDIDATES],\n toolTimeoutMs: resolveToolTimeoutMs(),\n maxPayloadBytes: MCP_MAX_PAYLOAD_BYTES,\n autoActivateGraceMs: resolveAutoActivateGraceMs(),\n maxAssetSizeBytes: resolveMaxAssetSizeBytes()\n }\n}\n"],"mappings":";;;AAQA,SAAS,iBAAiB,UAA8B,UAA0B;CAChF,MAAM,SAAS,WAAW,OAAO,SAAS,UAAU,GAAG,GAAG;AAC1D,QAAO,OAAO,SAAS,OAAO,IAAI,SAAS,IAAI,SAAS;;AAG1D,SAAS,uBAA+B;AACtC,QAAO,iBAAiB,QAAQ,IAAI,yBAAyB,oBAAoB;;AAGnF,SAAS,6BAAqC;AAC5C,QAAO,iBAAiB,QAAQ,IAAI,gCAAgC,2BAA2B;;AAGjG,SAAS,2BAAmC;AAC1C,QAAO,iBAAiB,QAAQ,IAAI,4BAA4B,oBAAoB;;AAGtF,SAAgB,qBAAqB;AACnC,QAAO;EACL,kBAAkB,CAAC,GAAG,oBAAoB;EAC1C,eAAe,sBAAsB;EACrC,iBAAiB;EACjB,qBAAqB,4BAA4B;EACjD,mBAAmB,0BAA0B;EAC9C"}
|