@neverprepared/mcp-markdown-to-confluence 1.4.1 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +207 -136
- package/dist/kroki/KrokiDiagramPlugin.js +12 -7
- package/dist/loader.js +4 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -4,8 +4,8 @@ import { CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextpro
|
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
import { ConfluenceClient } from 'confluence.js';
|
|
6
6
|
import matter from 'gray-matter';
|
|
7
|
-
import { readFile, readdir } from 'fs/promises';
|
|
8
|
-
import { join, extname, basename, relative } from 'path';
|
|
7
|
+
import { readFile, readdir, realpath } from 'fs/promises';
|
|
8
|
+
import { join, extname, basename, relative, isAbsolute } from 'path';
|
|
9
9
|
// Deep imports to avoid loading adaptors/filesystem.js which has broken CJS named exports.
|
|
10
10
|
// Pin @markdown-confluence/lib version if these paths change.
|
|
11
11
|
import { parseMarkdownToADF } from '@markdown-confluence/lib/dist/MdToADF.js';
|
|
@@ -16,12 +16,18 @@ import { KrokiClient, KrokiMermaidRenderer, KrokiDiagramPlugin } from './kroki/i
|
|
|
16
16
|
// ---------------------------------------------------------------------------
|
|
17
17
|
// Environment
|
|
18
18
|
// ---------------------------------------------------------------------------
|
|
19
|
-
const CONFLUENCE_BASE_URL = (process.env.CONFLUENCE_URL ??
|
|
19
|
+
const CONFLUENCE_BASE_URL = (process.env.CONFLUENCE_URL ?? '')
|
|
20
20
|
.replace(/\/wiki\/?$/, '');
|
|
21
21
|
const CONFLUENCE_USERNAME = process.env.CONFLUENCE_USERNAME ?? '';
|
|
22
22
|
const CONFLUENCE_API_TOKEN = process.env.CONFLUENCE_API_TOKEN ?? '';
|
|
23
23
|
const KROKI_URL = process.env.KROKI_URL ?? 'http://localhost:8371';
|
|
24
24
|
// ---------------------------------------------------------------------------
|
|
25
|
+
// Constants
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
const ADF_CONTENT_TYPE = 'atlas_doc_format';
|
|
28
|
+
const DEFAULT_PUBLISH_CONCURRENCY = 5;
|
|
29
|
+
const MAX_PUBLISH_CONCURRENCY = 20;
|
|
30
|
+
// ---------------------------------------------------------------------------
|
|
25
31
|
// Kroki client
|
|
26
32
|
// ---------------------------------------------------------------------------
|
|
27
33
|
const krokiClient = new KrokiClient(KROKI_URL);
|
|
@@ -62,8 +68,18 @@ const confluenceClient = new ConfluenceClient({
|
|
|
62
68
|
},
|
|
63
69
|
});
|
|
64
70
|
// ---------------------------------------------------------------------------
|
|
65
|
-
//
|
|
71
|
+
// Helpers
|
|
72
|
+
// ---------------------------------------------------------------------------
|
|
73
|
+
/** Extracts a readable message from an unknown thrown value. */
|
|
74
|
+
function errorMessage(err) {
|
|
75
|
+
return err instanceof Error ? err.message : String(err);
|
|
76
|
+
}
|
|
66
77
|
// ---------------------------------------------------------------------------
|
|
78
|
+
// Stub LoaderAdaptor
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
// The LoaderAdaptor interface requires all methods below, but in MCP context
|
|
81
|
+
// only uploadBuffer is ever invoked (by the diagram pipeline). The remaining
|
|
82
|
+
// methods are stubs required for type compatibility.
|
|
67
83
|
const stubAdaptor = {
|
|
68
84
|
readFile: async (_filePath) => undefined,
|
|
69
85
|
readBinary: async (_filePath) => false,
|
|
@@ -71,9 +87,15 @@ const stubAdaptor = {
|
|
|
71
87
|
listFiles: async () => [],
|
|
72
88
|
uploadBuffer: async (_buffer, _fileName, _mimeType) => undefined,
|
|
73
89
|
};
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
90
|
+
/**
|
|
91
|
+
* Lightweight diagram count over raw markdown — scans for fenced code block
|
|
92
|
+
* openings matching a supported diagram type. Used during preview mode to avoid
|
|
93
|
+
* a full ADF parse just to display a diagram count.
|
|
94
|
+
*/
|
|
95
|
+
const DIAGRAM_FENCE_TYPES = SUPPORTED_DIAGRAM_TYPES.join('|');
|
|
96
|
+
function countDiagramsInMarkdown(markdown) {
|
|
97
|
+
return (markdown.match(new RegExp(`^\`\`\`(?:${DIAGRAM_FENCE_TYPES})[ \\t]*$`, 'gm')) ?? []).length;
|
|
98
|
+
}
|
|
77
99
|
function countDiagramBlocks(adf) {
|
|
78
100
|
if (typeof adf !== 'object' || adf === null)
|
|
79
101
|
return 0;
|
|
@@ -116,6 +138,23 @@ function pLimit(concurrency) {
|
|
|
116
138
|
next();
|
|
117
139
|
});
|
|
118
140
|
}
|
|
141
|
+
/**
|
|
142
|
+
* Validates that a user-supplied path is safe to access.
|
|
143
|
+
* - Must be absolute (prevents relative traversal)
|
|
144
|
+
* - Must not contain ".." segments (belt-and-suspenders before realpath)
|
|
145
|
+
* - Resolves symlinks and normalizes via realpath (the canonical defense)
|
|
146
|
+
* Returns the canonicalized path, or throws if the path is invalid.
|
|
147
|
+
*/
|
|
148
|
+
async function validatePath(inputPath) {
|
|
149
|
+
if (!isAbsolute(inputPath)) {
|
|
150
|
+
throw new Error(`Path must be absolute: "${inputPath}"`);
|
|
151
|
+
}
|
|
152
|
+
if (inputPath.includes('..')) {
|
|
153
|
+
throw new Error(`Path must not contain ".." segments: "${inputPath}"`);
|
|
154
|
+
}
|
|
155
|
+
// realpath resolves symlinks and normalizes — throws ENOENT if path doesn't exist
|
|
156
|
+
return realpath(inputPath);
|
|
157
|
+
}
|
|
119
158
|
async function parseMarkdownFile(filePath, overrides) {
|
|
120
159
|
const raw = await readFile(filePath, 'utf-8');
|
|
121
160
|
const parsed = matter(raw);
|
|
@@ -137,20 +176,26 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
137
176
|
const nodes = [];
|
|
138
177
|
const skipped = [];
|
|
139
178
|
const relFromRoot = relative(rootPath, currentPath) || '.';
|
|
140
|
-
|
|
179
|
+
// Derive parent path from relFromRoot instead of calling join+relative again.
|
|
180
|
+
const parentRel = depth === 0 ? null : (relFromRoot.includes('/') ? relFromRoot.substring(0, relFromRoot.lastIndexOf('/')) : '.');
|
|
141
181
|
// Collect subdirectories and markdown files
|
|
142
182
|
const subdirs = entries.filter((e) => e.isDirectory() && !e.name.startsWith('.'));
|
|
143
183
|
const mdFiles = entries.filter((e) => e.isFile() && extname(e.name).toLowerCase() === '.md' && !e.name.startsWith('.'));
|
|
144
184
|
// Check for markdown files that correspond to subdirectories (e.g., "01 - Strategic.md" + "01 - Strategic/")
|
|
145
185
|
const subdirNames = new Set(subdirs.map((d) => d.name));
|
|
146
|
-
|
|
186
|
+
// Cache parsed results for dir-matching .md files so we don't read them twice.
|
|
187
|
+
const parsedDirMd = new Map();
|
|
147
188
|
// Process markdown files
|
|
148
189
|
for (const entry of mdFiles) {
|
|
149
190
|
const filePath = join(currentPath, entry.name);
|
|
150
191
|
const stem = basename(entry.name, extname(entry.name));
|
|
151
|
-
// If this .md file has a matching subdirectory,
|
|
192
|
+
// If this .md file has a matching subdirectory, parse and cache it now so
|
|
193
|
+
// the subdir loop below can reuse the result without a second disk read.
|
|
152
194
|
if (subdirNames.has(stem)) {
|
|
153
|
-
|
|
195
|
+
const result = await parseMarkdownFile(filePath, { spaceKey, titleFallback: stem });
|
|
196
|
+
if (!('skipped' in result)) {
|
|
197
|
+
parsedDirMd.set(stem, result);
|
|
198
|
+
}
|
|
154
199
|
continue; // handled when processing the subdirectory
|
|
155
200
|
}
|
|
156
201
|
const result = await parseMarkdownFile(filePath, {
|
|
@@ -175,18 +220,8 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
175
220
|
for (const dir of subdirs) {
|
|
176
221
|
const dirPath = join(currentPath, dir.name);
|
|
177
222
|
const dirRelPath = relative(rootPath, dirPath);
|
|
178
|
-
//
|
|
179
|
-
const
|
|
180
|
-
let dirMarkdownFile;
|
|
181
|
-
if (dirMdFiles.has(dir.name)) {
|
|
182
|
-
const result = await parseMarkdownFile(matchingMdPath, {
|
|
183
|
-
spaceKey,
|
|
184
|
-
titleFallback: dir.name,
|
|
185
|
-
});
|
|
186
|
-
if (!('skipped' in result)) {
|
|
187
|
-
dirMarkdownFile = result;
|
|
188
|
-
}
|
|
189
|
-
}
|
|
223
|
+
// Reuse the cached parse result from the first loop (avoids a second disk read).
|
|
224
|
+
const dirMarkdownFile = parsedDirMd.get(dir.name);
|
|
190
225
|
nodes.push({
|
|
191
226
|
relativePath: dirRelPath,
|
|
192
227
|
title: dirMarkdownFile?.title ?? dir.name,
|
|
@@ -206,12 +241,15 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
206
241
|
// Wiki link resolution
|
|
207
242
|
// ---------------------------------------------------------------------------
|
|
208
243
|
// Matches [[Page Name]] and [[Page Name#Heading]] and [[Page Name|Display Text]]
|
|
209
|
-
|
|
244
|
+
// Two separate regex instances: WIKI_LINK_RE_TEST (no /g) is stateless and safe for .test();
|
|
245
|
+
// WIKI_LINK_RE_REPLACE (with /g) is used only by String.replace() which resets lastIndex itself.
|
|
246
|
+
const WIKI_LINK_RE_TEST = /\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/;
|
|
247
|
+
const WIKI_LINK_RE_REPLACE = /\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/g;
|
|
210
248
|
function hasWikiLinks(markdown) {
|
|
211
|
-
return
|
|
249
|
+
return WIKI_LINK_RE_TEST.test(markdown);
|
|
212
250
|
}
|
|
213
251
|
function resolveWikiLinks(markdown, titleToUrl) {
|
|
214
|
-
return markdown.replace(
|
|
252
|
+
return markdown.replace(WIKI_LINK_RE_REPLACE, (_match, pageName, heading, displayText) => {
|
|
215
253
|
const trimmedName = pageName.trim();
|
|
216
254
|
const url = titleToUrl.get(trimmedName);
|
|
217
255
|
if (!url) {
|
|
@@ -229,8 +267,55 @@ function resolveWikiLinks(markdown, titleToUrl) {
|
|
|
229
267
|
// ---------------------------------------------------------------------------
|
|
230
268
|
// Core publish logic
|
|
231
269
|
// ---------------------------------------------------------------------------
|
|
270
|
+
/**
|
|
271
|
+
* Fetches the current version of an existing page, or creates a blank placeholder
|
|
272
|
+
* page and returns its new ID and version. Extracted from publishMarkdown to keep
|
|
273
|
+
* that function focused on the ADF pipeline.
|
|
274
|
+
*/
|
|
275
|
+
async function getOrCreatePage(pageId, spaceKey, title, parentId) {
|
|
276
|
+
if (pageId) {
|
|
277
|
+
const existingPage = await confluenceClient.content.getContentById({
|
|
278
|
+
id: pageId,
|
|
279
|
+
expand: ['version'],
|
|
280
|
+
});
|
|
281
|
+
const version = existingPage.version?.number;
|
|
282
|
+
if (version === undefined) {
|
|
283
|
+
throw new Error(`Could not read version for page ${pageId} — Confluence returned no version info`);
|
|
284
|
+
}
|
|
285
|
+
return { resolvedPageId: pageId, currentVersion: version };
|
|
286
|
+
}
|
|
287
|
+
// No existing page — create a blank placeholder to obtain a pageId, then
|
|
288
|
+
// update it with the real content in the caller.
|
|
289
|
+
const blankAdf = { version: 1, type: 'doc', content: [] };
|
|
290
|
+
const createParams = {
|
|
291
|
+
space: { key: spaceKey },
|
|
292
|
+
title,
|
|
293
|
+
type: 'page',
|
|
294
|
+
body: {
|
|
295
|
+
[ADF_CONTENT_TYPE]: {
|
|
296
|
+
value: JSON.stringify(blankAdf),
|
|
297
|
+
representation: ADF_CONTENT_TYPE,
|
|
298
|
+
},
|
|
299
|
+
},
|
|
300
|
+
};
|
|
301
|
+
if (parentId) {
|
|
302
|
+
createParams.ancestors = [{ id: parentId }];
|
|
303
|
+
}
|
|
304
|
+
const created = await confluenceClient.content.createContent(createParams);
|
|
305
|
+
const resolvedPageId = created.id;
|
|
306
|
+
const currentVersion = created.version?.number;
|
|
307
|
+
if (!resolvedPageId || currentVersion === undefined) {
|
|
308
|
+
throw new Error('Failed to create page: Confluence response is missing id or version');
|
|
309
|
+
}
|
|
310
|
+
return { resolvedPageId, currentVersion };
|
|
311
|
+
}
|
|
312
|
+
// Session-level cache: avoids re-fetching attachments for the same page during
|
|
313
|
+
// the wiki-link second-pass (where publishMarkdown is called again for pages
|
|
314
|
+
// that were already published in the first pass).
|
|
315
|
+
const attachmentCache = new Map();
|
|
232
316
|
async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skipPreview = false) {
|
|
233
|
-
// Parse markdown → ADF
|
|
317
|
+
// Parse markdown → ADF. Cast once here; downstream library calls accept any.
|
|
318
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
234
319
|
const adf = parseMarkdownToADF(markdown, CONFLUENCE_BASE_URL);
|
|
235
320
|
const diagramCount = countDiagramBlocks(adf);
|
|
236
321
|
if (!skipPreview) {
|
|
@@ -238,59 +323,33 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
238
323
|
return { isPreview: true, previewText, diagramCount };
|
|
239
324
|
}
|
|
240
325
|
// ----- Full publish -----
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
id: resolvedPageId,
|
|
247
|
-
expand: ['version'],
|
|
248
|
-
});
|
|
249
|
-
currentVersion = existingPage.version.number;
|
|
326
|
+
const { resolvedPageId, currentVersion } = await getOrCreatePage(pageId, spaceKey, title, parentId);
|
|
327
|
+
// Fetch current attachments to build the map (cached per page per session)
|
|
328
|
+
let currentAttachments;
|
|
329
|
+
if (attachmentCache.has(resolvedPageId)) {
|
|
330
|
+
currentAttachments = attachmentCache.get(resolvedPageId);
|
|
250
331
|
}
|
|
251
332
|
else {
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
},
|
|
268
|
-
};
|
|
269
|
-
if (parentId) {
|
|
270
|
-
createParams.ancestors = [{ id: parentId }];
|
|
271
|
-
}
|
|
272
|
-
const created = await confluenceClient.content.createContent(createParams);
|
|
273
|
-
resolvedPageId = created.id;
|
|
274
|
-
currentVersion = created.version.number;
|
|
275
|
-
}
|
|
276
|
-
// Fetch current attachments to build the map
|
|
277
|
-
const attachmentsResult = await confluenceClient.contentAttachments.getAttachments({
|
|
278
|
-
id: resolvedPageId,
|
|
279
|
-
});
|
|
280
|
-
const currentAttachments = {};
|
|
281
|
-
for (const att of attachmentsResult.results ?? []) {
|
|
282
|
-
const attTitle = att.title ?? '';
|
|
283
|
-
const fileId = att.extensions?.fileId ?? '';
|
|
284
|
-
const collectionName = att.extensions?.collectionName ?? '';
|
|
285
|
-
if (attTitle) {
|
|
286
|
-
currentAttachments[attTitle] = {
|
|
287
|
-
filehash: att.metadata?.comment ?? '',
|
|
288
|
-
attachmentId: fileId,
|
|
289
|
-
collectionName,
|
|
290
|
-
};
|
|
333
|
+
const attachmentsResult = await confluenceClient.contentAttachments.getAttachments({
|
|
334
|
+
id: resolvedPageId,
|
|
335
|
+
});
|
|
336
|
+
currentAttachments = {};
|
|
337
|
+
for (const att of attachmentsResult.results ?? []) {
|
|
338
|
+
const attTitle = att.title ?? '';
|
|
339
|
+
const fileId = att.extensions?.fileId ?? '';
|
|
340
|
+
const collectionName = att.extensions?.collectionName ?? '';
|
|
341
|
+
if (attTitle) {
|
|
342
|
+
currentAttachments[attTitle] = {
|
|
343
|
+
filehash: att.metadata?.comment ?? '',
|
|
344
|
+
attachmentId: fileId,
|
|
345
|
+
collectionName,
|
|
346
|
+
};
|
|
347
|
+
}
|
|
291
348
|
}
|
|
349
|
+
attachmentCache.set(resolvedPageId, currentAttachments);
|
|
292
350
|
}
|
|
293
351
|
// Build publisher functions
|
|
352
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
294
353
|
const publisherFunctions = createPublisherFunctions(confluenceClient, stubAdaptor, resolvedPageId, title, currentAttachments);
|
|
295
354
|
// Run ADF processing pipeline (renders diagrams via Kroki)
|
|
296
355
|
const finalAdf = await executeADFProcessingPipeline([
|
|
@@ -304,9 +363,9 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
304
363
|
type: 'page',
|
|
305
364
|
version: { number: currentVersion + 1 },
|
|
306
365
|
body: {
|
|
307
|
-
|
|
366
|
+
[ADF_CONTENT_TYPE]: {
|
|
308
367
|
value: JSON.stringify(finalAdf),
|
|
309
|
-
representation:
|
|
368
|
+
representation: ADF_CONTENT_TYPE,
|
|
310
369
|
},
|
|
311
370
|
},
|
|
312
371
|
};
|
|
@@ -324,6 +383,54 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
324
383
|
};
|
|
325
384
|
}
|
|
326
385
|
// ---------------------------------------------------------------------------
|
|
386
|
+
// Directory publish helpers
|
|
387
|
+
// ---------------------------------------------------------------------------
|
|
388
|
+
/**
|
|
389
|
+
* Builds the preview text for a directory publish operation — the tree
|
|
390
|
+
* visualization shown to the user before they confirm with skip_preview: true.
|
|
391
|
+
* Extracted from the tool handler to keep it focused on orchestration.
|
|
392
|
+
*/
|
|
393
|
+
function buildDirectoryPreview(nodes, skipped, directoryPath, rootPageId, spaceKey) {
|
|
394
|
+
const rootTitle = rootPageId
|
|
395
|
+
? `(existing page: ${rootPageId})`
|
|
396
|
+
: `"${basename(directoryPath)}" (will be created)`;
|
|
397
|
+
const lines = [
|
|
398
|
+
`=== DIRECTORY TREE PREVIEW ===`,
|
|
399
|
+
`Directory: ${directoryPath}`,
|
|
400
|
+
`Space: ${spaceKey}`,
|
|
401
|
+
`Root page: ${rootTitle}`,
|
|
402
|
+
`Total pages: ${nodes.length + (rootPageId ? 0 : 1)}`,
|
|
403
|
+
'',
|
|
404
|
+
'--- Page tree ---',
|
|
405
|
+
];
|
|
406
|
+
const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
|
|
407
|
+
for (let d = 0; d <= maxDepth; d++) {
|
|
408
|
+
for (const node of nodes.filter((n) => n.depth === d)) {
|
|
409
|
+
const indent = ' '.repeat(d + 1);
|
|
410
|
+
const suffix = node.isDirectory ? '/' : '';
|
|
411
|
+
const pageInfo = node.markdownFile?.pageId
|
|
412
|
+
? `update: ${node.markdownFile.pageId}`
|
|
413
|
+
: 'new page';
|
|
414
|
+
let diagrams = '';
|
|
415
|
+
if (node.markdownFile) {
|
|
416
|
+
const count = countDiagramsInMarkdown(node.markdownFile.content);
|
|
417
|
+
if (count > 0)
|
|
418
|
+
diagrams = `, ${count} diagram(s)`;
|
|
419
|
+
}
|
|
420
|
+
const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
|
|
421
|
+
lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
if (skipped.length > 0) {
|
|
425
|
+
lines.push('', '--- Skipped files ---');
|
|
426
|
+
for (const s of skipped) {
|
|
427
|
+
lines.push(` ${basename(s.filePath)}: ${s.reason}`);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
lines.push('', `Call again with skip_preview: true to publish.`);
|
|
431
|
+
return lines.join('\n');
|
|
432
|
+
}
|
|
433
|
+
// ---------------------------------------------------------------------------
|
|
327
434
|
// MCP Server
|
|
328
435
|
// ---------------------------------------------------------------------------
|
|
329
436
|
const server = new Server({ name: 'mcp-markdown-to-confluence', version: '1.0.0' }, { capabilities: { tools: {} } });
|
|
@@ -428,6 +535,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
428
535
|
const input = z
|
|
429
536
|
.object({ markdown: z.string(), title: z.string() })
|
|
430
537
|
.parse(args);
|
|
538
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
431
539
|
const adf = parseMarkdownToADF(input.markdown, CONFLUENCE_BASE_URL);
|
|
432
540
|
const diagramCount = countDiagramBlocks(adf);
|
|
433
541
|
const previewText = renderADFDoc(adf);
|
|
@@ -480,7 +588,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
480
588
|
skip_preview: z.boolean().default(false),
|
|
481
589
|
})
|
|
482
590
|
.parse(args);
|
|
483
|
-
const
|
|
591
|
+
const safeFilePath = await validatePath(input.filePath);
|
|
592
|
+
const raw = await readFile(safeFilePath, 'utf-8');
|
|
484
593
|
const parsed = matter(raw);
|
|
485
594
|
const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? '';
|
|
486
595
|
const spaceKey = parsed.data['connie-space-key'] ?? '';
|
|
@@ -545,11 +654,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
545
654
|
spaceKey: z.string(),
|
|
546
655
|
rootPageId: z.string().optional(),
|
|
547
656
|
skip_preview: z.boolean().default(false),
|
|
548
|
-
concurrency: z.number().int().min(1).max(
|
|
657
|
+
concurrency: z.number().int().min(1).max(MAX_PUBLISH_CONCURRENCY).default(DEFAULT_PUBLISH_CONCURRENCY),
|
|
549
658
|
})
|
|
550
659
|
.parse(args);
|
|
660
|
+
const safeDirectoryPath = await validatePath(input.directoryPath);
|
|
551
661
|
// Scan directory tree
|
|
552
|
-
const { nodes, skipped } = await scanDirectoryTree(
|
|
662
|
+
const { nodes, skipped } = await scanDirectoryTree(safeDirectoryPath, input.spaceKey);
|
|
553
663
|
if (nodes.length === 0 && skipped.length === 0) {
|
|
554
664
|
return {
|
|
555
665
|
content: [{ type: 'text', text: `No files found in ${input.directoryPath}` }],
|
|
@@ -557,46 +667,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
557
667
|
}
|
|
558
668
|
// Preview mode
|
|
559
669
|
if (!input.skip_preview) {
|
|
560
|
-
const
|
|
561
|
-
|
|
562
|
-
: `"${basename(input.directoryPath)}" (will be created)`;
|
|
563
|
-
const lines = [
|
|
564
|
-
`=== DIRECTORY TREE PREVIEW ===`,
|
|
565
|
-
`Directory: ${input.directoryPath}`,
|
|
566
|
-
`Space: ${input.spaceKey}`,
|
|
567
|
-
`Root page: ${rootTitle}`,
|
|
568
|
-
`Total pages: ${nodes.length + (input.rootPageId ? 0 : 1)}`,
|
|
569
|
-
'',
|
|
570
|
-
'--- Page tree ---',
|
|
571
|
-
];
|
|
572
|
-
// Build tree visualization
|
|
573
|
-
const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
|
|
574
|
-
for (let d = 0; d <= maxDepth; d++) {
|
|
575
|
-
for (const node of nodes.filter((n) => n.depth === d)) {
|
|
576
|
-
const indent = ' '.repeat(d + 1);
|
|
577
|
-
const suffix = node.isDirectory ? '/' : '';
|
|
578
|
-
const pageInfo = node.markdownFile?.pageId
|
|
579
|
-
? `update: ${node.markdownFile.pageId}`
|
|
580
|
-
: 'new page';
|
|
581
|
-
let diagrams = '';
|
|
582
|
-
if (node.markdownFile) {
|
|
583
|
-
const adf = parseMarkdownToADF(node.markdownFile.content, CONFLUENCE_BASE_URL);
|
|
584
|
-
const count = countDiagramBlocks(adf);
|
|
585
|
-
if (count > 0)
|
|
586
|
-
diagrams = `, ${count} diagram(s)`;
|
|
587
|
-
}
|
|
588
|
-
const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
|
|
589
|
-
lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
if (skipped.length > 0) {
|
|
593
|
-
lines.push('', '--- Skipped files ---');
|
|
594
|
-
for (const s of skipped) {
|
|
595
|
-
lines.push(` ${basename(s.filePath)}: ${s.reason}`);
|
|
596
|
-
}
|
|
597
|
-
}
|
|
598
|
-
lines.push('', `Call again with skip_preview: true to publish.`);
|
|
599
|
-
return { content: [{ type: 'text', text: lines.join('\n') }] };
|
|
670
|
+
const preview = buildDirectoryPreview(nodes, skipped, input.directoryPath, input.rootPageId, input.spaceKey);
|
|
671
|
+
return { content: [{ type: 'text', text: preview }] };
|
|
600
672
|
}
|
|
601
673
|
// Publish mode — process level by level
|
|
602
674
|
const limit = pLimit(input.concurrency);
|
|
@@ -620,15 +692,14 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
620
692
|
catch (err) {
|
|
621
693
|
return {
|
|
622
694
|
isError: true,
|
|
623
|
-
content: [{
|
|
624
|
-
type: 'text',
|
|
625
|
-
text: `Error creating root page: ${err instanceof Error ? err.message : String(err)}`,
|
|
626
|
-
}],
|
|
695
|
+
content: [{ type: 'text', text: `Error creating root page: ${errorMessage(err)}` }],
|
|
627
696
|
};
|
|
628
697
|
}
|
|
629
698
|
}
|
|
630
|
-
// Build a map from relativePath
|
|
699
|
+
// Build a map from relativePath → node for parent lookups, and a separate
|
|
700
|
+
// map for resolved page IDs so we never mutate the input node objects.
|
|
631
701
|
const nodeMap = new Map();
|
|
702
|
+
const resolvedIds = new Map(); // relativePath → pageId
|
|
632
703
|
for (const node of nodes) {
|
|
633
704
|
nodeMap.set(node.relativePath, node);
|
|
634
705
|
}
|
|
@@ -644,7 +715,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
644
715
|
}
|
|
645
716
|
else {
|
|
646
717
|
const parentNode = nodeMap.get(node.parentRelativePath);
|
|
647
|
-
parentId = parentNode
|
|
718
|
+
parentId = parentNode ? resolvedIds.get(parentNode.relativePath) : undefined;
|
|
648
719
|
}
|
|
649
720
|
if (!parentId) {
|
|
650
721
|
return {
|
|
@@ -659,8 +730,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
659
730
|
const content = node.markdownFile?.content ?? '';
|
|
660
731
|
const pageId = node.markdownFile?.pageId;
|
|
661
732
|
const result = await publishMarkdown(content, node.title, input.spaceKey, pageId, parentId, true);
|
|
662
|
-
|
|
663
|
-
|
|
733
|
+
if (result.pageId) {
|
|
734
|
+
resolvedIds.set(node.relativePath, result.pageId);
|
|
735
|
+
}
|
|
664
736
|
return {
|
|
665
737
|
relativePath: node.relativePath,
|
|
666
738
|
title: node.title,
|
|
@@ -678,7 +750,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
678
750
|
title: node.title,
|
|
679
751
|
success: false,
|
|
680
752
|
isDirectory: node.isDirectory,
|
|
681
|
-
error:
|
|
753
|
+
error: errorMessage(err),
|
|
682
754
|
};
|
|
683
755
|
}
|
|
684
756
|
})));
|
|
@@ -692,18 +764,18 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
692
764
|
titleToUrl.set(r.title, r.url);
|
|
693
765
|
}
|
|
694
766
|
}
|
|
695
|
-
// Find nodes with wiki links that need re-publishing
|
|
696
|
-
const nodesWithLinks = nodes.filter((n) => n.markdownFile && hasWikiLinks(n.markdownFile.content) && n.
|
|
767
|
+
// Find nodes with wiki links that were successfully published and need re-publishing
|
|
768
|
+
const nodesWithLinks = nodes.filter((n) => n.markdownFile && hasWikiLinks(n.markdownFile.content) && resolvedIds.has(n.relativePath));
|
|
697
769
|
if (nodesWithLinks.length > 0 && titleToUrl.size > 0) {
|
|
698
770
|
const linkResults = await Promise.all(nodesWithLinks.map((node) => limit(async () => {
|
|
699
771
|
try {
|
|
700
772
|
const resolvedMarkdown = resolveWikiLinks(node.markdownFile.content, titleToUrl);
|
|
701
|
-
const result = await publishMarkdown(resolvedMarkdown, node.title, input.spaceKey, node.
|
|
773
|
+
const result = await publishMarkdown(resolvedMarkdown, node.title, input.spaceKey, resolvedIds.get(node.relativePath), undefined, // don't reparent on second pass
|
|
702
774
|
true);
|
|
703
775
|
return { relativePath: node.relativePath, title: node.title, success: true, version: result.version };
|
|
704
776
|
}
|
|
705
|
-
catch {
|
|
706
|
-
return { relativePath: node.relativePath, title: node.title, success: false };
|
|
777
|
+
catch (err) {
|
|
778
|
+
return { relativePath: node.relativePath, title: node.title, success: false, error: errorMessage(err) };
|
|
707
779
|
}
|
|
708
780
|
})));
|
|
709
781
|
const linkedCount = linkResults.filter((r) => r.success).length;
|
|
@@ -758,10 +830,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
758
830
|
};
|
|
759
831
|
}
|
|
760
832
|
catch (err) {
|
|
761
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
762
833
|
return {
|
|
763
834
|
isError: true,
|
|
764
|
-
content: [{ type: 'text', text: `Error: ${
|
|
835
|
+
content: [{ type: 'text', text: `Error: ${errorMessage(err)}` }],
|
|
765
836
|
};
|
|
766
837
|
}
|
|
767
838
|
});
|
|
@@ -20,7 +20,7 @@ export class KrokiDiagramPlugin {
|
|
|
20
20
|
const nodes = filter(adf, (node) => node.type === 'codeBlock' &&
|
|
21
21
|
(node.attrs || {})?.['language'] === this.diagramType);
|
|
22
22
|
const charts = new Set(nodes.map((node) => {
|
|
23
|
-
const details = getDiagramFileName(this.diagramType, node?.content?.
|
|
23
|
+
const details = getDiagramFileName(this.diagramType, node?.content?.[0]?.text, this.outputFormat);
|
|
24
24
|
return {
|
|
25
25
|
name: details.uploadFilename,
|
|
26
26
|
data: details.text,
|
|
@@ -29,17 +29,22 @@ export class KrokiDiagramPlugin {
|
|
|
29
29
|
return Array.from(charts);
|
|
30
30
|
}
|
|
31
31
|
async transform(charts, supportFunctions) {
|
|
32
|
-
let imageMap = {};
|
|
33
32
|
if (charts.length === 0) {
|
|
34
|
-
return
|
|
33
|
+
return {};
|
|
35
34
|
}
|
|
35
|
+
// Render all diagrams in parallel, then upload all results in parallel.
|
|
36
|
+
// Previously uploads were sequential (N+1); now both phases are concurrent.
|
|
36
37
|
const rendered = await Promise.all(charts.map(async (chart) => {
|
|
37
38
|
const buffer = await this.client.renderDiagram(this.diagramType, chart.data, this.outputFormat);
|
|
38
39
|
return [chart.name, buffer];
|
|
39
40
|
}));
|
|
40
|
-
|
|
41
|
-
const
|
|
42
|
-
|
|
41
|
+
const uploaded = await Promise.all(rendered.map(async ([name, buffer]) => {
|
|
42
|
+
const image = await supportFunctions.uploadBuffer(name, buffer);
|
|
43
|
+
return [name, image];
|
|
44
|
+
}));
|
|
45
|
+
const imageMap = {};
|
|
46
|
+
for (const [name, image] of uploaded) {
|
|
47
|
+
imageMap[name] = image;
|
|
43
48
|
}
|
|
44
49
|
return imageMap;
|
|
45
50
|
}
|
|
@@ -49,7 +54,7 @@ export class KrokiDiagramPlugin {
|
|
|
49
54
|
traverse(afterAdf, {
|
|
50
55
|
codeBlock: (node, _parent) => {
|
|
51
56
|
if (node?.attrs?.['language'] === this.diagramType) {
|
|
52
|
-
const content = node?.content?.
|
|
57
|
+
const content = node?.content?.[0]?.text;
|
|
53
58
|
if (!content) {
|
|
54
59
|
return;
|
|
55
60
|
}
|
package/dist/loader.js
CHANGED
|
@@ -22,7 +22,10 @@ function tryResolveFile(filePath) {
|
|
|
22
22
|
return pathToFileURL(resolved).href;
|
|
23
23
|
}
|
|
24
24
|
}
|
|
25
|
-
} catch {
|
|
25
|
+
} catch (err) {
|
|
26
|
+
// Non-fatal: if package.json is unparseable we simply can't resolve this path.
|
|
27
|
+
process.stderr.write(`[loader] Failed to parse package.json at ${filePath}: ${err?.message ?? err}\n`);
|
|
28
|
+
}
|
|
26
29
|
}
|
|
27
30
|
return null;
|
|
28
31
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@neverprepared/mcp-markdown-to-confluence",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.2",
|
|
4
4
|
"description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|