@neverprepared/mcp-markdown-to-confluence 1.4.0 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +208 -136
- package/dist/kroki/KrokiDiagramPlugin.js +12 -7
- package/dist/loader.js +4 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -4,8 +4,8 @@ import { CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextpro
|
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
import { ConfluenceClient } from 'confluence.js';
|
|
6
6
|
import matter from 'gray-matter';
|
|
7
|
-
import { readFile, readdir } from 'fs/promises';
|
|
8
|
-
import { join, extname, basename, relative } from 'path';
|
|
7
|
+
import { readFile, readdir, realpath } from 'fs/promises';
|
|
8
|
+
import { join, extname, basename, relative, isAbsolute } from 'path';
|
|
9
9
|
// Deep imports to avoid loading adaptors/filesystem.js which has broken CJS named exports.
|
|
10
10
|
// Pin @markdown-confluence/lib version if these paths change.
|
|
11
11
|
import { parseMarkdownToADF } from '@markdown-confluence/lib/dist/MdToADF.js';
|
|
@@ -16,11 +16,18 @@ import { KrokiClient, KrokiMermaidRenderer, KrokiDiagramPlugin } from './kroki/i
|
|
|
16
16
|
// ---------------------------------------------------------------------------
|
|
17
17
|
// Environment
|
|
18
18
|
// ---------------------------------------------------------------------------
|
|
19
|
-
const CONFLUENCE_BASE_URL = process.env.
|
|
19
|
+
const CONFLUENCE_BASE_URL = (process.env.CONFLUENCE_URL ?? '')
|
|
20
|
+
.replace(/\/wiki\/?$/, '');
|
|
20
21
|
const CONFLUENCE_USERNAME = process.env.CONFLUENCE_USERNAME ?? '';
|
|
21
22
|
const CONFLUENCE_API_TOKEN = process.env.CONFLUENCE_API_TOKEN ?? '';
|
|
22
23
|
const KROKI_URL = process.env.KROKI_URL ?? 'http://localhost:8371';
|
|
23
24
|
// ---------------------------------------------------------------------------
|
|
25
|
+
// Constants
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
const ADF_CONTENT_TYPE = 'atlas_doc_format';
|
|
28
|
+
const DEFAULT_PUBLISH_CONCURRENCY = 5;
|
|
29
|
+
const MAX_PUBLISH_CONCURRENCY = 20;
|
|
30
|
+
// ---------------------------------------------------------------------------
|
|
24
31
|
// Kroki client
|
|
25
32
|
// ---------------------------------------------------------------------------
|
|
26
33
|
const krokiClient = new KrokiClient(KROKI_URL);
|
|
@@ -61,8 +68,18 @@ const confluenceClient = new ConfluenceClient({
|
|
|
61
68
|
},
|
|
62
69
|
});
|
|
63
70
|
// ---------------------------------------------------------------------------
|
|
64
|
-
//
|
|
71
|
+
// Helpers
|
|
72
|
+
// ---------------------------------------------------------------------------
|
|
73
|
+
/** Extracts a readable message from an unknown thrown value. */
|
|
74
|
+
function errorMessage(err) {
|
|
75
|
+
return err instanceof Error ? err.message : String(err);
|
|
76
|
+
}
|
|
65
77
|
// ---------------------------------------------------------------------------
|
|
78
|
+
// Stub LoaderAdaptor
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
// The LoaderAdaptor interface requires all methods below, but in MCP context
|
|
81
|
+
// only uploadBuffer is ever invoked (by the diagram pipeline). The remaining
|
|
82
|
+
// methods are stubs required for type compatibility.
|
|
66
83
|
const stubAdaptor = {
|
|
67
84
|
readFile: async (_filePath) => undefined,
|
|
68
85
|
readBinary: async (_filePath) => false,
|
|
@@ -70,9 +87,15 @@ const stubAdaptor = {
|
|
|
70
87
|
listFiles: async () => [],
|
|
71
88
|
uploadBuffer: async (_buffer, _fileName, _mimeType) => undefined,
|
|
72
89
|
};
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
90
|
+
/**
|
|
91
|
+
* Lightweight diagram count over raw markdown — scans for fenced code block
|
|
92
|
+
* openings matching a supported diagram type. Used during preview mode to avoid
|
|
93
|
+
* a full ADF parse just to display a diagram count.
|
|
94
|
+
*/
|
|
95
|
+
const DIAGRAM_FENCE_TYPES = SUPPORTED_DIAGRAM_TYPES.join('|');
|
|
96
|
+
function countDiagramsInMarkdown(markdown) {
|
|
97
|
+
return (markdown.match(new RegExp(`^\`\`\`(?:${DIAGRAM_FENCE_TYPES})[ \\t]*$`, 'gm')) ?? []).length;
|
|
98
|
+
}
|
|
76
99
|
function countDiagramBlocks(adf) {
|
|
77
100
|
if (typeof adf !== 'object' || adf === null)
|
|
78
101
|
return 0;
|
|
@@ -115,6 +138,23 @@ function pLimit(concurrency) {
|
|
|
115
138
|
next();
|
|
116
139
|
});
|
|
117
140
|
}
|
|
141
|
+
/**
|
|
142
|
+
* Validates that a user-supplied path is safe to access.
|
|
143
|
+
* - Must be absolute (prevents relative traversal)
|
|
144
|
+
* - Must not contain ".." segments (belt-and-suspenders before realpath)
|
|
145
|
+
* - Resolves symlinks and normalizes via realpath (the canonical defense)
|
|
146
|
+
* Returns the canonicalized path, or throws if the path is invalid.
|
|
147
|
+
*/
|
|
148
|
+
async function validatePath(inputPath) {
|
|
149
|
+
if (!isAbsolute(inputPath)) {
|
|
150
|
+
throw new Error(`Path must be absolute: "${inputPath}"`);
|
|
151
|
+
}
|
|
152
|
+
if (inputPath.includes('..')) {
|
|
153
|
+
throw new Error(`Path must not contain ".." segments: "${inputPath}"`);
|
|
154
|
+
}
|
|
155
|
+
// realpath resolves symlinks and normalizes — throws ENOENT if path doesn't exist
|
|
156
|
+
return realpath(inputPath);
|
|
157
|
+
}
|
|
118
158
|
async function parseMarkdownFile(filePath, overrides) {
|
|
119
159
|
const raw = await readFile(filePath, 'utf-8');
|
|
120
160
|
const parsed = matter(raw);
|
|
@@ -136,20 +176,26 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
136
176
|
const nodes = [];
|
|
137
177
|
const skipped = [];
|
|
138
178
|
const relFromRoot = relative(rootPath, currentPath) || '.';
|
|
139
|
-
|
|
179
|
+
// Derive parent path from relFromRoot instead of calling join+relative again.
|
|
180
|
+
const parentRel = depth === 0 ? null : (relFromRoot.includes('/') ? relFromRoot.substring(0, relFromRoot.lastIndexOf('/')) : '.');
|
|
140
181
|
// Collect subdirectories and markdown files
|
|
141
182
|
const subdirs = entries.filter((e) => e.isDirectory() && !e.name.startsWith('.'));
|
|
142
183
|
const mdFiles = entries.filter((e) => e.isFile() && extname(e.name).toLowerCase() === '.md' && !e.name.startsWith('.'));
|
|
143
184
|
// Check for markdown files that correspond to subdirectories (e.g., "01 - Strategic.md" + "01 - Strategic/")
|
|
144
185
|
const subdirNames = new Set(subdirs.map((d) => d.name));
|
|
145
|
-
|
|
186
|
+
// Cache parsed results for dir-matching .md files so we don't read them twice.
|
|
187
|
+
const parsedDirMd = new Map();
|
|
146
188
|
// Process markdown files
|
|
147
189
|
for (const entry of mdFiles) {
|
|
148
190
|
const filePath = join(currentPath, entry.name);
|
|
149
191
|
const stem = basename(entry.name, extname(entry.name));
|
|
150
|
-
// If this .md file has a matching subdirectory,
|
|
192
|
+
// If this .md file has a matching subdirectory, parse and cache it now so
|
|
193
|
+
// the subdir loop below can reuse the result without a second disk read.
|
|
151
194
|
if (subdirNames.has(stem)) {
|
|
152
|
-
|
|
195
|
+
const result = await parseMarkdownFile(filePath, { spaceKey, titleFallback: stem });
|
|
196
|
+
if (!('skipped' in result)) {
|
|
197
|
+
parsedDirMd.set(stem, result);
|
|
198
|
+
}
|
|
153
199
|
continue; // handled when processing the subdirectory
|
|
154
200
|
}
|
|
155
201
|
const result = await parseMarkdownFile(filePath, {
|
|
@@ -174,18 +220,8 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
174
220
|
for (const dir of subdirs) {
|
|
175
221
|
const dirPath = join(currentPath, dir.name);
|
|
176
222
|
const dirRelPath = relative(rootPath, dirPath);
|
|
177
|
-
//
|
|
178
|
-
const
|
|
179
|
-
let dirMarkdownFile;
|
|
180
|
-
if (dirMdFiles.has(dir.name)) {
|
|
181
|
-
const result = await parseMarkdownFile(matchingMdPath, {
|
|
182
|
-
spaceKey,
|
|
183
|
-
titleFallback: dir.name,
|
|
184
|
-
});
|
|
185
|
-
if (!('skipped' in result)) {
|
|
186
|
-
dirMarkdownFile = result;
|
|
187
|
-
}
|
|
188
|
-
}
|
|
223
|
+
// Reuse the cached parse result from the first loop (avoids a second disk read).
|
|
224
|
+
const dirMarkdownFile = parsedDirMd.get(dir.name);
|
|
189
225
|
nodes.push({
|
|
190
226
|
relativePath: dirRelPath,
|
|
191
227
|
title: dirMarkdownFile?.title ?? dir.name,
|
|
@@ -205,12 +241,15 @@ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, dep
|
|
|
205
241
|
// Wiki link resolution
|
|
206
242
|
// ---------------------------------------------------------------------------
|
|
207
243
|
// Matches [[Page Name]] and [[Page Name#Heading]] and [[Page Name|Display Text]]
|
|
208
|
-
|
|
244
|
+
// Two separate regex instances: WIKI_LINK_RE_TEST (no /g) is stateless and safe for .test();
|
|
245
|
+
// WIKI_LINK_RE_REPLACE (with /g) is used only by String.replace() which resets lastIndex itself.
|
|
246
|
+
const WIKI_LINK_RE_TEST = /\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/;
|
|
247
|
+
const WIKI_LINK_RE_REPLACE = /\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/g;
|
|
209
248
|
function hasWikiLinks(markdown) {
|
|
210
|
-
return
|
|
249
|
+
return WIKI_LINK_RE_TEST.test(markdown);
|
|
211
250
|
}
|
|
212
251
|
function resolveWikiLinks(markdown, titleToUrl) {
|
|
213
|
-
return markdown.replace(
|
|
252
|
+
return markdown.replace(WIKI_LINK_RE_REPLACE, (_match, pageName, heading, displayText) => {
|
|
214
253
|
const trimmedName = pageName.trim();
|
|
215
254
|
const url = titleToUrl.get(trimmedName);
|
|
216
255
|
if (!url) {
|
|
@@ -228,8 +267,55 @@ function resolveWikiLinks(markdown, titleToUrl) {
|
|
|
228
267
|
// ---------------------------------------------------------------------------
|
|
229
268
|
// Core publish logic
|
|
230
269
|
// ---------------------------------------------------------------------------
|
|
270
|
+
/**
|
|
271
|
+
* Fetches the current version of an existing page, or creates a blank placeholder
|
|
272
|
+
* page and returns its new ID and version. Extracted from publishMarkdown to keep
|
|
273
|
+
* that function focused on the ADF pipeline.
|
|
274
|
+
*/
|
|
275
|
+
async function getOrCreatePage(pageId, spaceKey, title, parentId) {
|
|
276
|
+
if (pageId) {
|
|
277
|
+
const existingPage = await confluenceClient.content.getContentById({
|
|
278
|
+
id: pageId,
|
|
279
|
+
expand: ['version'],
|
|
280
|
+
});
|
|
281
|
+
const version = existingPage.version?.number;
|
|
282
|
+
if (version === undefined) {
|
|
283
|
+
throw new Error(`Could not read version for page ${pageId} — Confluence returned no version info`);
|
|
284
|
+
}
|
|
285
|
+
return { resolvedPageId: pageId, currentVersion: version };
|
|
286
|
+
}
|
|
287
|
+
// No existing page — create a blank placeholder to obtain a pageId, then
|
|
288
|
+
// update it with the real content in the caller.
|
|
289
|
+
const blankAdf = { version: 1, type: 'doc', content: [] };
|
|
290
|
+
const createParams = {
|
|
291
|
+
space: { key: spaceKey },
|
|
292
|
+
title,
|
|
293
|
+
type: 'page',
|
|
294
|
+
body: {
|
|
295
|
+
[ADF_CONTENT_TYPE]: {
|
|
296
|
+
value: JSON.stringify(blankAdf),
|
|
297
|
+
representation: ADF_CONTENT_TYPE,
|
|
298
|
+
},
|
|
299
|
+
},
|
|
300
|
+
};
|
|
301
|
+
if (parentId) {
|
|
302
|
+
createParams.ancestors = [{ id: parentId }];
|
|
303
|
+
}
|
|
304
|
+
const created = await confluenceClient.content.createContent(createParams);
|
|
305
|
+
const resolvedPageId = created.id;
|
|
306
|
+
const currentVersion = created.version?.number;
|
|
307
|
+
if (!resolvedPageId || currentVersion === undefined) {
|
|
308
|
+
throw new Error('Failed to create page: Confluence response is missing id or version');
|
|
309
|
+
}
|
|
310
|
+
return { resolvedPageId, currentVersion };
|
|
311
|
+
}
|
|
312
|
+
// Session-level cache: avoids re-fetching attachments for the same page during
|
|
313
|
+
// the wiki-link second-pass (where publishMarkdown is called again for pages
|
|
314
|
+
// that were already published in the first pass).
|
|
315
|
+
const attachmentCache = new Map();
|
|
231
316
|
async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skipPreview = false) {
|
|
232
|
-
// Parse markdown → ADF
|
|
317
|
+
// Parse markdown → ADF. Cast once here; downstream library calls accept any.
|
|
318
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
233
319
|
const adf = parseMarkdownToADF(markdown, CONFLUENCE_BASE_URL);
|
|
234
320
|
const diagramCount = countDiagramBlocks(adf);
|
|
235
321
|
if (!skipPreview) {
|
|
@@ -237,59 +323,33 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
237
323
|
return { isPreview: true, previewText, diagramCount };
|
|
238
324
|
}
|
|
239
325
|
// ----- Full publish -----
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
id: resolvedPageId,
|
|
246
|
-
expand: ['version'],
|
|
247
|
-
});
|
|
248
|
-
currentVersion = existingPage.version.number;
|
|
326
|
+
const { resolvedPageId, currentVersion } = await getOrCreatePage(pageId, spaceKey, title, parentId);
|
|
327
|
+
// Fetch current attachments to build the map (cached per page per session)
|
|
328
|
+
let currentAttachments;
|
|
329
|
+
if (attachmentCache.has(resolvedPageId)) {
|
|
330
|
+
currentAttachments = attachmentCache.get(resolvedPageId);
|
|
249
331
|
}
|
|
250
332
|
else {
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
},
|
|
267
|
-
};
|
|
268
|
-
if (parentId) {
|
|
269
|
-
createParams.ancestors = [{ id: parentId }];
|
|
270
|
-
}
|
|
271
|
-
const created = await confluenceClient.content.createContent(createParams);
|
|
272
|
-
resolvedPageId = created.id;
|
|
273
|
-
currentVersion = created.version.number;
|
|
274
|
-
}
|
|
275
|
-
// Fetch current attachments to build the map
|
|
276
|
-
const attachmentsResult = await confluenceClient.contentAttachments.getAttachments({
|
|
277
|
-
id: resolvedPageId,
|
|
278
|
-
});
|
|
279
|
-
const currentAttachments = {};
|
|
280
|
-
for (const att of attachmentsResult.results ?? []) {
|
|
281
|
-
const attTitle = att.title ?? '';
|
|
282
|
-
const fileId = att.extensions?.fileId ?? '';
|
|
283
|
-
const collectionName = att.extensions?.collectionName ?? '';
|
|
284
|
-
if (attTitle) {
|
|
285
|
-
currentAttachments[attTitle] = {
|
|
286
|
-
filehash: att.metadata?.comment ?? '',
|
|
287
|
-
attachmentId: fileId,
|
|
288
|
-
collectionName,
|
|
289
|
-
};
|
|
333
|
+
const attachmentsResult = await confluenceClient.contentAttachments.getAttachments({
|
|
334
|
+
id: resolvedPageId,
|
|
335
|
+
});
|
|
336
|
+
currentAttachments = {};
|
|
337
|
+
for (const att of attachmentsResult.results ?? []) {
|
|
338
|
+
const attTitle = att.title ?? '';
|
|
339
|
+
const fileId = att.extensions?.fileId ?? '';
|
|
340
|
+
const collectionName = att.extensions?.collectionName ?? '';
|
|
341
|
+
if (attTitle) {
|
|
342
|
+
currentAttachments[attTitle] = {
|
|
343
|
+
filehash: att.metadata?.comment ?? '',
|
|
344
|
+
attachmentId: fileId,
|
|
345
|
+
collectionName,
|
|
346
|
+
};
|
|
347
|
+
}
|
|
290
348
|
}
|
|
349
|
+
attachmentCache.set(resolvedPageId, currentAttachments);
|
|
291
350
|
}
|
|
292
351
|
// Build publisher functions
|
|
352
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
293
353
|
const publisherFunctions = createPublisherFunctions(confluenceClient, stubAdaptor, resolvedPageId, title, currentAttachments);
|
|
294
354
|
// Run ADF processing pipeline (renders diagrams via Kroki)
|
|
295
355
|
const finalAdf = await executeADFProcessingPipeline([
|
|
@@ -303,9 +363,9 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
303
363
|
type: 'page',
|
|
304
364
|
version: { number: currentVersion + 1 },
|
|
305
365
|
body: {
|
|
306
|
-
|
|
366
|
+
[ADF_CONTENT_TYPE]: {
|
|
307
367
|
value: JSON.stringify(finalAdf),
|
|
308
|
-
representation:
|
|
368
|
+
representation: ADF_CONTENT_TYPE,
|
|
309
369
|
},
|
|
310
370
|
},
|
|
311
371
|
};
|
|
@@ -323,6 +383,54 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
323
383
|
};
|
|
324
384
|
}
|
|
325
385
|
// ---------------------------------------------------------------------------
|
|
386
|
+
// Directory publish helpers
|
|
387
|
+
// ---------------------------------------------------------------------------
|
|
388
|
+
/**
|
|
389
|
+
* Builds the preview text for a directory publish operation — the tree
|
|
390
|
+
* visualization shown to the user before they confirm with skip_preview: true.
|
|
391
|
+
* Extracted from the tool handler to keep it focused on orchestration.
|
|
392
|
+
*/
|
|
393
|
+
function buildDirectoryPreview(nodes, skipped, directoryPath, rootPageId, spaceKey) {
|
|
394
|
+
const rootTitle = rootPageId
|
|
395
|
+
? `(existing page: ${rootPageId})`
|
|
396
|
+
: `"${basename(directoryPath)}" (will be created)`;
|
|
397
|
+
const lines = [
|
|
398
|
+
`=== DIRECTORY TREE PREVIEW ===`,
|
|
399
|
+
`Directory: ${directoryPath}`,
|
|
400
|
+
`Space: ${spaceKey}`,
|
|
401
|
+
`Root page: ${rootTitle}`,
|
|
402
|
+
`Total pages: ${nodes.length + (rootPageId ? 0 : 1)}`,
|
|
403
|
+
'',
|
|
404
|
+
'--- Page tree ---',
|
|
405
|
+
];
|
|
406
|
+
const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
|
|
407
|
+
for (let d = 0; d <= maxDepth; d++) {
|
|
408
|
+
for (const node of nodes.filter((n) => n.depth === d)) {
|
|
409
|
+
const indent = ' '.repeat(d + 1);
|
|
410
|
+
const suffix = node.isDirectory ? '/' : '';
|
|
411
|
+
const pageInfo = node.markdownFile?.pageId
|
|
412
|
+
? `update: ${node.markdownFile.pageId}`
|
|
413
|
+
: 'new page';
|
|
414
|
+
let diagrams = '';
|
|
415
|
+
if (node.markdownFile) {
|
|
416
|
+
const count = countDiagramsInMarkdown(node.markdownFile.content);
|
|
417
|
+
if (count > 0)
|
|
418
|
+
diagrams = `, ${count} diagram(s)`;
|
|
419
|
+
}
|
|
420
|
+
const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
|
|
421
|
+
lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
if (skipped.length > 0) {
|
|
425
|
+
lines.push('', '--- Skipped files ---');
|
|
426
|
+
for (const s of skipped) {
|
|
427
|
+
lines.push(` ${basename(s.filePath)}: ${s.reason}`);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
lines.push('', `Call again with skip_preview: true to publish.`);
|
|
431
|
+
return lines.join('\n');
|
|
432
|
+
}
|
|
433
|
+
// ---------------------------------------------------------------------------
|
|
326
434
|
// MCP Server
|
|
327
435
|
// ---------------------------------------------------------------------------
|
|
328
436
|
const server = new Server({ name: 'mcp-markdown-to-confluence', version: '1.0.0' }, { capabilities: { tools: {} } });
|
|
@@ -427,6 +535,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
427
535
|
const input = z
|
|
428
536
|
.object({ markdown: z.string(), title: z.string() })
|
|
429
537
|
.parse(args);
|
|
538
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
430
539
|
const adf = parseMarkdownToADF(input.markdown, CONFLUENCE_BASE_URL);
|
|
431
540
|
const diagramCount = countDiagramBlocks(adf);
|
|
432
541
|
const previewText = renderADFDoc(adf);
|
|
@@ -479,7 +588,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
479
588
|
skip_preview: z.boolean().default(false),
|
|
480
589
|
})
|
|
481
590
|
.parse(args);
|
|
482
|
-
const
|
|
591
|
+
const safeFilePath = await validatePath(input.filePath);
|
|
592
|
+
const raw = await readFile(safeFilePath, 'utf-8');
|
|
483
593
|
const parsed = matter(raw);
|
|
484
594
|
const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? '';
|
|
485
595
|
const spaceKey = parsed.data['connie-space-key'] ?? '';
|
|
@@ -544,11 +654,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
544
654
|
spaceKey: z.string(),
|
|
545
655
|
rootPageId: z.string().optional(),
|
|
546
656
|
skip_preview: z.boolean().default(false),
|
|
547
|
-
concurrency: z.number().int().min(1).max(
|
|
657
|
+
concurrency: z.number().int().min(1).max(MAX_PUBLISH_CONCURRENCY).default(DEFAULT_PUBLISH_CONCURRENCY),
|
|
548
658
|
})
|
|
549
659
|
.parse(args);
|
|
660
|
+
const safeDirectoryPath = await validatePath(input.directoryPath);
|
|
550
661
|
// Scan directory tree
|
|
551
|
-
const { nodes, skipped } = await scanDirectoryTree(
|
|
662
|
+
const { nodes, skipped } = await scanDirectoryTree(safeDirectoryPath, input.spaceKey);
|
|
552
663
|
if (nodes.length === 0 && skipped.length === 0) {
|
|
553
664
|
return {
|
|
554
665
|
content: [{ type: 'text', text: `No files found in ${input.directoryPath}` }],
|
|
@@ -556,46 +667,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
556
667
|
}
|
|
557
668
|
// Preview mode
|
|
558
669
|
if (!input.skip_preview) {
|
|
559
|
-
const
|
|
560
|
-
|
|
561
|
-
: `"${basename(input.directoryPath)}" (will be created)`;
|
|
562
|
-
const lines = [
|
|
563
|
-
`=== DIRECTORY TREE PREVIEW ===`,
|
|
564
|
-
`Directory: ${input.directoryPath}`,
|
|
565
|
-
`Space: ${input.spaceKey}`,
|
|
566
|
-
`Root page: ${rootTitle}`,
|
|
567
|
-
`Total pages: ${nodes.length + (input.rootPageId ? 0 : 1)}`,
|
|
568
|
-
'',
|
|
569
|
-
'--- Page tree ---',
|
|
570
|
-
];
|
|
571
|
-
// Build tree visualization
|
|
572
|
-
const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
|
|
573
|
-
for (let d = 0; d <= maxDepth; d++) {
|
|
574
|
-
for (const node of nodes.filter((n) => n.depth === d)) {
|
|
575
|
-
const indent = ' '.repeat(d + 1);
|
|
576
|
-
const suffix = node.isDirectory ? '/' : '';
|
|
577
|
-
const pageInfo = node.markdownFile?.pageId
|
|
578
|
-
? `update: ${node.markdownFile.pageId}`
|
|
579
|
-
: 'new page';
|
|
580
|
-
let diagrams = '';
|
|
581
|
-
if (node.markdownFile) {
|
|
582
|
-
const adf = parseMarkdownToADF(node.markdownFile.content, CONFLUENCE_BASE_URL);
|
|
583
|
-
const count = countDiagramBlocks(adf);
|
|
584
|
-
if (count > 0)
|
|
585
|
-
diagrams = `, ${count} diagram(s)`;
|
|
586
|
-
}
|
|
587
|
-
const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
|
|
588
|
-
lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
|
|
589
|
-
}
|
|
590
|
-
}
|
|
591
|
-
if (skipped.length > 0) {
|
|
592
|
-
lines.push('', '--- Skipped files ---');
|
|
593
|
-
for (const s of skipped) {
|
|
594
|
-
lines.push(` ${basename(s.filePath)}: ${s.reason}`);
|
|
595
|
-
}
|
|
596
|
-
}
|
|
597
|
-
lines.push('', `Call again with skip_preview: true to publish.`);
|
|
598
|
-
return { content: [{ type: 'text', text: lines.join('\n') }] };
|
|
670
|
+
const preview = buildDirectoryPreview(nodes, skipped, input.directoryPath, input.rootPageId, input.spaceKey);
|
|
671
|
+
return { content: [{ type: 'text', text: preview }] };
|
|
599
672
|
}
|
|
600
673
|
// Publish mode — process level by level
|
|
601
674
|
const limit = pLimit(input.concurrency);
|
|
@@ -619,15 +692,14 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
619
692
|
catch (err) {
|
|
620
693
|
return {
|
|
621
694
|
isError: true,
|
|
622
|
-
content: [{
|
|
623
|
-
type: 'text',
|
|
624
|
-
text: `Error creating root page: ${err instanceof Error ? err.message : String(err)}`,
|
|
625
|
-
}],
|
|
695
|
+
content: [{ type: 'text', text: `Error creating root page: ${errorMessage(err)}` }],
|
|
626
696
|
};
|
|
627
697
|
}
|
|
628
698
|
}
|
|
629
|
-
// Build a map from relativePath
|
|
699
|
+
// Build a map from relativePath → node for parent lookups, and a separate
|
|
700
|
+
// map for resolved page IDs so we never mutate the input node objects.
|
|
630
701
|
const nodeMap = new Map();
|
|
702
|
+
const resolvedIds = new Map(); // relativePath → pageId
|
|
631
703
|
for (const node of nodes) {
|
|
632
704
|
nodeMap.set(node.relativePath, node);
|
|
633
705
|
}
|
|
@@ -643,7 +715,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
643
715
|
}
|
|
644
716
|
else {
|
|
645
717
|
const parentNode = nodeMap.get(node.parentRelativePath);
|
|
646
|
-
parentId = parentNode
|
|
718
|
+
parentId = parentNode ? resolvedIds.get(parentNode.relativePath) : undefined;
|
|
647
719
|
}
|
|
648
720
|
if (!parentId) {
|
|
649
721
|
return {
|
|
@@ -658,8 +730,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
658
730
|
const content = node.markdownFile?.content ?? '';
|
|
659
731
|
const pageId = node.markdownFile?.pageId;
|
|
660
732
|
const result = await publishMarkdown(content, node.title, input.spaceKey, pageId, parentId, true);
|
|
661
|
-
|
|
662
|
-
|
|
733
|
+
if (result.pageId) {
|
|
734
|
+
resolvedIds.set(node.relativePath, result.pageId);
|
|
735
|
+
}
|
|
663
736
|
return {
|
|
664
737
|
relativePath: node.relativePath,
|
|
665
738
|
title: node.title,
|
|
@@ -677,7 +750,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
677
750
|
title: node.title,
|
|
678
751
|
success: false,
|
|
679
752
|
isDirectory: node.isDirectory,
|
|
680
|
-
error:
|
|
753
|
+
error: errorMessage(err),
|
|
681
754
|
};
|
|
682
755
|
}
|
|
683
756
|
})));
|
|
@@ -691,18 +764,18 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
691
764
|
titleToUrl.set(r.title, r.url);
|
|
692
765
|
}
|
|
693
766
|
}
|
|
694
|
-
// Find nodes with wiki links that need re-publishing
|
|
695
|
-
const nodesWithLinks = nodes.filter((n) => n.markdownFile && hasWikiLinks(n.markdownFile.content) && n.
|
|
767
|
+
// Find nodes with wiki links that were successfully published and need re-publishing
|
|
768
|
+
const nodesWithLinks = nodes.filter((n) => n.markdownFile && hasWikiLinks(n.markdownFile.content) && resolvedIds.has(n.relativePath));
|
|
696
769
|
if (nodesWithLinks.length > 0 && titleToUrl.size > 0) {
|
|
697
770
|
const linkResults = await Promise.all(nodesWithLinks.map((node) => limit(async () => {
|
|
698
771
|
try {
|
|
699
772
|
const resolvedMarkdown = resolveWikiLinks(node.markdownFile.content, titleToUrl);
|
|
700
|
-
const result = await publishMarkdown(resolvedMarkdown, node.title, input.spaceKey, node.
|
|
773
|
+
const result = await publishMarkdown(resolvedMarkdown, node.title, input.spaceKey, resolvedIds.get(node.relativePath), undefined, // don't reparent on second pass
|
|
701
774
|
true);
|
|
702
775
|
return { relativePath: node.relativePath, title: node.title, success: true, version: result.version };
|
|
703
776
|
}
|
|
704
|
-
catch {
|
|
705
|
-
return { relativePath: node.relativePath, title: node.title, success: false };
|
|
777
|
+
catch (err) {
|
|
778
|
+
return { relativePath: node.relativePath, title: node.title, success: false, error: errorMessage(err) };
|
|
706
779
|
}
|
|
707
780
|
})));
|
|
708
781
|
const linkedCount = linkResults.filter((r) => r.success).length;
|
|
@@ -757,10 +830,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
757
830
|
};
|
|
758
831
|
}
|
|
759
832
|
catch (err) {
|
|
760
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
761
833
|
return {
|
|
762
834
|
isError: true,
|
|
763
|
-
content: [{ type: 'text', text: `Error: ${
|
|
835
|
+
content: [{ type: 'text', text: `Error: ${errorMessage(err)}` }],
|
|
764
836
|
};
|
|
765
837
|
}
|
|
766
838
|
});
|
|
@@ -20,7 +20,7 @@ export class KrokiDiagramPlugin {
|
|
|
20
20
|
const nodes = filter(adf, (node) => node.type === 'codeBlock' &&
|
|
21
21
|
(node.attrs || {})?.['language'] === this.diagramType);
|
|
22
22
|
const charts = new Set(nodes.map((node) => {
|
|
23
|
-
const details = getDiagramFileName(this.diagramType, node?.content?.
|
|
23
|
+
const details = getDiagramFileName(this.diagramType, node?.content?.[0]?.text, this.outputFormat);
|
|
24
24
|
return {
|
|
25
25
|
name: details.uploadFilename,
|
|
26
26
|
data: details.text,
|
|
@@ -29,17 +29,22 @@ export class KrokiDiagramPlugin {
|
|
|
29
29
|
return Array.from(charts);
|
|
30
30
|
}
|
|
31
31
|
async transform(charts, supportFunctions) {
|
|
32
|
-
let imageMap = {};
|
|
33
32
|
if (charts.length === 0) {
|
|
34
|
-
return
|
|
33
|
+
return {};
|
|
35
34
|
}
|
|
35
|
+
// Render all diagrams in parallel, then upload all results in parallel.
|
|
36
|
+
// Previously uploads were sequential (N+1); now both phases are concurrent.
|
|
36
37
|
const rendered = await Promise.all(charts.map(async (chart) => {
|
|
37
38
|
const buffer = await this.client.renderDiagram(this.diagramType, chart.data, this.outputFormat);
|
|
38
39
|
return [chart.name, buffer];
|
|
39
40
|
}));
|
|
40
|
-
|
|
41
|
-
const
|
|
42
|
-
|
|
41
|
+
const uploaded = await Promise.all(rendered.map(async ([name, buffer]) => {
|
|
42
|
+
const image = await supportFunctions.uploadBuffer(name, buffer);
|
|
43
|
+
return [name, image];
|
|
44
|
+
}));
|
|
45
|
+
const imageMap = {};
|
|
46
|
+
for (const [name, image] of uploaded) {
|
|
47
|
+
imageMap[name] = image;
|
|
43
48
|
}
|
|
44
49
|
return imageMap;
|
|
45
50
|
}
|
|
@@ -49,7 +54,7 @@ export class KrokiDiagramPlugin {
|
|
|
49
54
|
traverse(afterAdf, {
|
|
50
55
|
codeBlock: (node, _parent) => {
|
|
51
56
|
if (node?.attrs?.['language'] === this.diagramType) {
|
|
52
|
-
const content = node?.content?.
|
|
57
|
+
const content = node?.content?.[0]?.text;
|
|
53
58
|
if (!content) {
|
|
54
59
|
return;
|
|
55
60
|
}
|
package/dist/loader.js
CHANGED
|
@@ -22,7 +22,10 @@ function tryResolveFile(filePath) {
|
|
|
22
22
|
return pathToFileURL(resolved).href;
|
|
23
23
|
}
|
|
24
24
|
}
|
|
25
|
-
} catch {
|
|
25
|
+
} catch (err) {
|
|
26
|
+
// Non-fatal: if package.json is unparseable we simply can't resolve this path.
|
|
27
|
+
process.stderr.write(`[loader] Failed to parse package.json at ${filePath}: ${err?.message ?? err}\n`);
|
|
28
|
+
}
|
|
26
29
|
}
|
|
27
30
|
return null;
|
|
28
31
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@neverprepared/mcp-markdown-to-confluence",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.2",
|
|
4
4
|
"description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|