@neverprepared/mcp-markdown-to-confluence 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +269 -60
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -5,7 +5,7 @@ import { z } from 'zod';
5
5
  import { ConfluenceClient } from 'confluence.js';
6
6
  import matter from 'gray-matter';
7
7
  import { readFile, readdir } from 'fs/promises';
8
- import { join, extname, basename } from 'path';
8
+ import { join, extname, basename, relative } from 'path';
9
9
  // Deep imports to avoid loading adaptors/filesystem.js which has broken CJS named exports.
10
10
  // Pin @markdown-confluence/lib version if these paths change.
11
11
  import { parseMarkdownToADF } from '@markdown-confluence/lib/dist/MdToADF.js';
@@ -115,11 +115,11 @@ function pLimit(concurrency) {
115
115
  next();
116
116
  });
117
117
  }
118
- async function parseMarkdownFile(filePath) {
118
+ async function parseMarkdownFile(filePath, overrides) {
119
119
  const raw = await readFile(filePath, 'utf-8');
120
120
  const parsed = matter(raw);
121
- const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? '';
122
- const spaceKey = parsed.data['connie-space-key'] ?? '';
121
+ const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? overrides?.titleFallback ?? '';
122
+ const spaceKey = overrides?.spaceKey ?? parsed.data['connie-space-key'] ?? '';
123
123
  const pageId = parsed.data['connie-page-id']
124
124
  ? String(parsed.data['connie-page-id'])
125
125
  : undefined;
@@ -131,6 +131,100 @@ async function parseMarkdownFile(filePath) {
131
131
  }
132
132
  return { filePath, title, spaceKey, pageId, content: parsed.content };
133
133
  }
134
+ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, depth = 0) {
135
+ const entries = await readdir(currentPath, { withFileTypes: true });
136
+ const nodes = [];
137
+ const skipped = [];
138
+ const relFromRoot = relative(rootPath, currentPath) || '.';
139
+ const parentRel = depth === 0 ? null : (relative(rootPath, join(currentPath, '..')) || '.');
140
+ // Collect subdirectories and markdown files
141
+ const subdirs = entries.filter((e) => e.isDirectory() && !e.name.startsWith('.'));
142
+ const mdFiles = entries.filter((e) => e.isFile() && extname(e.name).toLowerCase() === '.md' && !e.name.startsWith('.'));
143
+ // Check for markdown files that correspond to subdirectories (e.g., "01 - Strategic.md" + "01 - Strategic/")
144
+ const subdirNames = new Set(subdirs.map((d) => d.name));
145
+ const dirMdFiles = new Set();
146
+ // Process markdown files
147
+ for (const entry of mdFiles) {
148
+ const filePath = join(currentPath, entry.name);
149
+ const stem = basename(entry.name, extname(entry.name));
150
+ // If this .md file has a matching subdirectory, it will be used as the directory's content
151
+ if (subdirNames.has(stem)) {
152
+ dirMdFiles.add(stem);
153
+ continue; // handled when processing the subdirectory
154
+ }
155
+ const result = await parseMarkdownFile(filePath, {
156
+ spaceKey,
157
+ titleFallback: stem,
158
+ });
159
+ if ('skipped' in result) {
160
+ skipped.push(result);
161
+ }
162
+ else {
163
+ nodes.push({
164
+ relativePath: relative(rootPath, filePath),
165
+ title: result.title,
166
+ depth,
167
+ parentRelativePath: depth === 0 ? null : relFromRoot,
168
+ markdownFile: result,
169
+ isDirectory: false,
170
+ });
171
+ }
172
+ }
173
+ // Process subdirectories
174
+ for (const dir of subdirs) {
175
+ const dirPath = join(currentPath, dir.name);
176
+ const dirRelPath = relative(rootPath, dirPath);
177
+ // Check for a matching .md file to use as directory content
178
+ const matchingMdPath = join(currentPath, dir.name + '.md');
179
+ let dirMarkdownFile;
180
+ if (dirMdFiles.has(dir.name)) {
181
+ const result = await parseMarkdownFile(matchingMdPath, {
182
+ spaceKey,
183
+ titleFallback: dir.name,
184
+ });
185
+ if (!('skipped' in result)) {
186
+ dirMarkdownFile = result;
187
+ }
188
+ }
189
+ nodes.push({
190
+ relativePath: dirRelPath,
191
+ title: dirMarkdownFile?.title ?? dir.name,
192
+ depth,
193
+ parentRelativePath: depth === 0 ? null : relFromRoot,
194
+ markdownFile: dirMarkdownFile,
195
+ isDirectory: true,
196
+ });
197
+ // Recurse
198
+ const subResult = await scanDirectoryTree(rootPath, spaceKey, dirPath, depth + 1);
199
+ nodes.push(...subResult.nodes);
200
+ skipped.push(...subResult.skipped);
201
+ }
202
+ return { nodes, skipped };
203
+ }
204
+ // ---------------------------------------------------------------------------
205
+ // Wiki link resolution
206
+ // ---------------------------------------------------------------------------
207
+ // Matches [[Page Name]] and [[Page Name#Heading]] and [[Page Name|Display Text]]
208
+ const WIKI_LINK_RE = /\[\[([^\]|#]+)(?:#([^\]|]+))?(?:\|([^\]]+))?\]\]/g;
209
+ function hasWikiLinks(markdown) {
210
+ return WIKI_LINK_RE.test(markdown);
211
+ }
212
+ function resolveWikiLinks(markdown, titleToUrl) {
213
+ return markdown.replace(WIKI_LINK_RE, (_match, pageName, heading, displayText) => {
214
+ const trimmedName = pageName.trim();
215
+ const url = titleToUrl.get(trimmedName);
216
+ if (!url) {
217
+ // No matching page found — leave as plain text
218
+ return displayText?.trim() || trimmedName;
219
+ }
220
+ const label = displayText?.trim() || trimmedName;
221
+ const anchor = heading?.trim();
222
+ const anchorSuffix = anchor
223
+ ? '#' + anchor.replace(/\s+/g, '-')
224
+ : '';
225
+ return `[${label}](${url}${anchorSuffix})`;
226
+ });
227
+ }
134
228
  // ---------------------------------------------------------------------------
135
229
  // Core publish logic
136
230
  // ---------------------------------------------------------------------------
@@ -291,14 +385,23 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
291
385
  },
292
386
  {
293
387
  name: 'markdown_publish_directory',
294
- description: 'Scan a directory for markdown files with Confluence frontmatter and publish them all concurrently. ' +
295
- 'Files without required frontmatter (connie-title, connie-space-key) are skipped.',
388
+ description: 'Recursively scan a directory and publish markdown files as a Confluence page tree, ' +
389
+ 'mirroring the folder structure. Directories become parent pages; markdown files become child pages. ' +
390
+ 'Existing pages (with connie-page-id) are updated and reparented to match the directory structure.',
296
391
  inputSchema: {
297
392
  type: 'object',
298
393
  properties: {
299
394
  directoryPath: {
300
395
  type: 'string',
301
- description: 'Absolute path to the directory containing markdown files',
396
+ description: 'Absolute path to the root directory',
397
+ },
398
+ spaceKey: {
399
+ type: 'string',
400
+ description: 'Confluence space key. Overrides file-level connie-space-key.',
401
+ },
402
+ rootPageId: {
403
+ type: 'string',
404
+ description: 'Existing Confluence page ID to use as the root parent. If omitted, a new root page is created.',
302
405
  },
303
406
  skip_preview: {
304
407
  type: 'boolean',
@@ -307,11 +410,11 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
307
410
  },
308
411
  concurrency: {
309
412
  type: 'number',
310
- description: 'Maximum number of files to publish concurrently (default: 5)',
413
+ description: 'Maximum concurrent publishes per depth level (default: 5)',
311
414
  default: 5,
312
415
  },
313
416
  },
314
- required: ['directoryPath'],
417
+ required: ['directoryPath', 'spaceKey'],
315
418
  },
316
419
  },
317
420
  ],
@@ -438,47 +541,52 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
438
541
  const input = z
439
542
  .object({
440
543
  directoryPath: z.string(),
544
+ spaceKey: z.string(),
545
+ rootPageId: z.string().optional(),
441
546
  skip_preview: z.boolean().default(false),
442
547
  concurrency: z.number().int().min(1).max(20).default(5),
443
548
  })
444
549
  .parse(args);
445
- const entries = await readdir(input.directoryPath);
446
- const mdFiles = entries
447
- .filter((f) => extname(f).toLowerCase() === '.md')
448
- .map((f) => join(input.directoryPath, f));
449
- if (mdFiles.length === 0) {
550
+ // Scan directory tree
551
+ const { nodes, skipped } = await scanDirectoryTree(input.directoryPath, input.spaceKey);
552
+ if (nodes.length === 0 && skipped.length === 0) {
450
553
  return {
451
- content: [{ type: 'text', text: `No .md files found in ${input.directoryPath}` }],
554
+ content: [{ type: 'text', text: `No files found in ${input.directoryPath}` }],
452
555
  };
453
556
  }
454
- const parseResults = await Promise.all(mdFiles.map(parseMarkdownFile));
455
- const valid = [];
456
- const skipped = [];
457
- for (const r of parseResults) {
458
- if ('skipped' in r) {
459
- skipped.push(r);
460
- }
461
- else {
462
- valid.push(r);
463
- }
464
- }
557
+ // Preview mode
465
558
  if (!input.skip_preview) {
559
+ const rootTitle = input.rootPageId
560
+ ? `(existing page: ${input.rootPageId})`
561
+ : `"${basename(input.directoryPath)}" (will be created)`;
466
562
  const lines = [
467
- `=== DIRECTORY PREVIEW ===`,
563
+ `=== DIRECTORY TREE PREVIEW ===`,
468
564
  `Directory: ${input.directoryPath}`,
469
- `Total .md files: ${mdFiles.length}`,
470
- `Files to publish: ${valid.length}`,
471
- `Files skipped: ${skipped.length}`,
565
+ `Space: ${input.spaceKey}`,
566
+ `Root page: ${rootTitle}`,
567
+ `Total pages: ${nodes.length + (input.rootPageId ? 0 : 1)}`,
472
568
  '',
473
- '--- Files to publish ---',
569
+ '--- Page tree ---',
474
570
  ];
475
- for (const f of valid) {
476
- const adf = parseMarkdownToADF(f.content, CONFLUENCE_BASE_URL);
477
- const diagrams = countDiagramBlocks(adf);
478
- lines.push(` ${basename(f.filePath)}`);
479
- lines.push(` Title: ${f.title} | Space: ${f.spaceKey}` +
480
- (f.pageId ? ` | Page ID: ${f.pageId}` : ' (new page)') +
481
- (diagrams > 0 ? ` | Diagrams: ${diagrams}` : ''));
571
+ // Build tree visualization
572
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
573
+ for (let d = 0; d <= maxDepth; d++) {
574
+ for (const node of nodes.filter((n) => n.depth === d)) {
575
+ const indent = ' '.repeat(d + 1);
576
+ const suffix = node.isDirectory ? '/' : '';
577
+ const pageInfo = node.markdownFile?.pageId
578
+ ? `update: ${node.markdownFile.pageId}`
579
+ : 'new page';
580
+ let diagrams = '';
581
+ if (node.markdownFile) {
582
+ const adf = parseMarkdownToADF(node.markdownFile.content, CONFLUENCE_BASE_URL);
583
+ const count = countDiagramBlocks(adf);
584
+ if (count > 0)
585
+ diagrams = `, ${count} diagram(s)`;
586
+ }
587
+ const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
588
+ lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
589
+ }
482
590
  }
483
591
  if (skipped.length > 0) {
484
592
  lines.push('', '--- Skipped files ---');
@@ -486,51 +594,152 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
486
594
  lines.push(` ${basename(s.filePath)}: ${s.reason}`);
487
595
  }
488
596
  }
489
- lines.push('', `Call again with skip_preview: true to publish all ${valid.length} file(s).`);
597
+ lines.push('', `Call again with skip_preview: true to publish.`);
490
598
  return { content: [{ type: 'text', text: lines.join('\n') }] };
491
599
  }
600
+ // Publish mode — process level by level
492
601
  const limit = pLimit(input.concurrency);
493
- const results = await Promise.all(valid.map((f) => limit(async () => {
602
+ // Create or resolve root page
603
+ let rootPageId = input.rootPageId;
604
+ const allResults = [];
605
+ if (!rootPageId) {
494
606
  try {
495
- const result = await publishMarkdown(f.content, f.title, f.spaceKey, f.pageId, undefined, true);
496
- return {
497
- filePath: f.filePath,
498
- title: f.title,
607
+ const rootResult = await publishMarkdown('', basename(input.directoryPath), input.spaceKey, undefined, undefined, true);
608
+ rootPageId = rootResult.pageId;
609
+ allResults.push({
610
+ relativePath: '.',
611
+ title: basename(input.directoryPath),
499
612
  success: true,
500
- pageId: result.pageId,
501
- version: result.version,
502
- diagramCount: result.diagramCount,
503
- url: result.url,
504
- };
613
+ isDirectory: true,
614
+ pageId: rootResult.pageId,
615
+ version: rootResult.version,
616
+ url: rootResult.url,
617
+ });
505
618
  }
506
619
  catch (err) {
507
620
  return {
508
- filePath: f.filePath,
509
- title: f.title,
510
- success: false,
511
- error: err instanceof Error ? err.message : String(err),
621
+ isError: true,
622
+ content: [{
623
+ type: 'text',
624
+ text: `Error creating root page: ${err instanceof Error ? err.message : String(err)}`,
625
+ }],
512
626
  };
513
627
  }
514
- })));
515
- const succeeded = results.filter((r) => r.success);
516
- const failed = results.filter((r) => !r.success);
628
+ }
629
+ // Build a map from relativePath to node for parent lookups
630
+ const nodeMap = new Map();
631
+ for (const node of nodes) {
632
+ nodeMap.set(node.relativePath, node);
633
+ }
634
+ // Group by depth and process level by level
635
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
636
+ for (let depth = 0; depth <= maxDepth; depth++) {
637
+ const levelNodes = nodes.filter((n) => n.depth === depth);
638
+ const levelResults = await Promise.all(levelNodes.map((node) => limit(async () => {
639
+ // Determine parent page ID
640
+ let parentId;
641
+ if (node.parentRelativePath === null) {
642
+ parentId = rootPageId;
643
+ }
644
+ else {
645
+ const parentNode = nodeMap.get(node.parentRelativePath);
646
+ parentId = parentNode?.resolvedPageId;
647
+ }
648
+ if (!parentId) {
649
+ return {
650
+ relativePath: node.relativePath,
651
+ title: node.title,
652
+ success: false,
653
+ isDirectory: node.isDirectory,
654
+ error: 'Parent page was not created (parent failed)',
655
+ };
656
+ }
657
+ try {
658
+ const content = node.markdownFile?.content ?? '';
659
+ const pageId = node.markdownFile?.pageId;
660
+ const result = await publishMarkdown(content, node.title, input.spaceKey, pageId, parentId, true);
661
+ // Store resolved page ID for children
662
+ node.resolvedPageId = result.pageId;
663
+ return {
664
+ relativePath: node.relativePath,
665
+ title: node.title,
666
+ success: true,
667
+ isDirectory: node.isDirectory,
668
+ pageId: result.pageId,
669
+ version: result.version,
670
+ diagramCount: result.diagramCount,
671
+ url: result.url,
672
+ };
673
+ }
674
+ catch (err) {
675
+ return {
676
+ relativePath: node.relativePath,
677
+ title: node.title,
678
+ success: false,
679
+ isDirectory: node.isDirectory,
680
+ error: err instanceof Error ? err.message : String(err),
681
+ };
682
+ }
683
+ })));
684
+ allResults.push(...levelResults);
685
+ }
686
+ // Second pass: resolve wiki links [[Page Name]] and [[Page Name#Heading]]
687
+ // Build title → URL map from all successfully published pages
688
+ const titleToUrl = new Map();
689
+ for (const r of allResults) {
690
+ if (r.success && r.url) {
691
+ titleToUrl.set(r.title, r.url);
692
+ }
693
+ }
694
+ // Find nodes with wiki links that need re-publishing
695
+ const nodesWithLinks = nodes.filter((n) => n.markdownFile && hasWikiLinks(n.markdownFile.content) && n.resolvedPageId);
696
+ if (nodesWithLinks.length > 0 && titleToUrl.size > 0) {
697
+ const linkResults = await Promise.all(nodesWithLinks.map((node) => limit(async () => {
698
+ try {
699
+ const resolvedMarkdown = resolveWikiLinks(node.markdownFile.content, titleToUrl);
700
+ const result = await publishMarkdown(resolvedMarkdown, node.title, input.spaceKey, node.resolvedPageId, undefined, // don't reparent on second pass
701
+ true);
702
+ return { relativePath: node.relativePath, title: node.title, success: true, version: result.version };
703
+ }
704
+ catch {
705
+ return { relativePath: node.relativePath, title: node.title, success: false };
706
+ }
707
+ })));
708
+ const linkedCount = linkResults.filter((r) => r.success).length;
709
+ if (linkedCount > 0) {
710
+ // Update versions in allResults
711
+ for (const lr of linkResults) {
712
+ if (lr.success && lr.version) {
713
+ const existing = allResults.find((r) => r.relativePath === lr.relativePath);
714
+ if (existing)
715
+ existing.version = lr.version;
716
+ }
717
+ }
718
+ }
719
+ }
720
+ // Build summary
721
+ const succeeded = allResults.filter((r) => r.success);
722
+ const failed = allResults.filter((r) => !r.success);
517
723
  const lines = [
518
724
  `=== DIRECTORY PUBLISH RESULTS ===`,
519
725
  `Directory: ${input.directoryPath}`,
520
- `Succeeded: ${succeeded.length} | Failed: ${failed.length} | Skipped: ${skipped.length}`,
726
+ `Space: ${input.spaceKey}`,
727
+ `Succeeded: ${succeeded.length} | Failed: ${failed.length} | Skipped: ${skipped.length}` +
728
+ (nodesWithLinks.length > 0 ? ` | Wiki links resolved: ${nodesWithLinks.length} page(s)` : ''),
521
729
  '',
522
730
  ];
523
731
  if (succeeded.length > 0) {
524
732
  lines.push('--- Succeeded ---');
525
733
  for (const r of succeeded) {
526
- lines.push(` "${r.title}"`);
527
- lines.push(` Page ID: ${r.pageId} | Version: ${r.version} | Diagrams: ${r.diagramCount} | URL: ${r.url}`);
734
+ const type = r.isDirectory ? ' (folder)' : '';
735
+ lines.push(` "${r.title}"${type}`);
736
+ lines.push(` Page ID: ${r.pageId} | Version: ${r.version}${r.diagramCount ? ` | Diagrams: ${r.diagramCount}` : ''} | URL: ${r.url}`);
528
737
  }
529
738
  }
530
739
  if (failed.length > 0) {
531
740
  lines.push('', '--- Failed ---');
532
741
  for (const r of failed) {
533
- lines.push(` "${r.title}" (${basename(r.filePath)})`);
742
+ lines.push(` "${r.title}" (${r.relativePath})`);
534
743
  lines.push(` Error: ${r.error}`);
535
744
  }
536
745
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@neverprepared/mcp-markdown-to-confluence",
3
- "version": "1.2.0",
3
+ "version": "1.4.0",
4
4
  "description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",