@neverprepared/mcp-markdown-to-confluence 1.1.4 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +320 -1
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -4,7 +4,8 @@ import { CallToolRequestSchema, ListToolsRequestSchema, } from '@modelcontextpro
4
4
  import { z } from 'zod';
5
5
  import { ConfluenceClient } from 'confluence.js';
6
6
  import matter from 'gray-matter';
7
- import { readFile } from 'fs/promises';
7
+ import { readFile, readdir } from 'fs/promises';
8
+ import { join, extname, basename, relative } from 'path';
8
9
  // Deep imports to avoid loading adaptors/filesystem.js which has broken CJS named exports.
9
10
  // Pin @markdown-confluence/lib version if these paths change.
10
11
  import { parseMarkdownToADF } from '@markdown-confluence/lib/dist/MdToADF.js';
@@ -95,6 +96,111 @@ function countDiagramBlocks(adf) {
95
96
  }
96
97
  return count;
97
98
  }
99
+ function pLimit(concurrency) {
100
+ let active = 0;
101
+ const queue = [];
102
+ function next() {
103
+ if (queue.length > 0 && active < concurrency) {
104
+ active++;
105
+ queue.shift()();
106
+ }
107
+ }
108
+ return (fn) => new Promise((resolve, reject) => {
109
+ queue.push(() => {
110
+ fn().then(resolve, reject).finally(() => {
111
+ active--;
112
+ next();
113
+ });
114
+ });
115
+ next();
116
+ });
117
+ }
118
+ async function parseMarkdownFile(filePath, overrides) {
119
+ const raw = await readFile(filePath, 'utf-8');
120
+ const parsed = matter(raw);
121
+ const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? overrides?.titleFallback ?? '';
122
+ const spaceKey = overrides?.spaceKey ?? parsed.data['connie-space-key'] ?? '';
123
+ const pageId = parsed.data['connie-page-id']
124
+ ? String(parsed.data['connie-page-id'])
125
+ : undefined;
126
+ if (!title) {
127
+ return { skipped: true, filePath, reason: 'Missing "connie-title" or "title" in frontmatter' };
128
+ }
129
+ if (!spaceKey) {
130
+ return { skipped: true, filePath, reason: 'Missing "connie-space-key" in frontmatter' };
131
+ }
132
+ return { filePath, title, spaceKey, pageId, content: parsed.content };
133
+ }
134
+ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, depth = 0) {
135
+ const entries = await readdir(currentPath, { withFileTypes: true });
136
+ const nodes = [];
137
+ const skipped = [];
138
+ const relFromRoot = relative(rootPath, currentPath) || '.';
139
+ const parentRel = depth === 0 ? null : (relative(rootPath, join(currentPath, '..')) || '.');
140
+ // Collect subdirectories and markdown files
141
+ const subdirs = entries.filter((e) => e.isDirectory() && !e.name.startsWith('.'));
142
+ const mdFiles = entries.filter((e) => e.isFile() && extname(e.name).toLowerCase() === '.md' && !e.name.startsWith('.'));
143
+ // Check for markdown files that correspond to subdirectories (e.g., "01 - Strategic.md" + "01 - Strategic/")
144
+ const subdirNames = new Set(subdirs.map((d) => d.name));
145
+ const dirMdFiles = new Set();
146
+ // Process markdown files
147
+ for (const entry of mdFiles) {
148
+ const filePath = join(currentPath, entry.name);
149
+ const stem = basename(entry.name, extname(entry.name));
150
+ // If this .md file has a matching subdirectory, it will be used as the directory's content
151
+ if (subdirNames.has(stem)) {
152
+ dirMdFiles.add(stem);
153
+ continue; // handled when processing the subdirectory
154
+ }
155
+ const result = await parseMarkdownFile(filePath, {
156
+ spaceKey,
157
+ titleFallback: stem,
158
+ });
159
+ if ('skipped' in result) {
160
+ skipped.push(result);
161
+ }
162
+ else {
163
+ nodes.push({
164
+ relativePath: relative(rootPath, filePath),
165
+ title: result.title,
166
+ depth,
167
+ parentRelativePath: depth === 0 ? null : relFromRoot,
168
+ markdownFile: result,
169
+ isDirectory: false,
170
+ });
171
+ }
172
+ }
173
+ // Process subdirectories
174
+ for (const dir of subdirs) {
175
+ const dirPath = join(currentPath, dir.name);
176
+ const dirRelPath = relative(rootPath, dirPath);
177
+ // Check for a matching .md file to use as directory content
178
+ const matchingMdPath = join(currentPath, dir.name + '.md');
179
+ let dirMarkdownFile;
180
+ if (dirMdFiles.has(dir.name)) {
181
+ const result = await parseMarkdownFile(matchingMdPath, {
182
+ spaceKey,
183
+ titleFallback: dir.name,
184
+ });
185
+ if (!('skipped' in result)) {
186
+ dirMarkdownFile = result;
187
+ }
188
+ }
189
+ nodes.push({
190
+ relativePath: dirRelPath,
191
+ title: dirMarkdownFile?.title ?? dir.name,
192
+ depth,
193
+ parentRelativePath: depth === 0 ? null : relFromRoot,
194
+ markdownFile: dirMarkdownFile,
195
+ isDirectory: true,
196
+ });
197
+ // Recurse
198
+ const subResult = await scanDirectoryTree(rootPath, spaceKey, dirPath, depth + 1);
199
+ nodes.push(...subResult.nodes);
200
+ skipped.push(...subResult.skipped);
201
+ }
202
+ return { nodes, skipped };
203
+ }
98
204
  // ---------------------------------------------------------------------------
99
205
  // Core publish logic
100
206
  // ---------------------------------------------------------------------------
@@ -253,6 +359,40 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
253
359
  required: ['filePath'],
254
360
  },
255
361
  },
362
+ {
363
+ name: 'markdown_publish_directory',
364
+ description: 'Recursively scan a directory and publish markdown files as a Confluence page tree, ' +
365
+ 'mirroring the folder structure. Directories become parent pages; markdown files become child pages. ' +
366
+ 'Existing pages (with connie-page-id) are updated and reparented to match the directory structure.',
367
+ inputSchema: {
368
+ type: 'object',
369
+ properties: {
370
+ directoryPath: {
371
+ type: 'string',
372
+ description: 'Absolute path to the root directory',
373
+ },
374
+ spaceKey: {
375
+ type: 'string',
376
+ description: 'Confluence space key. Overrides file-level connie-space-key.',
377
+ },
378
+ rootPageId: {
379
+ type: 'string',
380
+ description: 'Existing Confluence page ID to use as the root parent. If omitted, a new root page is created.',
381
+ },
382
+ skip_preview: {
383
+ type: 'boolean',
384
+ description: 'Set to true to skip preview and publish immediately',
385
+ default: false,
386
+ },
387
+ concurrency: {
388
+ type: 'number',
389
+ description: 'Maximum concurrent publishes per depth level (default: 5)',
390
+ default: 5,
391
+ },
392
+ },
393
+ required: ['directoryPath', 'spaceKey'],
394
+ },
395
+ },
256
396
  ],
257
397
  }));
258
398
  // Tool handlers
@@ -373,6 +513,185 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
373
513
  ],
374
514
  };
375
515
  }
516
+ if (name === 'markdown_publish_directory') {
517
+ const input = z
518
+ .object({
519
+ directoryPath: z.string(),
520
+ spaceKey: z.string(),
521
+ rootPageId: z.string().optional(),
522
+ skip_preview: z.boolean().default(false),
523
+ concurrency: z.number().int().min(1).max(20).default(5),
524
+ })
525
+ .parse(args);
526
+ // Scan directory tree
527
+ const { nodes, skipped } = await scanDirectoryTree(input.directoryPath, input.spaceKey);
528
+ if (nodes.length === 0 && skipped.length === 0) {
529
+ return {
530
+ content: [{ type: 'text', text: `No files found in ${input.directoryPath}` }],
531
+ };
532
+ }
533
+ // Preview mode
534
+ if (!input.skip_preview) {
535
+ const rootTitle = input.rootPageId
536
+ ? `(existing page: ${input.rootPageId})`
537
+ : `"${basename(input.directoryPath)}" (will be created)`;
538
+ const lines = [
539
+ `=== DIRECTORY TREE PREVIEW ===`,
540
+ `Directory: ${input.directoryPath}`,
541
+ `Space: ${input.spaceKey}`,
542
+ `Root page: ${rootTitle}`,
543
+ `Total pages: ${nodes.length + (input.rootPageId ? 0 : 1)}`,
544
+ '',
545
+ '--- Page tree ---',
546
+ ];
547
+ // Build tree visualization
548
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
549
+ for (let d = 0; d <= maxDepth; d++) {
550
+ for (const node of nodes.filter((n) => n.depth === d)) {
551
+ const indent = ' '.repeat(d + 1);
552
+ const suffix = node.isDirectory ? '/' : '';
553
+ const pageInfo = node.markdownFile?.pageId
554
+ ? `update: ${node.markdownFile.pageId}`
555
+ : 'new page';
556
+ let diagrams = '';
557
+ if (node.markdownFile) {
558
+ const adf = parseMarkdownToADF(node.markdownFile.content, CONFLUENCE_BASE_URL);
559
+ const count = countDiagramBlocks(adf);
560
+ if (count > 0)
561
+ diagrams = `, ${count} diagram(s)`;
562
+ }
563
+ const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
564
+ lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
565
+ }
566
+ }
567
+ if (skipped.length > 0) {
568
+ lines.push('', '--- Skipped files ---');
569
+ for (const s of skipped) {
570
+ lines.push(` ${basename(s.filePath)}: ${s.reason}`);
571
+ }
572
+ }
573
+ lines.push('', `Call again with skip_preview: true to publish.`);
574
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
575
+ }
576
+ // Publish mode — process level by level
577
+ const limit = pLimit(input.concurrency);
578
+ // Create or resolve root page
579
+ let rootPageId = input.rootPageId;
580
+ const allResults = [];
581
+ if (!rootPageId) {
582
+ try {
583
+ const rootResult = await publishMarkdown('', basename(input.directoryPath), input.spaceKey, undefined, undefined, true);
584
+ rootPageId = rootResult.pageId;
585
+ allResults.push({
586
+ relativePath: '.',
587
+ title: basename(input.directoryPath),
588
+ success: true,
589
+ isDirectory: true,
590
+ pageId: rootResult.pageId,
591
+ version: rootResult.version,
592
+ url: rootResult.url,
593
+ });
594
+ }
595
+ catch (err) {
596
+ return {
597
+ isError: true,
598
+ content: [{
599
+ type: 'text',
600
+ text: `Error creating root page: ${err instanceof Error ? err.message : String(err)}`,
601
+ }],
602
+ };
603
+ }
604
+ }
605
+ // Build a map from relativePath to node for parent lookups
606
+ const nodeMap = new Map();
607
+ for (const node of nodes) {
608
+ nodeMap.set(node.relativePath, node);
609
+ }
610
+ // Group by depth and process level by level
611
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
612
+ for (let depth = 0; depth <= maxDepth; depth++) {
613
+ const levelNodes = nodes.filter((n) => n.depth === depth);
614
+ const levelResults = await Promise.all(levelNodes.map((node) => limit(async () => {
615
+ // Determine parent page ID
616
+ let parentId;
617
+ if (node.parentRelativePath === null) {
618
+ parentId = rootPageId;
619
+ }
620
+ else {
621
+ const parentNode = nodeMap.get(node.parentRelativePath);
622
+ parentId = parentNode?.resolvedPageId;
623
+ }
624
+ if (!parentId) {
625
+ return {
626
+ relativePath: node.relativePath,
627
+ title: node.title,
628
+ success: false,
629
+ isDirectory: node.isDirectory,
630
+ error: 'Parent page was not created (parent failed)',
631
+ };
632
+ }
633
+ try {
634
+ const content = node.markdownFile?.content ?? '';
635
+ const pageId = node.markdownFile?.pageId;
636
+ const result = await publishMarkdown(content, node.title, input.spaceKey, pageId, parentId, true);
637
+ // Store resolved page ID for children
638
+ node.resolvedPageId = result.pageId;
639
+ return {
640
+ relativePath: node.relativePath,
641
+ title: node.title,
642
+ success: true,
643
+ isDirectory: node.isDirectory,
644
+ pageId: result.pageId,
645
+ version: result.version,
646
+ diagramCount: result.diagramCount,
647
+ url: result.url,
648
+ };
649
+ }
650
+ catch (err) {
651
+ return {
652
+ relativePath: node.relativePath,
653
+ title: node.title,
654
+ success: false,
655
+ isDirectory: node.isDirectory,
656
+ error: err instanceof Error ? err.message : String(err),
657
+ };
658
+ }
659
+ })));
660
+ allResults.push(...levelResults);
661
+ }
662
+ // Build summary
663
+ const succeeded = allResults.filter((r) => r.success);
664
+ const failed = allResults.filter((r) => !r.success);
665
+ const lines = [
666
+ `=== DIRECTORY PUBLISH RESULTS ===`,
667
+ `Directory: ${input.directoryPath}`,
668
+ `Space: ${input.spaceKey}`,
669
+ `Succeeded: ${succeeded.length} | Failed: ${failed.length} | Skipped: ${skipped.length}`,
670
+ '',
671
+ ];
672
+ if (succeeded.length > 0) {
673
+ lines.push('--- Succeeded ---');
674
+ for (const r of succeeded) {
675
+ const type = r.isDirectory ? ' (folder)' : '';
676
+ lines.push(` "${r.title}"${type}`);
677
+ lines.push(` Page ID: ${r.pageId} | Version: ${r.version}${r.diagramCount ? ` | Diagrams: ${r.diagramCount}` : ''} | URL: ${r.url}`);
678
+ }
679
+ }
680
+ if (failed.length > 0) {
681
+ lines.push('', '--- Failed ---');
682
+ for (const r of failed) {
683
+ lines.push(` "${r.title}" (${r.relativePath})`);
684
+ lines.push(` Error: ${r.error}`);
685
+ }
686
+ }
687
+ if (skipped.length > 0) {
688
+ lines.push('', '--- Skipped (invalid frontmatter) ---');
689
+ for (const s of skipped) {
690
+ lines.push(` ${basename(s.filePath)}: ${s.reason}`);
691
+ }
692
+ }
693
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
694
+ }
376
695
  return {
377
696
  isError: true,
378
697
  content: [{ type: 'text', text: `Error: Unknown tool "${name}"` }],
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@neverprepared/mcp-markdown-to-confluence",
3
- "version": "1.1.4",
3
+ "version": "1.3.0",
4
4
  "description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",