@neverprepared/mcp-markdown-to-confluence 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +209 -59
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -5,7 +5,7 @@ import { z } from 'zod';
5
5
  import { ConfluenceClient } from 'confluence.js';
6
6
  import matter from 'gray-matter';
7
7
  import { readFile, readdir } from 'fs/promises';
8
- import { join, extname, basename } from 'path';
8
+ import { join, extname, basename, relative } from 'path';
9
9
  // Deep imports to avoid loading adaptors/filesystem.js which has broken CJS named exports.
10
10
  // Pin @markdown-confluence/lib version if these paths change.
11
11
  import { parseMarkdownToADF } from '@markdown-confluence/lib/dist/MdToADF.js';
@@ -115,11 +115,11 @@ function pLimit(concurrency) {
115
115
  next();
116
116
  });
117
117
  }
118
- async function parseMarkdownFile(filePath) {
118
+ async function parseMarkdownFile(filePath, overrides) {
119
119
  const raw = await readFile(filePath, 'utf-8');
120
120
  const parsed = matter(raw);
121
- const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? '';
122
- const spaceKey = parsed.data['connie-space-key'] ?? '';
121
+ const title = parsed.data['connie-title'] ?? parsed.data['title'] ?? overrides?.titleFallback ?? '';
122
+ const spaceKey = overrides?.spaceKey ?? parsed.data['connie-space-key'] ?? '';
123
123
  const pageId = parsed.data['connie-page-id']
124
124
  ? String(parsed.data['connie-page-id'])
125
125
  : undefined;
@@ -131,6 +131,76 @@ async function parseMarkdownFile(filePath) {
131
131
  }
132
132
  return { filePath, title, spaceKey, pageId, content: parsed.content };
133
133
  }
134
+ async function scanDirectoryTree(rootPath, spaceKey, currentPath = rootPath, depth = 0) {
135
+ const entries = await readdir(currentPath, { withFileTypes: true });
136
+ const nodes = [];
137
+ const skipped = [];
138
+ const relFromRoot = relative(rootPath, currentPath) || '.';
139
+ const parentRel = depth === 0 ? null : (relative(rootPath, join(currentPath, '..')) || '.');
140
+ // Collect subdirectories and markdown files
141
+ const subdirs = entries.filter((e) => e.isDirectory() && !e.name.startsWith('.'));
142
+ const mdFiles = entries.filter((e) => e.isFile() && extname(e.name).toLowerCase() === '.md' && !e.name.startsWith('.'));
143
+ // Check for markdown files that correspond to subdirectories (e.g., "01 - Strategic.md" + "01 - Strategic/")
144
+ const subdirNames = new Set(subdirs.map((d) => d.name));
145
+ const dirMdFiles = new Set();
146
+ // Process markdown files
147
+ for (const entry of mdFiles) {
148
+ const filePath = join(currentPath, entry.name);
149
+ const stem = basename(entry.name, extname(entry.name));
150
+ // If this .md file has a matching subdirectory, it will be used as the directory's content
151
+ if (subdirNames.has(stem)) {
152
+ dirMdFiles.add(stem);
153
+ continue; // handled when processing the subdirectory
154
+ }
155
+ const result = await parseMarkdownFile(filePath, {
156
+ spaceKey,
157
+ titleFallback: stem,
158
+ });
159
+ if ('skipped' in result) {
160
+ skipped.push(result);
161
+ }
162
+ else {
163
+ nodes.push({
164
+ relativePath: relative(rootPath, filePath),
165
+ title: result.title,
166
+ depth,
167
+ parentRelativePath: depth === 0 ? null : relFromRoot,
168
+ markdownFile: result,
169
+ isDirectory: false,
170
+ });
171
+ }
172
+ }
173
+ // Process subdirectories
174
+ for (const dir of subdirs) {
175
+ const dirPath = join(currentPath, dir.name);
176
+ const dirRelPath = relative(rootPath, dirPath);
177
+ // Check for a matching .md file to use as directory content
178
+ const matchingMdPath = join(currentPath, dir.name + '.md');
179
+ let dirMarkdownFile;
180
+ if (dirMdFiles.has(dir.name)) {
181
+ const result = await parseMarkdownFile(matchingMdPath, {
182
+ spaceKey,
183
+ titleFallback: dir.name,
184
+ });
185
+ if (!('skipped' in result)) {
186
+ dirMarkdownFile = result;
187
+ }
188
+ }
189
+ nodes.push({
190
+ relativePath: dirRelPath,
191
+ title: dirMarkdownFile?.title ?? dir.name,
192
+ depth,
193
+ parentRelativePath: depth === 0 ? null : relFromRoot,
194
+ markdownFile: dirMarkdownFile,
195
+ isDirectory: true,
196
+ });
197
+ // Recurse
198
+ const subResult = await scanDirectoryTree(rootPath, spaceKey, dirPath, depth + 1);
199
+ nodes.push(...subResult.nodes);
200
+ skipped.push(...subResult.skipped);
201
+ }
202
+ return { nodes, skipped };
203
+ }
134
204
  // ---------------------------------------------------------------------------
135
205
  // Core publish logic
136
206
  // ---------------------------------------------------------------------------
@@ -291,14 +361,23 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
291
361
  },
292
362
  {
293
363
  name: 'markdown_publish_directory',
294
- description: 'Scan a directory for markdown files with Confluence frontmatter and publish them all concurrently. ' +
295
- 'Files without required frontmatter (connie-title, connie-space-key) are skipped.',
364
+ description: 'Recursively scan a directory and publish markdown files as a Confluence page tree, ' +
365
+ 'mirroring the folder structure. Directories become parent pages; markdown files become child pages. ' +
366
+ 'Existing pages (with connie-page-id) are updated and reparented to match the directory structure.',
296
367
  inputSchema: {
297
368
  type: 'object',
298
369
  properties: {
299
370
  directoryPath: {
300
371
  type: 'string',
301
- description: 'Absolute path to the directory containing markdown files',
372
+ description: 'Absolute path to the root directory',
373
+ },
374
+ spaceKey: {
375
+ type: 'string',
376
+ description: 'Confluence space key. Overrides file-level connie-space-key.',
377
+ },
378
+ rootPageId: {
379
+ type: 'string',
380
+ description: 'Existing Confluence page ID to use as the root parent. If omitted, a new root page is created.',
302
381
  },
303
382
  skip_preview: {
304
383
  type: 'boolean',
@@ -307,11 +386,11 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
307
386
  },
308
387
  concurrency: {
309
388
  type: 'number',
310
- description: 'Maximum number of files to publish concurrently (default: 5)',
389
+ description: 'Maximum concurrent publishes per depth level (default: 5)',
311
390
  default: 5,
312
391
  },
313
392
  },
314
- required: ['directoryPath'],
393
+ required: ['directoryPath', 'spaceKey'],
315
394
  },
316
395
  },
317
396
  ],
@@ -438,47 +517,52 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
438
517
  const input = z
439
518
  .object({
440
519
  directoryPath: z.string(),
520
+ spaceKey: z.string(),
521
+ rootPageId: z.string().optional(),
441
522
  skip_preview: z.boolean().default(false),
442
523
  concurrency: z.number().int().min(1).max(20).default(5),
443
524
  })
444
525
  .parse(args);
445
- const entries = await readdir(input.directoryPath);
446
- const mdFiles = entries
447
- .filter((f) => extname(f).toLowerCase() === '.md')
448
- .map((f) => join(input.directoryPath, f));
449
- if (mdFiles.length === 0) {
526
+ // Scan directory tree
527
+ const { nodes, skipped } = await scanDirectoryTree(input.directoryPath, input.spaceKey);
528
+ if (nodes.length === 0 && skipped.length === 0) {
450
529
  return {
451
- content: [{ type: 'text', text: `No .md files found in ${input.directoryPath}` }],
530
+ content: [{ type: 'text', text: `No files found in ${input.directoryPath}` }],
452
531
  };
453
532
  }
454
- const parseResults = await Promise.all(mdFiles.map(parseMarkdownFile));
455
- const valid = [];
456
- const skipped = [];
457
- for (const r of parseResults) {
458
- if ('skipped' in r) {
459
- skipped.push(r);
460
- }
461
- else {
462
- valid.push(r);
463
- }
464
- }
533
+ // Preview mode
465
534
  if (!input.skip_preview) {
535
+ const rootTitle = input.rootPageId
536
+ ? `(existing page: ${input.rootPageId})`
537
+ : `"${basename(input.directoryPath)}" (will be created)`;
466
538
  const lines = [
467
- `=== DIRECTORY PREVIEW ===`,
539
+ `=== DIRECTORY TREE PREVIEW ===`,
468
540
  `Directory: ${input.directoryPath}`,
469
- `Total .md files: ${mdFiles.length}`,
470
- `Files to publish: ${valid.length}`,
471
- `Files skipped: ${skipped.length}`,
541
+ `Space: ${input.spaceKey}`,
542
+ `Root page: ${rootTitle}`,
543
+ `Total pages: ${nodes.length + (input.rootPageId ? 0 : 1)}`,
472
544
  '',
473
- '--- Files to publish ---',
545
+ '--- Page tree ---',
474
546
  ];
475
- for (const f of valid) {
476
- const adf = parseMarkdownToADF(f.content, CONFLUENCE_BASE_URL);
477
- const diagrams = countDiagramBlocks(adf);
478
- lines.push(` ${basename(f.filePath)}`);
479
- lines.push(` Title: ${f.title} | Space: ${f.spaceKey}` +
480
- (f.pageId ? ` | Page ID: ${f.pageId}` : ' (new page)') +
481
- (diagrams > 0 ? ` | Diagrams: ${diagrams}` : ''));
547
+ // Build tree visualization
548
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
549
+ for (let d = 0; d <= maxDepth; d++) {
550
+ for (const node of nodes.filter((n) => n.depth === d)) {
551
+ const indent = ' '.repeat(d + 1);
552
+ const suffix = node.isDirectory ? '/' : '';
553
+ const pageInfo = node.markdownFile?.pageId
554
+ ? `update: ${node.markdownFile.pageId}`
555
+ : 'new page';
556
+ let diagrams = '';
557
+ if (node.markdownFile) {
558
+ const adf = parseMarkdownToADF(node.markdownFile.content, CONFLUENCE_BASE_URL);
559
+ const count = countDiagramBlocks(adf);
560
+ if (count > 0)
561
+ diagrams = `, ${count} diagram(s)`;
562
+ }
563
+ const label = node.isDirectory && !node.markdownFile ? 'placeholder' : pageInfo;
564
+ lines.push(`${indent}${node.title}${suffix} (${label}${diagrams})`);
565
+ }
482
566
  }
483
567
  if (skipped.length > 0) {
484
568
  lines.push('', '--- Skipped files ---');
@@ -486,51 +570,117 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
486
570
  lines.push(` ${basename(s.filePath)}: ${s.reason}`);
487
571
  }
488
572
  }
489
- lines.push('', `Call again with skip_preview: true to publish all ${valid.length} file(s).`);
573
+ lines.push('', `Call again with skip_preview: true to publish.`);
490
574
  return { content: [{ type: 'text', text: lines.join('\n') }] };
491
575
  }
576
+ // Publish mode — process level by level
492
577
  const limit = pLimit(input.concurrency);
493
- const results = await Promise.all(valid.map((f) => limit(async () => {
578
+ // Create or resolve root page
579
+ let rootPageId = input.rootPageId;
580
+ const allResults = [];
581
+ if (!rootPageId) {
494
582
  try {
495
- const result = await publishMarkdown(f.content, f.title, f.spaceKey, f.pageId, undefined, true);
496
- return {
497
- filePath: f.filePath,
498
- title: f.title,
583
+ const rootResult = await publishMarkdown('', basename(input.directoryPath), input.spaceKey, undefined, undefined, true);
584
+ rootPageId = rootResult.pageId;
585
+ allResults.push({
586
+ relativePath: '.',
587
+ title: basename(input.directoryPath),
499
588
  success: true,
500
- pageId: result.pageId,
501
- version: result.version,
502
- diagramCount: result.diagramCount,
503
- url: result.url,
504
- };
589
+ isDirectory: true,
590
+ pageId: rootResult.pageId,
591
+ version: rootResult.version,
592
+ url: rootResult.url,
593
+ });
505
594
  }
506
595
  catch (err) {
507
596
  return {
508
- filePath: f.filePath,
509
- title: f.title,
510
- success: false,
511
- error: err instanceof Error ? err.message : String(err),
597
+ isError: true,
598
+ content: [{
599
+ type: 'text',
600
+ text: `Error creating root page: ${err instanceof Error ? err.message : String(err)}`,
601
+ }],
512
602
  };
513
603
  }
514
- })));
515
- const succeeded = results.filter((r) => r.success);
516
- const failed = results.filter((r) => !r.success);
604
+ }
605
+ // Build a map from relativePath to node for parent lookups
606
+ const nodeMap = new Map();
607
+ for (const node of nodes) {
608
+ nodeMap.set(node.relativePath, node);
609
+ }
610
+ // Group by depth and process level by level
611
+ const maxDepth = nodes.reduce((max, n) => Math.max(max, n.depth), 0);
612
+ for (let depth = 0; depth <= maxDepth; depth++) {
613
+ const levelNodes = nodes.filter((n) => n.depth === depth);
614
+ const levelResults = await Promise.all(levelNodes.map((node) => limit(async () => {
615
+ // Determine parent page ID
616
+ let parentId;
617
+ if (node.parentRelativePath === null) {
618
+ parentId = rootPageId;
619
+ }
620
+ else {
621
+ const parentNode = nodeMap.get(node.parentRelativePath);
622
+ parentId = parentNode?.resolvedPageId;
623
+ }
624
+ if (!parentId) {
625
+ return {
626
+ relativePath: node.relativePath,
627
+ title: node.title,
628
+ success: false,
629
+ isDirectory: node.isDirectory,
630
+ error: 'Parent page was not created (parent failed)',
631
+ };
632
+ }
633
+ try {
634
+ const content = node.markdownFile?.content ?? '';
635
+ const pageId = node.markdownFile?.pageId;
636
+ const result = await publishMarkdown(content, node.title, input.spaceKey, pageId, parentId, true);
637
+ // Store resolved page ID for children
638
+ node.resolvedPageId = result.pageId;
639
+ return {
640
+ relativePath: node.relativePath,
641
+ title: node.title,
642
+ success: true,
643
+ isDirectory: node.isDirectory,
644
+ pageId: result.pageId,
645
+ version: result.version,
646
+ diagramCount: result.diagramCount,
647
+ url: result.url,
648
+ };
649
+ }
650
+ catch (err) {
651
+ return {
652
+ relativePath: node.relativePath,
653
+ title: node.title,
654
+ success: false,
655
+ isDirectory: node.isDirectory,
656
+ error: err instanceof Error ? err.message : String(err),
657
+ };
658
+ }
659
+ })));
660
+ allResults.push(...levelResults);
661
+ }
662
+ // Build summary
663
+ const succeeded = allResults.filter((r) => r.success);
664
+ const failed = allResults.filter((r) => !r.success);
517
665
  const lines = [
518
666
  `=== DIRECTORY PUBLISH RESULTS ===`,
519
667
  `Directory: ${input.directoryPath}`,
668
+ `Space: ${input.spaceKey}`,
520
669
  `Succeeded: ${succeeded.length} | Failed: ${failed.length} | Skipped: ${skipped.length}`,
521
670
  '',
522
671
  ];
523
672
  if (succeeded.length > 0) {
524
673
  lines.push('--- Succeeded ---');
525
674
  for (const r of succeeded) {
526
- lines.push(` "${r.title}"`);
527
- lines.push(` Page ID: ${r.pageId} | Version: ${r.version} | Diagrams: ${r.diagramCount} | URL: ${r.url}`);
675
+ const type = r.isDirectory ? ' (folder)' : '';
676
+ lines.push(` "${r.title}"${type}`);
677
+ lines.push(` Page ID: ${r.pageId} | Version: ${r.version}${r.diagramCount ? ` | Diagrams: ${r.diagramCount}` : ''} | URL: ${r.url}`);
528
678
  }
529
679
  }
530
680
  if (failed.length > 0) {
531
681
  lines.push('', '--- Failed ---');
532
682
  for (const r of failed) {
533
- lines.push(` "${r.title}" (${basename(r.filePath)})`);
683
+ lines.push(` "${r.title}" (${r.relativePath})`);
534
684
  lines.push(` Error: ${r.error}`);
535
685
  }
536
686
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@neverprepared/mcp-markdown-to-confluence",
3
- "version": "1.2.0",
3
+ "version": "1.3.0",
4
4
  "description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",