@neverprepared/mcp-markdown-to-confluence 1.0.1 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +52 -21
- package/dist/kroki/KrokiClient.d.ts +5 -0
- package/dist/kroki/KrokiClient.js +19 -0
- package/dist/kroki/KrokiDiagramPlugin.d.ts +23 -0
- package/dist/kroki/KrokiDiagramPlugin.js +87 -0
- package/dist/kroki/KrokiMermaidRenderer.d.ts +7 -0
- package/dist/kroki/KrokiMermaidRenderer.js +13 -0
- package/dist/kroki/index.d.ts +3 -0
- package/dist/kroki/index.js +3 -0
- package/docker/kroki/docker-compose.yml +15 -0
- package/package.json +11 -4
- package/scripts/postinstall.sh +20 -0
- package/.github/workflows/ci.yml +0 -28
- package/.github/workflows/release-please.yml +0 -48
- package/CHANGELOG.md +0 -16
- package/src/index.ts +0 -482
- package/tsconfig.json +0 -16
package/dist/index.js
CHANGED
|
@@ -5,15 +5,43 @@ import { z } from 'zod';
|
|
|
5
5
|
import { ConfluenceClient } from 'confluence.js';
|
|
6
6
|
import matter from 'gray-matter';
|
|
7
7
|
import { readFile } from 'fs/promises';
|
|
8
|
-
import { parseMarkdownToADF, renderADFDoc, executeADFProcessingPipeline, createPublisherFunctions, } from '@markdown-confluence/lib';
|
|
9
|
-
import {
|
|
10
|
-
import { PuppeteerMermaidRenderer } from '@markdown-confluence/mermaid-puppeteer-renderer';
|
|
8
|
+
import { parseMarkdownToADF, renderADFDoc, executeADFProcessingPipeline, createPublisherFunctions, MermaidRendererPlugin, } from '@markdown-confluence/lib';
|
|
9
|
+
import { KrokiClient, KrokiMermaidRenderer, KrokiDiagramPlugin } from './kroki/index.js';
|
|
11
10
|
// ---------------------------------------------------------------------------
|
|
12
11
|
// Environment
|
|
13
12
|
// ---------------------------------------------------------------------------
|
|
14
13
|
const CONFLUENCE_BASE_URL = process.env.CONFLUENCE_BASE_URL ?? '';
|
|
15
14
|
const CONFLUENCE_USERNAME = process.env.CONFLUENCE_USERNAME ?? '';
|
|
16
15
|
const CONFLUENCE_API_TOKEN = process.env.CONFLUENCE_API_TOKEN ?? '';
|
|
16
|
+
const KROKI_URL = process.env.KROKI_URL ?? 'http://localhost:8371';
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
// Kroki client
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
const krokiClient = new KrokiClient(KROKI_URL);
|
|
21
|
+
// Diagram types and their preferred output format
|
|
22
|
+
// PNG is preferred when supported; SVG is the universal fallback
|
|
23
|
+
const KROKI_DIAGRAM_CONFIGS = [
|
|
24
|
+
{ type: 'plantuml', format: 'png' },
|
|
25
|
+
{ type: 'graphviz', format: 'png' },
|
|
26
|
+
{ type: 'dot', format: 'png' },
|
|
27
|
+
{ type: 'ditaa', format: 'svg' },
|
|
28
|
+
{ type: 'nomnoml', format: 'svg' },
|
|
29
|
+
{ type: 'd2', format: 'svg' },
|
|
30
|
+
{ type: 'dbml', format: 'svg' },
|
|
31
|
+
{ type: 'erd', format: 'svg' },
|
|
32
|
+
{ type: 'svgbob', format: 'svg' },
|
|
33
|
+
{ type: 'pikchr', format: 'svg' },
|
|
34
|
+
{ type: 'bytefield', format: 'svg' },
|
|
35
|
+
{ type: 'wavedrom', format: 'svg' },
|
|
36
|
+
{ type: 'vega', format: 'svg' },
|
|
37
|
+
{ type: 'vega-lite', format: 'svg' },
|
|
38
|
+
{ type: 'bpmn', format: 'svg' },
|
|
39
|
+
{ type: 'c4plantuml', format: 'png' },
|
|
40
|
+
];
|
|
41
|
+
const SUPPORTED_DIAGRAM_TYPES = [
|
|
42
|
+
'mermaid',
|
|
43
|
+
...KROKI_DIAGRAM_CONFIGS.map((c) => c.type),
|
|
44
|
+
];
|
|
17
45
|
// ---------------------------------------------------------------------------
|
|
18
46
|
// Confluence client
|
|
19
47
|
// ---------------------------------------------------------------------------
|
|
@@ -39,7 +67,7 @@ const stubAdaptor = {
|
|
|
39
67
|
// ---------------------------------------------------------------------------
|
|
40
68
|
// Helpers
|
|
41
69
|
// ---------------------------------------------------------------------------
|
|
42
|
-
function
|
|
70
|
+
function countDiagramBlocks(adf) {
|
|
43
71
|
if (typeof adf !== 'object' || adf === null)
|
|
44
72
|
return 0;
|
|
45
73
|
const node = adf;
|
|
@@ -47,17 +75,17 @@ function countMermaidBlocks(adf) {
|
|
|
47
75
|
if (node['type'] === 'codeBlock' &&
|
|
48
76
|
typeof node['attrs'] === 'object' &&
|
|
49
77
|
node['attrs'] !== null &&
|
|
50
|
-
node['attrs']['language']
|
|
78
|
+
SUPPORTED_DIAGRAM_TYPES.includes(node['attrs']['language'])) {
|
|
51
79
|
count += 1;
|
|
52
80
|
}
|
|
53
81
|
for (const value of Object.values(node)) {
|
|
54
82
|
if (Array.isArray(value)) {
|
|
55
83
|
for (const item of value) {
|
|
56
|
-
count +=
|
|
84
|
+
count += countDiagramBlocks(item);
|
|
57
85
|
}
|
|
58
86
|
}
|
|
59
87
|
else if (typeof value === 'object' && value !== null) {
|
|
60
|
-
count +=
|
|
88
|
+
count += countDiagramBlocks(value);
|
|
61
89
|
}
|
|
62
90
|
}
|
|
63
91
|
return count;
|
|
@@ -68,10 +96,10 @@ function countMermaidBlocks(adf) {
|
|
|
68
96
|
async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skipPreview = false) {
|
|
69
97
|
// Parse markdown → ADF
|
|
70
98
|
const adf = parseMarkdownToADF(markdown, CONFLUENCE_BASE_URL);
|
|
71
|
-
const
|
|
99
|
+
const diagramCount = countDiagramBlocks(adf);
|
|
72
100
|
if (!skipPreview) {
|
|
73
101
|
const previewText = renderADFDoc(adf);
|
|
74
|
-
return { isPreview: true, previewText,
|
|
102
|
+
return { isPreview: true, previewText, diagramCount };
|
|
75
103
|
}
|
|
76
104
|
// ----- Full publish -----
|
|
77
105
|
let currentVersion = 1;
|
|
@@ -128,8 +156,11 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
128
156
|
}
|
|
129
157
|
// Build publisher functions
|
|
130
158
|
const publisherFunctions = createPublisherFunctions(confluenceClient, stubAdaptor, resolvedPageId, title, currentAttachments);
|
|
131
|
-
// Run ADF processing pipeline (renders
|
|
132
|
-
const finalAdf = await executeADFProcessingPipeline([
|
|
159
|
+
// Run ADF processing pipeline (renders diagrams via Kroki)
|
|
160
|
+
const finalAdf = await executeADFProcessingPipeline([
|
|
161
|
+
new MermaidRendererPlugin(new KrokiMermaidRenderer(krokiClient)),
|
|
162
|
+
...KROKI_DIAGRAM_CONFIGS.map((c) => new KrokiDiagramPlugin(c.type, krokiClient, c.format)),
|
|
163
|
+
], adf, publisherFunctions);
|
|
133
164
|
// Update the page with the final ADF
|
|
134
165
|
const updateParams = {
|
|
135
166
|
id: resolvedPageId,
|
|
@@ -152,7 +183,7 @@ async function publishMarkdown(markdown, title, spaceKey, pageId, parentId, skip
|
|
|
152
183
|
isPreview: false,
|
|
153
184
|
pageId: resolvedPageId,
|
|
154
185
|
version: currentVersion + 1,
|
|
155
|
-
|
|
186
|
+
diagramCount,
|
|
156
187
|
url,
|
|
157
188
|
};
|
|
158
189
|
}
|
|
@@ -228,11 +259,11 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
228
259
|
.object({ markdown: z.string(), title: z.string() })
|
|
229
260
|
.parse(args);
|
|
230
261
|
const adf = parseMarkdownToADF(input.markdown, CONFLUENCE_BASE_URL);
|
|
231
|
-
const
|
|
262
|
+
const diagramCount = countDiagramBlocks(adf);
|
|
232
263
|
const previewText = renderADFDoc(adf);
|
|
233
264
|
const lines = [previewText];
|
|
234
|
-
if (
|
|
235
|
-
lines.push(`\n[Note: ${
|
|
265
|
+
if (diagramCount > 0) {
|
|
266
|
+
lines.push(`\n[Note: ${diagramCount} diagram(s) detected — they will be rendered as images when published.]`);
|
|
236
267
|
}
|
|
237
268
|
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
238
269
|
}
|
|
@@ -250,8 +281,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
250
281
|
const result = await publishMarkdown(input.markdown, input.title, input.spaceKey, input.pageId, input.parentId, input.skip_preview);
|
|
251
282
|
if (result.isPreview) {
|
|
252
283
|
const lines = ['=== PREVIEW ===\n', result.previewText ?? ''];
|
|
253
|
-
if ((result.
|
|
254
|
-
lines.push(`\n[Note: ${result.
|
|
284
|
+
if ((result.diagramCount ?? 0) > 0) {
|
|
285
|
+
lines.push(`\n[Note: ${result.diagramCount} diagram(s) detected — they will be rendered when published.]`);
|
|
255
286
|
}
|
|
256
287
|
lines.push('\n\nCall again with skip_preview: true to publish to Confluence.');
|
|
257
288
|
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
@@ -265,7 +296,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
265
296
|
`Title: ${input.title}`,
|
|
266
297
|
`Page ID: ${result.pageId}`,
|
|
267
298
|
`Version: ${result.version}`,
|
|
268
|
-
`
|
|
299
|
+
`Diagrams rendered: ${result.diagramCount}`,
|
|
269
300
|
`URL: ${result.url}`,
|
|
270
301
|
].join('\n'),
|
|
271
302
|
},
|
|
@@ -315,8 +346,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
315
346
|
'=== PREVIEW ===\n',
|
|
316
347
|
result.previewText ?? '',
|
|
317
348
|
];
|
|
318
|
-
if ((result.
|
|
319
|
-
lines.push(`\n[Note: ${result.
|
|
349
|
+
if ((result.diagramCount ?? 0) > 0) {
|
|
350
|
+
lines.push(`\n[Note: ${result.diagramCount} diagram(s) detected — they will be rendered when published.]`);
|
|
320
351
|
}
|
|
321
352
|
lines.push('\n\nCall again with skip_preview: true to publish to Confluence.');
|
|
322
353
|
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
@@ -330,7 +361,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
330
361
|
`File: ${input.filePath}`,
|
|
331
362
|
`Page ID: ${result.pageId}`,
|
|
332
363
|
`Version: ${result.version}`,
|
|
333
|
-
`
|
|
364
|
+
`Diagrams rendered: ${result.diagramCount}`,
|
|
334
365
|
`URL: ${result.url}`,
|
|
335
366
|
].join('\n'),
|
|
336
367
|
},
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export class KrokiClient {
|
|
2
|
+
baseUrl;
|
|
3
|
+
constructor(baseUrl = 'http://localhost:8371') {
|
|
4
|
+
this.baseUrl = baseUrl.replace(/\/+$/, '');
|
|
5
|
+
}
|
|
6
|
+
async renderDiagram(diagramType, source, outputFormat = 'png') {
|
|
7
|
+
const url = `${this.baseUrl}/${diagramType}/${outputFormat}`;
|
|
8
|
+
const response = await fetch(url, {
|
|
9
|
+
method: 'POST',
|
|
10
|
+
headers: { 'Content-Type': 'text/plain' },
|
|
11
|
+
body: source,
|
|
12
|
+
});
|
|
13
|
+
if (!response.ok) {
|
|
14
|
+
const body = await response.text();
|
|
15
|
+
throw new Error(`Kroki render failed for ${diagramType} (${response.status}): ${body}`);
|
|
16
|
+
}
|
|
17
|
+
return Buffer.from(await response.arrayBuffer());
|
|
18
|
+
}
|
|
19
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { JSONDocNode } from '@atlaskit/editor-json-transformer';
|
|
2
|
+
import type { ADFProcessingPlugin, PublisherFunctions } from '@markdown-confluence/lib';
|
|
3
|
+
import type { KrokiClient } from './KrokiClient.js';
|
|
4
|
+
interface ChartData {
|
|
5
|
+
name: string;
|
|
6
|
+
data: string;
|
|
7
|
+
}
|
|
8
|
+
interface UploadedImage {
|
|
9
|
+
id: string;
|
|
10
|
+
collection: string;
|
|
11
|
+
width: number;
|
|
12
|
+
height: number;
|
|
13
|
+
}
|
|
14
|
+
export declare class KrokiDiagramPlugin implements ADFProcessingPlugin<ChartData[], Record<string, UploadedImage | null>> {
|
|
15
|
+
private diagramType;
|
|
16
|
+
private client;
|
|
17
|
+
private outputFormat;
|
|
18
|
+
constructor(diagramType: string, client: KrokiClient, outputFormat?: string);
|
|
19
|
+
extract(adf: JSONDocNode): ChartData[];
|
|
20
|
+
transform(charts: ChartData[], supportFunctions: PublisherFunctions): Promise<Record<string, UploadedImage | null>>;
|
|
21
|
+
load(adf: JSONDocNode, imageMap: Record<string, UploadedImage | null>): JSONDocNode;
|
|
22
|
+
}
|
|
23
|
+
export {};
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { filter, traverse } from '@atlaskit/adf-utils/traverse';
|
|
2
|
+
import SparkMD5 from 'spark-md5';
|
|
3
|
+
function getDiagramFileName(diagramType, content, outputFormat) {
|
|
4
|
+
const text = content ?? `${diagramType} placeholder`;
|
|
5
|
+
const hash = SparkMD5.hash(text);
|
|
6
|
+
const ext = outputFormat === 'png' ? 'png' : 'svg';
|
|
7
|
+
const uploadFilename = `RenderedKrokiChart-${diagramType}-${hash}.${ext}`;
|
|
8
|
+
return { uploadFilename, text };
|
|
9
|
+
}
|
|
10
|
+
export class KrokiDiagramPlugin {
|
|
11
|
+
diagramType;
|
|
12
|
+
client;
|
|
13
|
+
outputFormat;
|
|
14
|
+
constructor(diagramType, client, outputFormat = 'svg') {
|
|
15
|
+
this.diagramType = diagramType;
|
|
16
|
+
this.client = client;
|
|
17
|
+
this.outputFormat = outputFormat;
|
|
18
|
+
}
|
|
19
|
+
extract(adf) {
|
|
20
|
+
const nodes = filter(adf, (node) => node.type === 'codeBlock' &&
|
|
21
|
+
(node.attrs || {})?.['language'] === this.diagramType);
|
|
22
|
+
const charts = new Set(nodes.map((node) => {
|
|
23
|
+
const details = getDiagramFileName(this.diagramType, node?.content?.at(0)?.text, this.outputFormat);
|
|
24
|
+
return {
|
|
25
|
+
name: details.uploadFilename,
|
|
26
|
+
data: details.text,
|
|
27
|
+
};
|
|
28
|
+
}));
|
|
29
|
+
return Array.from(charts);
|
|
30
|
+
}
|
|
31
|
+
async transform(charts, supportFunctions) {
|
|
32
|
+
let imageMap = {};
|
|
33
|
+
if (charts.length === 0) {
|
|
34
|
+
return imageMap;
|
|
35
|
+
}
|
|
36
|
+
const rendered = await Promise.all(charts.map(async (chart) => {
|
|
37
|
+
const buffer = await this.client.renderDiagram(this.diagramType, chart.data, this.outputFormat);
|
|
38
|
+
return [chart.name, buffer];
|
|
39
|
+
}));
|
|
40
|
+
for (const [name, buffer] of rendered) {
|
|
41
|
+
const uploaded = await supportFunctions.uploadBuffer(name, buffer);
|
|
42
|
+
imageMap = { ...imageMap, [name]: uploaded };
|
|
43
|
+
}
|
|
44
|
+
return imageMap;
|
|
45
|
+
}
|
|
46
|
+
load(adf, imageMap) {
|
|
47
|
+
let afterAdf = adf;
|
|
48
|
+
afterAdf =
|
|
49
|
+
traverse(afterAdf, {
|
|
50
|
+
codeBlock: (node, _parent) => {
|
|
51
|
+
if (node?.attrs?.['language'] === this.diagramType) {
|
|
52
|
+
const content = node?.content?.at(0)?.text;
|
|
53
|
+
if (!content) {
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
const filename = getDiagramFileName(this.diagramType, content, this.outputFormat);
|
|
57
|
+
if (!imageMap[filename.uploadFilename]) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
const mappedImage = imageMap[filename.uploadFilename];
|
|
61
|
+
if (mappedImage) {
|
|
62
|
+
node.type = 'mediaSingle';
|
|
63
|
+
node.attrs['layout'] = 'center';
|
|
64
|
+
if (node.content) {
|
|
65
|
+
node.content = [
|
|
66
|
+
{
|
|
67
|
+
type: 'media',
|
|
68
|
+
attrs: {
|
|
69
|
+
type: 'file',
|
|
70
|
+
collection: mappedImage.collection,
|
|
71
|
+
id: mappedImage.id,
|
|
72
|
+
width: mappedImage.width,
|
|
73
|
+
height: mappedImage.height,
|
|
74
|
+
},
|
|
75
|
+
},
|
|
76
|
+
];
|
|
77
|
+
}
|
|
78
|
+
delete node.attrs['language'];
|
|
79
|
+
return node;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return;
|
|
83
|
+
},
|
|
84
|
+
}) || afterAdf;
|
|
85
|
+
return afterAdf;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { ChartData, MermaidRenderer } from '@markdown-confluence/lib';
|
|
2
|
+
import { KrokiClient } from './KrokiClient.js';
|
|
3
|
+
export declare class KrokiMermaidRenderer implements MermaidRenderer {
|
|
4
|
+
private client;
|
|
5
|
+
constructor(client: KrokiClient);
|
|
6
|
+
captureMermaidCharts(charts: ChartData[]): Promise<Map<string, Buffer>>;
|
|
7
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export class KrokiMermaidRenderer {
|
|
2
|
+
client;
|
|
3
|
+
constructor(client) {
|
|
4
|
+
this.client = client;
|
|
5
|
+
}
|
|
6
|
+
async captureMermaidCharts(charts) {
|
|
7
|
+
const results = await Promise.all(charts.map(async (chart) => {
|
|
8
|
+
const buffer = await this.client.renderDiagram('mermaid', chart.data);
|
|
9
|
+
return [chart.name, buffer];
|
|
10
|
+
}));
|
|
11
|
+
return new Map(results);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
services:
|
|
2
|
+
kroki:
|
|
3
|
+
image: yuzutech/kroki
|
|
4
|
+
ports:
|
|
5
|
+
- "8371:8000"
|
|
6
|
+
restart: unless-stopped
|
|
7
|
+
environment:
|
|
8
|
+
- KROKI_MERMAID_HOST=mermaid
|
|
9
|
+
depends_on:
|
|
10
|
+
- mermaid
|
|
11
|
+
mermaid:
|
|
12
|
+
image: yuzutech/kroki-mermaid
|
|
13
|
+
restart: unless-stopped
|
|
14
|
+
expose:
|
|
15
|
+
- "8002"
|
package/package.json
CHANGED
|
@@ -1,20 +1,25 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@neverprepared/mcp-markdown-to-confluence",
|
|
3
|
-
"version": "1.
|
|
4
|
-
"description": "MCP server for converting markdown to Confluence ADF and publishing pages with
|
|
3
|
+
"version": "1.1.1",
|
|
4
|
+
"description": "MCP server for converting markdown to Confluence ADF and publishing pages with diagram support via Kroki",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"bin": {
|
|
8
8
|
"mcp-markdown-to-confluence": "dist/index.js"
|
|
9
9
|
},
|
|
10
|
+
"files": [
|
|
11
|
+
"dist",
|
|
12
|
+
"docker",
|
|
13
|
+
"scripts"
|
|
14
|
+
],
|
|
10
15
|
"scripts": {
|
|
11
16
|
"build": "tsc",
|
|
12
17
|
"start": "node dist/index.js",
|
|
13
|
-
"dev": "node --watch dist/index.js"
|
|
18
|
+
"dev": "node --watch dist/index.js",
|
|
19
|
+
"postinstall": "sh scripts/postinstall.sh"
|
|
14
20
|
},
|
|
15
21
|
"dependencies": {
|
|
16
22
|
"@markdown-confluence/lib": "^5.5.2",
|
|
17
|
-
"@markdown-confluence/mermaid-puppeteer-renderer": "^5.5.2",
|
|
18
23
|
"@modelcontextprotocol/sdk": "^1.10.1",
|
|
19
24
|
"confluence.js": "^1.6.3",
|
|
20
25
|
"gray-matter": "^4.0.3",
|
|
@@ -41,6 +46,8 @@
|
|
|
41
46
|
"markdown",
|
|
42
47
|
"atlassian",
|
|
43
48
|
"mermaid",
|
|
49
|
+
"kroki",
|
|
50
|
+
"diagrams",
|
|
44
51
|
"model-context-protocol"
|
|
45
52
|
],
|
|
46
53
|
"license": "MIT"
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Copy docker-compose files to ~/.config/neverprepared-mcp-servers/
|
|
3
|
+
# Only copies if the target file does not already exist (preserves user modifications)
|
|
4
|
+
|
|
5
|
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
6
|
+
CONFIG_DIR="${HOME}/.config/neverprepared-mcp-servers"
|
|
7
|
+
|
|
8
|
+
# Kroki
|
|
9
|
+
KROKI_DIR="${CONFIG_DIR}/kroki"
|
|
10
|
+
KROKI_SOURCE="${SCRIPT_DIR}/../docker/kroki/docker-compose.yml"
|
|
11
|
+
KROKI_TARGET="${KROKI_DIR}/docker-compose.yml"
|
|
12
|
+
|
|
13
|
+
if [ -f "$KROKI_SOURCE" ]; then
|
|
14
|
+
mkdir -p "$KROKI_DIR"
|
|
15
|
+
if [ ! -f "$KROKI_TARGET" ]; then
|
|
16
|
+
cp "$KROKI_SOURCE" "$KROKI_TARGET"
|
|
17
|
+
echo "mcp-markdown-to-confluence: Installed Kroki docker-compose.yml to ${KROKI_TARGET}"
|
|
18
|
+
echo "mcp-markdown-to-confluence: Run 'docker compose -f ${KROKI_TARGET} up -d' to start Kroki"
|
|
19
|
+
fi
|
|
20
|
+
fi
|
package/.github/workflows/ci.yml
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
name: CI
|
|
2
|
-
|
|
3
|
-
on:
|
|
4
|
-
push:
|
|
5
|
-
branches: [main]
|
|
6
|
-
pull_request:
|
|
7
|
-
branches: [main]
|
|
8
|
-
|
|
9
|
-
jobs:
|
|
10
|
-
ci:
|
|
11
|
-
name: Build
|
|
12
|
-
runs-on: ubuntu-latest
|
|
13
|
-
|
|
14
|
-
steps:
|
|
15
|
-
- uses: actions/checkout@v4
|
|
16
|
-
|
|
17
|
-
- uses: actions/setup-node@v4
|
|
18
|
-
with:
|
|
19
|
-
node-version: 20
|
|
20
|
-
cache: npm
|
|
21
|
-
|
|
22
|
-
- name: Install dependencies
|
|
23
|
-
run: npm ci
|
|
24
|
-
env:
|
|
25
|
-
PUPPETEER_SKIP_DOWNLOAD: "true"
|
|
26
|
-
|
|
27
|
-
- name: Build
|
|
28
|
-
run: npm run build
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
name: Release Please
|
|
2
|
-
|
|
3
|
-
on:
|
|
4
|
-
push:
|
|
5
|
-
branches: [main]
|
|
6
|
-
|
|
7
|
-
permissions:
|
|
8
|
-
contents: write
|
|
9
|
-
pull-requests: write
|
|
10
|
-
packages: write
|
|
11
|
-
|
|
12
|
-
jobs:
|
|
13
|
-
release-please:
|
|
14
|
-
runs-on: ubuntu-latest
|
|
15
|
-
outputs:
|
|
16
|
-
release_created: ${{ steps.release.outputs.release_created }}
|
|
17
|
-
steps:
|
|
18
|
-
- uses: googleapis/release-please-action@v4
|
|
19
|
-
id: release
|
|
20
|
-
with:
|
|
21
|
-
release-type: node
|
|
22
|
-
|
|
23
|
-
publish:
|
|
24
|
-
runs-on: ubuntu-latest
|
|
25
|
-
needs: release-please
|
|
26
|
-
if: needs.release-please.outputs.release_created == 'true'
|
|
27
|
-
|
|
28
|
-
steps:
|
|
29
|
-
- uses: actions/checkout@v4
|
|
30
|
-
|
|
31
|
-
- uses: actions/setup-node@v4
|
|
32
|
-
with:
|
|
33
|
-
node-version: 20
|
|
34
|
-
cache: npm
|
|
35
|
-
registry-url: https://registry.npmjs.org/
|
|
36
|
-
|
|
37
|
-
- name: Install dependencies
|
|
38
|
-
run: npm ci
|
|
39
|
-
env:
|
|
40
|
-
PUPPETEER_SKIP_DOWNLOAD: "true"
|
|
41
|
-
|
|
42
|
-
- name: Build
|
|
43
|
-
run: npm run build
|
|
44
|
-
|
|
45
|
-
- name: Publish to npm
|
|
46
|
-
run: npm publish --access public
|
|
47
|
-
env:
|
|
48
|
-
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
package/CHANGELOG.md
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
# Changelog
|
|
2
|
-
|
|
3
|
-
## [1.0.1](https://github.com/neverprepared/mcp-markdown-to-confluence/compare/v1.0.0...v1.0.1) (2026-04-01)
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
### Bug Fixes
|
|
7
|
-
|
|
8
|
-
* resolve packages from npm registry and fix API compatibility ([7b36bd7](https://github.com/neverprepared/mcp-markdown-to-confluence/commit/7b36bd78f9f0ad07c2e0e60658584b443215668e))
|
|
9
|
-
* skip Puppeteer Chromium download in CI ([31da53d](https://github.com/neverprepared/mcp-markdown-to-confluence/commit/31da53d48ea88b5444df5715bf414a4d4d70cca5))
|
|
10
|
-
|
|
11
|
-
## 1.0.0 (2026-04-01)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
### Bug Fixes
|
|
15
|
-
|
|
16
|
-
* resolve packages from npm registry and fix API compatibility ([7b36bd7](https://github.com/neverprepared/mcp-markdown-to-confluence/commit/7b36bd78f9f0ad07c2e0e60658584b443215668e))
|
package/src/index.ts
DELETED
|
@@ -1,482 +0,0 @@
|
|
|
1
|
-
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
|
2
|
-
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
3
|
-
import {
|
|
4
|
-
CallToolRequestSchema,
|
|
5
|
-
ListToolsRequestSchema,
|
|
6
|
-
} from '@modelcontextprotocol/sdk/types.js';
|
|
7
|
-
import { z } from 'zod';
|
|
8
|
-
import { ConfluenceClient } from 'confluence.js';
|
|
9
|
-
import matter from 'gray-matter';
|
|
10
|
-
import { readFile } from 'fs/promises';
|
|
11
|
-
|
|
12
|
-
import {
|
|
13
|
-
parseMarkdownToADF,
|
|
14
|
-
renderADFDoc,
|
|
15
|
-
executeADFProcessingPipeline,
|
|
16
|
-
createPublisherFunctions,
|
|
17
|
-
} from '@markdown-confluence/lib';
|
|
18
|
-
import { MermaidRendererPlugin } from '@markdown-confluence/lib';
|
|
19
|
-
import { PuppeteerMermaidRenderer } from '@markdown-confluence/mermaid-puppeteer-renderer';
|
|
20
|
-
|
|
21
|
-
// ---------------------------------------------------------------------------
|
|
22
|
-
// Environment
|
|
23
|
-
// ---------------------------------------------------------------------------
|
|
24
|
-
|
|
25
|
-
const CONFLUENCE_BASE_URL = process.env.CONFLUENCE_BASE_URL ?? '';
|
|
26
|
-
const CONFLUENCE_USERNAME = process.env.CONFLUENCE_USERNAME ?? '';
|
|
27
|
-
const CONFLUENCE_API_TOKEN = process.env.CONFLUENCE_API_TOKEN ?? '';
|
|
28
|
-
|
|
29
|
-
// ---------------------------------------------------------------------------
|
|
30
|
-
// Confluence client
|
|
31
|
-
// ---------------------------------------------------------------------------
|
|
32
|
-
|
|
33
|
-
const confluenceClient = new ConfluenceClient({
|
|
34
|
-
host: CONFLUENCE_BASE_URL,
|
|
35
|
-
authentication: {
|
|
36
|
-
basic: {
|
|
37
|
-
email: CONFLUENCE_USERNAME,
|
|
38
|
-
apiToken: CONFLUENCE_API_TOKEN,
|
|
39
|
-
},
|
|
40
|
-
},
|
|
41
|
-
});
|
|
42
|
-
|
|
43
|
-
// ---------------------------------------------------------------------------
|
|
44
|
-
// Stub LoaderAdaptor — only uploadBuffer is called by the mermaid plugin
|
|
45
|
-
// ---------------------------------------------------------------------------
|
|
46
|
-
|
|
47
|
-
const stubAdaptor = {
|
|
48
|
-
readFile: async (_filePath: string) => undefined,
|
|
49
|
-
readBinary: async (_filePath: string) => false as const,
|
|
50
|
-
fileExists: async (_filePath: string) => false,
|
|
51
|
-
listFiles: async () => [],
|
|
52
|
-
uploadBuffer: async (
|
|
53
|
-
_buffer: Buffer,
|
|
54
|
-
_fileName: string,
|
|
55
|
-
_mimeType: string
|
|
56
|
-
) => undefined,
|
|
57
|
-
} as unknown as import('@markdown-confluence/lib').LoaderAdaptor;
|
|
58
|
-
|
|
59
|
-
// ---------------------------------------------------------------------------
|
|
60
|
-
// Helpers
|
|
61
|
-
// ---------------------------------------------------------------------------
|
|
62
|
-
|
|
63
|
-
function countMermaidBlocks(adf: unknown): number {
|
|
64
|
-
if (typeof adf !== 'object' || adf === null) return 0;
|
|
65
|
-
|
|
66
|
-
const node = adf as Record<string, unknown>;
|
|
67
|
-
let count = 0;
|
|
68
|
-
|
|
69
|
-
if (
|
|
70
|
-
node['type'] === 'codeBlock' &&
|
|
71
|
-
typeof node['attrs'] === 'object' &&
|
|
72
|
-
node['attrs'] !== null &&
|
|
73
|
-
(node['attrs'] as Record<string, unknown>)['language'] === 'mermaid'
|
|
74
|
-
) {
|
|
75
|
-
count += 1;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
for (const value of Object.values(node)) {
|
|
79
|
-
if (Array.isArray(value)) {
|
|
80
|
-
for (const item of value) {
|
|
81
|
-
count += countMermaidBlocks(item);
|
|
82
|
-
}
|
|
83
|
-
} else if (typeof value === 'object' && value !== null) {
|
|
84
|
-
count += countMermaidBlocks(value);
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
return count;
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
// ---------------------------------------------------------------------------
|
|
92
|
-
// Core publish logic
|
|
93
|
-
// ---------------------------------------------------------------------------
|
|
94
|
-
|
|
95
|
-
async function publishMarkdown(
|
|
96
|
-
markdown: string,
|
|
97
|
-
title: string,
|
|
98
|
-
spaceKey: string,
|
|
99
|
-
pageId?: string,
|
|
100
|
-
parentId?: string,
|
|
101
|
-
skipPreview = false
|
|
102
|
-
): Promise<{ isPreview: boolean; previewText?: string; mermaidCount?: number; pageId?: string; version?: number; url?: string }> {
|
|
103
|
-
// Parse markdown → ADF
|
|
104
|
-
const adf = parseMarkdownToADF(
|
|
105
|
-
markdown,
|
|
106
|
-
CONFLUENCE_BASE_URL
|
|
107
|
-
) as unknown as any;
|
|
108
|
-
|
|
109
|
-
const mermaidCount = countMermaidBlocks(adf);
|
|
110
|
-
|
|
111
|
-
if (!skipPreview) {
|
|
112
|
-
const previewText = renderADFDoc(adf as unknown as any);
|
|
113
|
-
return { isPreview: true, previewText, mermaidCount };
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
// ----- Full publish -----
|
|
117
|
-
|
|
118
|
-
let currentVersion = 1;
|
|
119
|
-
let resolvedPageId = pageId;
|
|
120
|
-
|
|
121
|
-
if (resolvedPageId) {
|
|
122
|
-
// Fetch existing page to get current version
|
|
123
|
-
const existingPage = await confluenceClient.content.getContentById({
|
|
124
|
-
id: resolvedPageId,
|
|
125
|
-
expand: ['version'],
|
|
126
|
-
});
|
|
127
|
-
currentVersion = existingPage.version!.number!;
|
|
128
|
-
} else {
|
|
129
|
-
// Create a placeholder page to obtain a pageId
|
|
130
|
-
const blankAdf = {
|
|
131
|
-
version: 1,
|
|
132
|
-
type: 'doc',
|
|
133
|
-
content: [],
|
|
134
|
-
};
|
|
135
|
-
|
|
136
|
-
const createParams: Parameters<typeof confluenceClient.content.createContent>[0] = {
|
|
137
|
-
space: { key: spaceKey },
|
|
138
|
-
title,
|
|
139
|
-
type: 'page',
|
|
140
|
-
body: {
|
|
141
|
-
atlas_doc_format: {
|
|
142
|
-
value: JSON.stringify(blankAdf),
|
|
143
|
-
representation: 'atlas_doc_format',
|
|
144
|
-
},
|
|
145
|
-
},
|
|
146
|
-
};
|
|
147
|
-
|
|
148
|
-
if (parentId) {
|
|
149
|
-
createParams.ancestors = [{ id: parentId }];
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
const created = await confluenceClient.content.createContent(createParams);
|
|
153
|
-
resolvedPageId = created.id!;
|
|
154
|
-
currentVersion = created.version!.number!;
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
// Fetch current attachments to build the map
|
|
158
|
-
const attachmentsResult = await confluenceClient.contentAttachments.getAttachments({
|
|
159
|
-
id: resolvedPageId,
|
|
160
|
-
});
|
|
161
|
-
|
|
162
|
-
type CurrentAttachments = Record<
|
|
163
|
-
string,
|
|
164
|
-
{ filehash: string; attachmentId: string; collectionName: string }
|
|
165
|
-
>;
|
|
166
|
-
|
|
167
|
-
const currentAttachments: CurrentAttachments = {};
|
|
168
|
-
for (const att of attachmentsResult.results ?? []) {
|
|
169
|
-
const attTitle = att.title ?? '';
|
|
170
|
-
const fileId = (att.extensions as any)?.fileId ?? '';
|
|
171
|
-
const collectionName = (att.extensions as any)?.collectionName ?? '';
|
|
172
|
-
if (attTitle) {
|
|
173
|
-
currentAttachments[attTitle] = {
|
|
174
|
-
filehash: (att.metadata as any)?.comment ?? '',
|
|
175
|
-
attachmentId: fileId,
|
|
176
|
-
collectionName,
|
|
177
|
-
};
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
// Build publisher functions
|
|
182
|
-
const publisherFunctions = createPublisherFunctions(
|
|
183
|
-
confluenceClient as unknown as any,
|
|
184
|
-
stubAdaptor,
|
|
185
|
-
resolvedPageId,
|
|
186
|
-
title,
|
|
187
|
-
currentAttachments
|
|
188
|
-
);
|
|
189
|
-
|
|
190
|
-
// Run ADF processing pipeline (renders mermaid diagrams)
|
|
191
|
-
const finalAdf = await executeADFProcessingPipeline(
|
|
192
|
-
[new MermaidRendererPlugin(new PuppeteerMermaidRenderer())],
|
|
193
|
-
adf as unknown as any,
|
|
194
|
-
publisherFunctions
|
|
195
|
-
);
|
|
196
|
-
|
|
197
|
-
// Update the page with the final ADF
|
|
198
|
-
const updateParams: Parameters<typeof confluenceClient.content.updateContent>[0] = {
|
|
199
|
-
id: resolvedPageId,
|
|
200
|
-
title,
|
|
201
|
-
type: 'page',
|
|
202
|
-
version: { number: currentVersion + 1 },
|
|
203
|
-
body: {
|
|
204
|
-
atlas_doc_format: {
|
|
205
|
-
value: JSON.stringify(finalAdf),
|
|
206
|
-
representation: 'atlas_doc_format',
|
|
207
|
-
},
|
|
208
|
-
},
|
|
209
|
-
};
|
|
210
|
-
|
|
211
|
-
if (parentId) {
|
|
212
|
-
updateParams.ancestors = [{ id: parentId }];
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
await confluenceClient.content.updateContent(updateParams);
|
|
216
|
-
|
|
217
|
-
const url = `${CONFLUENCE_BASE_URL}/wiki/spaces/${spaceKey}/pages/${resolvedPageId}`;
|
|
218
|
-
|
|
219
|
-
return {
|
|
220
|
-
isPreview: false,
|
|
221
|
-
pageId: resolvedPageId,
|
|
222
|
-
version: currentVersion + 1,
|
|
223
|
-
mermaidCount,
|
|
224
|
-
url,
|
|
225
|
-
};
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
// ---------------------------------------------------------------------------
|
|
229
|
-
// MCP Server
|
|
230
|
-
// ---------------------------------------------------------------------------
|
|
231
|
-
|
|
232
|
-
const server = new Server(
|
|
233
|
-
{ name: 'mcp-markdown-to-confluence', version: '1.0.0' },
|
|
234
|
-
{ capabilities: { tools: {} } }
|
|
235
|
-
);
|
|
236
|
-
|
|
237
|
-
// Tool definitions
|
|
238
|
-
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
239
|
-
tools: [
|
|
240
|
-
{
|
|
241
|
-
name: 'markdown_preview',
|
|
242
|
-
description:
|
|
243
|
-
'Convert markdown to Confluence ADF and return a text preview. Does not publish to Confluence.',
|
|
244
|
-
inputSchema: {
|
|
245
|
-
type: 'object',
|
|
246
|
-
properties: {
|
|
247
|
-
markdown: { type: 'string', description: 'Markdown content to preview' },
|
|
248
|
-
title: { type: 'string', description: 'Page title (used during ADF conversion)' },
|
|
249
|
-
},
|
|
250
|
-
required: ['markdown', 'title'],
|
|
251
|
-
},
|
|
252
|
-
},
|
|
253
|
-
{
|
|
254
|
-
name: 'markdown_publish',
|
|
255
|
-
description:
|
|
256
|
-
'Publish markdown to a Confluence page. By default runs a preview first; set skip_preview: true to publish immediately.',
|
|
257
|
-
inputSchema: {
|
|
258
|
-
type: 'object',
|
|
259
|
-
properties: {
|
|
260
|
-
markdown: { type: 'string', description: 'Markdown content to publish' },
|
|
261
|
-
title: { type: 'string', description: 'Confluence page title' },
|
|
262
|
-
spaceKey: { type: 'string', description: 'Confluence space key (e.g. "ENG")' },
|
|
263
|
-
pageId: {
|
|
264
|
-
type: 'string',
|
|
265
|
-
description: 'Existing page ID to update (omit to create a new page)',
|
|
266
|
-
},
|
|
267
|
-
parentId: {
|
|
268
|
-
type: 'string',
|
|
269
|
-
description: 'Parent page ID for new page creation',
|
|
270
|
-
},
|
|
271
|
-
skip_preview: {
|
|
272
|
-
type: 'boolean',
|
|
273
|
-
description: 'Set to true to skip preview and publish immediately',
|
|
274
|
-
default: false,
|
|
275
|
-
},
|
|
276
|
-
},
|
|
277
|
-
required: ['markdown', 'title', 'spaceKey'],
|
|
278
|
-
},
|
|
279
|
-
},
|
|
280
|
-
{
|
|
281
|
-
name: 'markdown_publish_file',
|
|
282
|
-
description:
|
|
283
|
-
'Read a markdown file from disk and publish it to Confluence. Frontmatter keys: connie-title / title, connie-space-key, connie-page-id.',
|
|
284
|
-
inputSchema: {
|
|
285
|
-
type: 'object',
|
|
286
|
-
properties: {
|
|
287
|
-
filePath: { type: 'string', description: 'Absolute path to the markdown file' },
|
|
288
|
-
skip_preview: {
|
|
289
|
-
type: 'boolean',
|
|
290
|
-
description: 'Set to true to skip preview and publish immediately',
|
|
291
|
-
default: false,
|
|
292
|
-
},
|
|
293
|
-
},
|
|
294
|
-
required: ['filePath'],
|
|
295
|
-
},
|
|
296
|
-
},
|
|
297
|
-
],
|
|
298
|
-
}));
|
|
299
|
-
|
|
300
|
-
// Tool handlers
|
|
301
|
-
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
302
|
-
const { name, arguments: args } = request.params;
|
|
303
|
-
|
|
304
|
-
try {
|
|
305
|
-
if (name === 'markdown_preview') {
|
|
306
|
-
const input = z
|
|
307
|
-
.object({ markdown: z.string(), title: z.string() })
|
|
308
|
-
.parse(args);
|
|
309
|
-
|
|
310
|
-
const adf = parseMarkdownToADF(
|
|
311
|
-
input.markdown,
|
|
312
|
-
CONFLUENCE_BASE_URL
|
|
313
|
-
) as unknown as any;
|
|
314
|
-
|
|
315
|
-
const mermaidCount = countMermaidBlocks(adf);
|
|
316
|
-
const previewText = renderADFDoc(adf as unknown as any);
|
|
317
|
-
|
|
318
|
-
const lines: string[] = [previewText];
|
|
319
|
-
if (mermaidCount > 0) {
|
|
320
|
-
lines.push(
|
|
321
|
-
`\n[Note: ${mermaidCount} mermaid diagram(s) detected — they will be rendered as images when published.]`
|
|
322
|
-
);
|
|
323
|
-
}
|
|
324
|
-
|
|
325
|
-
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
if (name === 'markdown_publish') {
|
|
329
|
-
const input = z
|
|
330
|
-
.object({
|
|
331
|
-
markdown: z.string(),
|
|
332
|
-
title: z.string(),
|
|
333
|
-
spaceKey: z.string(),
|
|
334
|
-
pageId: z.string().optional(),
|
|
335
|
-
parentId: z.string().optional(),
|
|
336
|
-
skip_preview: z.boolean().default(false),
|
|
337
|
-
})
|
|
338
|
-
.parse(args);
|
|
339
|
-
|
|
340
|
-
const result = await publishMarkdown(
|
|
341
|
-
input.markdown,
|
|
342
|
-
input.title,
|
|
343
|
-
input.spaceKey,
|
|
344
|
-
input.pageId,
|
|
345
|
-
input.parentId,
|
|
346
|
-
input.skip_preview
|
|
347
|
-
);
|
|
348
|
-
|
|
349
|
-
if (result.isPreview) {
|
|
350
|
-
const lines: string[] = ['=== PREVIEW ===\n', result.previewText ?? ''];
|
|
351
|
-
if ((result.mermaidCount ?? 0) > 0) {
|
|
352
|
-
lines.push(
|
|
353
|
-
`\n[Note: ${result.mermaidCount} mermaid diagram(s) detected — they will be rendered when published.]`
|
|
354
|
-
);
|
|
355
|
-
}
|
|
356
|
-
lines.push(
|
|
357
|
-
'\n\nCall again with skip_preview: true to publish to Confluence.'
|
|
358
|
-
);
|
|
359
|
-
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
360
|
-
}
|
|
361
|
-
|
|
362
|
-
return {
|
|
363
|
-
content: [
|
|
364
|
-
{
|
|
365
|
-
type: 'text',
|
|
366
|
-
text: [
|
|
367
|
-
`Successfully published to Confluence.`,
|
|
368
|
-
`Title: ${input.title}`,
|
|
369
|
-
`Page ID: ${result.pageId}`,
|
|
370
|
-
`Version: ${result.version}`,
|
|
371
|
-
`Mermaid diagrams rendered: ${result.mermaidCount}`,
|
|
372
|
-
`URL: ${result.url}`,
|
|
373
|
-
].join('\n'),
|
|
374
|
-
},
|
|
375
|
-
],
|
|
376
|
-
};
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
if (name === 'markdown_publish_file') {
|
|
380
|
-
const input = z
|
|
381
|
-
.object({
|
|
382
|
-
filePath: z.string(),
|
|
383
|
-
skip_preview: z.boolean().default(false),
|
|
384
|
-
})
|
|
385
|
-
.parse(args);
|
|
386
|
-
|
|
387
|
-
const raw = await readFile(input.filePath, 'utf-8');
|
|
388
|
-
const parsed = matter(raw);
|
|
389
|
-
|
|
390
|
-
const title: string =
|
|
391
|
-
parsed.data['connie-title'] ?? parsed.data['title'] ?? '';
|
|
392
|
-
const spaceKey: string = parsed.data['connie-space-key'] ?? '';
|
|
393
|
-
const pageId: string | undefined = parsed.data['connie-page-id']
|
|
394
|
-
? String(parsed.data['connie-page-id'])
|
|
395
|
-
: undefined;
|
|
396
|
-
|
|
397
|
-
if (!title) {
|
|
398
|
-
return {
|
|
399
|
-
isError: true,
|
|
400
|
-
content: [
|
|
401
|
-
{
|
|
402
|
-
type: 'text',
|
|
403
|
-
text: 'Error: Missing page title. Set "connie-title" or "title" in frontmatter.',
|
|
404
|
-
},
|
|
405
|
-
],
|
|
406
|
-
};
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
if (!spaceKey) {
|
|
410
|
-
return {
|
|
411
|
-
isError: true,
|
|
412
|
-
content: [
|
|
413
|
-
{
|
|
414
|
-
type: 'text',
|
|
415
|
-
text: 'Error: Missing space key. Set "connie-space-key" in frontmatter.',
|
|
416
|
-
},
|
|
417
|
-
],
|
|
418
|
-
};
|
|
419
|
-
}
|
|
420
|
-
|
|
421
|
-
const result = await publishMarkdown(
|
|
422
|
-
parsed.content,
|
|
423
|
-
title,
|
|
424
|
-
spaceKey,
|
|
425
|
-
pageId,
|
|
426
|
-
undefined,
|
|
427
|
-
input.skip_preview
|
|
428
|
-
);
|
|
429
|
-
|
|
430
|
-
if (result.isPreview) {
|
|
431
|
-
const lines: string[] = [
|
|
432
|
-
`File: ${input.filePath}\n`,
|
|
433
|
-
'=== PREVIEW ===\n',
|
|
434
|
-
result.previewText ?? '',
|
|
435
|
-
];
|
|
436
|
-
if ((result.mermaidCount ?? 0) > 0) {
|
|
437
|
-
lines.push(
|
|
438
|
-
`\n[Note: ${result.mermaidCount} mermaid diagram(s) detected — they will be rendered when published.]`
|
|
439
|
-
);
|
|
440
|
-
}
|
|
441
|
-
lines.push(
|
|
442
|
-
'\n\nCall again with skip_preview: true to publish to Confluence.'
|
|
443
|
-
);
|
|
444
|
-
return { content: [{ type: 'text', text: lines.join('') }] };
|
|
445
|
-
}
|
|
446
|
-
|
|
447
|
-
return {
|
|
448
|
-
content: [
|
|
449
|
-
{
|
|
450
|
-
type: 'text',
|
|
451
|
-
text: [
|
|
452
|
-
`Successfully published "${title}" to Confluence.`,
|
|
453
|
-
`File: ${input.filePath}`,
|
|
454
|
-
`Page ID: ${result.pageId}`,
|
|
455
|
-
`Version: ${result.version}`,
|
|
456
|
-
`Mermaid diagrams rendered: ${result.mermaidCount}`,
|
|
457
|
-
`URL: ${result.url}`,
|
|
458
|
-
].join('\n'),
|
|
459
|
-
},
|
|
460
|
-
],
|
|
461
|
-
};
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
return {
|
|
465
|
-
isError: true,
|
|
466
|
-
content: [{ type: 'text', text: `Error: Unknown tool "${name}"` }],
|
|
467
|
-
};
|
|
468
|
-
} catch (err: unknown) {
|
|
469
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
470
|
-
return {
|
|
471
|
-
isError: true,
|
|
472
|
-
content: [{ type: 'text', text: `Error: ${message}` }],
|
|
473
|
-
};
|
|
474
|
-
}
|
|
475
|
-
});
|
|
476
|
-
|
|
477
|
-
// ---------------------------------------------------------------------------
|
|
478
|
-
// Start
|
|
479
|
-
// ---------------------------------------------------------------------------
|
|
480
|
-
|
|
481
|
-
const transport = new StdioServerTransport();
|
|
482
|
-
await server.connect(transport);
|
package/tsconfig.json
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
"target": "ES2022",
|
|
4
|
-
"module": "ESNext",
|
|
5
|
-
"moduleResolution": "node",
|
|
6
|
-
"allowSyntheticDefaultImports": true,
|
|
7
|
-
"esModuleInterop": true,
|
|
8
|
-
"strict": true,
|
|
9
|
-
"skipLibCheck": true,
|
|
10
|
-
"declaration": true,
|
|
11
|
-
"outDir": "./dist",
|
|
12
|
-
"rootDir": "./src"
|
|
13
|
-
},
|
|
14
|
-
"include": ["src/**/*"],
|
|
15
|
-
"exclude": ["node_modules", "dist"]
|
|
16
|
-
}
|