markdown-structure-checker 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +250 -0
- package/dist/cli.d.mts +4 -0
- package/dist/cli.d.ts +4 -0
- package/dist/cli.js +178 -0
- package/dist/cli.js.map +1 -0
- package/dist/cli.mjs +150 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/index.d.mts +19 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.js +125 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +86 -0
- package/dist/index.mjs.map +1 -0
- package/examples/incident-structure.schema.json +92 -0
- package/package.json +42 -0
package/README.md
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
# markdown-structure-checker
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/markdown-structure-checker)
|
|
4
|
+

|
|
5
|
+
[](./LICENSE)
|
|
6
|
+
|
|
7
|
+
`markdown-structure-checker` is a minimal TypeScript tool that validates the **structural outline** of Markdown files against a user-provided JSON Schema.
|
|
8
|
+
|
|
9
|
+
It extracts heading hierarchy (H1-H6), normalizes heading text, produces a deterministic internal representation (`StructureJSON`), and validates it with AJV.
|
|
10
|
+
|
|
11
|
+
## What It Does
|
|
12
|
+
|
|
13
|
+
- Extracts Markdown headings (`#` through `######`) from a file.
|
|
14
|
+
- Strips YAML front matter before parsing.
|
|
15
|
+
- Builds nested heading structure based on heading depth.
|
|
16
|
+
- Normalizes heading text:
|
|
17
|
+
- trims surrounding whitespace
|
|
18
|
+
- collapses repeated spaces
|
|
19
|
+
- includes only `text` and `inlineCode` content
|
|
20
|
+
- Validates extracted structure against your JSON Schema.
|
|
21
|
+
- Provides a CLI with predictable exit codes for CI usage.
|
|
22
|
+
|
|
23
|
+
## What It Does Not Do (v0.1 Scope)
|
|
24
|
+
|
|
25
|
+
- Does not validate prose/content semantics.
|
|
26
|
+
- Does not auto-fix documents.
|
|
27
|
+
- Does not validate non-heading blocks (lists, tables, paragraphs, etc.).
|
|
28
|
+
- Does not provide plugin/config file systems.
|
|
29
|
+
|
|
30
|
+
## Installation
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
npm install markdown-structure-checker
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
Or run without installing globally:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
npx markdown-structure-checker --file ./doc.md --schema ./schema.json
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## CLI Usage
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
markdown-structure-checker --file <path> --schema <schemaPath> [--print-ir] [--verbose]
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
### Flags
|
|
49
|
+
|
|
50
|
+
- `--file <path>`: Markdown file to validate.
|
|
51
|
+
- `--schema <schemaPath>`: JSON Schema used to validate extracted structure.
|
|
52
|
+
- `--print-ir`: Print extracted `StructureJSON` to stdout.
|
|
53
|
+
- `--verbose`: Print success message on pass.
|
|
54
|
+
|
|
55
|
+
### Example
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
markdown-structure-checker \
|
|
59
|
+
--file ./examples/incident.md \
|
|
60
|
+
--schema ./examples/incident-structure.schema.json \
|
|
61
|
+
--print-ir \
|
|
62
|
+
--verbose
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
Example output (pass):
|
|
66
|
+
|
|
67
|
+
```json
|
|
68
|
+
{
|
|
69
|
+
"headings": [
|
|
70
|
+
{ "depth": 2, "text": "Summary" },
|
|
71
|
+
{ "depth": 2, "text": "Steps to Reproduce" },
|
|
72
|
+
{ "depth": 2, "text": "Expected Behavior" },
|
|
73
|
+
{ "depth": 2, "text": "Actual Behavior" },
|
|
74
|
+
{ "depth": 2, "text": "Impact" },
|
|
75
|
+
{ "depth": 2, "text": "Workarounds" },
|
|
76
|
+
{
|
|
77
|
+
"depth": 2,
|
|
78
|
+
"text": "Vendor Response Timeline",
|
|
79
|
+
"children": [{ "depth": 3, "text": "2026-02-20" }]
|
|
80
|
+
}
|
|
81
|
+
]
|
|
82
|
+
}
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
```text
|
|
86
|
+
Validation passed.
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
Example output (fail):
|
|
90
|
+
|
|
91
|
+
```text
|
|
92
|
+
Validation failed:
|
|
93
|
+
- /headings/2/text must be equal to constant
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## StructureJSON Format
|
|
97
|
+
|
|
98
|
+
`extractStructure()` produces:
|
|
99
|
+
|
|
100
|
+
```json
|
|
101
|
+
{
|
|
102
|
+
"headings": [
|
|
103
|
+
{
|
|
104
|
+
"depth": 2,
|
|
105
|
+
"text": "Summary",
|
|
106
|
+
"children": [
|
|
107
|
+
{
|
|
108
|
+
"depth": 3,
|
|
109
|
+
"text": "Details"
|
|
110
|
+
}
|
|
111
|
+
]
|
|
112
|
+
}
|
|
113
|
+
]
|
|
114
|
+
}
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
Shape:
|
|
118
|
+
|
|
119
|
+
```ts
|
|
120
|
+
interface StructureJSON {
|
|
121
|
+
headings: HeadingNode[];
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
interface HeadingNode {
|
|
125
|
+
depth: number; // 1..6
|
|
126
|
+
text: string;
|
|
127
|
+
children?: HeadingNode[];
|
|
128
|
+
}
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
## Example JSON Schema (Required H2 Sequence)
|
|
132
|
+
|
|
133
|
+
This schema enforces exact H2 order and disallows extra top-level headings.
|
|
134
|
+
|
|
135
|
+
```json
|
|
136
|
+
{
|
|
137
|
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
138
|
+
"type": "object",
|
|
139
|
+
"required": ["headings"],
|
|
140
|
+
"properties": {
|
|
141
|
+
"headings": {
|
|
142
|
+
"type": "array",
|
|
143
|
+
"prefixItems": [
|
|
144
|
+
{
|
|
145
|
+
"type": "object",
|
|
146
|
+
"required": ["depth", "text"],
|
|
147
|
+
"properties": {
|
|
148
|
+
"depth": { "const": 2 },
|
|
149
|
+
"text": { "const": "Summary" }
|
|
150
|
+
}
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
"type": "object",
|
|
154
|
+
"required": ["depth", "text"],
|
|
155
|
+
"properties": {
|
|
156
|
+
"depth": { "const": 2 },
|
|
157
|
+
"text": { "const": "Steps to Reproduce" }
|
|
158
|
+
}
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
"type": "object",
|
|
162
|
+
"required": ["depth", "text"],
|
|
163
|
+
"properties": {
|
|
164
|
+
"depth": { "const": 2 },
|
|
165
|
+
"text": { "const": "Expected Behavior" }
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
],
|
|
169
|
+
"items": false
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
A full example is included at `examples/incident-structure.schema.json`.
|
|
176
|
+
|
|
177
|
+
## Exit Codes
|
|
178
|
+
|
|
179
|
+
- `0`: Validation passed.
|
|
180
|
+
- `1`: Validation failed (schema mismatch).
|
|
181
|
+
- `2`: Usage or runtime error (invalid args, file read failure, JSON parse error, etc.).
|
|
182
|
+
|
|
183
|
+
## API Usage
|
|
184
|
+
|
|
185
|
+
```ts
|
|
186
|
+
import {
|
|
187
|
+
extractStructure,
|
|
188
|
+
validateStructure,
|
|
189
|
+
validateMarkdownFile,
|
|
190
|
+
type StructureJSON,
|
|
191
|
+
type ValidationResult,
|
|
192
|
+
} from "markdown-structure-checker";
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
### Exports
|
|
196
|
+
|
|
197
|
+
```ts
|
|
198
|
+
extractStructure(markdown: string): StructureJSON
|
|
199
|
+
validateStructure(structure: StructureJSON, schema: object): { ok: boolean; errors?: any[] }
|
|
200
|
+
validateMarkdownFile(filePath: string, schemaPath: string): { ok: boolean; errors?: any[] }
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
### Minimal example
|
|
204
|
+
|
|
205
|
+
```ts
|
|
206
|
+
import { extractStructure, validateStructure } from "markdown-structure-checker";
|
|
207
|
+
|
|
208
|
+
const markdown = "## Summary\n## Steps to Reproduce";
|
|
209
|
+
const schema = {
|
|
210
|
+
type: "object",
|
|
211
|
+
required: ["headings"],
|
|
212
|
+
properties: {
|
|
213
|
+
headings: {
|
|
214
|
+
type: "array",
|
|
215
|
+
minItems: 2,
|
|
216
|
+
},
|
|
217
|
+
},
|
|
218
|
+
};
|
|
219
|
+
|
|
220
|
+
const structure = extractStructure(markdown);
|
|
221
|
+
const result = validateStructure(structure, schema);
|
|
222
|
+
|
|
223
|
+
if (!result.ok) {
|
|
224
|
+
console.error(result.errors);
|
|
225
|
+
}
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
## Roadmap
|
|
229
|
+
|
|
230
|
+
Near-term improvements after v0.1:
|
|
231
|
+
|
|
232
|
+
- Better CLI formatting for AJV errors (grouped/context-rich output).
|
|
233
|
+
- Optional schema helper templates for common document formats.
|
|
234
|
+
- Additional deterministic normalization edge-case coverage.
|
|
235
|
+
- Performance benchmarking for large Markdown documents.
|
|
236
|
+
|
|
237
|
+
## Contributing
|
|
238
|
+
|
|
239
|
+
Contributions are welcome. For local validation:
|
|
240
|
+
|
|
241
|
+
```bash
|
|
242
|
+
npm test
|
|
243
|
+
npm run build
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
Please include tests for behavioral changes, especially extractor and CLI exit code behavior.
|
|
247
|
+
|
|
248
|
+
## License
|
|
249
|
+
|
|
250
|
+
MIT. See `LICENSE`.
|
package/dist/cli.d.mts
ADDED
package/dist/cli.d.ts
ADDED
package/dist/cli.js
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
22
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
23
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
24
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
25
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
26
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
27
|
+
mod
|
|
28
|
+
));
|
|
29
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
30
|
+
|
|
31
|
+
// src/cli.ts
|
|
32
|
+
var cli_exports = {};
|
|
33
|
+
__export(cli_exports, {
|
|
34
|
+
runCli: () => runCli
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(cli_exports);
|
|
37
|
+
var import_node_fs2 = __toESM(require("fs"));
|
|
38
|
+
var import_commander = require("commander");
|
|
39
|
+
|
|
40
|
+
// src/extract.ts
|
|
41
|
+
var import_gray_matter = __toESM(require("gray-matter"));
|
|
42
|
+
var import_unified = require("unified");
|
|
43
|
+
var import_remark_parse = __toESM(require("remark-parse"));
|
|
44
|
+
function normalizeText(text) {
|
|
45
|
+
return text.trim().replace(/\s+/g, " ");
|
|
46
|
+
}
|
|
47
|
+
function extractInlineText(node) {
|
|
48
|
+
if (!node || typeof node !== "object") {
|
|
49
|
+
return "";
|
|
50
|
+
}
|
|
51
|
+
if (node.type === "text" || node.type === "inlineCode") {
|
|
52
|
+
return node.value ?? "";
|
|
53
|
+
}
|
|
54
|
+
if (!Array.isArray(node.children)) {
|
|
55
|
+
return "";
|
|
56
|
+
}
|
|
57
|
+
return node.children.map(extractInlineText).join("");
|
|
58
|
+
}
|
|
59
|
+
function collectHeadings(node, acc = []) {
|
|
60
|
+
if (!node || typeof node !== "object") {
|
|
61
|
+
return acc;
|
|
62
|
+
}
|
|
63
|
+
if (node.type === "heading" && typeof node.depth === "number") {
|
|
64
|
+
acc.push(node);
|
|
65
|
+
}
|
|
66
|
+
if (Array.isArray(node.children)) {
|
|
67
|
+
for (const child of node.children) {
|
|
68
|
+
collectHeadings(child, acc);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return acc;
|
|
72
|
+
}
|
|
73
|
+
function extractStructure(markdown) {
|
|
74
|
+
const { content } = (0, import_gray_matter.default)(markdown);
|
|
75
|
+
const tree = (0, import_unified.unified)().use(import_remark_parse.default).parse(content);
|
|
76
|
+
const headingNodes = collectHeadings(tree);
|
|
77
|
+
const headings = [];
|
|
78
|
+
const stack = [];
|
|
79
|
+
for (const heading of headingNodes) {
|
|
80
|
+
const depth = heading.depth;
|
|
81
|
+
if (typeof depth !== "number" || depth < 1 || depth > 6) {
|
|
82
|
+
continue;
|
|
83
|
+
}
|
|
84
|
+
const text = normalizeText(extractInlineText(heading));
|
|
85
|
+
const current = { depth, text };
|
|
86
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
87
|
+
stack.pop();
|
|
88
|
+
}
|
|
89
|
+
const parent = stack[stack.length - 1];
|
|
90
|
+
if (parent) {
|
|
91
|
+
parent.children ??= [];
|
|
92
|
+
parent.children.push(current);
|
|
93
|
+
} else {
|
|
94
|
+
headings.push(current);
|
|
95
|
+
}
|
|
96
|
+
stack.push(current);
|
|
97
|
+
}
|
|
98
|
+
return { headings };
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// src/validate.ts
|
|
102
|
+
var import_node_fs = __toESM(require("fs"));
|
|
103
|
+
var import__ = __toESM(require("ajv/dist/2020"));
|
|
104
|
+
function validateStructure(structure, schema) {
|
|
105
|
+
const ajv = new import__.default({ allErrors: true, strict: false });
|
|
106
|
+
const validate = ajv.compile(schema);
|
|
107
|
+
const ok = validate(structure);
|
|
108
|
+
if (ok) {
|
|
109
|
+
return { ok: true };
|
|
110
|
+
}
|
|
111
|
+
return { ok: false, errors: validate.errors ?? [] };
|
|
112
|
+
}
|
|
113
|
+
function validateMarkdownFile(filePath, schemaPath) {
|
|
114
|
+
const markdown = import_node_fs.default.readFileSync(filePath, "utf8");
|
|
115
|
+
const schemaRaw = import_node_fs.default.readFileSync(schemaPath, "utf8");
|
|
116
|
+
const schema = JSON.parse(schemaRaw);
|
|
117
|
+
const structure = extractStructure(markdown);
|
|
118
|
+
return validateStructure(structure, schema);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// src/cli.ts
|
|
122
|
+
function formatErrors(errors) {
|
|
123
|
+
if (!errors || errors.length === 0) {
|
|
124
|
+
return "Validation failed.";
|
|
125
|
+
}
|
|
126
|
+
return errors.map((err) => {
|
|
127
|
+
const path = err.instancePath && err.instancePath.length > 0 ? err.instancePath : "/";
|
|
128
|
+
return `- ${path} ${err.message ?? "validation error"}`;
|
|
129
|
+
}).join("\n");
|
|
130
|
+
}
|
|
131
|
+
async function runCli(argv) {
|
|
132
|
+
const program = new import_commander.Command();
|
|
133
|
+
program.name("markdown-structure-checker").requiredOption("--file <path>", "Path to markdown file").requiredOption("--schema <path>", "Path to JSON Schema file").option("--print-ir", "Print extracted StructureJSON").option("--verbose", "Print extra output").exitOverride();
|
|
134
|
+
let opts;
|
|
135
|
+
try {
|
|
136
|
+
program.parse(argv, { from: "user" });
|
|
137
|
+
opts = program.opts();
|
|
138
|
+
} catch (error) {
|
|
139
|
+
if (error?.code === "commander.helpDisplayed") {
|
|
140
|
+
return 0;
|
|
141
|
+
}
|
|
142
|
+
if (error?.message) {
|
|
143
|
+
console.error(error.message);
|
|
144
|
+
}
|
|
145
|
+
return 2;
|
|
146
|
+
}
|
|
147
|
+
try {
|
|
148
|
+
if (opts.printIr) {
|
|
149
|
+
const markdown = import_node_fs2.default.readFileSync(opts.file, "utf8");
|
|
150
|
+
const structure = extractStructure(markdown);
|
|
151
|
+
console.log(JSON.stringify(structure, null, 2));
|
|
152
|
+
}
|
|
153
|
+
const result = validateMarkdownFile(opts.file, opts.schema);
|
|
154
|
+
if (result.ok) {
|
|
155
|
+
if (opts.verbose) {
|
|
156
|
+
console.log("Validation passed.");
|
|
157
|
+
}
|
|
158
|
+
return 0;
|
|
159
|
+
}
|
|
160
|
+
console.error("Validation failed:");
|
|
161
|
+
console.error(formatErrors(result.errors));
|
|
162
|
+
return 1;
|
|
163
|
+
} catch (error) {
|
|
164
|
+
console.error(error?.message ?? "Runtime error");
|
|
165
|
+
return 2;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
var isDirectRun = typeof require !== "undefined" && require.main === module;
|
|
169
|
+
if (isDirectRun) {
|
|
170
|
+
runCli(process.argv.slice(2)).then((code) => {
|
|
171
|
+
process.exit(code);
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
175
|
+
0 && (module.exports = {
|
|
176
|
+
runCli
|
|
177
|
+
});
|
|
178
|
+
//# sourceMappingURL=cli.js.map
|
package/dist/cli.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cli.ts","../src/extract.ts","../src/validate.ts"],"sourcesContent":["#!/usr/bin/env node\nimport fs from \"node:fs\";\nimport { Command } from \"commander\";\nimport { extractStructure, validateMarkdownFile } from \"./index\";\n\nfunction formatErrors(errors: any[] | undefined): string {\n if (!errors || errors.length === 0) {\n return \"Validation failed.\";\n }\n\n return errors\n .map((err) => {\n const path = err.instancePath && err.instancePath.length > 0 ? err.instancePath : \"/\";\n return `- ${path} ${err.message ?? \"validation error\"}`;\n })\n .join(\"\\n\");\n}\n\nexport async function runCli(argv: string[]): Promise<number> {\n const program = new Command();\n\n program\n .name(\"markdown-structure-checker\")\n .requiredOption(\"--file <path>\", \"Path to markdown file\")\n .requiredOption(\"--schema <path>\", \"Path to JSON Schema file\")\n .option(\"--print-ir\", \"Print extracted StructureJSON\")\n .option(\"--verbose\", \"Print extra output\")\n .exitOverride();\n\n let opts: {\n file: string;\n schema: string;\n printIr?: boolean;\n verbose?: boolean;\n };\n\n try {\n program.parse(argv, { from: \"user\" });\n opts = program.opts();\n } catch (error: any) {\n if (error?.code === \"commander.helpDisplayed\") {\n return 0;\n }\n\n if (error?.message) {\n console.error(error.message);\n }\n return 2;\n }\n\n try {\n if (opts.printIr) {\n const markdown = fs.readFileSync(opts.file, \"utf8\");\n const structure = extractStructure(markdown);\n console.log(JSON.stringify(structure, null, 2));\n }\n\n const result = validateMarkdownFile(opts.file, opts.schema);\n\n if (result.ok) {\n if (opts.verbose) {\n console.log(\"Validation passed.\");\n }\n return 0;\n }\n\n console.error(\"Validation failed:\");\n console.error(formatErrors(result.errors));\n return 1;\n } catch (error: any) {\n console.error(error?.message ?? \"Runtime error\");\n return 2;\n }\n}\n\nconst isDirectRun = typeof require !== \"undefined\" && require.main === module;\n\nif (isDirectRun) {\n runCli(process.argv.slice(2)).then((code) => {\n process.exit(code);\n });\n}\n","import matter from \"gray-matter\";\nimport { unified } from \"unified\";\nimport remarkParse from \"remark-parse\";\nimport type { HeadingNode, StructureJSON } from \"./types\";\n\ninterface MdastNode {\n type?: string;\n value?: string;\n depth?: number;\n children?: MdastNode[];\n}\n\nfunction normalizeText(text: string): string {\n return text.trim().replace(/\\s+/g, \" \");\n}\n\nfunction extractInlineText(node: MdastNode): string {\n if (!node || typeof node !== \"object\") {\n return \"\";\n }\n\n if (node.type === \"text\" || node.type === \"inlineCode\") {\n return node.value ?? \"\";\n }\n\n if (!Array.isArray(node.children)) {\n return \"\";\n }\n\n return node.children.map(extractInlineText).join(\"\");\n}\n\nfunction collectHeadings(node: MdastNode, acc: MdastNode[] = []): MdastNode[] {\n if (!node || typeof node !== \"object\") {\n return acc;\n }\n\n if (node.type === \"heading\" && typeof node.depth === \"number\") {\n acc.push(node);\n }\n\n if (Array.isArray(node.children)) {\n for (const child of node.children) {\n collectHeadings(child, acc);\n }\n }\n\n return acc;\n}\n\nexport function extractStructure(markdown: string): StructureJSON {\n const { content } = matter(markdown);\n const tree = unified().use(remarkParse).parse(content) as MdastNode;\n const headingNodes = collectHeadings(tree);\n\n const headings: HeadingNode[] = [];\n const stack: HeadingNode[] = [];\n\n for (const heading of headingNodes) {\n const depth = heading.depth;\n if (typeof depth !== \"number\" || depth < 1 || depth > 6) {\n continue;\n }\n\n const text = normalizeText(extractInlineText(heading));\n const current: HeadingNode = { depth, text };\n\n while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {\n stack.pop();\n }\n\n const parent = stack[stack.length - 1];\n if (parent) {\n parent.children ??= [];\n parent.children.push(current);\n } else {\n headings.push(current);\n }\n\n stack.push(current);\n }\n\n return { headings };\n}\n","import fs from \"node:fs\";\nimport Ajv2020 from \"ajv/dist/2020\";\nimport { extractStructure } from \"./extract\";\nimport type { StructureJSON, ValidationResult } from \"./types\";\n\nexport function validateStructure(structure: StructureJSON, schema: object): ValidationResult {\n const ajv = new Ajv2020({ allErrors: true, strict: false });\n const validate = ajv.compile(schema);\n const ok = validate(structure) as boolean;\n\n if (ok) {\n return { ok: true };\n }\n\n return { ok: false, errors: validate.errors ?? [] };\n}\n\nexport function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult {\n const markdown = fs.readFileSync(filePath, \"utf8\");\n const schemaRaw = fs.readFileSync(schemaPath, \"utf8\");\n const schema = JSON.parse(schemaRaw) as object;\n\n const structure = extractStructure(markdown);\n return validateStructure(structure, schema);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,IAAAA,kBAAe;AACf,uBAAwB;;;ACFxB,yBAAmB;AACnB,qBAAwB;AACxB,0BAAwB;AAUxB,SAAS,cAAc,MAAsB;AAC3C,SAAO,KAAK,KAAK,EAAE,QAAQ,QAAQ,GAAG;AACxC;AAEA,SAAS,kBAAkB,MAAyB;AAClD,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,UAAU,KAAK,SAAS,cAAc;AACtD,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,CAAC,MAAM,QAAQ,KAAK,QAAQ,GAAG;AACjC,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,SAAS,IAAI,iBAAiB,EAAE,KAAK,EAAE;AACrD;AAEA,SAAS,gBAAgB,MAAiB,MAAmB,CAAC,GAAgB;AAC5E,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,aAAa,OAAO,KAAK,UAAU,UAAU;AAC7D,QAAI,KAAK,IAAI;AAAA,EACf;AAEA,MAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAChC,eAAW,SAAS,KAAK,UAAU;AACjC,sBAAgB,OAAO,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,iBAAiB,UAAiC;AAChE,QAAM,EAAE,QAAQ,QAAI,mBAAAC,SAAO,QAAQ;AACnC,QAAM,WAAO,wBAAQ,EAAE,IAAI,oBAAAC,OAAW,EAAE,MAAM,OAAO;AACrD,QAAM,eAAe,gBAAgB,IAAI;AAEzC,QAAM,WAA0B,CAAC;AACjC,QAAM,QAAuB,CAAC;AAE9B,aAAW,WAAW,cAAc;AAClC,UAAM,QAAQ,QAAQ;AACtB,QAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,QAAQ,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,cAAc,kBAAkB,OAAO,CAAC;AACrD,UAAM,UAAuB,EAAE,OAAO,KAAK;AAE3C,WAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,OAAO;AACjE,YAAM,IAAI;AAAA,IACZ;AAEA,UAAM,SAAS,MAAM,MAAM,SAAS,CAAC;AACrC,QAAI,QAAQ;AACV,aAAO,aAAa,CAAC;AACrB,aAAO,SAAS,KAAK,OAAO;AAAA,IAC9B,OAAO;AACL,eAAS,KAAK,OAAO;AAAA,IACvB;AAEA,UAAM,KAAK,OAAO;AAAA,EACpB;AAEA,SAAO,EAAE,SAAS;AACpB;;;ACnFA,qBAAe;AACf,eAAoB;AAIb,SAAS,kBAAkB,WAA0B,QAAkC;AAC5F,QAAM,MAAM,IAAI,SAAAC,QAAQ,EAAE,WAAW,MAAM,QAAQ,MAAM,CAAC;AAC1D,QAAM,WAAW,IAAI,QAAQ,MAAM;AACnC,QAAM,KAAK,SAAS,SAAS;AAE7B,MAAI,IAAI;AACN,WAAO,EAAE,IAAI,KAAK;AAAA,EACpB;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,SAAS,UAAU,CAAC,EAAE;AACpD;AAEO,SAAS,qBAAqB,UAAkB,YAAsC;AAC3F,QAAM,WAAW,eAAAC,QAAG,aAAa,UAAU,MAAM;AACjD,QAAM,YAAY,eAAAA,QAAG,aAAa,YAAY,MAAM;AACpD,QAAM,SAAS,KAAK,MAAM,SAAS;AAEnC,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,SAAO,kBAAkB,WAAW,MAAM;AAC5C;;;AFnBA,SAAS,aAAa,QAAmC;AACvD,MAAI,CAAC,UAAU,OAAO,WAAW,GAAG;AAClC,WAAO;AAAA,EACT;AAEA,SAAO,OACJ,IAAI,CAAC,QAAQ;AACZ,UAAM,OAAO,IAAI,gBAAgB,IAAI,aAAa,SAAS,IAAI,IAAI,eAAe;AAClF,WAAO,KAAK,IAAI,IAAI,IAAI,WAAW,kBAAkB;AAAA,EACvD,CAAC,EACA,KAAK,IAAI;AACd;AAEA,eAAsB,OAAO,MAAiC;AAC5D,QAAM,UAAU,IAAI,yBAAQ;AAE5B,UACG,KAAK,4BAA4B,EACjC,eAAe,iBAAiB,uBAAuB,EACvD,eAAe,mBAAmB,0BAA0B,EAC5D,OAAO,cAAc,+BAA+B,EACpD,OAAO,aAAa,oBAAoB,EACxC,aAAa;AAEhB,MAAI;AAOJ,MAAI;AACF,YAAQ,MAAM,MAAM,EAAE,MAAM,OAAO,CAAC;AACpC,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAY;AACnB,QAAI,OAAO,SAAS,2BAA2B;AAC7C,aAAO;AAAA,IACT;AAEA,QAAI,OAAO,SAAS;AAClB,cAAQ,MAAM,MAAM,OAAO;AAAA,IAC7B;AACA,WAAO;AAAA,EACT;AAEA,MAAI;AACF,QAAI,KAAK,SAAS;AAChB,YAAM,WAAW,gBAAAC,QAAG,aAAa,KAAK,MAAM,MAAM;AAClD,YAAM,YAAY,iBAAiB,QAAQ;AAC3C,cAAQ,IAAI,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA,IAChD;AAEA,UAAM,SAAS,qBAAqB,KAAK,MAAM,KAAK,MAAM;AAE1D,QAAI,OAAO,IAAI;AACb,UAAI,KAAK,SAAS;AAChB,gBAAQ,IAAI,oBAAoB;AAAA,MAClC;AACA,aAAO;AAAA,IACT;AAEA,YAAQ,MAAM,oBAAoB;AAClC,YAAQ,MAAM,aAAa,OAAO,MAAM,CAAC;AACzC,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,YAAQ,MAAM,OAAO,WAAW,eAAe;AAC/C,WAAO;AAAA,EACT;AACF;AAEA,IAAM,cAAc,OAAO,YAAY,eAAe,QAAQ,SAAS;AAEvE,IAAI,aAAa;AACf,SAAO,QAAQ,KAAK,MAAM,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS;AAC3C,YAAQ,KAAK,IAAI;AAAA,EACnB,CAAC;AACH;","names":["import_node_fs","matter","remarkParse","Ajv2020","fs","fs"]}
|
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
3
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
4
|
+
}) : x)(function(x) {
|
|
5
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
6
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
// src/cli.ts
|
|
10
|
+
import fs2 from "fs";
|
|
11
|
+
import { Command } from "commander";
|
|
12
|
+
|
|
13
|
+
// src/extract.ts
|
|
14
|
+
import matter from "gray-matter";
|
|
15
|
+
import { unified } from "unified";
|
|
16
|
+
import remarkParse from "remark-parse";
|
|
17
|
+
function normalizeText(text) {
|
|
18
|
+
return text.trim().replace(/\s+/g, " ");
|
|
19
|
+
}
|
|
20
|
+
function extractInlineText(node) {
|
|
21
|
+
if (!node || typeof node !== "object") {
|
|
22
|
+
return "";
|
|
23
|
+
}
|
|
24
|
+
if (node.type === "text" || node.type === "inlineCode") {
|
|
25
|
+
return node.value ?? "";
|
|
26
|
+
}
|
|
27
|
+
if (!Array.isArray(node.children)) {
|
|
28
|
+
return "";
|
|
29
|
+
}
|
|
30
|
+
return node.children.map(extractInlineText).join("");
|
|
31
|
+
}
|
|
32
|
+
function collectHeadings(node, acc = []) {
|
|
33
|
+
if (!node || typeof node !== "object") {
|
|
34
|
+
return acc;
|
|
35
|
+
}
|
|
36
|
+
if (node.type === "heading" && typeof node.depth === "number") {
|
|
37
|
+
acc.push(node);
|
|
38
|
+
}
|
|
39
|
+
if (Array.isArray(node.children)) {
|
|
40
|
+
for (const child of node.children) {
|
|
41
|
+
collectHeadings(child, acc);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return acc;
|
|
45
|
+
}
|
|
46
|
+
function extractStructure(markdown) {
|
|
47
|
+
const { content } = matter(markdown);
|
|
48
|
+
const tree = unified().use(remarkParse).parse(content);
|
|
49
|
+
const headingNodes = collectHeadings(tree);
|
|
50
|
+
const headings = [];
|
|
51
|
+
const stack = [];
|
|
52
|
+
for (const heading of headingNodes) {
|
|
53
|
+
const depth = heading.depth;
|
|
54
|
+
if (typeof depth !== "number" || depth < 1 || depth > 6) {
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
const text = normalizeText(extractInlineText(heading));
|
|
58
|
+
const current = { depth, text };
|
|
59
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
60
|
+
stack.pop();
|
|
61
|
+
}
|
|
62
|
+
const parent = stack[stack.length - 1];
|
|
63
|
+
if (parent) {
|
|
64
|
+
parent.children ??= [];
|
|
65
|
+
parent.children.push(current);
|
|
66
|
+
} else {
|
|
67
|
+
headings.push(current);
|
|
68
|
+
}
|
|
69
|
+
stack.push(current);
|
|
70
|
+
}
|
|
71
|
+
return { headings };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// src/validate.ts
|
|
75
|
+
import fs from "fs";
|
|
76
|
+
import Ajv2020 from "ajv/dist/2020";
|
|
77
|
+
function validateStructure(structure, schema) {
|
|
78
|
+
const ajv = new Ajv2020({ allErrors: true, strict: false });
|
|
79
|
+
const validate = ajv.compile(schema);
|
|
80
|
+
const ok = validate(structure);
|
|
81
|
+
if (ok) {
|
|
82
|
+
return { ok: true };
|
|
83
|
+
}
|
|
84
|
+
return { ok: false, errors: validate.errors ?? [] };
|
|
85
|
+
}
|
|
86
|
+
function validateMarkdownFile(filePath, schemaPath) {
|
|
87
|
+
const markdown = fs.readFileSync(filePath, "utf8");
|
|
88
|
+
const schemaRaw = fs.readFileSync(schemaPath, "utf8");
|
|
89
|
+
const schema = JSON.parse(schemaRaw);
|
|
90
|
+
const structure = extractStructure(markdown);
|
|
91
|
+
return validateStructure(structure, schema);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// src/cli.ts
|
|
95
|
+
function formatErrors(errors) {
|
|
96
|
+
if (!errors || errors.length === 0) {
|
|
97
|
+
return "Validation failed.";
|
|
98
|
+
}
|
|
99
|
+
return errors.map((err) => {
|
|
100
|
+
const path = err.instancePath && err.instancePath.length > 0 ? err.instancePath : "/";
|
|
101
|
+
return `- ${path} ${err.message ?? "validation error"}`;
|
|
102
|
+
}).join("\n");
|
|
103
|
+
}
|
|
104
|
+
async function runCli(argv) {
|
|
105
|
+
const program = new Command();
|
|
106
|
+
program.name("markdown-structure-checker").requiredOption("--file <path>", "Path to markdown file").requiredOption("--schema <path>", "Path to JSON Schema file").option("--print-ir", "Print extracted StructureJSON").option("--verbose", "Print extra output").exitOverride();
|
|
107
|
+
let opts;
|
|
108
|
+
try {
|
|
109
|
+
program.parse(argv, { from: "user" });
|
|
110
|
+
opts = program.opts();
|
|
111
|
+
} catch (error) {
|
|
112
|
+
if (error?.code === "commander.helpDisplayed") {
|
|
113
|
+
return 0;
|
|
114
|
+
}
|
|
115
|
+
if (error?.message) {
|
|
116
|
+
console.error(error.message);
|
|
117
|
+
}
|
|
118
|
+
return 2;
|
|
119
|
+
}
|
|
120
|
+
try {
|
|
121
|
+
if (opts.printIr) {
|
|
122
|
+
const markdown = fs2.readFileSync(opts.file, "utf8");
|
|
123
|
+
const structure = extractStructure(markdown);
|
|
124
|
+
console.log(JSON.stringify(structure, null, 2));
|
|
125
|
+
}
|
|
126
|
+
const result = validateMarkdownFile(opts.file, opts.schema);
|
|
127
|
+
if (result.ok) {
|
|
128
|
+
if (opts.verbose) {
|
|
129
|
+
console.log("Validation passed.");
|
|
130
|
+
}
|
|
131
|
+
return 0;
|
|
132
|
+
}
|
|
133
|
+
console.error("Validation failed:");
|
|
134
|
+
console.error(formatErrors(result.errors));
|
|
135
|
+
return 1;
|
|
136
|
+
} catch (error) {
|
|
137
|
+
console.error(error?.message ?? "Runtime error");
|
|
138
|
+
return 2;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
var isDirectRun = typeof __require !== "undefined" && __require.main === module;
|
|
142
|
+
if (isDirectRun) {
|
|
143
|
+
runCli(process.argv.slice(2)).then((code) => {
|
|
144
|
+
process.exit(code);
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
export {
|
|
148
|
+
runCli
|
|
149
|
+
};
|
|
150
|
+
//# sourceMappingURL=cli.mjs.map
|
package/dist/cli.mjs.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cli.ts","../src/extract.ts","../src/validate.ts"],"sourcesContent":["#!/usr/bin/env node\nimport fs from \"node:fs\";\nimport { Command } from \"commander\";\nimport { extractStructure, validateMarkdownFile } from \"./index\";\n\nfunction formatErrors(errors: any[] | undefined): string {\n if (!errors || errors.length === 0) {\n return \"Validation failed.\";\n }\n\n return errors\n .map((err) => {\n const path = err.instancePath && err.instancePath.length > 0 ? err.instancePath : \"/\";\n return `- ${path} ${err.message ?? \"validation error\"}`;\n })\n .join(\"\\n\");\n}\n\nexport async function runCli(argv: string[]): Promise<number> {\n const program = new Command();\n\n program\n .name(\"markdown-structure-checker\")\n .requiredOption(\"--file <path>\", \"Path to markdown file\")\n .requiredOption(\"--schema <path>\", \"Path to JSON Schema file\")\n .option(\"--print-ir\", \"Print extracted StructureJSON\")\n .option(\"--verbose\", \"Print extra output\")\n .exitOverride();\n\n let opts: {\n file: string;\n schema: string;\n printIr?: boolean;\n verbose?: boolean;\n };\n\n try {\n program.parse(argv, { from: \"user\" });\n opts = program.opts();\n } catch (error: any) {\n if (error?.code === \"commander.helpDisplayed\") {\n return 0;\n }\n\n if (error?.message) {\n console.error(error.message);\n }\n return 2;\n }\n\n try {\n if (opts.printIr) {\n const markdown = fs.readFileSync(opts.file, \"utf8\");\n const structure = extractStructure(markdown);\n console.log(JSON.stringify(structure, null, 2));\n }\n\n const result = validateMarkdownFile(opts.file, opts.schema);\n\n if (result.ok) {\n if (opts.verbose) {\n console.log(\"Validation passed.\");\n }\n return 0;\n }\n\n console.error(\"Validation failed:\");\n console.error(formatErrors(result.errors));\n return 1;\n } catch (error: any) {\n console.error(error?.message ?? \"Runtime error\");\n return 2;\n }\n}\n\nconst isDirectRun = typeof require !== \"undefined\" && require.main === module;\n\nif (isDirectRun) {\n runCli(process.argv.slice(2)).then((code) => {\n process.exit(code);\n });\n}\n","import matter from \"gray-matter\";\nimport { unified } from \"unified\";\nimport remarkParse from \"remark-parse\";\nimport type { HeadingNode, StructureJSON } from \"./types\";\n\ninterface MdastNode {\n type?: string;\n value?: string;\n depth?: number;\n children?: MdastNode[];\n}\n\nfunction normalizeText(text: string): string {\n return text.trim().replace(/\\s+/g, \" \");\n}\n\nfunction extractInlineText(node: MdastNode): string {\n if (!node || typeof node !== \"object\") {\n return \"\";\n }\n\n if (node.type === \"text\" || node.type === \"inlineCode\") {\n return node.value ?? \"\";\n }\n\n if (!Array.isArray(node.children)) {\n return \"\";\n }\n\n return node.children.map(extractInlineText).join(\"\");\n}\n\nfunction collectHeadings(node: MdastNode, acc: MdastNode[] = []): MdastNode[] {\n if (!node || typeof node !== \"object\") {\n return acc;\n }\n\n if (node.type === \"heading\" && typeof node.depth === \"number\") {\n acc.push(node);\n }\n\n if (Array.isArray(node.children)) {\n for (const child of node.children) {\n collectHeadings(child, acc);\n }\n }\n\n return acc;\n}\n\nexport function extractStructure(markdown: string): StructureJSON {\n const { content } = matter(markdown);\n const tree = unified().use(remarkParse).parse(content) as MdastNode;\n const headingNodes = collectHeadings(tree);\n\n const headings: HeadingNode[] = [];\n const stack: HeadingNode[] = [];\n\n for (const heading of headingNodes) {\n const depth = heading.depth;\n if (typeof depth !== \"number\" || depth < 1 || depth > 6) {\n continue;\n }\n\n const text = normalizeText(extractInlineText(heading));\n const current: HeadingNode = { depth, text };\n\n while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {\n stack.pop();\n }\n\n const parent = stack[stack.length - 1];\n if (parent) {\n parent.children ??= [];\n parent.children.push(current);\n } else {\n headings.push(current);\n }\n\n stack.push(current);\n }\n\n return { headings };\n}\n","import fs from \"node:fs\";\nimport Ajv2020 from \"ajv/dist/2020\";\nimport { extractStructure } from \"./extract\";\nimport type { StructureJSON, ValidationResult } from \"./types\";\n\nexport function validateStructure(structure: StructureJSON, schema: object): ValidationResult {\n const ajv = new Ajv2020({ allErrors: true, strict: false });\n const validate = ajv.compile(schema);\n const ok = validate(structure) as boolean;\n\n if (ok) {\n return { ok: true };\n }\n\n return { ok: false, errors: validate.errors ?? [] };\n}\n\nexport function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult {\n const markdown = fs.readFileSync(filePath, \"utf8\");\n const schemaRaw = fs.readFileSync(schemaPath, \"utf8\");\n const schema = JSON.parse(schemaRaw) as object;\n\n const structure = extractStructure(markdown);\n return validateStructure(structure, schema);\n}\n"],"mappings":";;;;;;;;;AACA,OAAOA,SAAQ;AACf,SAAS,eAAe;;;ACFxB,OAAO,YAAY;AACnB,SAAS,eAAe;AACxB,OAAO,iBAAiB;AAUxB,SAAS,cAAc,MAAsB;AAC3C,SAAO,KAAK,KAAK,EAAE,QAAQ,QAAQ,GAAG;AACxC;AAEA,SAAS,kBAAkB,MAAyB;AAClD,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,UAAU,KAAK,SAAS,cAAc;AACtD,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,CAAC,MAAM,QAAQ,KAAK,QAAQ,GAAG;AACjC,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,SAAS,IAAI,iBAAiB,EAAE,KAAK,EAAE;AACrD;AAEA,SAAS,gBAAgB,MAAiB,MAAmB,CAAC,GAAgB;AAC5E,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,aAAa,OAAO,KAAK,UAAU,UAAU;AAC7D,QAAI,KAAK,IAAI;AAAA,EACf;AAEA,MAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAChC,eAAW,SAAS,KAAK,UAAU;AACjC,sBAAgB,OAAO,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,iBAAiB,UAAiC;AAChE,QAAM,EAAE,QAAQ,IAAI,OAAO,QAAQ;AACnC,QAAM,OAAO,QAAQ,EAAE,IAAI,WAAW,EAAE,MAAM,OAAO;AACrD,QAAM,eAAe,gBAAgB,IAAI;AAEzC,QAAM,WAA0B,CAAC;AACjC,QAAM,QAAuB,CAAC;AAE9B,aAAW,WAAW,cAAc;AAClC,UAAM,QAAQ,QAAQ;AACtB,QAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,QAAQ,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,cAAc,kBAAkB,OAAO,CAAC;AACrD,UAAM,UAAuB,EAAE,OAAO,KAAK;AAE3C,WAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,OAAO;AACjE,YAAM,IAAI;AAAA,IACZ;AAEA,UAAM,SAAS,MAAM,MAAM,SAAS,CAAC;AACrC,QAAI,QAAQ;AACV,aAAO,aAAa,CAAC;AACrB,aAAO,SAAS,KAAK,OAAO;AAAA,IAC9B,OAAO;AACL,eAAS,KAAK,OAAO;AAAA,IACvB;AAEA,UAAM,KAAK,OAAO;AAAA,EACpB;AAEA,SAAO,EAAE,SAAS;AACpB;;;ACnFA,OAAO,QAAQ;AACf,OAAO,aAAa;AAIb,SAAS,kBAAkB,WAA0B,QAAkC;AAC5F,QAAM,MAAM,IAAI,QAAQ,EAAE,WAAW,MAAM,QAAQ,MAAM,CAAC;AAC1D,QAAM,WAAW,IAAI,QAAQ,MAAM;AACnC,QAAM,KAAK,SAAS,SAAS;AAE7B,MAAI,IAAI;AACN,WAAO,EAAE,IAAI,KAAK;AAAA,EACpB;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,SAAS,UAAU,CAAC,EAAE;AACpD;AAEO,SAAS,qBAAqB,UAAkB,YAAsC;AAC3F,QAAM,WAAW,GAAG,aAAa,UAAU,MAAM;AACjD,QAAM,YAAY,GAAG,aAAa,YAAY,MAAM;AACpD,QAAM,SAAS,KAAK,MAAM,SAAS;AAEnC,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,SAAO,kBAAkB,WAAW,MAAM;AAC5C;;;AFnBA,SAAS,aAAa,QAAmC;AACvD,MAAI,CAAC,UAAU,OAAO,WAAW,GAAG;AAClC,WAAO;AAAA,EACT;AAEA,SAAO,OACJ,IAAI,CAAC,QAAQ;AACZ,UAAM,OAAO,IAAI,gBAAgB,IAAI,aAAa,SAAS,IAAI,IAAI,eAAe;AAClF,WAAO,KAAK,IAAI,IAAI,IAAI,WAAW,kBAAkB;AAAA,EACvD,CAAC,EACA,KAAK,IAAI;AACd;AAEA,eAAsB,OAAO,MAAiC;AAC5D,QAAM,UAAU,IAAI,QAAQ;AAE5B,UACG,KAAK,4BAA4B,EACjC,eAAe,iBAAiB,uBAAuB,EACvD,eAAe,mBAAmB,0BAA0B,EAC5D,OAAO,cAAc,+BAA+B,EACpD,OAAO,aAAa,oBAAoB,EACxC,aAAa;AAEhB,MAAI;AAOJ,MAAI;AACF,YAAQ,MAAM,MAAM,EAAE,MAAM,OAAO,CAAC;AACpC,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAY;AACnB,QAAI,OAAO,SAAS,2BAA2B;AAC7C,aAAO;AAAA,IACT;AAEA,QAAI,OAAO,SAAS;AAClB,cAAQ,MAAM,MAAM,OAAO;AAAA,IAC7B;AACA,WAAO;AAAA,EACT;AAEA,MAAI;AACF,QAAI,KAAK,SAAS;AAChB,YAAM,WAAWC,IAAG,aAAa,KAAK,MAAM,MAAM;AAClD,YAAM,YAAY,iBAAiB,QAAQ;AAC3C,cAAQ,IAAI,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA,IAChD;AAEA,UAAM,SAAS,qBAAqB,KAAK,MAAM,KAAK,MAAM;AAE1D,QAAI,OAAO,IAAI;AACb,UAAI,KAAK,SAAS;AAChB,gBAAQ,IAAI,oBAAoB;AAAA,MAClC;AACA,aAAO;AAAA,IACT;AAEA,YAAQ,MAAM,oBAAoB;AAClC,YAAQ,MAAM,aAAa,OAAO,MAAM,CAAC;AACzC,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,YAAQ,MAAM,OAAO,WAAW,eAAe;AAC/C,WAAO;AAAA,EACT;AACF;AAEA,IAAM,cAAc,OAAO,cAAY,eAAe,UAAQ,SAAS;AAEvE,IAAI,aAAa;AACf,SAAO,QAAQ,KAAK,MAAM,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS;AAC3C,YAAQ,KAAK,IAAI;AAAA,EACnB,CAAC;AACH;","names":["fs","fs"]}
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
interface HeadingNode {
|
|
2
|
+
depth: number;
|
|
3
|
+
text: string;
|
|
4
|
+
children?: HeadingNode[];
|
|
5
|
+
}
|
|
6
|
+
interface StructureJSON {
|
|
7
|
+
headings: HeadingNode[];
|
|
8
|
+
}
|
|
9
|
+
interface ValidationResult {
|
|
10
|
+
ok: boolean;
|
|
11
|
+
errors?: any[];
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
declare function extractStructure(markdown: string): StructureJSON;
|
|
15
|
+
|
|
16
|
+
declare function validateStructure(structure: StructureJSON, schema: object): ValidationResult;
|
|
17
|
+
declare function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult;
|
|
18
|
+
|
|
19
|
+
export { type HeadingNode, type StructureJSON, type ValidationResult, extractStructure, validateMarkdownFile, validateStructure };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
interface HeadingNode {
|
|
2
|
+
depth: number;
|
|
3
|
+
text: string;
|
|
4
|
+
children?: HeadingNode[];
|
|
5
|
+
}
|
|
6
|
+
interface StructureJSON {
|
|
7
|
+
headings: HeadingNode[];
|
|
8
|
+
}
|
|
9
|
+
interface ValidationResult {
|
|
10
|
+
ok: boolean;
|
|
11
|
+
errors?: any[];
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
declare function extractStructure(markdown: string): StructureJSON;
|
|
15
|
+
|
|
16
|
+
declare function validateStructure(structure: StructureJSON, schema: object): ValidationResult;
|
|
17
|
+
declare function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult;
|
|
18
|
+
|
|
19
|
+
export { type HeadingNode, type StructureJSON, type ValidationResult, extractStructure, validateMarkdownFile, validateStructure };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
extractStructure: () => extractStructure,
|
|
34
|
+
validateMarkdownFile: () => validateMarkdownFile,
|
|
35
|
+
validateStructure: () => validateStructure
|
|
36
|
+
});
|
|
37
|
+
module.exports = __toCommonJS(index_exports);
|
|
38
|
+
|
|
39
|
+
// src/extract.ts
|
|
40
|
+
var import_gray_matter = __toESM(require("gray-matter"));
|
|
41
|
+
var import_unified = require("unified");
|
|
42
|
+
var import_remark_parse = __toESM(require("remark-parse"));
|
|
43
|
+
function normalizeText(text) {
|
|
44
|
+
return text.trim().replace(/\s+/g, " ");
|
|
45
|
+
}
|
|
46
|
+
function extractInlineText(node) {
|
|
47
|
+
if (!node || typeof node !== "object") {
|
|
48
|
+
return "";
|
|
49
|
+
}
|
|
50
|
+
if (node.type === "text" || node.type === "inlineCode") {
|
|
51
|
+
return node.value ?? "";
|
|
52
|
+
}
|
|
53
|
+
if (!Array.isArray(node.children)) {
|
|
54
|
+
return "";
|
|
55
|
+
}
|
|
56
|
+
return node.children.map(extractInlineText).join("");
|
|
57
|
+
}
|
|
58
|
+
function collectHeadings(node, acc = []) {
|
|
59
|
+
if (!node || typeof node !== "object") {
|
|
60
|
+
return acc;
|
|
61
|
+
}
|
|
62
|
+
if (node.type === "heading" && typeof node.depth === "number") {
|
|
63
|
+
acc.push(node);
|
|
64
|
+
}
|
|
65
|
+
if (Array.isArray(node.children)) {
|
|
66
|
+
for (const child of node.children) {
|
|
67
|
+
collectHeadings(child, acc);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return acc;
|
|
71
|
+
}
|
|
72
|
+
function extractStructure(markdown) {
|
|
73
|
+
const { content } = (0, import_gray_matter.default)(markdown);
|
|
74
|
+
const tree = (0, import_unified.unified)().use(import_remark_parse.default).parse(content);
|
|
75
|
+
const headingNodes = collectHeadings(tree);
|
|
76
|
+
const headings = [];
|
|
77
|
+
const stack = [];
|
|
78
|
+
for (const heading of headingNodes) {
|
|
79
|
+
const depth = heading.depth;
|
|
80
|
+
if (typeof depth !== "number" || depth < 1 || depth > 6) {
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
const text = normalizeText(extractInlineText(heading));
|
|
84
|
+
const current = { depth, text };
|
|
85
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
86
|
+
stack.pop();
|
|
87
|
+
}
|
|
88
|
+
const parent = stack[stack.length - 1];
|
|
89
|
+
if (parent) {
|
|
90
|
+
parent.children ??= [];
|
|
91
|
+
parent.children.push(current);
|
|
92
|
+
} else {
|
|
93
|
+
headings.push(current);
|
|
94
|
+
}
|
|
95
|
+
stack.push(current);
|
|
96
|
+
}
|
|
97
|
+
return { headings };
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// src/validate.ts
|
|
101
|
+
var import_node_fs = __toESM(require("fs"));
|
|
102
|
+
var import__ = __toESM(require("ajv/dist/2020"));
|
|
103
|
+
function validateStructure(structure, schema) {
|
|
104
|
+
const ajv = new import__.default({ allErrors: true, strict: false });
|
|
105
|
+
const validate = ajv.compile(schema);
|
|
106
|
+
const ok = validate(structure);
|
|
107
|
+
if (ok) {
|
|
108
|
+
return { ok: true };
|
|
109
|
+
}
|
|
110
|
+
return { ok: false, errors: validate.errors ?? [] };
|
|
111
|
+
}
|
|
112
|
+
function validateMarkdownFile(filePath, schemaPath) {
|
|
113
|
+
const markdown = import_node_fs.default.readFileSync(filePath, "utf8");
|
|
114
|
+
const schemaRaw = import_node_fs.default.readFileSync(schemaPath, "utf8");
|
|
115
|
+
const schema = JSON.parse(schemaRaw);
|
|
116
|
+
const structure = extractStructure(markdown);
|
|
117
|
+
return validateStructure(structure, schema);
|
|
118
|
+
}
|
|
119
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
120
|
+
0 && (module.exports = {
|
|
121
|
+
extractStructure,
|
|
122
|
+
validateMarkdownFile,
|
|
123
|
+
validateStructure
|
|
124
|
+
});
|
|
125
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/extract.ts","../src/validate.ts"],"sourcesContent":["export { extractStructure } from \"./extract\";\nexport { validateStructure, validateMarkdownFile } from \"./validate\";\nexport type { HeadingNode, StructureJSON, ValidationResult } from \"./types\";\n","import matter from \"gray-matter\";\nimport { unified } from \"unified\";\nimport remarkParse from \"remark-parse\";\nimport type { HeadingNode, StructureJSON } from \"./types\";\n\ninterface MdastNode {\n type?: string;\n value?: string;\n depth?: number;\n children?: MdastNode[];\n}\n\nfunction normalizeText(text: string): string {\n return text.trim().replace(/\\s+/g, \" \");\n}\n\nfunction extractInlineText(node: MdastNode): string {\n if (!node || typeof node !== \"object\") {\n return \"\";\n }\n\n if (node.type === \"text\" || node.type === \"inlineCode\") {\n return node.value ?? \"\";\n }\n\n if (!Array.isArray(node.children)) {\n return \"\";\n }\n\n return node.children.map(extractInlineText).join(\"\");\n}\n\nfunction collectHeadings(node: MdastNode, acc: MdastNode[] = []): MdastNode[] {\n if (!node || typeof node !== \"object\") {\n return acc;\n }\n\n if (node.type === \"heading\" && typeof node.depth === \"number\") {\n acc.push(node);\n }\n\n if (Array.isArray(node.children)) {\n for (const child of node.children) {\n collectHeadings(child, acc);\n }\n }\n\n return acc;\n}\n\nexport function extractStructure(markdown: string): StructureJSON {\n const { content } = matter(markdown);\n const tree = unified().use(remarkParse).parse(content) as MdastNode;\n const headingNodes = collectHeadings(tree);\n\n const headings: HeadingNode[] = [];\n const stack: HeadingNode[] = [];\n\n for (const heading of headingNodes) {\n const depth = heading.depth;\n if (typeof depth !== \"number\" || depth < 1 || depth > 6) {\n continue;\n }\n\n const text = normalizeText(extractInlineText(heading));\n const current: HeadingNode = { depth, text };\n\n while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {\n stack.pop();\n }\n\n const parent = stack[stack.length - 1];\n if (parent) {\n parent.children ??= [];\n parent.children.push(current);\n } else {\n headings.push(current);\n }\n\n stack.push(current);\n }\n\n return { headings };\n}\n","import fs from \"node:fs\";\nimport Ajv2020 from \"ajv/dist/2020\";\nimport { extractStructure } from \"./extract\";\nimport type { StructureJSON, ValidationResult } from \"./types\";\n\nexport function validateStructure(structure: StructureJSON, schema: object): ValidationResult {\n const ajv = new Ajv2020({ allErrors: true, strict: false });\n const validate = ajv.compile(schema);\n const ok = validate(structure) as boolean;\n\n if (ok) {\n return { ok: true };\n }\n\n return { ok: false, errors: validate.errors ?? [] };\n}\n\nexport function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult {\n const markdown = fs.readFileSync(filePath, \"utf8\");\n const schemaRaw = fs.readFileSync(schemaPath, \"utf8\");\n const schema = JSON.parse(schemaRaw) as object;\n\n const structure = extractStructure(markdown);\n return validateStructure(structure, schema);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,yBAAmB;AACnB,qBAAwB;AACxB,0BAAwB;AAUxB,SAAS,cAAc,MAAsB;AAC3C,SAAO,KAAK,KAAK,EAAE,QAAQ,QAAQ,GAAG;AACxC;AAEA,SAAS,kBAAkB,MAAyB;AAClD,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,UAAU,KAAK,SAAS,cAAc;AACtD,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,CAAC,MAAM,QAAQ,KAAK,QAAQ,GAAG;AACjC,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,SAAS,IAAI,iBAAiB,EAAE,KAAK,EAAE;AACrD;AAEA,SAAS,gBAAgB,MAAiB,MAAmB,CAAC,GAAgB;AAC5E,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,aAAa,OAAO,KAAK,UAAU,UAAU;AAC7D,QAAI,KAAK,IAAI;AAAA,EACf;AAEA,MAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAChC,eAAW,SAAS,KAAK,UAAU;AACjC,sBAAgB,OAAO,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,iBAAiB,UAAiC;AAChE,QAAM,EAAE,QAAQ,QAAI,mBAAAA,SAAO,QAAQ;AACnC,QAAM,WAAO,wBAAQ,EAAE,IAAI,oBAAAC,OAAW,EAAE,MAAM,OAAO;AACrD,QAAM,eAAe,gBAAgB,IAAI;AAEzC,QAAM,WAA0B,CAAC;AACjC,QAAM,QAAuB,CAAC;AAE9B,aAAW,WAAW,cAAc;AAClC,UAAM,QAAQ,QAAQ;AACtB,QAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,QAAQ,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,cAAc,kBAAkB,OAAO,CAAC;AACrD,UAAM,UAAuB,EAAE,OAAO,KAAK;AAE3C,WAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,OAAO;AACjE,YAAM,IAAI;AAAA,IACZ;AAEA,UAAM,SAAS,MAAM,MAAM,SAAS,CAAC;AACrC,QAAI,QAAQ;AACV,aAAO,aAAa,CAAC;AACrB,aAAO,SAAS,KAAK,OAAO;AAAA,IAC9B,OAAO;AACL,eAAS,KAAK,OAAO;AAAA,IACvB;AAEA,UAAM,KAAK,OAAO;AAAA,EACpB;AAEA,SAAO,EAAE,SAAS;AACpB;;;ACnFA,qBAAe;AACf,eAAoB;AAIb,SAAS,kBAAkB,WAA0B,QAAkC;AAC5F,QAAM,MAAM,IAAI,SAAAC,QAAQ,EAAE,WAAW,MAAM,QAAQ,MAAM,CAAC;AAC1D,QAAM,WAAW,IAAI,QAAQ,MAAM;AACnC,QAAM,KAAK,SAAS,SAAS;AAE7B,MAAI,IAAI;AACN,WAAO,EAAE,IAAI,KAAK;AAAA,EACpB;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,SAAS,UAAU,CAAC,EAAE;AACpD;AAEO,SAAS,qBAAqB,UAAkB,YAAsC;AAC3F,QAAM,WAAW,eAAAC,QAAG,aAAa,UAAU,MAAM;AACjD,QAAM,YAAY,eAAAA,QAAG,aAAa,YAAY,MAAM;AACpD,QAAM,SAAS,KAAK,MAAM,SAAS;AAEnC,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,SAAO,kBAAkB,WAAW,MAAM;AAC5C;","names":["matter","remarkParse","Ajv2020","fs"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
// src/extract.ts
|
|
2
|
+
import matter from "gray-matter";
|
|
3
|
+
import { unified } from "unified";
|
|
4
|
+
import remarkParse from "remark-parse";
|
|
5
|
+
function normalizeText(text) {
|
|
6
|
+
return text.trim().replace(/\s+/g, " ");
|
|
7
|
+
}
|
|
8
|
+
function extractInlineText(node) {
|
|
9
|
+
if (!node || typeof node !== "object") {
|
|
10
|
+
return "";
|
|
11
|
+
}
|
|
12
|
+
if (node.type === "text" || node.type === "inlineCode") {
|
|
13
|
+
return node.value ?? "";
|
|
14
|
+
}
|
|
15
|
+
if (!Array.isArray(node.children)) {
|
|
16
|
+
return "";
|
|
17
|
+
}
|
|
18
|
+
return node.children.map(extractInlineText).join("");
|
|
19
|
+
}
|
|
20
|
+
function collectHeadings(node, acc = []) {
|
|
21
|
+
if (!node || typeof node !== "object") {
|
|
22
|
+
return acc;
|
|
23
|
+
}
|
|
24
|
+
if (node.type === "heading" && typeof node.depth === "number") {
|
|
25
|
+
acc.push(node);
|
|
26
|
+
}
|
|
27
|
+
if (Array.isArray(node.children)) {
|
|
28
|
+
for (const child of node.children) {
|
|
29
|
+
collectHeadings(child, acc);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return acc;
|
|
33
|
+
}
|
|
34
|
+
function extractStructure(markdown) {
|
|
35
|
+
const { content } = matter(markdown);
|
|
36
|
+
const tree = unified().use(remarkParse).parse(content);
|
|
37
|
+
const headingNodes = collectHeadings(tree);
|
|
38
|
+
const headings = [];
|
|
39
|
+
const stack = [];
|
|
40
|
+
for (const heading of headingNodes) {
|
|
41
|
+
const depth = heading.depth;
|
|
42
|
+
if (typeof depth !== "number" || depth < 1 || depth > 6) {
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
const text = normalizeText(extractInlineText(heading));
|
|
46
|
+
const current = { depth, text };
|
|
47
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
48
|
+
stack.pop();
|
|
49
|
+
}
|
|
50
|
+
const parent = stack[stack.length - 1];
|
|
51
|
+
if (parent) {
|
|
52
|
+
parent.children ??= [];
|
|
53
|
+
parent.children.push(current);
|
|
54
|
+
} else {
|
|
55
|
+
headings.push(current);
|
|
56
|
+
}
|
|
57
|
+
stack.push(current);
|
|
58
|
+
}
|
|
59
|
+
return { headings };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// src/validate.ts
|
|
63
|
+
import fs from "fs";
|
|
64
|
+
import Ajv2020 from "ajv/dist/2020";
|
|
65
|
+
function validateStructure(structure, schema) {
|
|
66
|
+
const ajv = new Ajv2020({ allErrors: true, strict: false });
|
|
67
|
+
const validate = ajv.compile(schema);
|
|
68
|
+
const ok = validate(structure);
|
|
69
|
+
if (ok) {
|
|
70
|
+
return { ok: true };
|
|
71
|
+
}
|
|
72
|
+
return { ok: false, errors: validate.errors ?? [] };
|
|
73
|
+
}
|
|
74
|
+
function validateMarkdownFile(filePath, schemaPath) {
|
|
75
|
+
const markdown = fs.readFileSync(filePath, "utf8");
|
|
76
|
+
const schemaRaw = fs.readFileSync(schemaPath, "utf8");
|
|
77
|
+
const schema = JSON.parse(schemaRaw);
|
|
78
|
+
const structure = extractStructure(markdown);
|
|
79
|
+
return validateStructure(structure, schema);
|
|
80
|
+
}
|
|
81
|
+
export {
|
|
82
|
+
extractStructure,
|
|
83
|
+
validateMarkdownFile,
|
|
84
|
+
validateStructure
|
|
85
|
+
};
|
|
86
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/extract.ts","../src/validate.ts"],"sourcesContent":["import matter from \"gray-matter\";\nimport { unified } from \"unified\";\nimport remarkParse from \"remark-parse\";\nimport type { HeadingNode, StructureJSON } from \"./types\";\n\ninterface MdastNode {\n type?: string;\n value?: string;\n depth?: number;\n children?: MdastNode[];\n}\n\nfunction normalizeText(text: string): string {\n return text.trim().replace(/\\s+/g, \" \");\n}\n\nfunction extractInlineText(node: MdastNode): string {\n if (!node || typeof node !== \"object\") {\n return \"\";\n }\n\n if (node.type === \"text\" || node.type === \"inlineCode\") {\n return node.value ?? \"\";\n }\n\n if (!Array.isArray(node.children)) {\n return \"\";\n }\n\n return node.children.map(extractInlineText).join(\"\");\n}\n\nfunction collectHeadings(node: MdastNode, acc: MdastNode[] = []): MdastNode[] {\n if (!node || typeof node !== \"object\") {\n return acc;\n }\n\n if (node.type === \"heading\" && typeof node.depth === \"number\") {\n acc.push(node);\n }\n\n if (Array.isArray(node.children)) {\n for (const child of node.children) {\n collectHeadings(child, acc);\n }\n }\n\n return acc;\n}\n\nexport function extractStructure(markdown: string): StructureJSON {\n const { content } = matter(markdown);\n const tree = unified().use(remarkParse).parse(content) as MdastNode;\n const headingNodes = collectHeadings(tree);\n\n const headings: HeadingNode[] = [];\n const stack: HeadingNode[] = [];\n\n for (const heading of headingNodes) {\n const depth = heading.depth;\n if (typeof depth !== \"number\" || depth < 1 || depth > 6) {\n continue;\n }\n\n const text = normalizeText(extractInlineText(heading));\n const current: HeadingNode = { depth, text };\n\n while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {\n stack.pop();\n }\n\n const parent = stack[stack.length - 1];\n if (parent) {\n parent.children ??= [];\n parent.children.push(current);\n } else {\n headings.push(current);\n }\n\n stack.push(current);\n }\n\n return { headings };\n}\n","import fs from \"node:fs\";\nimport Ajv2020 from \"ajv/dist/2020\";\nimport { extractStructure } from \"./extract\";\nimport type { StructureJSON, ValidationResult } from \"./types\";\n\nexport function validateStructure(structure: StructureJSON, schema: object): ValidationResult {\n const ajv = new Ajv2020({ allErrors: true, strict: false });\n const validate = ajv.compile(schema);\n const ok = validate(structure) as boolean;\n\n if (ok) {\n return { ok: true };\n }\n\n return { ok: false, errors: validate.errors ?? [] };\n}\n\nexport function validateMarkdownFile(filePath: string, schemaPath: string): ValidationResult {\n const markdown = fs.readFileSync(filePath, \"utf8\");\n const schemaRaw = fs.readFileSync(schemaPath, \"utf8\");\n const schema = JSON.parse(schemaRaw) as object;\n\n const structure = extractStructure(markdown);\n return validateStructure(structure, schema);\n}\n"],"mappings":";AAAA,OAAO,YAAY;AACnB,SAAS,eAAe;AACxB,OAAO,iBAAiB;AAUxB,SAAS,cAAc,MAAsB;AAC3C,SAAO,KAAK,KAAK,EAAE,QAAQ,QAAQ,GAAG;AACxC;AAEA,SAAS,kBAAkB,MAAyB;AAClD,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,UAAU,KAAK,SAAS,cAAc;AACtD,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,CAAC,MAAM,QAAQ,KAAK,QAAQ,GAAG;AACjC,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,SAAS,IAAI,iBAAiB,EAAE,KAAK,EAAE;AACrD;AAEA,SAAS,gBAAgB,MAAiB,MAAmB,CAAC,GAAgB;AAC5E,MAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,WAAO;AAAA,EACT;AAEA,MAAI,KAAK,SAAS,aAAa,OAAO,KAAK,UAAU,UAAU;AAC7D,QAAI,KAAK,IAAI;AAAA,EACf;AAEA,MAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAChC,eAAW,SAAS,KAAK,UAAU;AACjC,sBAAgB,OAAO,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,iBAAiB,UAAiC;AAChE,QAAM,EAAE,QAAQ,IAAI,OAAO,QAAQ;AACnC,QAAM,OAAO,QAAQ,EAAE,IAAI,WAAW,EAAE,MAAM,OAAO;AACrD,QAAM,eAAe,gBAAgB,IAAI;AAEzC,QAAM,WAA0B,CAAC;AACjC,QAAM,QAAuB,CAAC;AAE9B,aAAW,WAAW,cAAc;AAClC,UAAM,QAAQ,QAAQ;AACtB,QAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,QAAQ,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,OAAO,cAAc,kBAAkB,OAAO,CAAC;AACrD,UAAM,UAAuB,EAAE,OAAO,KAAK;AAE3C,WAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,OAAO;AACjE,YAAM,IAAI;AAAA,IACZ;AAEA,UAAM,SAAS,MAAM,MAAM,SAAS,CAAC;AACrC,QAAI,QAAQ;AACV,aAAO,aAAa,CAAC;AACrB,aAAO,SAAS,KAAK,OAAO;AAAA,IAC9B,OAAO;AACL,eAAS,KAAK,OAAO;AAAA,IACvB;AAEA,UAAM,KAAK,OAAO;AAAA,EACpB;AAEA,SAAO,EAAE,SAAS;AACpB;;;ACnFA,OAAO,QAAQ;AACf,OAAO,aAAa;AAIb,SAAS,kBAAkB,WAA0B,QAAkC;AAC5F,QAAM,MAAM,IAAI,QAAQ,EAAE,WAAW,MAAM,QAAQ,MAAM,CAAC;AAC1D,QAAM,WAAW,IAAI,QAAQ,MAAM;AACnC,QAAM,KAAK,SAAS,SAAS;AAE7B,MAAI,IAAI;AACN,WAAO,EAAE,IAAI,KAAK;AAAA,EACpB;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,SAAS,UAAU,CAAC,EAAE;AACpD;AAEO,SAAS,qBAAqB,UAAkB,YAAsC;AAC3F,QAAM,WAAW,GAAG,aAAa,UAAU,MAAM;AACjD,QAAM,YAAY,GAAG,aAAa,YAAY,MAAM;AACpD,QAAM,SAAS,KAAK,MAAM,SAAS;AAEnC,QAAM,YAAY,iBAAiB,QAAQ;AAC3C,SAAO,kBAAkB,WAAW,MAAM;AAC5C;","names":[]}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
3
|
+
"type": "object",
|
|
4
|
+
"required": ["headings"],
|
|
5
|
+
"additionalProperties": false,
|
|
6
|
+
"properties": {
|
|
7
|
+
"headings": {
|
|
8
|
+
"type": "array",
|
|
9
|
+
"prefixItems": [
|
|
10
|
+
{
|
|
11
|
+
"type": "object",
|
|
12
|
+
"required": ["depth", "text"],
|
|
13
|
+
"additionalProperties": false,
|
|
14
|
+
"properties": {
|
|
15
|
+
"depth": { "const": 2 },
|
|
16
|
+
"text": { "const": "Summary" }
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
"type": "object",
|
|
21
|
+
"required": ["depth", "text"],
|
|
22
|
+
"additionalProperties": false,
|
|
23
|
+
"properties": {
|
|
24
|
+
"depth": { "const": 2 },
|
|
25
|
+
"text": { "const": "Steps to Reproduce" }
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
"type": "object",
|
|
30
|
+
"required": ["depth", "text"],
|
|
31
|
+
"additionalProperties": false,
|
|
32
|
+
"properties": {
|
|
33
|
+
"depth": { "const": 2 },
|
|
34
|
+
"text": { "const": "Expected Behavior" }
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
"type": "object",
|
|
39
|
+
"required": ["depth", "text"],
|
|
40
|
+
"additionalProperties": false,
|
|
41
|
+
"properties": {
|
|
42
|
+
"depth": { "const": 2 },
|
|
43
|
+
"text": { "const": "Actual Behavior" }
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
"type": "object",
|
|
48
|
+
"required": ["depth", "text"],
|
|
49
|
+
"additionalProperties": false,
|
|
50
|
+
"properties": {
|
|
51
|
+
"depth": { "const": 2 },
|
|
52
|
+
"text": { "const": "Impact" }
|
|
53
|
+
}
|
|
54
|
+
},
|
|
55
|
+
{
|
|
56
|
+
"type": "object",
|
|
57
|
+
"required": ["depth", "text"],
|
|
58
|
+
"additionalProperties": false,
|
|
59
|
+
"properties": {
|
|
60
|
+
"depth": { "const": 2 },
|
|
61
|
+
"text": { "const": "Workarounds" }
|
|
62
|
+
}
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
"type": "object",
|
|
66
|
+
"required": ["depth", "text"],
|
|
67
|
+
"additionalProperties": false,
|
|
68
|
+
"properties": {
|
|
69
|
+
"depth": { "const": 2 },
|
|
70
|
+
"text": { "const": "Vendor Response Timeline" },
|
|
71
|
+
"children": {
|
|
72
|
+
"type": "array",
|
|
73
|
+
"items": {
|
|
74
|
+
"type": "object",
|
|
75
|
+
"required": ["depth", "text"],
|
|
76
|
+
"additionalProperties": false,
|
|
77
|
+
"properties": {
|
|
78
|
+
"depth": { "const": 3 },
|
|
79
|
+
"text": {
|
|
80
|
+
"type": "string",
|
|
81
|
+
"pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
],
|
|
89
|
+
"items": false
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "markdown-structure-checker",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Structural outline checker for Markdown files using JSON Schema",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "dist/index.mjs",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"bin": {
|
|
9
|
+
"markdown-structure-checker": "dist/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"dist",
|
|
13
|
+
"examples"
|
|
14
|
+
],
|
|
15
|
+
"scripts": {
|
|
16
|
+
"build": "tsup",
|
|
17
|
+
"test": "vitest run",
|
|
18
|
+
"test:watch": "vitest",
|
|
19
|
+
"typecheck": "tsc --noEmit"
|
|
20
|
+
},
|
|
21
|
+
"keywords": [
|
|
22
|
+
"markdown",
|
|
23
|
+
"schema",
|
|
24
|
+
"validation",
|
|
25
|
+
"ajv",
|
|
26
|
+
"cli"
|
|
27
|
+
],
|
|
28
|
+
"license": "MIT",
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"ajv": "^8.17.1",
|
|
31
|
+
"commander": "^14.0.1",
|
|
32
|
+
"gray-matter": "^4.0.3",
|
|
33
|
+
"remark-parse": "^11.0.0",
|
|
34
|
+
"unified": "^11.0.5"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@types/node": "^24.3.0",
|
|
38
|
+
"tsup": "^8.5.0",
|
|
39
|
+
"typescript": "^5.9.2",
|
|
40
|
+
"vitest": "^3.2.4"
|
|
41
|
+
}
|
|
42
|
+
}
|