hdlinter 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +91 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +154 -0
- package/dist/extension.d.ts +3 -0
- package/dist/extension.js +296 -0
- package/dist/graph.d.ts +6 -0
- package/dist/graph.js +84 -0
- package/dist/header.d.ts +19 -0
- package/dist/header.js +193 -0
- package/dist/linter.d.ts +10 -0
- package/dist/linter.js +210 -0
- package/dist/types.d.ts +77 -0
- package/dist/types.js +19 -0
- package/package.json +148 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Siem Kleuskens
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
# hdlinter
|
|
2
|
+
|
|
3
|
+
Cursor/VS Code extension and CLI for validating Markdown planning headers.
|
|
4
|
+
|
|
5
|
+
The linter enforces this header shape at the top of every Markdown file:
|
|
6
|
+
|
|
7
|
+
```md
|
|
8
|
+
Status: Draft
|
|
9
|
+
Owner: Project Manager
|
|
10
|
+
Reviewers: Siem, Vansh
|
|
11
|
+
Last updated: 2026-05-10
|
|
12
|
+
Milestone: M1
|
|
13
|
+
Depends on: Markdown links with bracketed dependency dates
|
|
14
|
+
Supersedes: None
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Rules
|
|
18
|
+
|
|
19
|
+
- Required fields must appear in order.
|
|
20
|
+
- `Status` must be one of `Draft`, `In Review`, `Approved`, or `Superseded`.
|
|
21
|
+
- `Last updated` must be a valid ISO date.
|
|
22
|
+
- `Depends on` and `Supersedes` must be either `None` or Markdown links.
|
|
23
|
+
- Header links must include the referenced document's `Last updated` date in brackets
|
|
24
|
+
after the link.
|
|
25
|
+
- Referenced files must exist when they point at local Markdown files.
|
|
26
|
+
- A dependency newer than the current document is flagged as possible drift.
|
|
27
|
+
- A superseded document newer than the superseding document is flagged as suspicious.
|
|
28
|
+
|
|
29
|
+
The extension writes hidden graph/hash state to `.hdlinter/state.json`. That file
|
|
30
|
+
is implementation state, not a planning artifact. Older builds used `.hdlint/` for
|
|
31
|
+
the same file; rename that directory to `.hdlinter` or run `--write-state` to recreate it.
|
|
32
|
+
|
|
33
|
+
## Cursor Setup
|
|
34
|
+
|
|
35
|
+
From this directory:
|
|
36
|
+
|
|
37
|
+
```sh
|
|
38
|
+
npm install
|
|
39
|
+
npm run compile
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Then in Cursor:
|
|
43
|
+
|
|
44
|
+
1. Open this folder.
|
|
45
|
+
2. Press `F5` to launch the extension development host.
|
|
46
|
+
3. In the launched Cursor/VS Code window, open the target Markdown project.
|
|
47
|
+
4. Diagnostics and Quick Fixes appear for Markdown files.
|
|
48
|
+
|
|
49
|
+
For regular use, package/install it as a VS Code-compatible extension later:
|
|
50
|
+
|
|
51
|
+
```sh
|
|
52
|
+
npm install -g @vscode/vsce
|
|
53
|
+
npm run package
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
Then install the generated `.vsix` in Cursor.
|
|
57
|
+
|
|
58
|
+
## CLI
|
|
59
|
+
|
|
60
|
+
Run without installing:
|
|
61
|
+
|
|
62
|
+
```sh
|
|
63
|
+
npx hdlinter .
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Lint a workspace and rebuild hidden graph state:
|
|
67
|
+
|
|
68
|
+
```sh
|
|
69
|
+
npx hdlinter ../aicompliance --write-state
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
Autofix a workspace and rebuild hidden graph state:
|
|
73
|
+
|
|
74
|
+
```sh
|
|
75
|
+
npx hdlinter ../aicompliance --fix
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
The CLI exits non-zero when errors are found.
|
|
79
|
+
|
|
80
|
+
## Local Development
|
|
81
|
+
|
|
82
|
+
For quick local CLI testing from this repo:
|
|
83
|
+
|
|
84
|
+
```sh
|
|
85
|
+
npm install
|
|
86
|
+
npm run build
|
|
87
|
+
npm exec -- hdlinter samples
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
`npm link` works for local development, but publishing to npm is the intended path for
|
|
91
|
+
normal `npx hdlinter` usage.
|
package/dist/cli.d.ts
ADDED
package/dist/cli.js
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
8
|
+
const node_module_1 = require("node:module");
|
|
9
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
10
|
+
const graph_1 = require("./graph");
|
|
11
|
+
const linter_1 = require("./linter");
|
|
12
|
+
const types_1 = require("./types");
|
|
13
|
+
const DEFAULT_STATE_FILE = ".hdlinter/state.json";
|
|
14
|
+
const EXCLUDED_DIRS = new Set([".git", "node_modules", ".hdlinter", "dist"]);
|
|
15
|
+
const VALID_FLAGS = new Set(["--fix", "--write-state", "--help", "-h", "--version", "-v"]);
|
|
16
|
+
async function main() {
|
|
17
|
+
const args = process.argv.slice(2);
|
|
18
|
+
if (args.includes("--help") || args.includes("-h")) {
|
|
19
|
+
printHelp();
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
if (args.includes("--version") || args.includes("-v")) {
|
|
23
|
+
console.log(readPackageVersion());
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
const unknownFlag = args.find((arg) => arg.startsWith("-") && !VALID_FLAGS.has(arg));
|
|
27
|
+
if (unknownFlag) {
|
|
28
|
+
console.error(`Unknown option: ${unknownFlag}`);
|
|
29
|
+
console.error("Run `hdlinter --help` for usage.");
|
|
30
|
+
process.exitCode = 2;
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
const writeState = args.includes("--write-state");
|
|
34
|
+
const fix = args.includes("--fix");
|
|
35
|
+
const workspaceArg = args.find((arg) => !arg.startsWith("--")) ?? process.cwd();
|
|
36
|
+
const workspaceRoot = node_path_1.default.resolve(workspaceArg);
|
|
37
|
+
const documents = await collectMarkdownDocuments(workspaceRoot);
|
|
38
|
+
let graph = (0, graph_1.buildDocumentGraph)(workspaceRoot, documents);
|
|
39
|
+
if (fix) {
|
|
40
|
+
const changed = await fixDocuments(workspaceRoot, documents, graph);
|
|
41
|
+
graph = (0, graph_1.buildDocumentGraph)(workspaceRoot, documents);
|
|
42
|
+
console.log(`Fixed ${changed} Markdown file${changed === 1 ? "" : "s"}.`);
|
|
43
|
+
}
|
|
44
|
+
if (writeState || fix) {
|
|
45
|
+
await (0, graph_1.writeGraphState)(workspaceRoot, DEFAULT_STATE_FILE, graph);
|
|
46
|
+
}
|
|
47
|
+
let errorCount = 0;
|
|
48
|
+
let warningCount = 0;
|
|
49
|
+
for (const document of documents) {
|
|
50
|
+
const diagnostics = (0, linter_1.lintMarkdownDocument)(document.path, document.text, graph, types_1.DEFAULT_LINT_OPTIONS);
|
|
51
|
+
for (const diagnostic of diagnostics) {
|
|
52
|
+
printDiagnostic(document.path, diagnostic);
|
|
53
|
+
if (diagnostic.severity === "error") {
|
|
54
|
+
errorCount += 1;
|
|
55
|
+
}
|
|
56
|
+
else if (diagnostic.severity === "warning") {
|
|
57
|
+
warningCount += 1;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
console.log(`\nChecked ${documents.length} Markdown files: ${errorCount} errors, ${warningCount} warnings.`);
|
|
62
|
+
process.exitCode = errorCount > 0 ? 1 : 0;
|
|
63
|
+
}
|
|
64
|
+
async function fixDocuments(workspaceRoot, documents, initialGraph) {
|
|
65
|
+
let graph = initialGraph;
|
|
66
|
+
const changedPaths = new Set();
|
|
67
|
+
for (let pass = 0; pass < 3; pass += 1) {
|
|
68
|
+
let passChanged = 0;
|
|
69
|
+
for (const document of documents) {
|
|
70
|
+
const fixed = (0, linter_1.fixMarkdownDocument)(document.path, document.text, graph);
|
|
71
|
+
if (fixed !== document.text) {
|
|
72
|
+
document.text = fixed;
|
|
73
|
+
changedPaths.add(document.path);
|
|
74
|
+
passChanged += 1;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
graph = (0, graph_1.buildDocumentGraph)(workspaceRoot, documents);
|
|
78
|
+
if (passChanged === 0) {
|
|
79
|
+
break;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
for (const documentPath of changedPaths) {
|
|
83
|
+
const document = documents.find((candidate) => candidate.path === documentPath);
|
|
84
|
+
if (!document) {
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
await promises_1.default.writeFile(node_path_1.default.join(workspaceRoot, document.path), document.text, "utf8");
|
|
88
|
+
}
|
|
89
|
+
return changedPaths.size;
|
|
90
|
+
}
|
|
91
|
+
async function collectMarkdownDocuments(workspaceRoot) {
|
|
92
|
+
const absolutePaths = await walk(workspaceRoot);
|
|
93
|
+
const documents = [];
|
|
94
|
+
for (const absolutePath of absolutePaths) {
|
|
95
|
+
if (!absolutePath.endsWith(".md")) {
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
const text = await promises_1.default.readFile(absolutePath, "utf8");
|
|
99
|
+
documents.push({
|
|
100
|
+
path: (0, graph_1.normalizeWorkspacePath)(node_path_1.default.relative(workspaceRoot, absolutePath)),
|
|
101
|
+
text
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
documents.sort((a, b) => a.path.localeCompare(b.path));
|
|
105
|
+
return documents;
|
|
106
|
+
}
|
|
107
|
+
async function walk(directory) {
|
|
108
|
+
const entries = await promises_1.default.readdir(directory, { withFileTypes: true });
|
|
109
|
+
const results = [];
|
|
110
|
+
for (const entry of entries) {
|
|
111
|
+
if (EXCLUDED_DIRS.has(entry.name)) {
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
const absolutePath = node_path_1.default.join(directory, entry.name);
|
|
115
|
+
if (entry.isDirectory()) {
|
|
116
|
+
results.push(...(await walk(absolutePath)));
|
|
117
|
+
}
|
|
118
|
+
else if (entry.isFile()) {
|
|
119
|
+
results.push(absolutePath);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return results;
|
|
123
|
+
}
|
|
124
|
+
function printDiagnostic(documentPath, diagnostic) {
|
|
125
|
+
const severity = diagnostic.severity.toUpperCase();
|
|
126
|
+
console.log(`${documentPath}:${diagnostic.line + 1}:${diagnostic.character + 1} ${severity} ${diagnostic.code} ${diagnostic.message}`);
|
|
127
|
+
}
|
|
128
|
+
function printHelp() {
|
|
129
|
+
console.log(`hdlinter
|
|
130
|
+
|
|
131
|
+
Usage:
|
|
132
|
+
hdlinter [workspace] [options]
|
|
133
|
+
|
|
134
|
+
Options:
|
|
135
|
+
--fix Autofix Markdown header issues and write .hdlinter/state.json
|
|
136
|
+
--write-state Rebuild .hdlinter/state.json without modifying documents
|
|
137
|
+
-v, --version Print version
|
|
138
|
+
-h, --help Print this help
|
|
139
|
+
|
|
140
|
+
Examples:
|
|
141
|
+
hdlinter
|
|
142
|
+
hdlinter docs --write-state
|
|
143
|
+
hdlinter ../aicompliance --fix`);
|
|
144
|
+
}
|
|
145
|
+
function readPackageVersion() {
|
|
146
|
+
const require = (0, node_module_1.createRequire)(__filename);
|
|
147
|
+
const packageJson = require("../package.json");
|
|
148
|
+
return packageJson.version ?? "0.0.0";
|
|
149
|
+
}
|
|
150
|
+
main().catch((error) => {
|
|
151
|
+
console.error(error instanceof Error ? error.message : String(error));
|
|
152
|
+
process.exitCode = 2;
|
|
153
|
+
});
|
|
154
|
+
//# sourceMappingURL=cli.js.map
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.activate = activate;
|
|
40
|
+
exports.deactivate = deactivate;
|
|
41
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
42
|
+
const vscode = __importStar(require("vscode"));
|
|
43
|
+
const graph_1 = require("./graph");
|
|
44
|
+
const linter_1 = require("./linter");
|
|
45
|
+
const types_1 = require("./types");
|
|
46
|
+
const SOURCE = "hdlinter";
|
|
47
|
+
function activate(context) {
|
|
48
|
+
const controller = new HeaderLintController(context);
|
|
49
|
+
context.subscriptions.push(controller);
|
|
50
|
+
controller.activate();
|
|
51
|
+
}
|
|
52
|
+
function deactivate() {
|
|
53
|
+
// VS Code disposes subscriptions registered in activate.
|
|
54
|
+
}
|
|
55
|
+
class HeaderLintController {
|
|
56
|
+
context;
|
|
57
|
+
diagnostics = vscode.languages.createDiagnosticCollection(SOURCE);
|
|
58
|
+
output = vscode.window.createOutputChannel("hdlinter");
|
|
59
|
+
graphByRoot = new Map();
|
|
60
|
+
disposed = false;
|
|
61
|
+
constructor(context) {
|
|
62
|
+
this.context = context;
|
|
63
|
+
}
|
|
64
|
+
activate() {
|
|
65
|
+
this.context.subscriptions.push(this.diagnostics, this.output);
|
|
66
|
+
this.context.subscriptions.push(vscode.languages.registerCodeActionsProvider({ language: "markdown", scheme: "file" }, new HeaderCodeActionProvider(() => this.graphByRoot), { providedCodeActionKinds: [vscode.CodeActionKind.QuickFix, vscode.CodeActionKind.SourceFixAll] }));
|
|
67
|
+
this.context.subscriptions.push(vscode.commands.registerCommand("hdlinter.fixCurrentDocumentHeader", async () => {
|
|
68
|
+
const editor = vscode.window.activeTextEditor;
|
|
69
|
+
if (!editor || editor.document.languageId !== "markdown") {
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
await this.rebuildGraphsAndValidate();
|
|
73
|
+
const graph = this.graphForDocument(editor.document);
|
|
74
|
+
if (!graph) {
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
await applyHeaderFix(editor.document, graph);
|
|
78
|
+
}));
|
|
79
|
+
this.context.subscriptions.push(vscode.commands.registerCommand("hdlinter.fixWorkspaceHeaders", async () => {
|
|
80
|
+
await this.fixWorkspaceHeaders();
|
|
81
|
+
}));
|
|
82
|
+
this.context.subscriptions.push(vscode.commands.registerCommand("hdlinter.rebuildGraphState", async () => {
|
|
83
|
+
await this.rebuildGraphsAndValidate();
|
|
84
|
+
vscode.window.showInformationMessage("hdlinter graph state rebuilt.");
|
|
85
|
+
}));
|
|
86
|
+
this.context.subscriptions.push(vscode.workspace.onDidSaveTextDocument(async (document) => {
|
|
87
|
+
if (document.languageId === "markdown") {
|
|
88
|
+
await this.rebuildGraphsAndValidate();
|
|
89
|
+
}
|
|
90
|
+
}));
|
|
91
|
+
this.context.subscriptions.push(vscode.workspace.onDidChangeTextDocument((event) => {
|
|
92
|
+
if (event.document.languageId === "markdown") {
|
|
93
|
+
this.validateOpenDocument(event.document);
|
|
94
|
+
}
|
|
95
|
+
}));
|
|
96
|
+
this.context.subscriptions.push(vscode.workspace.onDidChangeConfiguration((event) => {
|
|
97
|
+
if (event.affectsConfiguration("hdlinter")) {
|
|
98
|
+
void this.rebuildGraphsAndValidate();
|
|
99
|
+
}
|
|
100
|
+
}));
|
|
101
|
+
void this.rebuildGraphsAndValidate();
|
|
102
|
+
}
|
|
103
|
+
dispose() {
|
|
104
|
+
this.disposed = true;
|
|
105
|
+
this.diagnostics.dispose();
|
|
106
|
+
this.output.dispose();
|
|
107
|
+
}
|
|
108
|
+
async rebuildGraphsAndValidate() {
|
|
109
|
+
if (this.disposed) {
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
const settings = getSettings();
|
|
113
|
+
if (!settings.enabled) {
|
|
114
|
+
this.diagnostics.clear();
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
this.graphByRoot.clear();
|
|
118
|
+
const folders = vscode.workspace.workspaceFolders ?? [];
|
|
119
|
+
for (const folder of folders) {
|
|
120
|
+
const documents = await collectWorkspaceDocuments(folder, settings);
|
|
121
|
+
const graph = (0, graph_1.buildDocumentGraph)(folder.uri.fsPath, documents);
|
|
122
|
+
this.graphByRoot.set(folder.uri.toString(), graph);
|
|
123
|
+
try {
|
|
124
|
+
await (0, graph_1.writeGraphState)(folder.uri.fsPath, settings.stateFile, graph);
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
this.output.appendLine(`Failed to write graph state for ${folder.uri.fsPath}: ${String(error)}`);
|
|
128
|
+
}
|
|
129
|
+
for (const document of documents) {
|
|
130
|
+
const uri = vscode.Uri.file(node_path_1.default.join(folder.uri.fsPath, document.path));
|
|
131
|
+
const diagnostics = (0, linter_1.lintMarkdownDocument)(document.path, document.text, graph, settings.lintOptions);
|
|
132
|
+
this.diagnostics.set(uri, diagnostics.map(toVsCodeDiagnostic));
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
for (const document of vscode.workspace.textDocuments) {
|
|
136
|
+
if (document.languageId === "markdown" && document.isDirty) {
|
|
137
|
+
this.validateOpenDocument(document);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
validateOpenDocument(document) {
|
|
142
|
+
const settings = getSettings();
|
|
143
|
+
if (!settings.enabled || document.uri.scheme !== "file") {
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
const graph = this.graphForDocument(document);
|
|
147
|
+
const folder = vscode.workspace.getWorkspaceFolder(document.uri);
|
|
148
|
+
if (!graph || !folder) {
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
const documentPath = (0, graph_1.normalizeWorkspacePath)(node_path_1.default.relative(folder.uri.fsPath, document.uri.fsPath));
|
|
152
|
+
const diagnostics = (0, linter_1.lintMarkdownDocument)(documentPath, document.getText(), graph, settings.lintOptions);
|
|
153
|
+
this.diagnostics.set(document.uri, diagnostics.map(toVsCodeDiagnostic));
|
|
154
|
+
}
|
|
155
|
+
graphForDocument(document) {
|
|
156
|
+
const folder = vscode.workspace.getWorkspaceFolder(document.uri);
|
|
157
|
+
if (!folder) {
|
|
158
|
+
return undefined;
|
|
159
|
+
}
|
|
160
|
+
return this.graphByRoot.get(folder.uri.toString());
|
|
161
|
+
}
|
|
162
|
+
async fixWorkspaceHeaders() {
|
|
163
|
+
const settings = getSettings();
|
|
164
|
+
const folders = vscode.workspace.workspaceFolders ?? [];
|
|
165
|
+
let changed = 0;
|
|
166
|
+
for (const folder of folders) {
|
|
167
|
+
const documents = await collectWorkspaceDocuments(folder, settings);
|
|
168
|
+
let graph = (0, graph_1.buildDocumentGraph)(folder.uri.fsPath, documents);
|
|
169
|
+
const edit = new vscode.WorkspaceEdit();
|
|
170
|
+
for (const document of documents) {
|
|
171
|
+
const fixed = (0, linter_1.fixMarkdownDocument)(document.path, document.text, graph);
|
|
172
|
+
if (fixed !== document.text) {
|
|
173
|
+
const uri = vscode.Uri.file(node_path_1.default.join(folder.uri.fsPath, document.path));
|
|
174
|
+
edit.replace(uri, fullDocumentRangeFromText(document.text), fixed);
|
|
175
|
+
document.text = fixed;
|
|
176
|
+
changed += 1;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
if (changed > 0) {
|
|
180
|
+
await vscode.workspace.applyEdit(edit);
|
|
181
|
+
}
|
|
182
|
+
graph = (0, graph_1.buildDocumentGraph)(folder.uri.fsPath, documents);
|
|
183
|
+
await (0, graph_1.writeGraphState)(folder.uri.fsPath, settings.stateFile, graph);
|
|
184
|
+
}
|
|
185
|
+
await this.rebuildGraphsAndValidate();
|
|
186
|
+
vscode.window.showInformationMessage(`hdlinter fixed ${changed} Markdown file${changed === 1 ? "" : "s"}.`);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
class HeaderCodeActionProvider {
|
|
190
|
+
graphByRoot;
|
|
191
|
+
constructor(graphByRoot) {
|
|
192
|
+
this.graphByRoot = graphByRoot;
|
|
193
|
+
}
|
|
194
|
+
provideCodeActions(document, _range, context) {
|
|
195
|
+
const folder = vscode.workspace.getWorkspaceFolder(document.uri);
|
|
196
|
+
if (!folder) {
|
|
197
|
+
return [];
|
|
198
|
+
}
|
|
199
|
+
const graph = this.graphByRoot().get(folder.uri.toString());
|
|
200
|
+
if (!graph) {
|
|
201
|
+
return [];
|
|
202
|
+
}
|
|
203
|
+
const diagnostics = context.diagnostics.filter((diagnostic) => diagnostic.source === SOURCE);
|
|
204
|
+
if (diagnostics.length === 0) {
|
|
205
|
+
return [];
|
|
206
|
+
}
|
|
207
|
+
return [createFixDocumentAction(document, graph)];
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
async function collectWorkspaceDocuments(folder, settings) {
|
|
211
|
+
const seen = new Set();
|
|
212
|
+
const documents = [];
|
|
213
|
+
const exclude = toGlobUnion(settings.exclude);
|
|
214
|
+
for (const include of settings.include) {
|
|
215
|
+
const uris = await vscode.workspace.findFiles(new vscode.RelativePattern(folder, include), exclude);
|
|
216
|
+
for (const uri of uris) {
|
|
217
|
+
if (seen.has(uri.fsPath)) {
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
seen.add(uri.fsPath);
|
|
221
|
+
const bytes = await vscode.workspace.fs.readFile(uri);
|
|
222
|
+
documents.push({
|
|
223
|
+
path: (0, graph_1.normalizeWorkspacePath)(node_path_1.default.relative(folder.uri.fsPath, uri.fsPath)),
|
|
224
|
+
text: Buffer.from(bytes).toString("utf8")
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
documents.sort((a, b) => a.path.localeCompare(b.path));
|
|
229
|
+
return documents;
|
|
230
|
+
}
|
|
231
|
+
function getSettings() {
|
|
232
|
+
const config = vscode.workspace.getConfiguration("hdlinter");
|
|
233
|
+
return {
|
|
234
|
+
enabled: config.get("enabled", true),
|
|
235
|
+
include: config.get("include", ["**/*.md"]),
|
|
236
|
+
exclude: config.get("exclude", ["**/.git/**", "**/node_modules/**", "**/.hdlinter/**"]),
|
|
237
|
+
stateFile: config.get("stateFile", ".hdlinter/state.json"),
|
|
238
|
+
lintOptions: {
|
|
239
|
+
allowedStatuses: config.get("allowedStatuses", types_1.DEFAULT_LINT_OPTIONS.allowedStatuses),
|
|
240
|
+
requireReferenceDates: config.get("requireReferenceDates", types_1.DEFAULT_LINT_OPTIONS.requireReferenceDates),
|
|
241
|
+
warnWhenDependencyNewer: config.get("warnWhenDependencyNewer", types_1.DEFAULT_LINT_OPTIONS.warnWhenDependencyNewer),
|
|
242
|
+
validateSupersedesNotNewer: config.get("validateSupersedesNotNewer", types_1.DEFAULT_LINT_OPTIONS.validateSupersedesNotNewer)
|
|
243
|
+
}
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
function toGlobUnion(patterns) {
|
|
247
|
+
if (patterns.length === 0) {
|
|
248
|
+
return "";
|
|
249
|
+
}
|
|
250
|
+
if (patterns.length === 1) {
|
|
251
|
+
return patterns[0];
|
|
252
|
+
}
|
|
253
|
+
return `{${patterns.join(",")}}`;
|
|
254
|
+
}
|
|
255
|
+
function toVsCodeDiagnostic(diagnostic) {
|
|
256
|
+
const range = new vscode.Range(new vscode.Position(diagnostic.line, diagnostic.character), new vscode.Position(diagnostic.endLine, diagnostic.endCharacter));
|
|
257
|
+
const severity = diagnostic.severity === "error"
|
|
258
|
+
? vscode.DiagnosticSeverity.Error
|
|
259
|
+
: diagnostic.severity === "warning"
|
|
260
|
+
? vscode.DiagnosticSeverity.Warning
|
|
261
|
+
: vscode.DiagnosticSeverity.Information;
|
|
262
|
+
const vscodeDiagnostic = new vscode.Diagnostic(range, diagnostic.message, severity);
|
|
263
|
+
vscodeDiagnostic.source = SOURCE;
|
|
264
|
+
vscodeDiagnostic.code = diagnostic.code;
|
|
265
|
+
return vscodeDiagnostic;
|
|
266
|
+
}
|
|
267
|
+
function createFixDocumentAction(document, graph) {
|
|
268
|
+
const action = new vscode.CodeAction("Fix document header lint issues", vscode.CodeActionKind.QuickFix);
|
|
269
|
+
action.edit = new vscode.WorkspaceEdit();
|
|
270
|
+
action.edit.replace(document.uri, fullDocumentRange(document), (0, linter_1.fixMarkdownDocument)(documentPath(document), document.getText(), graph));
|
|
271
|
+
action.diagnostics = [];
|
|
272
|
+
action.isPreferred = true;
|
|
273
|
+
return action;
|
|
274
|
+
}
|
|
275
|
+
async function applyHeaderFix(document, graph) {
|
|
276
|
+
const edit = new vscode.WorkspaceEdit();
|
|
277
|
+
edit.replace(document.uri, fullDocumentRange(document), (0, linter_1.fixMarkdownDocument)(documentPath(document), document.getText(), graph));
|
|
278
|
+
await vscode.workspace.applyEdit(edit);
|
|
279
|
+
}
|
|
280
|
+
function fullDocumentRange(document) {
|
|
281
|
+
const lastLine = document.lineAt(Math.max(0, document.lineCount - 1));
|
|
282
|
+
return new vscode.Range(new vscode.Position(0, 0), lastLine.range.end);
|
|
283
|
+
}
|
|
284
|
+
function fullDocumentRangeFromText(text) {
|
|
285
|
+
const lines = text.split(/\r?\n/);
|
|
286
|
+
const lastLineIndex = Math.max(0, lines.length - 1);
|
|
287
|
+
return new vscode.Range(new vscode.Position(0, 0), new vscode.Position(lastLineIndex, lines[lastLineIndex].length));
|
|
288
|
+
}
|
|
289
|
+
function documentPath(document) {
|
|
290
|
+
const folder = vscode.workspace.getWorkspaceFolder(document.uri);
|
|
291
|
+
if (!folder) {
|
|
292
|
+
return (0, graph_1.normalizeWorkspacePath)(node_path_1.default.basename(document.uri.fsPath));
|
|
293
|
+
}
|
|
294
|
+
return (0, graph_1.normalizeWorkspacePath)(node_path_1.default.relative(folder.uri.fsPath, document.uri.fsPath));
|
|
295
|
+
}
|
|
296
|
+
//# sourceMappingURL=extension.js.map
|
package/dist/graph.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { DocumentGraph, DocumentInput } from "./types";
|
|
2
|
+
export declare function normalizeWorkspacePath(filePath: string): string;
|
|
3
|
+
export declare function hashSha256(text: string): string;
|
|
4
|
+
export declare function buildDocumentGraph(workspaceRoot: string, documents: DocumentInput[]): DocumentGraph;
|
|
5
|
+
export declare function writeGraphState(workspaceRoot: string, stateFile: string, graph: DocumentGraph): Promise<void>;
|
|
6
|
+
export declare function readGraphState(workspaceRoot: string, stateFile: string): Promise<DocumentGraph | undefined>;
|
package/dist/graph.js
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.normalizeWorkspacePath = normalizeWorkspacePath;
|
|
7
|
+
exports.hashSha256 = hashSha256;
|
|
8
|
+
exports.buildDocumentGraph = buildDocumentGraph;
|
|
9
|
+
exports.writeGraphState = writeGraphState;
|
|
10
|
+
exports.readGraphState = readGraphState;
|
|
11
|
+
const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
12
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
13
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
14
|
+
const header_1 = require("./header");
|
|
15
|
+
function normalizeWorkspacePath(filePath) {
|
|
16
|
+
return filePath.split(node_path_1.default.sep).join(node_path_1.default.posix.sep).replace(/^\.\//, "");
|
|
17
|
+
}
|
|
18
|
+
function hashSha256(text) {
|
|
19
|
+
return node_crypto_1.default.createHash("sha256").update(text, "utf8").digest("hex");
|
|
20
|
+
}
|
|
21
|
+
function buildDocumentGraph(workspaceRoot, documents) {
|
|
22
|
+
const normalizedRoot = node_path_1.default.resolve(workspaceRoot);
|
|
23
|
+
const nodes = {};
|
|
24
|
+
for (const document of documents) {
|
|
25
|
+
const documentPath = normalizeWorkspacePath(document.path);
|
|
26
|
+
const header = (0, header_1.parseHeader)(document.text);
|
|
27
|
+
nodes[documentPath] = {
|
|
28
|
+
path: documentPath,
|
|
29
|
+
hashSha256: hashSha256(document.text),
|
|
30
|
+
status: header.values.Status,
|
|
31
|
+
owner: header.values.Owner,
|
|
32
|
+
reviewers: header.values.Reviewers,
|
|
33
|
+
lastUpdated: header.values["Last updated"],
|
|
34
|
+
milestone: header.values.Milestone,
|
|
35
|
+
title: (0, header_1.titleFromMarkdown)(document.text)
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
const edges = [];
|
|
39
|
+
for (const document of documents) {
|
|
40
|
+
const from = normalizeWorkspacePath(document.path);
|
|
41
|
+
const header = (0, header_1.parseHeader)(document.text);
|
|
42
|
+
collectEdges("dependsOn", from, header.values["Depends on"], nodes, edges);
|
|
43
|
+
collectEdges("supersedes", from, header.values.Supersedes, nodes, edges);
|
|
44
|
+
}
|
|
45
|
+
return {
|
|
46
|
+
version: 1,
|
|
47
|
+
generatedAt: new Date().toISOString(),
|
|
48
|
+
workspaceRoot: normalizedRoot,
|
|
49
|
+
documents: nodes,
|
|
50
|
+
edges
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
function collectEdges(kind, from, value, nodes, edges) {
|
|
54
|
+
const candidates = (0, header_1.extractReferenceCandidates)(value);
|
|
55
|
+
for (const reference of candidates) {
|
|
56
|
+
const to = (0, header_1.resolveReferenceTarget)(from, reference.target) ?? reference.target;
|
|
57
|
+
const targetNode = nodes[to];
|
|
58
|
+
edges.push({
|
|
59
|
+
from,
|
|
60
|
+
to,
|
|
61
|
+
kind,
|
|
62
|
+
label: reference.label,
|
|
63
|
+
declaredLastUpdated: reference.declaredLastUpdated,
|
|
64
|
+
actualLastUpdated: targetNode?.lastUpdated,
|
|
65
|
+
targetExists: Boolean(targetNode)
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
async function writeGraphState(workspaceRoot, stateFile, graph) {
|
|
70
|
+
const absolutePath = node_path_1.default.resolve(workspaceRoot, stateFile);
|
|
71
|
+
await promises_1.default.mkdir(node_path_1.default.dirname(absolutePath), { recursive: true });
|
|
72
|
+
await promises_1.default.writeFile(absolutePath, `${JSON.stringify(graph, null, 2)}\n`, "utf8");
|
|
73
|
+
}
|
|
74
|
+
async function readGraphState(workspaceRoot, stateFile) {
|
|
75
|
+
try {
|
|
76
|
+
const absolutePath = node_path_1.default.resolve(workspaceRoot, stateFile);
|
|
77
|
+
const raw = await promises_1.default.readFile(absolutePath, "utf8");
|
|
78
|
+
return JSON.parse(raw);
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
return undefined;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
//# sourceMappingURL=graph.js.map
|
package/dist/header.d.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { HeaderValues, ParsedHeader, ReferenceParseResult } from "./types";
|
|
2
|
+
export declare function splitLines(text: string): string[];
|
|
3
|
+
export declare function parseHeader(text: string): ParsedHeader;
|
|
4
|
+
export declare function extractHeaderValuesLoose(text: string): HeaderValues;
|
|
5
|
+
export declare function isValidIsoDate(value: string | undefined): boolean;
|
|
6
|
+
export declare function todayIso(): string;
|
|
7
|
+
export declare function compareIsoDate(a: string | undefined, b: string | undefined): number | undefined;
|
|
8
|
+
export declare function parseReferenceValue(value: string | undefined): ReferenceParseResult;
|
|
9
|
+
export interface ReferenceCandidate {
|
|
10
|
+
label: string;
|
|
11
|
+
target: string;
|
|
12
|
+
declaredLastUpdated?: string;
|
|
13
|
+
}
|
|
14
|
+
export declare function extractReferenceCandidates(value: string | undefined): ReferenceCandidate[];
|
|
15
|
+
export declare function isExternalTarget(target: string): boolean;
|
|
16
|
+
export declare function stripAnchor(target: string): string;
|
|
17
|
+
export declare function resolveReferenceTarget(currentDocumentPath: string, target: string): string | undefined;
|
|
18
|
+
export declare function formatReference(label: string, target: string, lastUpdated: string): string;
|
|
19
|
+
export declare function titleFromMarkdown(text: string): string | undefined;
|
package/dist/header.js
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.splitLines = splitLines;
|
|
7
|
+
exports.parseHeader = parseHeader;
|
|
8
|
+
exports.extractHeaderValuesLoose = extractHeaderValuesLoose;
|
|
9
|
+
exports.isValidIsoDate = isValidIsoDate;
|
|
10
|
+
exports.todayIso = todayIso;
|
|
11
|
+
exports.compareIsoDate = compareIsoDate;
|
|
12
|
+
exports.parseReferenceValue = parseReferenceValue;
|
|
13
|
+
exports.extractReferenceCandidates = extractReferenceCandidates;
|
|
14
|
+
exports.isExternalTarget = isExternalTarget;
|
|
15
|
+
exports.stripAnchor = stripAnchor;
|
|
16
|
+
exports.resolveReferenceTarget = resolveReferenceTarget;
|
|
17
|
+
exports.formatReference = formatReference;
|
|
18
|
+
exports.titleFromMarkdown = titleFromMarkdown;
|
|
19
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
20
|
+
const types_1 = require("./types");
|
|
21
|
+
const REFERENCE_TOKEN = /^\[([^\]]+)\]\(([^)]+)\)(?:\s+\[(\d{4}-\d{2}-\d{2})\])?/;
|
|
22
|
+
const MARKDOWN_LINK_CANDIDATE = /^\[([^\]]+)\]\(([^)]+)\)(?:\s+\[([^\]]+)\])?$/;
|
|
23
|
+
function splitLines(text) {
|
|
24
|
+
return text.replace(/^\uFEFF/, "").split(/\r?\n/);
|
|
25
|
+
}
|
|
26
|
+
function parseHeader(text) {
|
|
27
|
+
const lines = splitLines(text);
|
|
28
|
+
const values = {};
|
|
29
|
+
const parsedLines = types_1.HEADER_FIELDS.map((expectedField, index) => {
|
|
30
|
+
const raw = lines[index] ?? "";
|
|
31
|
+
const cleaned = index === 0 ? raw.replace(/^\uFEFF/, "") : raw;
|
|
32
|
+
const match = /^([^:]+):\s*(.*)$/.exec(cleaned);
|
|
33
|
+
const actualField = match?.[1]?.trim();
|
|
34
|
+
const value = match?.[2]?.trim();
|
|
35
|
+
if (actualField && types_1.HEADER_FIELDS.includes(actualField)) {
|
|
36
|
+
values[actualField] = value ?? "";
|
|
37
|
+
}
|
|
38
|
+
return {
|
|
39
|
+
expectedField,
|
|
40
|
+
actualField,
|
|
41
|
+
value,
|
|
42
|
+
line: index,
|
|
43
|
+
raw
|
|
44
|
+
};
|
|
45
|
+
});
|
|
46
|
+
return {
|
|
47
|
+
values,
|
|
48
|
+
lines: parsedLines,
|
|
49
|
+
hasHeaderStart: parsedLines[0]?.actualField === "Status",
|
|
50
|
+
headerLineCount: types_1.HEADER_FIELDS.length
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
function extractHeaderValuesLoose(text) {
|
|
54
|
+
const lines = splitLines(text).slice(0, 20);
|
|
55
|
+
const values = {};
|
|
56
|
+
for (const line of lines) {
|
|
57
|
+
const match = /^([^:]+):\s*(.*)$/.exec(line);
|
|
58
|
+
if (!match) {
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
const field = match[1].trim();
|
|
62
|
+
if (types_1.HEADER_FIELDS.includes(field)) {
|
|
63
|
+
values[field] = match[2].trim();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return values;
|
|
67
|
+
}
|
|
68
|
+
function isValidIsoDate(value) {
|
|
69
|
+
if (!value || !/^\d{4}-\d{2}-\d{2}$/.test(value)) {
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
72
|
+
const parsed = new Date(`${value}T00:00:00.000Z`);
|
|
73
|
+
if (Number.isNaN(parsed.getTime())) {
|
|
74
|
+
return false;
|
|
75
|
+
}
|
|
76
|
+
return parsed.toISOString().slice(0, 10) === value;
|
|
77
|
+
}
|
|
78
|
+
function todayIso() {
|
|
79
|
+
return new Date().toISOString().slice(0, 10);
|
|
80
|
+
}
|
|
81
|
+
function compareIsoDate(a, b) {
|
|
82
|
+
if (!a || !b || !isValidIsoDate(a) || !isValidIsoDate(b)) {
|
|
83
|
+
return undefined;
|
|
84
|
+
}
|
|
85
|
+
const left = a;
|
|
86
|
+
const right = b;
|
|
87
|
+
return left === right ? 0 : left < right ? -1 : 1;
|
|
88
|
+
}
|
|
89
|
+
function parseReferenceValue(value) {
|
|
90
|
+
const rawValue = (value ?? "").trim();
|
|
91
|
+
if (!rawValue || rawValue === "None") {
|
|
92
|
+
return { none: rawValue === "None", references: [], invalidFragments: rawValue ? [] : [""] };
|
|
93
|
+
}
|
|
94
|
+
const references = [];
|
|
95
|
+
const invalidFragments = [];
|
|
96
|
+
let cursor = 0;
|
|
97
|
+
while (cursor < rawValue.length) {
|
|
98
|
+
while (cursor < rawValue.length && /[\s,]/.test(rawValue[cursor])) {
|
|
99
|
+
cursor += 1;
|
|
100
|
+
}
|
|
101
|
+
if (cursor >= rawValue.length) {
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
const remainder = rawValue.slice(cursor);
|
|
105
|
+
const match = REFERENCE_TOKEN.exec(remainder);
|
|
106
|
+
if (match) {
|
|
107
|
+
const raw = match[0];
|
|
108
|
+
references.push({
|
|
109
|
+
label: match[1],
|
|
110
|
+
target: match[2],
|
|
111
|
+
declaredLastUpdated: match[3],
|
|
112
|
+
raw,
|
|
113
|
+
start: cursor,
|
|
114
|
+
end: cursor + raw.length
|
|
115
|
+
});
|
|
116
|
+
cursor += raw.length;
|
|
117
|
+
continue;
|
|
118
|
+
}
|
|
119
|
+
const nextComma = rawValue.indexOf(",", cursor);
|
|
120
|
+
const end = nextComma === -1 ? rawValue.length : nextComma;
|
|
121
|
+
const fragment = rawValue.slice(cursor, end).trim();
|
|
122
|
+
if (fragment) {
|
|
123
|
+
invalidFragments.push(fragment);
|
|
124
|
+
}
|
|
125
|
+
cursor = end + 1;
|
|
126
|
+
}
|
|
127
|
+
return { none: false, references, invalidFragments };
|
|
128
|
+
}
|
|
129
|
+
function extractReferenceCandidates(value) {
|
|
130
|
+
const rawValue = (value ?? "").trim();
|
|
131
|
+
if (!rawValue || rawValue === "None") {
|
|
132
|
+
return [];
|
|
133
|
+
}
|
|
134
|
+
const parsed = parseReferenceValue(rawValue);
|
|
135
|
+
if (parsed.invalidFragments.length === 0 && parsed.references.length > 0) {
|
|
136
|
+
return parsed.references.map((reference) => ({
|
|
137
|
+
label: reference.label,
|
|
138
|
+
target: reference.target,
|
|
139
|
+
declaredLastUpdated: reference.declaredLastUpdated
|
|
140
|
+
}));
|
|
141
|
+
}
|
|
142
|
+
return rawValue
|
|
143
|
+
.split(",")
|
|
144
|
+
.map((part) => part.trim())
|
|
145
|
+
.filter(Boolean)
|
|
146
|
+
.map((part) => {
|
|
147
|
+
const cleaned = part.replace(/^`|`$/g, "");
|
|
148
|
+
const linkMatch = MARKDOWN_LINK_CANDIDATE.exec(cleaned);
|
|
149
|
+
if (linkMatch) {
|
|
150
|
+
return {
|
|
151
|
+
label: linkMatch[1],
|
|
152
|
+
target: linkMatch[2],
|
|
153
|
+
declaredLastUpdated: /^\d{4}-\d{2}-\d{2}$/.test(linkMatch[3] ?? "") ? linkMatch[3] : undefined
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
return {
|
|
157
|
+
label: node_path_1.default.posix.basename(cleaned) || cleaned,
|
|
158
|
+
target: cleaned
|
|
159
|
+
};
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
function isExternalTarget(target) {
|
|
163
|
+
return /^(https?:|mailto:|#)/i.test(target);
|
|
164
|
+
}
|
|
165
|
+
function stripAnchor(target) {
|
|
166
|
+
return target.split("#", 1)[0];
|
|
167
|
+
}
|
|
168
|
+
function resolveReferenceTarget(currentDocumentPath, target) {
|
|
169
|
+
if (isExternalTarget(target)) {
|
|
170
|
+
return undefined;
|
|
171
|
+
}
|
|
172
|
+
const targetWithoutAnchor = stripAnchor(target);
|
|
173
|
+
if (!targetWithoutAnchor) {
|
|
174
|
+
return undefined;
|
|
175
|
+
}
|
|
176
|
+
if (targetWithoutAnchor.startsWith("/")) {
|
|
177
|
+
return node_path_1.default.posix.normalize(targetWithoutAnchor.slice(1));
|
|
178
|
+
}
|
|
179
|
+
return node_path_1.default.posix.normalize(node_path_1.default.posix.join(node_path_1.default.posix.dirname(currentDocumentPath), targetWithoutAnchor));
|
|
180
|
+
}
|
|
181
|
+
function formatReference(label, target, lastUpdated) {
|
|
182
|
+
return `[${label}](${target}) [${lastUpdated}]`;
|
|
183
|
+
}
|
|
184
|
+
function titleFromMarkdown(text) {
|
|
185
|
+
for (const line of splitLines(text)) {
|
|
186
|
+
const match = /^#\s+(.+?)\s*$/.exec(line);
|
|
187
|
+
if (match) {
|
|
188
|
+
return match[1];
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
return undefined;
|
|
192
|
+
}
|
|
193
|
+
//# sourceMappingURL=header.js.map
|
package/dist/linter.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { DocumentGraph, LintDiagnostic, LintOptions } from "./types";
|
|
2
|
+
export declare function lintMarkdownDocument(documentPath: string, text: string, graph: DocumentGraph, options: LintOptions): LintDiagnostic[];
|
|
3
|
+
export declare function buildNormalizedHeaderText(documentPath: string, text: string, graph: DocumentGraph, today?: string, forceLastUpdated?: boolean): string;
|
|
4
|
+
export declare function fixMarkdownDocument(documentPath: string, text: string, graph: DocumentGraph, today?: string, forceLastUpdated?: boolean): string;
|
|
5
|
+
export declare function normalizeReferenceValue(documentPath: string, value: string, graph: DocumentGraph, today?: string): string;
|
|
6
|
+
export declare function headerReplacementRange(text: string): {
|
|
7
|
+
startLine: number;
|
|
8
|
+
endLineExclusive: number;
|
|
9
|
+
};
|
|
10
|
+
export declare function recommendedReviewDate(documentPath: string, graph: DocumentGraph, fallbackToday?: string): string;
|
package/dist/linter.js
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.lintMarkdownDocument = lintMarkdownDocument;
|
|
4
|
+
exports.buildNormalizedHeaderText = buildNormalizedHeaderText;
|
|
5
|
+
exports.fixMarkdownDocument = fixMarkdownDocument;
|
|
6
|
+
exports.normalizeReferenceValue = normalizeReferenceValue;
|
|
7
|
+
exports.headerReplacementRange = headerReplacementRange;
|
|
8
|
+
exports.recommendedReviewDate = recommendedReviewDate;
|
|
9
|
+
const header_1 = require("./header");
|
|
10
|
+
const types_1 = require("./types");
|
|
11
|
+
const EMPTY_RANGE = { character: 0, endCharacter: 1 };
|
|
12
|
+
function lintMarkdownDocument(documentPath, text, graph, options) {
|
|
13
|
+
const diagnostics = [];
|
|
14
|
+
const lines = (0, header_1.splitLines)(text);
|
|
15
|
+
const header = (0, header_1.parseHeader)(text);
|
|
16
|
+
if (!header.hasHeaderStart) {
|
|
17
|
+
diagnostics.push(diagnostic("missing-header", "error", "Markdown file must start with a document metadata header.", 0, 0, 1));
|
|
18
|
+
}
|
|
19
|
+
for (const headerLine of header.lines) {
|
|
20
|
+
const expected = headerLine.expectedField;
|
|
21
|
+
const rawLength = headerLine.raw.length || 1;
|
|
22
|
+
if (headerLine.actualField !== expected) {
|
|
23
|
+
diagnostics.push(diagnostic("invalid-field-order", "error", `Expected header field \`${expected}:\` on line ${headerLine.line + 1}.`, headerLine.line, 0, rawLength));
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
if ((headerLine.value ?? "").trim() === "") {
|
|
27
|
+
diagnostics.push(diagnostic("empty-header-field", "error", `Header field \`${expected}\` must not be empty.`, headerLine.line, 0, rawLength));
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
if (lines.length > types_1.HEADER_FIELDS.length && lines[types_1.HEADER_FIELDS.length].trim() !== "") {
|
|
31
|
+
diagnostics.push(diagnostic("missing-header-separator", "warning", "Header should be followed by one blank line before document content.", types_1.HEADER_FIELDS.length, 0, lines[types_1.HEADER_FIELDS.length].length || 1));
|
|
32
|
+
}
|
|
33
|
+
const status = header.values.Status;
|
|
34
|
+
if (status && !options.allowedStatuses.includes(status)) {
|
|
35
|
+
diagnostics.push(diagnostic("invalid-status", "error", `Status must be one of: ${options.allowedStatuses.join(", ")}.`, fieldLine("Status"), 0, lines[fieldLine("Status")]?.length || 1));
|
|
36
|
+
}
|
|
37
|
+
const lastUpdated = header.values["Last updated"];
|
|
38
|
+
if (lastUpdated && !(0, header_1.isValidIsoDate)(lastUpdated)) {
|
|
39
|
+
diagnostics.push(diagnostic("invalid-last-updated", "error", "`Last updated` must be a valid YYYY-MM-DD date.", fieldLine("Last updated"), 0, lines[fieldLine("Last updated")]?.length || 1));
|
|
40
|
+
}
|
|
41
|
+
validateReferences("Depends on", "dependsOn", documentPath, header.values["Depends on"], lastUpdated, graph, options, diagnostics);
|
|
42
|
+
validateReferences("Supersedes", "supersedes", documentPath, header.values.Supersedes, lastUpdated, graph, options, diagnostics);
|
|
43
|
+
return diagnostics;
|
|
44
|
+
function fieldLine(field) {
|
|
45
|
+
return types_1.HEADER_FIELDS.indexOf(field);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
function validateReferences(field, kind, documentPath, value, currentLastUpdated, graph, options, diagnostics) {
|
|
49
|
+
const line = types_1.HEADER_FIELDS.indexOf(field);
|
|
50
|
+
const parsed = (0, header_1.parseReferenceValue)(value);
|
|
51
|
+
if (!value || value.trim() === "") {
|
|
52
|
+
diagnostics.push(diagnostic("empty-reference-field", "error", `\`${field}\` must be \`None\` or one or more dated Markdown links.`, line, 0, 1));
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
if (parsed.none) {
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
if (parsed.invalidFragments.length > 0 || parsed.references.length === 0) {
|
|
59
|
+
diagnostics.push(diagnostic("raw-header-reference", "error", `\`${field}\` references must be clickable Markdown links with bracketed Last updated dates.`, line, 0, value.length || 1));
|
|
60
|
+
}
|
|
61
|
+
for (const reference of parsed.references) {
|
|
62
|
+
if (options.requireReferenceDates && !reference.declaredLastUpdated) {
|
|
63
|
+
diagnostics.push(diagnostic("missing-reference-date", "error", `Reference \`${reference.target}\` must include the target document's Last updated date in brackets.`, line, reference.start, reference.end));
|
|
64
|
+
}
|
|
65
|
+
if (reference.declaredLastUpdated && !(0, header_1.isValidIsoDate)(reference.declaredLastUpdated)) {
|
|
66
|
+
diagnostics.push(diagnostic("invalid-reference-date", "error", `Reference date for \`${reference.target}\` must be YYYY-MM-DD.`, line, reference.start, reference.end));
|
|
67
|
+
}
|
|
68
|
+
const resolvedTarget = (0, header_1.resolveReferenceTarget)(documentPath, reference.target);
|
|
69
|
+
if (!resolvedTarget) {
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
const targetNode = graph.documents[resolvedTarget];
|
|
73
|
+
if (!targetNode) {
|
|
74
|
+
diagnostics.push(diagnostic("missing-reference-target", "error", `Referenced Markdown document does not exist in the workspace graph: ${reference.target}`, line, reference.start, reference.end));
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
if (targetNode.lastUpdated && reference.declaredLastUpdated && targetNode.lastUpdated !== reference.declaredLastUpdated) {
|
|
78
|
+
diagnostics.push(diagnostic("stale-reference-date", "error", `Reference date for \`${reference.target}\` is ${reference.declaredLastUpdated}, but target Last updated is ${targetNode.lastUpdated}.`, line, reference.start, reference.end));
|
|
79
|
+
}
|
|
80
|
+
if (kind === "dependsOn" && options.warnWhenDependencyNewer) {
|
|
81
|
+
const comparison = (0, header_1.compareIsoDate)(currentLastUpdated, targetNode.lastUpdated);
|
|
82
|
+
if (comparison !== undefined && comparison < 0) {
|
|
83
|
+
diagnostics.push(diagnostic("dependency-newer-than-document", "warning", `Dependency \`${reference.target}\` was updated after this document; review for drift.`, line, reference.start, reference.end));
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (kind === "supersedes" && options.validateSupersedesNotNewer) {
|
|
87
|
+
const comparison = (0, header_1.compareIsoDate)(currentLastUpdated, targetNode.lastUpdated);
|
|
88
|
+
if (comparison !== undefined && comparison < 0) {
|
|
89
|
+
diagnostics.push(diagnostic("superseded-document-newer", "warning", `Superseded document \`${reference.target}\` is newer than this document.`, line, reference.start, reference.end));
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
function buildNormalizedHeaderText(documentPath, text, graph, today = (0, header_1.todayIso)(), forceLastUpdated = false) {
|
|
95
|
+
const loose = (0, header_1.extractHeaderValuesLoose)(text);
|
|
96
|
+
const lastUpdated = forceLastUpdated
|
|
97
|
+
? recommendedReviewDate(documentPath, graph, today)
|
|
98
|
+
: (0, header_1.isValidIsoDate)(loose["Last updated"])
|
|
99
|
+
? loose["Last updated"]
|
|
100
|
+
: today;
|
|
101
|
+
const values = {
|
|
102
|
+
Status: normalizeStatus(loose.Status),
|
|
103
|
+
Owner: loose.Owner || "TODO",
|
|
104
|
+
Reviewers: loose.Reviewers || "TODO",
|
|
105
|
+
"Last updated": lastUpdated,
|
|
106
|
+
Milestone: loose.Milestone || "M0",
|
|
107
|
+
"Depends on": normalizeReferenceValue(documentPath, loose["Depends on"] || "None", graph, today),
|
|
108
|
+
Supersedes: normalizeReferenceValue(documentPath, loose.Supersedes || "None", graph, today)
|
|
109
|
+
};
|
|
110
|
+
return types_1.HEADER_FIELDS.map((field) => `${field}: ${values[field]}`).join("\n");
|
|
111
|
+
}
|
|
112
|
+
function fixMarkdownDocument(documentPath, text, graph, today = (0, header_1.todayIso)(), forceLastUpdated = true) {
|
|
113
|
+
const header = buildNormalizedHeaderText(documentPath, text, graph, today, forceLastUpdated);
|
|
114
|
+
const body = stripExistingHeader(text);
|
|
115
|
+
return body ? `${header}\n\n${body}` : `${header}\n`;
|
|
116
|
+
}
|
|
117
|
+
function normalizeStatus(value) {
|
|
118
|
+
const trimmed = (value ?? "").trim();
|
|
119
|
+
if (!trimmed) {
|
|
120
|
+
return "Draft";
|
|
121
|
+
}
|
|
122
|
+
const canonical = DEFAULT_STATUSES.find((status) => status.toLowerCase() === trimmed.toLowerCase());
|
|
123
|
+
return canonical ?? "Draft";
|
|
124
|
+
}
|
|
125
|
+
function normalizeReferenceValue(documentPath, value, graph, today = (0, header_1.todayIso)()) {
|
|
126
|
+
const trimmed = value.trim();
|
|
127
|
+
if (!trimmed || trimmed === "None") {
|
|
128
|
+
return "None";
|
|
129
|
+
}
|
|
130
|
+
const candidates = (0, header_1.extractReferenceCandidates)(trimmed);
|
|
131
|
+
if (candidates.length === 0) {
|
|
132
|
+
return "None";
|
|
133
|
+
}
|
|
134
|
+
const references = candidates
|
|
135
|
+
.map((candidate) => {
|
|
136
|
+
const resolvedTarget = (0, header_1.resolveReferenceTarget)(documentPath, candidate.target);
|
|
137
|
+
const targetNode = resolvedTarget ? graph.documents[resolvedTarget] : undefined;
|
|
138
|
+
if (resolvedTarget && !targetNode) {
|
|
139
|
+
return undefined;
|
|
140
|
+
}
|
|
141
|
+
const targetLastUpdated = targetNode?.lastUpdated;
|
|
142
|
+
const date = (0, header_1.isValidIsoDate)(targetLastUpdated)
|
|
143
|
+
? targetLastUpdated
|
|
144
|
+
: (0, header_1.isValidIsoDate)(candidate.declaredLastUpdated)
|
|
145
|
+
? candidate.declaredLastUpdated
|
|
146
|
+
: (0, header_1.isExternalTarget)(candidate.target)
|
|
147
|
+
? today
|
|
148
|
+
: targetNode
|
|
149
|
+
? today
|
|
150
|
+
: undefined;
|
|
151
|
+
if (!date) {
|
|
152
|
+
return undefined;
|
|
153
|
+
}
|
|
154
|
+
return (0, header_1.formatReference)(candidate.label, candidate.target, date);
|
|
155
|
+
})
|
|
156
|
+
.filter((reference) => Boolean(reference));
|
|
157
|
+
return references.length > 0 ? references.join(", ") : "None";
|
|
158
|
+
}
|
|
159
|
+
function headerReplacementRange(text) {
|
|
160
|
+
const loose = (0, header_1.extractHeaderValuesLoose)(text);
|
|
161
|
+
const hasAnyHeaderField = Object.keys(loose).length > 0;
|
|
162
|
+
if (!hasAnyHeaderField) {
|
|
163
|
+
return { startLine: 0, endLineExclusive: 0 };
|
|
164
|
+
}
|
|
165
|
+
const lines = (0, header_1.splitLines)(text);
|
|
166
|
+
const includeSeparator = lines[types_1.HEADER_FIELDS.length]?.trim() === "";
|
|
167
|
+
return {
|
|
168
|
+
startLine: 0,
|
|
169
|
+
endLineExclusive: types_1.HEADER_FIELDS.length + (includeSeparator ? 1 : 0)
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
function recommendedReviewDate(documentPath, graph, fallbackToday = (0, header_1.todayIso)()) {
|
|
173
|
+
const relatedDates = graph.edges
|
|
174
|
+
.filter((edge) => edge.from === documentPath)
|
|
175
|
+
.map((edge) => edge.actualLastUpdated)
|
|
176
|
+
.filter(header_1.isValidIsoDate);
|
|
177
|
+
return [fallbackToday, ...relatedDates].sort().at(-1) ?? fallbackToday;
|
|
178
|
+
}
|
|
179
|
+
function stripExistingHeader(text) {
|
|
180
|
+
const lines = (0, header_1.splitLines)(text);
|
|
181
|
+
const removable = new Set();
|
|
182
|
+
const scanLimit = Math.min(lines.length, 30);
|
|
183
|
+
for (let index = 0; index < scanLimit; index += 1) {
|
|
184
|
+
const match = /^([^:]+):\s*(.*)$/.exec(lines[index]);
|
|
185
|
+
if (match && types_1.HEADER_FIELDS.includes(match[1].trim())) {
|
|
186
|
+
removable.add(index);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
const remaining = lines.filter((_line, index) => !removable.has(index));
|
|
190
|
+
while (remaining.length > 0 && remaining[0].trim() === "") {
|
|
191
|
+
remaining.shift();
|
|
192
|
+
}
|
|
193
|
+
while (remaining.length > 1 && remaining[0].trim() === "" && remaining[1].trim() === "") {
|
|
194
|
+
remaining.shift();
|
|
195
|
+
}
|
|
196
|
+
return remaining.join("\n").replace(/^\s*\n/, "").replace(/\s+$/, "");
|
|
197
|
+
}
|
|
198
|
+
function diagnostic(code, severity, message, line, character, endCharacter) {
|
|
199
|
+
return {
|
|
200
|
+
code,
|
|
201
|
+
severity,
|
|
202
|
+
message,
|
|
203
|
+
line,
|
|
204
|
+
character,
|
|
205
|
+
endLine: line,
|
|
206
|
+
endCharacter: Math.max(endCharacter, character + EMPTY_RANGE.endCharacter)
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
const DEFAULT_STATUSES = ["Draft", "In Review", "Approved", "Superseded"];
|
|
210
|
+
//# sourceMappingURL=linter.js.map
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
export declare const HEADER_FIELDS: readonly ["Status", "Owner", "Reviewers", "Last updated", "Milestone", "Depends on", "Supersedes"];
|
|
2
|
+
export type HeaderField = (typeof HEADER_FIELDS)[number];
|
|
3
|
+
export type HeaderValues = Partial<Record<HeaderField, string>>;
|
|
4
|
+
export type Severity = "error" | "warning" | "info";
|
|
5
|
+
export interface HeaderLine {
|
|
6
|
+
expectedField: HeaderField;
|
|
7
|
+
actualField?: string;
|
|
8
|
+
value?: string;
|
|
9
|
+
line: number;
|
|
10
|
+
raw: string;
|
|
11
|
+
}
|
|
12
|
+
export interface ParsedHeader {
|
|
13
|
+
values: HeaderValues;
|
|
14
|
+
lines: HeaderLine[];
|
|
15
|
+
hasHeaderStart: boolean;
|
|
16
|
+
headerLineCount: number;
|
|
17
|
+
}
|
|
18
|
+
export interface HeaderReference {
|
|
19
|
+
label: string;
|
|
20
|
+
target: string;
|
|
21
|
+
declaredLastUpdated?: string;
|
|
22
|
+
raw: string;
|
|
23
|
+
start: number;
|
|
24
|
+
end: number;
|
|
25
|
+
}
|
|
26
|
+
export interface ReferenceParseResult {
|
|
27
|
+
none: boolean;
|
|
28
|
+
references: HeaderReference[];
|
|
29
|
+
invalidFragments: string[];
|
|
30
|
+
}
|
|
31
|
+
export interface DocumentInput {
|
|
32
|
+
path: string;
|
|
33
|
+
text: string;
|
|
34
|
+
}
|
|
35
|
+
export interface DocumentNode {
|
|
36
|
+
path: string;
|
|
37
|
+
hashSha256: string;
|
|
38
|
+
status?: string;
|
|
39
|
+
owner?: string;
|
|
40
|
+
reviewers?: string;
|
|
41
|
+
lastUpdated?: string;
|
|
42
|
+
milestone?: string;
|
|
43
|
+
title?: string;
|
|
44
|
+
}
|
|
45
|
+
export type GraphEdgeKind = "dependsOn" | "supersedes";
|
|
46
|
+
export interface GraphEdge {
|
|
47
|
+
from: string;
|
|
48
|
+
to: string;
|
|
49
|
+
kind: GraphEdgeKind;
|
|
50
|
+
label: string;
|
|
51
|
+
declaredLastUpdated?: string;
|
|
52
|
+
actualLastUpdated?: string;
|
|
53
|
+
targetExists: boolean;
|
|
54
|
+
}
|
|
55
|
+
export interface DocumentGraph {
|
|
56
|
+
version: 1;
|
|
57
|
+
generatedAt: string;
|
|
58
|
+
workspaceRoot: string;
|
|
59
|
+
documents: Record<string, DocumentNode>;
|
|
60
|
+
edges: GraphEdge[];
|
|
61
|
+
}
|
|
62
|
+
export interface LintOptions {
|
|
63
|
+
allowedStatuses: string[];
|
|
64
|
+
requireReferenceDates: boolean;
|
|
65
|
+
warnWhenDependencyNewer: boolean;
|
|
66
|
+
validateSupersedesNotNewer: boolean;
|
|
67
|
+
}
|
|
68
|
+
export interface LintDiagnostic {
|
|
69
|
+
code: string;
|
|
70
|
+
severity: Severity;
|
|
71
|
+
message: string;
|
|
72
|
+
line: number;
|
|
73
|
+
character: number;
|
|
74
|
+
endLine: number;
|
|
75
|
+
endCharacter: number;
|
|
76
|
+
}
|
|
77
|
+
export declare const DEFAULT_LINT_OPTIONS: LintOptions;
|
package/dist/types.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DEFAULT_LINT_OPTIONS = exports.HEADER_FIELDS = void 0;
|
|
4
|
+
exports.HEADER_FIELDS = [
|
|
5
|
+
"Status",
|
|
6
|
+
"Owner",
|
|
7
|
+
"Reviewers",
|
|
8
|
+
"Last updated",
|
|
9
|
+
"Milestone",
|
|
10
|
+
"Depends on",
|
|
11
|
+
"Supersedes"
|
|
12
|
+
];
|
|
13
|
+
exports.DEFAULT_LINT_OPTIONS = {
|
|
14
|
+
allowedStatuses: ["Draft", "In Review", "Approved", "Superseded"],
|
|
15
|
+
requireReferenceDates: true,
|
|
16
|
+
warnWhenDependencyNewer: true,
|
|
17
|
+
validateSupersedesNotNewer: true
|
|
18
|
+
};
|
|
19
|
+
//# sourceMappingURL=types.js.map
|
package/package.json
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "hdlinter",
|
|
3
|
+
"publisher": "swekkiekekkie",
|
|
4
|
+
"displayName": "hdlinter",
|
|
5
|
+
"description": "Cursor/VS Code extension and CLI for validating project document metadata headers.",
|
|
6
|
+
"version": "0.1.0",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"keywords": [
|
|
9
|
+
"markdown",
|
|
10
|
+
"lint",
|
|
11
|
+
"metadata",
|
|
12
|
+
"headers",
|
|
13
|
+
"cursor",
|
|
14
|
+
"vscode",
|
|
15
|
+
"documentation"
|
|
16
|
+
],
|
|
17
|
+
"files": [
|
|
18
|
+
"dist/*.js",
|
|
19
|
+
"dist/*.d.ts",
|
|
20
|
+
"README.md"
|
|
21
|
+
],
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "git+https://github.com/swekkiekekkie/hdlinter.git"
|
|
25
|
+
},
|
|
26
|
+
"bugs": {
|
|
27
|
+
"url": "https://github.com/swekkiekekkie/hdlinter/issues"
|
|
28
|
+
},
|
|
29
|
+
"homepage": "https://github.com/swekkiekekkie/hdlinter#readme",
|
|
30
|
+
"engines": {
|
|
31
|
+
"vscode": "^1.85.0"
|
|
32
|
+
},
|
|
33
|
+
"categories": [
|
|
34
|
+
"Linters",
|
|
35
|
+
"Other"
|
|
36
|
+
],
|
|
37
|
+
"main": "./dist/extension.js",
|
|
38
|
+
"bin": {
|
|
39
|
+
"hdlinter": "dist/cli.js"
|
|
40
|
+
},
|
|
41
|
+
"activationEvents": [
|
|
42
|
+
"onLanguage:markdown",
|
|
43
|
+
"workspaceContains:**/*.md",
|
|
44
|
+
"onCommand:hdlinter.fixCurrentDocumentHeader",
|
|
45
|
+
"onCommand:hdlinter.rebuildGraphState",
|
|
46
|
+
"onCommand:hdlinter.fixWorkspaceHeaders"
|
|
47
|
+
],
|
|
48
|
+
"contributes": {
|
|
49
|
+
"commands": [
|
|
50
|
+
{
|
|
51
|
+
"command": "hdlinter.fixCurrentDocumentHeader",
|
|
52
|
+
"title": "hdlinter: Fix Current Document Header"
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
"command": "hdlinter.rebuildGraphState",
|
|
56
|
+
"title": "hdlinter: Rebuild Hidden Graph State"
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
"command": "hdlinter.fixWorkspaceHeaders",
|
|
60
|
+
"title": "hdlinter: Fix All Workspace Headers"
|
|
61
|
+
}
|
|
62
|
+
],
|
|
63
|
+
"configuration": {
|
|
64
|
+
"title": "hdlinter",
|
|
65
|
+
"properties": {
|
|
66
|
+
"hdlinter.enabled": {
|
|
67
|
+
"type": "boolean",
|
|
68
|
+
"default": true,
|
|
69
|
+
"description": "Enable document header lint diagnostics."
|
|
70
|
+
},
|
|
71
|
+
"hdlinter.include": {
|
|
72
|
+
"type": "array",
|
|
73
|
+
"items": {
|
|
74
|
+
"type": "string"
|
|
75
|
+
},
|
|
76
|
+
"default": [
|
|
77
|
+
"**/*.md"
|
|
78
|
+
],
|
|
79
|
+
"description": "Markdown file globs to lint."
|
|
80
|
+
},
|
|
81
|
+
"hdlinter.exclude": {
|
|
82
|
+
"type": "array",
|
|
83
|
+
"items": {
|
|
84
|
+
"type": "string"
|
|
85
|
+
},
|
|
86
|
+
"default": [
|
|
87
|
+
"**/.git/**",
|
|
88
|
+
"**/node_modules/**",
|
|
89
|
+
"**/.hdlinter/**"
|
|
90
|
+
],
|
|
91
|
+
"description": "Globs to exclude from linting."
|
|
92
|
+
},
|
|
93
|
+
"hdlinter.stateFile": {
|
|
94
|
+
"type": "string",
|
|
95
|
+
"default": ".hdlinter/state.json",
|
|
96
|
+
"description": "Workspace-relative hidden graph/hash state file."
|
|
97
|
+
},
|
|
98
|
+
"hdlinter.allowedStatuses": {
|
|
99
|
+
"type": "array",
|
|
100
|
+
"items": {
|
|
101
|
+
"type": "string"
|
|
102
|
+
},
|
|
103
|
+
"default": [
|
|
104
|
+
"Draft",
|
|
105
|
+
"In Review",
|
|
106
|
+
"Approved",
|
|
107
|
+
"Superseded"
|
|
108
|
+
],
|
|
109
|
+
"description": "Allowed Status header values."
|
|
110
|
+
},
|
|
111
|
+
"hdlinter.requireReferenceDates": {
|
|
112
|
+
"type": "boolean",
|
|
113
|
+
"default": true,
|
|
114
|
+
"description": "Require header references to include a bracketed target Last updated date."
|
|
115
|
+
},
|
|
116
|
+
"hdlinter.warnWhenDependencyNewer": {
|
|
117
|
+
"type": "boolean",
|
|
118
|
+
"default": true,
|
|
119
|
+
"description": "Warn when a dependency has a later Last updated date than the current document."
|
|
120
|
+
},
|
|
121
|
+
"hdlinter.validateSupersedesNotNewer": {
|
|
122
|
+
"type": "boolean",
|
|
123
|
+
"default": true,
|
|
124
|
+
"description": "Warn when a superseded document has a later Last updated date than the superseding document."
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
},
|
|
129
|
+
"scripts": {
|
|
130
|
+
"build": "npm run compile",
|
|
131
|
+
"compile": "tsc -p ./",
|
|
132
|
+
"watch": "tsc -watch -p ./",
|
|
133
|
+
"test": "npm run compile && node --test dist/test/*.test.js",
|
|
134
|
+
"vscode:prepublish": "npm run compile",
|
|
135
|
+
"prepack": "npm run compile",
|
|
136
|
+
"prepublishOnly": "npm test",
|
|
137
|
+
"package": "vsce package --allow-missing-repository",
|
|
138
|
+
"package:vsix": "vsce package --allow-missing-repository",
|
|
139
|
+
"publish:dry-run": "npm publish --dry-run",
|
|
140
|
+
"lint:aicompliance": "npm run compile && node dist/cli.js ../aicompliance --write-state"
|
|
141
|
+
},
|
|
142
|
+
"devDependencies": {
|
|
143
|
+
"@types/node": "^20.17.0",
|
|
144
|
+
"@types/vscode": "^1.85.0",
|
|
145
|
+
"@vscode/vsce": "^2.32.0",
|
|
146
|
+
"typescript": "^5.6.0"
|
|
147
|
+
}
|
|
148
|
+
}
|