hook-o-gnese 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,15 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 [Your Name]
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND.
package/README.md ADDED
@@ -0,0 +1,182 @@
1
+ # hook-o-gnese
2
+
3
+ > Score React component complexity from hook usage. Catch fat effects, scattered state, and coupled hooks before they ship.
4
+
5
+ [![npm](https://img.shields.io/npm/v/hook-o-gnese.svg)](https://www.npmjs.com/package/hook-o-gnese)
6
+ [![JSR](https://jsr.io/badges/hook-o-gnese)](https://jsr.io/hook-o-gnese)
7
+ [![CI](https://github.com/rehoutm/spaghetti-hook-o-gnese/actions/workflows/ci.yml/badge.svg)](https://github.com/rehoutm/spaghetti-hook-o-gnese/actions/workflows/ci.yml)
8
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE)
9
+
10
+ Most lint rules check syntax. **hook-o-gnese checks complexity.** It scores how dense your React hooks are — useEffect blocks bloated with branches and `setState` calls, components with too many `useState`s that should be a `useReducer`, effects that read and write the same state (loop bait), and custom hooks stacked too deep.
11
+
12
+ ```bash
13
+ npx hook-o-gnese ./src
14
+ ```
15
+
16
+ ```
17
+ src/components/Banner.tsx
18
+ 1:1 warn useEffect entropy 23.5 ≥ 10 (deps=4 branches=9 setStates=1 nested=0) hook-o-gnese/no-fat-effects
19
+ 1:1 error useEffect reads + writes same state 'open' (loop risk) hook-o-gnese/hook-coupling
20
+
21
+ src/screens/Settings.tsx
22
+ 1:1 warn state scatter 8 ≥ 5 (useStates=3, correlated setters=10). Consider useReducer. hook-o-gnese/state-scatter
23
+
24
+ 3 problems (1 error, 2 warnings) in 470 files, 415ms
25
+ ```
26
+
27
+ ## Why
28
+
29
+ You've seen the file. 800 lines of component, fifteen `useState` calls, a `useEffect` whose dependency array reads like a phone book, and a comment that says `// TODO: refactor`. By the time anyone notices, it's already in production and nobody wants to touch it.
30
+
31
+ `hook-o-gnese` is your early warning system. It measures the smells objectively, surfaces them in CI, and gives you concrete numbers to argue with in code review.
32
+
33
+ ## What it catches
34
+
35
+ | Rule | Smell | Default |
36
+ |---|---|---|
37
+ | `no-fat-effects` | useEffect blocks dense with branches, setState calls, missing cleanup | warn at score ≥ 10 |
38
+ | `state-scatter` | Components with too many `useState` calls (probably want `useReducer`) | warn at score ≥ 5 |
39
+ | `hook-coupling` | useEffect that reads state it also writes (re-render loop bait) | error |
40
+ | `custom-hook-depth` | Custom hooks calling custom hooks calling custom hooks (type-aware) | warn at depth ≥ 3 |
41
+
42
+ Full scoring formulas in [docs/thresholds.md](docs/thresholds.md). Per-rule reference in [docs/rule-reference.md](docs/rule-reference.md).
43
+
44
+ ## Two ways to run
45
+
46
+ ### 1. Standalone CLI — recommended for most
47
+
48
+ No linter setup required. Works in any project. Outputs stylish, JSON, SARIF (for GitHub code scanning), or GitHub Actions annotations.
49
+
50
+ ```bash
51
+ npx hook-o-gnese ./src
52
+ npx hook-o-gnese ./src --format=sarif > report.sarif
53
+ npx hook-o-gnese ./src --type-aware # enables custom-hook-depth
54
+ ```
55
+
56
+ Add a `.hookogneserc.json` if you want to tune thresholds:
57
+
58
+ ```jsonc
59
+ {
60
+ "rules": {
61
+ "hook-o-gnese/no-fat-effects": ["warn", { "threshold": 12 }],
62
+ "hook-o-gnese/state-scatter": "warn",
63
+ "hook-o-gnese/hook-coupling": "error",
64
+ "hook-o-gnese/custom-hook-depth": ["warn", { "maxDepth": 3 }]
65
+ },
66
+ "ignore": ["**/legacy/**"],
67
+ "typeAware": true
68
+ }
69
+ ```
70
+
71
+ Full CLI reference: [docs/cli.md](docs/cli.md).
72
+
73
+ ### 2. Oxlint plugin — if you're already on oxlint
74
+
75
+ ```bash
76
+ npm install -D hook-o-gnese oxlint
77
+ ```
78
+
79
+ ```jsonc
80
+ // .oxlintrc.json
81
+ {
82
+ "jsPlugins": ["./node_modules/hook-o-gnese/dist/index.mjs"],
83
+ "rules": {
84
+ "hook-o-gnese/no-fat-effects": "warn",
85
+ "hook-o-gnese/state-scatter": "warn",
86
+ "hook-o-gnese/hook-coupling": "error",
87
+ "hook-o-gnese/custom-hook-depth": ["warn", { "maxDepth": 3 }]
88
+ }
89
+ }
90
+ ```
91
+
92
+ Or import the recommended preset, which bundles tsgolint type-aware rules (`no-floating-promises`, `no-misused-promises`):
93
+
94
+ ```ts
95
+ import { recommended } from "hook-o-gnese";
96
+ ```
97
+
98
+ ## Output formats
99
+
100
+ ```bash
101
+ # Human-readable (default)
102
+ hook-o-gnese ./src
103
+
104
+ # Machine-readable for tooling / agentic loops
105
+ hook-o-gnese ./src --format=json
106
+
107
+ # SARIF for GitHub code scanning
108
+ hook-o-gnese ./src --format=sarif > report.sarif
109
+
110
+ # GitHub Actions inline annotations
111
+ hook-o-gnese ./src --format=github
112
+ ```
113
+
114
+ ## CI: GitHub Actions
115
+
116
+ ```yaml
117
+ - name: Lint hook complexity
118
+ run: npx hook-o-gnese ./src --format=github
119
+ ```
120
+
121
+ For PR-level code-scanning UI:
122
+
123
+ ```yaml
124
+ - run: npx hook-o-gnese ./src --format=sarif > hook-o-gnese.sarif
125
+ - uses: github/codeql-action/upload-sarif@v3
126
+ with:
127
+ sarif_file: hook-o-gnese.sarif
128
+ ```
129
+
130
+ ## Programmatic API
131
+
132
+ ```ts
133
+ import { lintFile, lintFiles } from "hook-o-gnese/engine";
134
+
135
+ const diagnostics = await lintFile("Component.tsx", source, {
136
+ rules: { "hook-o-gnese/no-fat-effects": { severity: "warn" } },
137
+ cwd: process.cwd(),
138
+ typeAware: false,
139
+ });
140
+ ```
141
+
142
+ ## Performance
143
+
144
+ Sequential per-file scan, but each file is cheap:
145
+
146
+ | Path | Cold start | Per-file warm |
147
+ |---|---|---|
148
+ | Node CLI (`npx`) | ~80ms | ~3–5ms |
149
+ | Standalone binary (`deno compile`) | ~30ms | ~3–5ms |
150
+ | Type-aware rule (first run) | +50–150ms | TS Program load |
151
+
152
+ Linear scan of ~200 files/sec on a single core. Rouvy companion app: 470 files in 415ms.
153
+
154
+ ## Standalone binary
155
+
156
+ The CLI also ships as a single static binary built with `deno compile` — no Node, no Deno, no install required:
157
+
158
+ ```bash
159
+ git clone https://github.com/rehoutm/spaghetti-hook-o-gnese
160
+ cd hook-o-gnese
161
+ deno task build:bin
162
+ ./bin/hook-o-gnese ./src
163
+ ```
164
+
165
+ ## Honest limitations
166
+
167
+ - **`custom-hook-depth` uses the TypeScript Compiler API**, not tsgolint's Go backend. Oxlint's JS plugin API doesn't expose tsgolint type info to custom rules, so we lazily build a `ts.Program` for that one rule. ~50–150ms first-run cost, then cached.
168
+ - **Sequential file scan.** Worker-thread parallelism is on the v1.5 list. Current per-file cost (~3–5ms) means linear scanning is fine through ~thousands of files.
169
+ - **No daemon mode yet.** Each invocation is a fresh process. Also v1.5.
170
+
171
+ ## Compatibility
172
+
173
+ - **Node:** ≥ 20.18
174
+ - **Deno:** ≥ 2.x
175
+ - **TypeScript:** ≥ 6.0 (peer)
176
+ - **Oxlint:** ≥ 1.63 (peer, optional — only needed for plugin path)
177
+
178
+ ESM only. No CJS build.
179
+
180
+ ## License
181
+
182
+ MIT
package/dist/cli.d.mts ADDED
@@ -0,0 +1 @@
1
+ export { };
package/dist/cli.mjs ADDED
@@ -0,0 +1,237 @@
1
+ #!/usr/bin/env node
2
+ import { n as lintFiles } from "./engine-DJFFKwTZ.mjs";
3
+ import { parseArgs } from "node:util";
4
+ import { readFile } from "node:fs/promises";
5
+ import { globby } from "globby";
6
+ //#region src/config.ts
7
+ const DEFAULT_RULES = {
8
+ "hook-o-gnese/no-fat-effects": { severity: "warn" },
9
+ "hook-o-gnese/state-scatter": { severity: "warn" },
10
+ "hook-o-gnese/hook-coupling": { severity: "error" },
11
+ "hook-o-gnese/custom-hook-depth": {
12
+ severity: "warn",
13
+ options: { maxDepth: 3 }
14
+ }
15
+ };
16
+ const DEFAULT_IGNORE = [
17
+ "**/node_modules/**",
18
+ "**/dist/**",
19
+ "**/build/**",
20
+ "**/.next/**",
21
+ "**/.cache/**"
22
+ ];
23
+ async function loadConfig(cwd, configPath, readTextFile) {
24
+ const candidates = configPath ? [configPath] : [`${cwd.replace(/\/$/, "")}/.hookogneserc.json`];
25
+ let fileCfg = {};
26
+ for (const c of candidates) try {
27
+ const text = await readTextFile(c);
28
+ fileCfg = JSON.parse(text);
29
+ break;
30
+ } catch {}
31
+ const rules = { ...DEFAULT_RULES };
32
+ if (fileCfg.rules) for (const [id, spec] of Object.entries(fileCfg.rules)) if (Array.isArray(spec)) rules[id] = {
33
+ severity: spec[0],
34
+ options: spec[1]
35
+ };
36
+ else rules[id] = { severity: spec };
37
+ return {
38
+ engine: {
39
+ rules,
40
+ cwd,
41
+ typeAware: fileCfg.typeAware ?? false
42
+ },
43
+ ignore: fileCfg.ignore ?? DEFAULT_IGNORE
44
+ };
45
+ }
46
+ function applyCliRuleOverrides(cfg, overrides) {
47
+ const rules = { ...cfg.rules };
48
+ for (const o of overrides) rules[o.id] = {
49
+ ...rules[o.id] ?? { severity: "off" },
50
+ severity: o.severity
51
+ };
52
+ return {
53
+ ...cfg,
54
+ rules
55
+ };
56
+ }
57
+ //#endregion
58
+ //#region src/formatters/stylish.ts
59
+ const stylish = ({ diagnostics, filesScanned, durationMs }) => {
60
+ if (diagnostics.length === 0) return `✓ no problems found (${filesScanned} files, ${durationMs}ms)\n`;
61
+ const byFile = /* @__PURE__ */ new Map();
62
+ for (const d of diagnostics) {
63
+ if (!byFile.has(d.file)) byFile.set(d.file, []);
64
+ byFile.get(d.file).push(d);
65
+ }
66
+ const lines = [];
67
+ for (const [file, ds] of byFile) {
68
+ lines.push(`\n${file}`);
69
+ for (const d of ds) {
70
+ const sev = d.severity === "error" ? "error" : "warn ";
71
+ const loc = `${d.line}:${d.column}`.padEnd(7);
72
+ lines.push(` ${loc} ${sev} ${d.message} ${d.rule}`);
73
+ }
74
+ }
75
+ const errors = diagnostics.filter((d) => d.severity === "error").length;
76
+ const warnings = diagnostics.filter((d) => d.severity === "warn").length;
77
+ lines.push(`\n${diagnostics.length} problems (${errors} error${errors === 1 ? "" : "s"}, ${warnings} warning${warnings === 1 ? "" : "s"}) in ${filesScanned} files, ${durationMs}ms`);
78
+ return lines.join("\n") + "\n";
79
+ };
80
+ //#endregion
81
+ //#region src/formatters/json.ts
82
+ const json = (ctx) => JSON.stringify(ctx, null, 2);
83
+ //#endregion
84
+ //#region src/formatters/sarif.ts
85
+ const sarif = ({ diagnostics }) => {
86
+ const ruleIds = [...new Set(diagnostics.map((d) => d.rule))];
87
+ return JSON.stringify({
88
+ version: "2.1.0",
89
+ $schema: "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/main/sarif-2.1/schema/sarif-schema-2.1.0.json",
90
+ runs: [{
91
+ tool: { driver: {
92
+ name: "hook-o-gnese",
93
+ informationUri: "https://github.com/rehoutm/spaghetti-hook-o-gnese",
94
+ rules: ruleIds.map((id) => ({ id }))
95
+ } },
96
+ results: diagnostics.map((d) => ({
97
+ ruleId: d.rule,
98
+ level: d.severity === "error" ? "error" : "warning",
99
+ message: { text: d.message },
100
+ locations: [{ physicalLocation: {
101
+ artifactLocation: { uri: d.file },
102
+ region: {
103
+ startLine: d.line,
104
+ startColumn: d.column,
105
+ endLine: d.endLine,
106
+ endColumn: d.endColumn
107
+ }
108
+ } }]
109
+ }))
110
+ }]
111
+ }, null, 2);
112
+ };
113
+ //#endregion
114
+ //#region src/formatters/github.ts
115
+ const github = ({ diagnostics }) => diagnostics.map((d) => {
116
+ const cmd = d.severity === "error" ? "::error" : "::warning";
117
+ const safe = d.message.replace(/\r?\n/g, " ").replace(/::/g, ":");
118
+ return `${cmd} file=${d.file},line=${d.line},col=${d.column},title=${d.rule}::${safe}`;
119
+ }).join("\n") + "\n";
120
+ //#endregion
121
+ //#region src/cli-core.ts
122
+ const FORMATTERS = {
123
+ stylish,
124
+ json,
125
+ sarif,
126
+ github
127
+ };
128
+ const HELP = `
129
+ hook-o-gnese — score React hook complexity
130
+
131
+ Usage:
132
+ hook-o-gnese [options] <paths...>
133
+
134
+ Options:
135
+ --format=<fmt> stylish (default) | json | sarif | github
136
+ --config=<path> path to .hookogneserc.json
137
+ --type-aware enable custom-hook-depth (slower, uses TS Compiler API)
138
+ --rule=<id>=<sev> override rule severity (off|warn|error). Repeatable.
139
+ --help, -h show this message
140
+
141
+ Examples:
142
+ hook-o-gnese ./src
143
+ hook-o-gnese ./src --format=sarif > report.sarif
144
+ hook-o-gnese ./src --type-aware --rule=hook-o-gnese/state-scatter=error
145
+ `.trim();
146
+ async function runCli(opts, io) {
147
+ if (opts.paths.length === 0) {
148
+ io.writeStderr("Error: no paths provided. Use --help for usage.\n");
149
+ return 2;
150
+ }
151
+ const formatter = FORMATTERS[opts.format];
152
+ if (!formatter) {
153
+ io.writeStderr(`Error: unknown format '${opts.format}'\n`);
154
+ return 2;
155
+ }
156
+ const { engine, ignore } = await loadConfig(opts.cwd, opts.config, io.readTextFile);
157
+ if (opts.typeAware) engine.typeAware = true;
158
+ const finalEngine = applyCliRuleOverrides(engine, opts.ruleOverrides);
159
+ const files = await globby(opts.paths, {
160
+ ignore: [...DEFAULT_IGNORE, ...ignore],
161
+ expandDirectories: { extensions: [
162
+ "ts",
163
+ "tsx",
164
+ "js",
165
+ "jsx"
166
+ ] },
167
+ absolute: false
168
+ });
169
+ if (files.length === 0) {
170
+ io.writeStderr("Error: no matching files found\n");
171
+ return 2;
172
+ }
173
+ const start = performance.now();
174
+ const diagnostics = await lintFiles(files, finalEngine, io.readTextFile);
175
+ const durationMs = Math.round(performance.now() - start);
176
+ io.writeStdout(formatter({
177
+ diagnostics,
178
+ filesScanned: files.length,
179
+ durationMs
180
+ }));
181
+ if (diagnostics.some((d) => d.severity === "error")) return 1;
182
+ return 0;
183
+ }
184
+ //#endregion
185
+ //#region src/cli.node.ts
186
+ const { values, positionals } = parseArgs({
187
+ args: process.argv.slice(2),
188
+ options: {
189
+ help: {
190
+ type: "boolean",
191
+ short: "h"
192
+ },
193
+ "type-aware": { type: "boolean" },
194
+ format: {
195
+ type: "string",
196
+ default: "stylish"
197
+ },
198
+ config: { type: "string" },
199
+ rule: {
200
+ type: "string",
201
+ multiple: true
202
+ }
203
+ },
204
+ allowPositionals: true
205
+ });
206
+ if (values.help) {
207
+ console.log(HELP);
208
+ process.exit(0);
209
+ }
210
+ const overrides = (values.rule ?? []).map((spec) => {
211
+ const [id, sev] = spec.split("=");
212
+ return {
213
+ id,
214
+ severity: sev
215
+ };
216
+ });
217
+ const code = await runCli({
218
+ paths: positionals,
219
+ format: values.format,
220
+ config: values.config,
221
+ typeAware: !!values["type-aware"],
222
+ ruleOverrides: overrides,
223
+ cwd: process.cwd()
224
+ }, {
225
+ readTextFile: (p) => readFile(p, "utf-8"),
226
+ writeStdout: (s) => {
227
+ process.stdout.write(s);
228
+ },
229
+ writeStderr: (s) => {
230
+ process.stderr.write(s);
231
+ }
232
+ });
233
+ process.exit(code);
234
+ //#endregion
235
+ export {};
236
+
237
+ //# sourceMappingURL=cli.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.mjs","names":["jsonFmt"],"sources":["../src/config.ts","../src/formatters/stylish.ts","../src/formatters/json.ts","../src/formatters/sarif.ts","../src/formatters/github.ts","../src/cli-core.ts","../src/cli.node.ts"],"sourcesContent":["import type { EngineConfig, Severity } from \"./engine.ts\";\n\nconst DEFAULT_RULES: Record<string, { severity: Severity; options?: unknown }> = {\n \"hook-o-gnese/no-fat-effects\": { severity: \"warn\" },\n \"hook-o-gnese/state-scatter\": { severity: \"warn\" },\n \"hook-o-gnese/hook-coupling\": { severity: \"error\" },\n \"hook-o-gnese/custom-hook-depth\": { severity: \"warn\", options: { maxDepth: 3 } },\n};\n\nexport const DEFAULT_IGNORE = [\n \"**/node_modules/**\",\n \"**/dist/**\",\n \"**/build/**\",\n \"**/.next/**\",\n \"**/.cache/**\",\n];\n\ninterface FileConfig {\n rules?: Record<string, Severity | [Severity, unknown]>;\n ignore?: string[];\n typeAware?: boolean;\n}\n\nexport type ReadTextFile = (path: string) => Promise<string>;\n\nexport async function loadConfig(\n cwd: string,\n configPath: string | undefined,\n readTextFile: ReadTextFile,\n): Promise<{ engine: EngineConfig; ignore: string[] }> {\n const candidates = configPath\n ? [configPath]\n : [`${cwd.replace(/\\/$/, \"\")}/.hookogneserc.json`];\n\n let fileCfg: FileConfig = {};\n for (const c of candidates) {\n try {\n const text = await readTextFile(c);\n fileCfg = JSON.parse(text);\n break;\n } catch {\n // not found — fine, use defaults\n }\n }\n\n const rules: EngineConfig[\"rules\"] = { ...DEFAULT_RULES };\n if (fileCfg.rules) {\n for (const [id, spec] of Object.entries(fileCfg.rules)) {\n if (Array.isArray(spec)) {\n rules[id] = { severity: spec[0], options: spec[1] };\n } else {\n rules[id] = { severity: spec };\n }\n }\n }\n\n return {\n engine: {\n rules,\n cwd,\n typeAware: fileCfg.typeAware ?? false,\n },\n ignore: fileCfg.ignore ?? DEFAULT_IGNORE,\n };\n}\n\nexport function applyCliRuleOverrides(\n cfg: EngineConfig,\n overrides: Array<{ id: string; severity: Severity }>,\n): EngineConfig {\n const rules = { ...cfg.rules };\n for (const o of overrides) {\n rules[o.id] = { ...(rules[o.id] ?? { severity: \"off\" }), severity: o.severity };\n }\n return { ...cfg, rules };\n}\n","import type { Formatter } from \"./types.ts\";\n\nexport const stylish: Formatter = ({ diagnostics, filesScanned, durationMs }) => {\n if (diagnostics.length === 0) {\n return `✓ no problems found (${filesScanned} files, ${durationMs}ms)\\n`;\n }\n const byFile = new Map<string, typeof diagnostics>();\n for (const d of diagnostics) {\n if (!byFile.has(d.file)) byFile.set(d.file, []);\n byFile.get(d.file)!.push(d);\n }\n const lines: string[] = [];\n for (const [file, ds] of byFile) {\n lines.push(`\\n${file}`);\n for (const d of ds) {\n const sev = d.severity === \"error\" ? \"error\" : \"warn \";\n const loc = `${d.line}:${d.column}`.padEnd(7);\n lines.push(` ${loc} ${sev} ${d.message} ${d.rule}`);\n }\n }\n const errors = diagnostics.filter((d) => d.severity === \"error\").length;\n const warnings = diagnostics.filter((d) => d.severity === \"warn\").length;\n lines.push(\n `\\n${diagnostics.length} problems (${errors} error${errors === 1 ? \"\" : \"s\"}, ${warnings} warning${warnings === 1 ? \"\" : \"s\"}) in ${filesScanned} files, ${durationMs}ms`,\n );\n return lines.join(\"\\n\") + \"\\n\";\n};\n","import type { Formatter } from \"./types.ts\";\n\nexport const json: Formatter = (ctx) => JSON.stringify(ctx, null, 2);\n","import type { Formatter } from \"./types.ts\";\n\nexport const sarif: Formatter = ({ diagnostics }) => {\n const ruleIds = [...new Set(diagnostics.map((d) => d.rule))];\n return JSON.stringify(\n {\n version: \"2.1.0\",\n $schema:\n \"https://raw.githubusercontent.com/oasis-tcs/sarif-spec/main/sarif-2.1/schema/sarif-schema-2.1.0.json\",\n runs: [{\n tool: {\n driver: {\n name: \"hook-o-gnese\",\n informationUri: \"https://github.com/rehoutm/spaghetti-hook-o-gnese\",\n rules: ruleIds.map((id) => ({ id })),\n },\n },\n results: diagnostics.map((d) => ({\n ruleId: d.rule,\n level: d.severity === \"error\" ? \"error\" : \"warning\",\n message: { text: d.message },\n locations: [{\n physicalLocation: {\n artifactLocation: { uri: d.file },\n region: {\n startLine: d.line,\n startColumn: d.column,\n endLine: d.endLine,\n endColumn: d.endColumn,\n },\n },\n }],\n })),\n }],\n },\n null,\n 2,\n );\n};\n","import type { Formatter } from \"./types.ts\";\n\nexport const github: Formatter = ({ diagnostics }) =>\n diagnostics.map((d) => {\n const cmd = d.severity === \"error\" ? \"::error\" : \"::warning\";\n const safe = d.message.replace(/\\r?\\n/g, \" \").replace(/::/g, \":\");\n return `${cmd} file=${d.file},line=${d.line},col=${d.column},title=${d.rule}::${safe}`;\n }).join(\"\\n\") + \"\\n\";\n","import { globby } from \"globby\";\nimport { lintFiles } from \"./engine.ts\";\nimport type { Severity } from \"./engine.ts\";\nimport { applyCliRuleOverrides, DEFAULT_IGNORE, loadConfig } from \"./config.ts\";\nimport { stylish } from \"./formatters/stylish.ts\";\nimport { json as jsonFmt } from \"./formatters/json.ts\";\nimport { sarif } from \"./formatters/sarif.ts\";\nimport { github } from \"./formatters/github.ts\";\nimport type { Formatter } from \"./formatters/types.ts\";\n\nconst FORMATTERS: Record<string, Formatter> = {\n stylish,\n json: jsonFmt,\n sarif,\n github,\n};\n\nexport const HELP = `\nhook-o-gnese — score React hook complexity\n\nUsage:\n hook-o-gnese [options] <paths...>\n\nOptions:\n --format=<fmt> stylish (default) | json | sarif | github\n --config=<path> path to .hookogneserc.json\n --type-aware enable custom-hook-depth (slower, uses TS Compiler API)\n --rule=<id>=<sev> override rule severity (off|warn|error). Repeatable.\n --help, -h show this message\n\nExamples:\n hook-o-gnese ./src\n hook-o-gnese ./src --format=sarif > report.sarif\n hook-o-gnese ./src --type-aware --rule=hook-o-gnese/state-scatter=error\n`.trim();\n\nexport interface CliOptions {\n paths: string[];\n format: string;\n config?: string;\n typeAware: boolean;\n ruleOverrides: Array<{ id: string; severity: Severity }>;\n cwd: string;\n}\n\nexport interface RuntimeIO {\n readTextFile(path: string): Promise<string>;\n writeStdout(s: string): void;\n writeStderr(s: string): void;\n}\n\nexport async function runCli(opts: CliOptions, io: RuntimeIO): Promise<number> {\n if (opts.paths.length === 0) {\n io.writeStderr(\"Error: no paths provided. Use --help for usage.\\n\");\n return 2;\n }\n\n const formatter = FORMATTERS[opts.format];\n if (!formatter) {\n io.writeStderr(`Error: unknown format '${opts.format}'\\n`);\n return 2;\n }\n\n const { engine, ignore } = await loadConfig(opts.cwd, opts.config, io.readTextFile);\n if (opts.typeAware) engine.typeAware = true;\n const finalEngine = applyCliRuleOverrides(engine, opts.ruleOverrides);\n\n const files = await globby(opts.paths, {\n ignore: [...DEFAULT_IGNORE, ...ignore],\n expandDirectories: { extensions: [\"ts\", \"tsx\", \"js\", \"jsx\"] },\n absolute: false,\n });\n\n if (files.length === 0) {\n io.writeStderr(\"Error: no matching files found\\n\");\n return 2;\n }\n\n const start = performance.now();\n const diagnostics = await lintFiles(files, finalEngine, io.readTextFile);\n const durationMs = Math.round(performance.now() - start);\n\n io.writeStdout(formatter({\n diagnostics,\n filesScanned: files.length,\n durationMs,\n }));\n\n if (diagnostics.some((d) => d.severity === \"error\")) return 1;\n return 0;\n}\n","#!/usr/bin/env node\nimport { parseArgs } from \"node:util\";\nimport { readFile } from \"node:fs/promises\";\nimport { HELP, runCli } from \"./cli-core.ts\";\nimport type { Severity } from \"./engine.ts\";\n\nconst { values, positionals } = parseArgs({\n args: process.argv.slice(2),\n options: {\n help: { type: \"boolean\", short: \"h\" },\n \"type-aware\": { type: \"boolean\" },\n format: { type: \"string\", default: \"stylish\" },\n config: { type: \"string\" },\n rule: { type: \"string\", multiple: true },\n },\n allowPositionals: true,\n});\n\nif (values.help) {\n console.log(HELP);\n process.exit(0);\n}\n\nconst overrides = (values.rule ?? []).map((spec) => {\n const [id, sev] = spec.split(\"=\");\n return { id, severity: sev as Severity };\n});\n\nconst code = await runCli(\n {\n paths: positionals,\n format: values.format as string,\n config: values.config,\n typeAware: !!values[\"type-aware\"],\n ruleOverrides: overrides,\n cwd: process.cwd(),\n },\n {\n readTextFile: (p) => readFile(p, \"utf-8\"),\n writeStdout: (s) => {\n process.stdout.write(s);\n },\n writeStderr: (s) => {\n process.stderr.write(s);\n },\n },\n);\nprocess.exit(code);\n"],"mappings":";;;;;;AAEA,MAAM,gBAA2E;CAC/E,+BAA+B,EAAE,UAAU,QAAQ;CACnD,8BAA8B,EAAE,UAAU,QAAQ;CAClD,8BAA8B,EAAE,UAAU,SAAS;CACnD,kCAAkC;EAAE,UAAU;EAAQ,SAAS,EAAE,UAAU,GAAG;EAAE;CACjF;AAED,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACA;CACD;AAUD,eAAsB,WACpB,KACA,YACA,cACqD;CACrD,MAAM,aAAa,aACf,CAAC,WAAW,GACZ,CAAC,GAAG,IAAI,QAAQ,OAAO,GAAG,CAAC,qBAAqB;CAEpD,IAAI,UAAsB,EAAE;AAC5B,MAAK,MAAM,KAAK,WACd,KAAI;EACF,MAAM,OAAO,MAAM,aAAa,EAAE;AAClC,YAAU,KAAK,MAAM,KAAK;AAC1B;SACM;CAKV,MAAM,QAA+B,EAAE,GAAG,eAAe;AACzD,KAAI,QAAQ,MACV,MAAK,MAAM,CAAC,IAAI,SAAS,OAAO,QAAQ,QAAQ,MAAM,CACpD,KAAI,MAAM,QAAQ,KAAK,CACrB,OAAM,MAAM;EAAE,UAAU,KAAK;EAAI,SAAS,KAAK;EAAI;KAEnD,OAAM,MAAM,EAAE,UAAU,MAAM;AAKpC,QAAO;EACL,QAAQ;GACN;GACA;GACA,WAAW,QAAQ,aAAa;GACjC;EACD,QAAQ,QAAQ,UAAU;EAC3B;;AAGH,SAAgB,sBACd,KACA,WACc;CACd,MAAM,QAAQ,EAAE,GAAG,IAAI,OAAO;AAC9B,MAAK,MAAM,KAAK,UACd,OAAM,EAAE,MAAM;EAAE,GAAI,MAAM,EAAE,OAAO,EAAE,UAAU,OAAO;EAAG,UAAU,EAAE;EAAU;AAEjF,QAAO;EAAE,GAAG;EAAK;EAAO;;;;ACxE1B,MAAa,WAAsB,EAAE,aAAa,cAAc,iBAAiB;AAC/E,KAAI,YAAY,WAAW,EACzB,QAAO,wBAAwB,aAAa,UAAU,WAAW;CAEnE,MAAM,yBAAS,IAAI,KAAiC;AACpD,MAAK,MAAM,KAAK,aAAa;AAC3B,MAAI,CAAC,OAAO,IAAI,EAAE,KAAK,CAAE,QAAO,IAAI,EAAE,MAAM,EAAE,CAAC;AAC/C,SAAO,IAAI,EAAE,KAAK,CAAE,KAAK,EAAE;;CAE7B,MAAM,QAAkB,EAAE;AAC1B,MAAK,MAAM,CAAC,MAAM,OAAO,QAAQ;AAC/B,QAAM,KAAK,KAAK,OAAO;AACvB,OAAK,MAAM,KAAK,IAAI;GAClB,MAAM,MAAM,EAAE,aAAa,UAAU,UAAU;GAC/C,MAAM,MAAM,GAAG,EAAE,KAAK,GAAG,EAAE,SAAS,OAAO,EAAE;AAC7C,SAAM,KAAK,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,QAAQ,IAAI,EAAE,OAAO;;;CAG1D,MAAM,SAAS,YAAY,QAAQ,MAAM,EAAE,aAAa,QAAQ,CAAC;CACjE,MAAM,WAAW,YAAY,QAAQ,MAAM,EAAE,aAAa,OAAO,CAAC;AAClE,OAAM,KACJ,KAAK,YAAY,OAAO,aAAa,OAAO,QAAQ,WAAW,IAAI,KAAK,IAAI,IAAI,SAAS,UAAU,aAAa,IAAI,KAAK,IAAI,OAAO,aAAa,UAAU,WAAW,IACvK;AACD,QAAO,MAAM,KAAK,KAAK,GAAG;;;;ACvB5B,MAAa,QAAmB,QAAQ,KAAK,UAAU,KAAK,MAAM,EAAE;;;ACApE,MAAa,SAAoB,EAAE,kBAAkB;CACnD,MAAM,UAAU,CAAC,GAAG,IAAI,IAAI,YAAY,KAAK,MAAM,EAAE,KAAK,CAAC,CAAC;AAC5D,QAAO,KAAK,UACV;EACE,SAAS;EACT,SACE;EACF,MAAM,CAAC;GACL,MAAM,EACJ,QAAQ;IACN,MAAM;IACN,gBAAgB;IAChB,OAAO,QAAQ,KAAK,QAAQ,EAAE,IAAI,EAAE;IACrC,EACF;GACD,SAAS,YAAY,KAAK,OAAO;IAC/B,QAAQ,EAAE;IACV,OAAO,EAAE,aAAa,UAAU,UAAU;IAC1C,SAAS,EAAE,MAAM,EAAE,SAAS;IAC5B,WAAW,CAAC,EACV,kBAAkB;KAChB,kBAAkB,EAAE,KAAK,EAAE,MAAM;KACjC,QAAQ;MACN,WAAW,EAAE;MACb,aAAa,EAAE;MACf,SAAS,EAAE;MACX,WAAW,EAAE;MACd;KACF,EACF,CAAC;IACH,EAAE;GACJ,CAAC;EACH,EACD,MACA,EACD;;;;ACnCH,MAAa,UAAqB,EAAE,kBAClC,YAAY,KAAK,MAAM;CACrB,MAAM,MAAM,EAAE,aAAa,UAAU,YAAY;CACjD,MAAM,OAAO,EAAE,QAAQ,QAAQ,UAAU,IAAI,CAAC,QAAQ,OAAO,IAAI;AACjE,QAAO,GAAG,IAAI,QAAQ,EAAE,KAAK,QAAQ,EAAE,KAAK,OAAO,EAAE,OAAO,SAAS,EAAE,KAAK,IAAI;EAChF,CAAC,KAAK,KAAK,GAAG;;;ACGlB,MAAM,aAAwC;CAC5C;CACMA;CACN;CACA;CACD;AAED,MAAa,OAAO;;;;;;;;;;;;;;;;;EAiBlB,MAAM;AAiBR,eAAsB,OAAO,MAAkB,IAAgC;AAC7E,KAAI,KAAK,MAAM,WAAW,GAAG;AAC3B,KAAG,YAAY,oDAAoD;AACnE,SAAO;;CAGT,MAAM,YAAY,WAAW,KAAK;AAClC,KAAI,CAAC,WAAW;AACd,KAAG,YAAY,0BAA0B,KAAK,OAAO,KAAK;AAC1D,SAAO;;CAGT,MAAM,EAAE,QAAQ,WAAW,MAAM,WAAW,KAAK,KAAK,KAAK,QAAQ,GAAG,aAAa;AACnF,KAAI,KAAK,UAAW,QAAO,YAAY;CACvC,MAAM,cAAc,sBAAsB,QAAQ,KAAK,cAAc;CAErE,MAAM,QAAQ,MAAM,OAAO,KAAK,OAAO;EACrC,QAAQ,CAAC,GAAG,gBAAgB,GAAG,OAAO;EACtC,mBAAmB,EAAE,YAAY;GAAC;GAAM;GAAO;GAAM;GAAM,EAAE;EAC7D,UAAU;EACX,CAAC;AAEF,KAAI,MAAM,WAAW,GAAG;AACtB,KAAG,YAAY,mCAAmC;AAClD,SAAO;;CAGT,MAAM,QAAQ,YAAY,KAAK;CAC/B,MAAM,cAAc,MAAM,UAAU,OAAO,aAAa,GAAG,aAAa;CACxE,MAAM,aAAa,KAAK,MAAM,YAAY,KAAK,GAAG,MAAM;AAExD,IAAG,YAAY,UAAU;EACvB;EACA,cAAc,MAAM;EACpB;EACD,CAAC,CAAC;AAEH,KAAI,YAAY,MAAM,MAAM,EAAE,aAAa,QAAQ,CAAE,QAAO;AAC5D,QAAO;;;;ACnFT,MAAM,EAAE,QAAQ,gBAAgB,UAAU;CACxC,MAAM,QAAQ,KAAK,MAAM,EAAE;CAC3B,SAAS;EACP,MAAM;GAAE,MAAM;GAAW,OAAO;GAAK;EACrC,cAAc,EAAE,MAAM,WAAW;EACjC,QAAQ;GAAE,MAAM;GAAU,SAAS;GAAW;EAC9C,QAAQ,EAAE,MAAM,UAAU;EAC1B,MAAM;GAAE,MAAM;GAAU,UAAU;GAAM;EACzC;CACD,kBAAkB;CACnB,CAAC;AAEF,IAAI,OAAO,MAAM;AACf,SAAQ,IAAI,KAAK;AACjB,SAAQ,KAAK,EAAE;;AAGjB,MAAM,aAAa,OAAO,QAAQ,EAAE,EAAE,KAAK,SAAS;CAClD,MAAM,CAAC,IAAI,OAAO,KAAK,MAAM,IAAI;AACjC,QAAO;EAAE;EAAI,UAAU;EAAiB;EACxC;AAEF,MAAM,OAAO,MAAM,OACjB;CACE,OAAO;CACP,QAAQ,OAAO;CACf,QAAQ,OAAO;CACf,WAAW,CAAC,CAAC,OAAO;CACpB,eAAe;CACf,KAAK,QAAQ,KAAK;CACnB,EACD;CACE,eAAe,MAAM,SAAS,GAAG,QAAQ;CACzC,cAAc,MAAM;AAClB,UAAQ,OAAO,MAAM,EAAE;;CAEzB,cAAc,MAAM;AAClB,UAAQ,OAAO,MAAM,EAAE;;CAE1B,CACF;AACD,QAAQ,KAAK,KAAK"}
@@ -0,0 +1,114 @@
1
+ import { t as ALL_RULES } from "./registry-iRG6wil9.mjs";
2
+ import { parseSync } from "oxc-parser";
3
+ //#region src/engine.ts
4
+ const TYPE_AWARE_RULES = new Set(["hook-o-gnese/custom-hook-depth"]);
5
+ function ruleNamespace(id) {
6
+ return id.replace(/^hook-o-gnese\//, "");
7
+ }
8
+ function buildLineOffsets(source) {
9
+ const offsets = [0];
10
+ for (let i = 0; i < source.length; i++) if (source.charCodeAt(i) === 10) offsets.push(i + 1);
11
+ return offsets;
12
+ }
13
+ function offsetToLineCol(offset, lineOffsets) {
14
+ let lo = 0;
15
+ let hi = lineOffsets.length - 1;
16
+ while (lo < hi) {
17
+ const mid = lo + hi + 1 >>> 1;
18
+ if (lineOffsets[mid] <= offset) lo = mid;
19
+ else hi = mid - 1;
20
+ }
21
+ return {
22
+ line: lo + 1,
23
+ column: offset - lineOffsets[lo] + 1
24
+ };
25
+ }
26
+ function getLoc(node, lineOffsets) {
27
+ const startOffset = typeof node?.start === "number" ? node.start : void 0;
28
+ const endOffset = typeof node?.end === "number" ? node.end : void 0;
29
+ if (startOffset === void 0) return {
30
+ line: 1,
31
+ column: 1
32
+ };
33
+ const start = offsetToLineCol(startOffset, lineOffsets);
34
+ const end = endOffset !== void 0 ? offsetToLineCol(endOffset, lineOffsets) : void 0;
35
+ return {
36
+ line: start.line,
37
+ column: start.column,
38
+ endLine: end?.line,
39
+ endColumn: end?.column
40
+ };
41
+ }
42
+ function walkAST(node, handlers) {
43
+ if (!node || typeof node !== "object") return;
44
+ const enter = handlers[node.type];
45
+ if (enter) enter(node);
46
+ for (const key in node) {
47
+ const v = node[key];
48
+ if (Array.isArray(v)) for (const c of v) walkAST(c, handlers);
49
+ else if (v && typeof v === "object") walkAST(v, handlers);
50
+ }
51
+ const exit = handlers[`${node.type}:exit`];
52
+ if (exit) exit(node);
53
+ }
54
+ async function lintFile(filePath, source, config) {
55
+ const parsed = parseSync(filePath, source, {
56
+ lang: filePath.endsWith(".tsx") ? "tsx" : filePath.endsWith(".ts") ? "ts" : filePath.endsWith(".jsx") ? "jsx" : "js",
57
+ sourceType: "module"
58
+ });
59
+ const lineOffsets = buildLineOffsets(source);
60
+ if (parsed.errors?.length) return parsed.errors.map((e) => {
61
+ const offset = typeof e.labels?.[0]?.start === "number" ? e.labels[0].start : void 0;
62
+ const loc = offset !== void 0 ? offsetToLineCol(offset, lineOffsets) : {
63
+ line: 1,
64
+ column: 1
65
+ };
66
+ return {
67
+ file: filePath,
68
+ rule: "parse-error",
69
+ severity: "error",
70
+ message: e.message ?? "parse error",
71
+ line: loc.line,
72
+ column: loc.column
73
+ };
74
+ });
75
+ if (!(parsed.module?.staticImports ?? []).some((i) => (i.moduleRequest?.value ?? i.source?.value) === "react")) return [];
76
+ const out = [];
77
+ for (const [ruleId, ruleCfg] of Object.entries(config.rules)) {
78
+ if (ruleCfg.severity === "off") continue;
79
+ if (!config.typeAware && TYPE_AWARE_RULES.has(ruleId)) continue;
80
+ const rule = ALL_RULES[ruleNamespace(ruleId)];
81
+ if (!rule) continue;
82
+ const localDiags = [];
83
+ const context = {
84
+ options: ruleCfg.options ? [ruleCfg.options] : [],
85
+ filename: filePath,
86
+ cwd: config.cwd,
87
+ report(d) {
88
+ const loc = getLoc(d.node, lineOffsets);
89
+ const cfgSev = ruleCfg.severity;
90
+ const severity = d.severity === "error" ? "error" : cfgSev;
91
+ localDiags.push({
92
+ file: filePath,
93
+ rule: ruleId,
94
+ severity,
95
+ message: d.message,
96
+ ...loc
97
+ });
98
+ }
99
+ };
100
+ const handlers = rule.create(context);
101
+ walkAST(parsed.program, handlers);
102
+ out.push(...localDiags);
103
+ }
104
+ return out;
105
+ }
106
+ async function lintFiles(filePaths, config, readTextFile) {
107
+ return (await Promise.all(filePaths.map(async (p) => {
108
+ return lintFile(p, await readTextFile(p), config);
109
+ }))).flat();
110
+ }
111
+ //#endregion
112
+ export { lintFiles as n, lintFile as t };
113
+
114
+ //# sourceMappingURL=engine-DJFFKwTZ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"engine-DJFFKwTZ.mjs","names":[],"sources":["../src/engine.ts"],"sourcesContent":["import { parseSync } from \"oxc-parser\";\nimport { ALL_RULES } from \"./rules/registry.ts\";\n\nexport type Severity = \"off\" | \"warn\" | \"error\";\n\nexport interface Diagnostic {\n file: string;\n rule: string;\n severity: Exclude<Severity, \"off\">;\n message: string;\n line: number;\n column: number;\n endLine?: number;\n endColumn?: number;\n}\n\nexport interface RuleConfig {\n severity: Severity;\n options?: unknown;\n}\n\nexport interface EngineConfig {\n rules: Record<string, RuleConfig>;\n cwd: string;\n typeAware: boolean;\n}\n\nconst TYPE_AWARE_RULES = new Set([\"hook-o-gnese/custom-hook-depth\"]);\n\nfunction ruleNamespace(id: string): string {\n return id.replace(/^hook-o-gnese\\//, \"\");\n}\n\nfunction buildLineOffsets(source: string): number[] {\n const offsets = [0];\n for (let i = 0; i < source.length; i++) {\n if (source.charCodeAt(i) === 10) offsets.push(i + 1);\n }\n return offsets;\n}\n\nfunction offsetToLineCol(\n offset: number,\n lineOffsets: number[],\n): { line: number; column: number } {\n let lo = 0;\n let hi = lineOffsets.length - 1;\n while (lo < hi) {\n const mid = (lo + hi + 1) >>> 1;\n if (lineOffsets[mid] <= offset) lo = mid;\n else hi = mid - 1;\n }\n return { line: lo + 1, column: offset - lineOffsets[lo] + 1 };\n}\n\nfunction getLoc(\n node: any,\n lineOffsets: number[],\n): { line: number; column: number; endLine?: number; endColumn?: number } {\n const startOffset = typeof node?.start === \"number\" ? node.start : undefined;\n const endOffset = typeof node?.end === \"number\" ? node.end : undefined;\n if (startOffset === undefined) {\n return { line: 1, column: 1 };\n }\n const start = offsetToLineCol(startOffset, lineOffsets);\n const end = endOffset !== undefined\n ? offsetToLineCol(endOffset, lineOffsets)\n : undefined;\n return {\n line: start.line,\n column: start.column,\n endLine: end?.line,\n endColumn: end?.column,\n };\n}\n\nfunction walkAST(node: any, handlers: Record<string, any>) {\n if (!node || typeof node !== \"object\") return;\n const enter = handlers[node.type];\n if (enter) enter(node);\n for (const key in node) {\n const v = node[key];\n if (Array.isArray(v)) {\n for (const c of v) walkAST(c, handlers);\n } else if (v && typeof v === \"object\") {\n walkAST(v, handlers);\n }\n }\n const exit = handlers[`${node.type}:exit`];\n if (exit) exit(node);\n}\n\nexport async function lintFile(\n filePath: string,\n source: string,\n config: EngineConfig,\n): Promise<Diagnostic[]> {\n const lang = filePath.endsWith(\".tsx\")\n ? \"tsx\"\n : filePath.endsWith(\".ts\")\n ? \"ts\"\n : filePath.endsWith(\".jsx\")\n ? \"jsx\"\n : \"js\";\n\n const parsed = parseSync(filePath, source, {\n lang,\n sourceType: \"module\",\n });\n\n const lineOffsets = buildLineOffsets(source);\n\n if (parsed.errors?.length) {\n return parsed.errors.map((e: any) => {\n const offset = typeof e.labels?.[0]?.start === \"number\"\n ? e.labels[0].start\n : undefined;\n const loc = offset !== undefined\n ? offsetToLineCol(offset, lineOffsets)\n : { line: 1, column: 1 };\n return {\n file: filePath,\n rule: \"parse-error\",\n severity: \"error\" as const,\n message: e.message ?? \"parse error\",\n line: loc.line,\n column: loc.column,\n };\n });\n }\n\n // Bail early on non-React files\n const imports = parsed.module?.staticImports ?? [];\n const hasReact = imports.some((i: any) =>\n (i.moduleRequest?.value ?? i.source?.value) === \"react\"\n );\n if (!hasReact) return [];\n\n const out: Diagnostic[] = [];\n\n for (const [ruleId, ruleCfg] of Object.entries(config.rules)) {\n if (ruleCfg.severity === \"off\") continue;\n if (!config.typeAware && TYPE_AWARE_RULES.has(ruleId)) continue;\n\n const rule = (ALL_RULES as any)[ruleNamespace(ruleId)];\n if (!rule) continue;\n\n const localDiags: Diagnostic[] = [];\n const context = {\n options: ruleCfg.options ? [ruleCfg.options] : [],\n filename: filePath,\n cwd: config.cwd,\n report(d: { message: string; node: any; severity?: \"warn\" | \"error\" }) {\n const loc = getLoc(d.node, lineOffsets);\n const cfgSev = ruleCfg.severity as \"warn\" | \"error\";\n // Rule-emitted severity only escalates (warn → error); never downgrades.\n const severity = d.severity === \"error\" ? \"error\" : cfgSev;\n localDiags.push({\n file: filePath,\n rule: ruleId,\n severity,\n message: d.message,\n ...loc,\n });\n },\n };\n\n const handlers = rule.create(context);\n walkAST(parsed.program, handlers);\n out.push(...localDiags);\n }\n\n return out;\n}\n\nexport type ReadTextFile = (path: string) => Promise<string>;\n\nexport async function lintFiles(\n filePaths: string[],\n config: EngineConfig,\n readTextFile: ReadTextFile,\n): Promise<Diagnostic[]> {\n const results = await Promise.all(\n filePaths.map(async (p) => {\n const src = await readTextFile(p);\n return lintFile(p, src, config);\n }),\n );\n return results.flat();\n}\n"],"mappings":";;;AA2BA,MAAM,mBAAmB,IAAI,IAAI,CAAC,iCAAiC,CAAC;AAEpE,SAAS,cAAc,IAAoB;AACzC,QAAO,GAAG,QAAQ,mBAAmB,GAAG;;AAG1C,SAAS,iBAAiB,QAA0B;CAClD,MAAM,UAAU,CAAC,EAAE;AACnB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,IACjC,KAAI,OAAO,WAAW,EAAE,KAAK,GAAI,SAAQ,KAAK,IAAI,EAAE;AAEtD,QAAO;;AAGT,SAAS,gBACP,QACA,aACkC;CAClC,IAAI,KAAK;CACT,IAAI,KAAK,YAAY,SAAS;AAC9B,QAAO,KAAK,IAAI;EACd,MAAM,MAAO,KAAK,KAAK,MAAO;AAC9B,MAAI,YAAY,QAAQ,OAAQ,MAAK;MAChC,MAAK,MAAM;;AAElB,QAAO;EAAE,MAAM,KAAK;EAAG,QAAQ,SAAS,YAAY,MAAM;EAAG;;AAG/D,SAAS,OACP,MACA,aACwE;CACxE,MAAM,cAAc,OAAO,MAAM,UAAU,WAAW,KAAK,QAAQ,KAAA;CACnE,MAAM,YAAY,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM,KAAA;AAC7D,KAAI,gBAAgB,KAAA,EAClB,QAAO;EAAE,MAAM;EAAG,QAAQ;EAAG;CAE/B,MAAM,QAAQ,gBAAgB,aAAa,YAAY;CACvD,MAAM,MAAM,cAAc,KAAA,IACtB,gBAAgB,WAAW,YAAY,GACvC,KAAA;AACJ,QAAO;EACL,MAAM,MAAM;EACZ,QAAQ,MAAM;EACd,SAAS,KAAK;EACd,WAAW,KAAK;EACjB;;AAGH,SAAS,QAAQ,MAAW,UAA+B;AACzD,KAAI,CAAC,QAAQ,OAAO,SAAS,SAAU;CACvC,MAAM,QAAQ,SAAS,KAAK;AAC5B,KAAI,MAAO,OAAM,KAAK;AACtB,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,IAAI,KAAK;AACf,MAAI,MAAM,QAAQ,EAAE,CAClB,MAAK,MAAM,KAAK,EAAG,SAAQ,GAAG,SAAS;WAC9B,KAAK,OAAO,MAAM,SAC3B,SAAQ,GAAG,SAAS;;CAGxB,MAAM,OAAO,SAAS,GAAG,KAAK,KAAK;AACnC,KAAI,KAAM,MAAK,KAAK;;AAGtB,eAAsB,SACpB,UACA,QACA,QACuB;CASvB,MAAM,SAAS,UAAU,UAAU,QAAQ;EACzC,MATW,SAAS,SAAS,OAAO,GAClC,QACA,SAAS,SAAS,MAAM,GACxB,OACA,SAAS,SAAS,OAAO,GACzB,QACA;EAIF,YAAY;EACb,CAAC;CAEF,MAAM,cAAc,iBAAiB,OAAO;AAE5C,KAAI,OAAO,QAAQ,OACjB,QAAO,OAAO,OAAO,KAAK,MAAW;EACnC,MAAM,SAAS,OAAO,EAAE,SAAS,IAAI,UAAU,WAC3C,EAAE,OAAO,GAAG,QACZ,KAAA;EACJ,MAAM,MAAM,WAAW,KAAA,IACnB,gBAAgB,QAAQ,YAAY,GACpC;GAAE,MAAM;GAAG,QAAQ;GAAG;AAC1B,SAAO;GACL,MAAM;GACN,MAAM;GACN,UAAU;GACV,SAAS,EAAE,WAAW;GACtB,MAAM,IAAI;GACV,QAAQ,IAAI;GACb;GACD;AAQJ,KAAI,EAJY,OAAO,QAAQ,iBAAiB,EAAE,EACzB,MAAM,OAC5B,EAAE,eAAe,SAAS,EAAE,QAAQ,WAAW,QAErC,CAAE,QAAO,EAAE;CAExB,MAAM,MAAoB,EAAE;AAE5B,MAAK,MAAM,CAAC,QAAQ,YAAY,OAAO,QAAQ,OAAO,MAAM,EAAE;AAC5D,MAAI,QAAQ,aAAa,MAAO;AAChC,MAAI,CAAC,OAAO,aAAa,iBAAiB,IAAI,OAAO,CAAE;EAEvD,MAAM,OAAQ,UAAkB,cAAc,OAAO;AACrD,MAAI,CAAC,KAAM;EAEX,MAAM,aAA2B,EAAE;EACnC,MAAM,UAAU;GACd,SAAS,QAAQ,UAAU,CAAC,QAAQ,QAAQ,GAAG,EAAE;GACjD,UAAU;GACV,KAAK,OAAO;GACZ,OAAO,GAAgE;IACrE,MAAM,MAAM,OAAO,EAAE,MAAM,YAAY;IACvC,MAAM,SAAS,QAAQ;IAEvB,MAAM,WAAW,EAAE,aAAa,UAAU,UAAU;AACpD,eAAW,KAAK;KACd,MAAM;KACN,MAAM;KACN;KACA,SAAS,EAAE;KACX,GAAG;KACJ,CAAC;;GAEL;EAED,MAAM,WAAW,KAAK,OAAO,QAAQ;AACrC,UAAQ,OAAO,SAAS,SAAS;AACjC,MAAI,KAAK,GAAG,WAAW;;AAGzB,QAAO;;AAKT,eAAsB,UACpB,WACA,QACA,cACuB;AAOvB,SAAO,MANe,QAAQ,IAC5B,UAAU,IAAI,OAAO,MAAM;AAEzB,SAAO,SAAS,GAAG,MADD,aAAa,EAAE,EACT,OAAO;GAC/B,CACH,EACc,MAAM"}
@@ -0,0 +1,27 @@
1
+ //#region src/engine.d.ts
2
+ type Severity = "off" | "warn" | "error";
3
+ interface Diagnostic {
4
+ file: string;
5
+ rule: string;
6
+ severity: Exclude<Severity, "off">;
7
+ message: string;
8
+ line: number;
9
+ column: number;
10
+ endLine?: number;
11
+ endColumn?: number;
12
+ }
13
+ interface RuleConfig {
14
+ severity: Severity;
15
+ options?: unknown;
16
+ }
17
+ interface EngineConfig {
18
+ rules: Record<string, RuleConfig>;
19
+ cwd: string;
20
+ typeAware: boolean;
21
+ }
22
+ declare function lintFile(filePath: string, source: string, config: EngineConfig): Promise<Diagnostic[]>;
23
+ type ReadTextFile = (path: string) => Promise<string>;
24
+ declare function lintFiles(filePaths: string[], config: EngineConfig, readTextFile: ReadTextFile): Promise<Diagnostic[]>;
25
+ //#endregion
26
+ export { Diagnostic, EngineConfig, ReadTextFile, RuleConfig, Severity, lintFile, lintFiles };
27
+ //# sourceMappingURL=engine.d.mts.map
@@ -0,0 +1,2 @@
1
+ import { n as lintFiles, t as lintFile } from "./engine-DJFFKwTZ.mjs";
2
+ export { lintFile, lintFiles };