@wolfcola/dead-export-finder 0.0.0-beta-20260513233134
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +104 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +158 -0
- package/dist/lib/errors.d.ts +25 -0
- package/dist/lib/errors.d.ts.map +1 -0
- package/dist/lib/errors.js +7 -0
- package/dist/lib/export-graph.d.ts +11 -0
- package/dist/lib/export-graph.d.ts.map +1 -0
- package/dist/lib/export-graph.js +125 -0
- package/dist/lib/export-parser.d.ts +12 -0
- package/dist/lib/export-parser.d.ts.map +1 -0
- package/dist/lib/export-parser.js +170 -0
- package/dist/lib/file-scanner.d.ts +18 -0
- package/dist/lib/file-scanner.d.ts.map +1 -0
- package/dist/lib/file-scanner.js +42 -0
- package/dist/lib/import-parser.d.ts +12 -0
- package/dist/lib/import-parser.d.ts.map +1 -0
- package/dist/lib/import-parser.js +129 -0
- package/dist/lib/reporter.d.ts +11 -0
- package/dist/lib/reporter.d.ts.map +1 -0
- package/dist/lib/reporter.js +50 -0
- package/dist/lib/schemas.d.ts +59 -0
- package/dist/lib/schemas.d.ts.map +1 -0
- package/dist/lib/schemas.js +33 -0
- package/dist/lib/workspace-detector.d.ts +18 -0
- package/dist/lib/workspace-detector.d.ts.map +1 -0
- package/dist/lib/workspace-detector.js +118 -0
- package/dist/test-setup.d.ts +2 -0
- package/dist/test-setup.d.ts.map +1 -0
- package/dist/test-setup.js +2 -0
- package/package.json +53 -0
package/README.md
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# @wolfcola/dead-export-finder
|
|
2
|
+
|
|
3
|
+
Find unused exports across monorepo package boundaries. Scans all workspace packages, parses exports and imports with [oxc-parser](https://oxc.rs/), and reports exports that are never imported by any other package.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -D @wolfcola/dead-export-finder
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## CLI Usage
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
dead-export-finder [options]
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
Run from the monorepo root. The tool auto-detects workspace packages from `pnpm-workspace.yaml` or `package.json` workspaces.
|
|
18
|
+
|
|
19
|
+
### Options
|
|
20
|
+
|
|
21
|
+
| Option | Alias | Description |
|
|
22
|
+
| ------------------- | ----- | ----------------------------------------------------- |
|
|
23
|
+
| `--packages <name>` | `-p` | Scope analysis to specific package names (repeatable) |
|
|
24
|
+
| `--ignore <glob>` | `-i` | Glob patterns to exclude from scanning (repeatable) |
|
|
25
|
+
| `--verbose` | `-v` | Print timing, file counts, and parse warnings |
|
|
26
|
+
|
|
27
|
+
### Examples
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
# Scan all packages
|
|
31
|
+
dead-export-finder
|
|
32
|
+
|
|
33
|
+
# Scope to specific packages
|
|
34
|
+
dead-export-finder -p @wolfcola/devtools-core -p @wolfcola/devtools-types
|
|
35
|
+
|
|
36
|
+
# Ignore test files
|
|
37
|
+
dead-export-finder -i "**/*.test.ts" -i "**/*.spec.ts"
|
|
38
|
+
|
|
39
|
+
# Verbose output
|
|
40
|
+
dead-export-finder --verbose
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Exit code is `1` when dead exports are found, `0` when clean.
|
|
44
|
+
|
|
45
|
+
## Programmatic API
|
|
46
|
+
|
|
47
|
+
All services are exported for use in custom tooling:
|
|
48
|
+
|
|
49
|
+
```typescript
|
|
50
|
+
import {
|
|
51
|
+
WorkspaceDetector,
|
|
52
|
+
WorkspaceDetectorLive,
|
|
53
|
+
FileScanner,
|
|
54
|
+
FileScannerLive,
|
|
55
|
+
ExportParser,
|
|
56
|
+
ExportParserLive,
|
|
57
|
+
ImportParser,
|
|
58
|
+
ImportParserLive,
|
|
59
|
+
ExportGraph,
|
|
60
|
+
ExportGraphLive,
|
|
61
|
+
Reporter,
|
|
62
|
+
ReporterLive,
|
|
63
|
+
} from '@wolfcola/dead-export-finder';
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Each service follows the `Context.Service` + `Live` layer pattern from Effect.
|
|
67
|
+
|
|
68
|
+
### Types
|
|
69
|
+
|
|
70
|
+
```typescript
|
|
71
|
+
import type {
|
|
72
|
+
PackageInfo,
|
|
73
|
+
ExportedSymbol,
|
|
74
|
+
ImportedSymbol,
|
|
75
|
+
DeadExport,
|
|
76
|
+
AnalysisResult,
|
|
77
|
+
WorkspaceResult,
|
|
78
|
+
} from '@wolfcola/dead-export-finder';
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Errors
|
|
82
|
+
|
|
83
|
+
```typescript
|
|
84
|
+
import {
|
|
85
|
+
WorkspaceNotFoundError,
|
|
86
|
+
ParseError,
|
|
87
|
+
EntryPointResolutionError,
|
|
88
|
+
} from '@wolfcola/dead-export-finder';
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## How It Works
|
|
92
|
+
|
|
93
|
+
1. **Workspace detection** — reads `pnpm-workspace.yaml` or `package.json` workspaces to find all packages
|
|
94
|
+
2. **File scanning** — finds all `.ts`, `.tsx`, `.js`, `.jsx` files in each package, respecting `.gitignore` and `--ignore` globs
|
|
95
|
+
3. **Export parsing** — uses `oxc-parser` to extract all exported symbols from each file
|
|
96
|
+
4. **Import parsing** — uses `oxc-parser` to extract all imported symbols from each file
|
|
97
|
+
5. **Graph analysis** — builds a cross-package dependency graph and identifies exports with zero imports
|
|
98
|
+
6. **Reporting** — formats results grouped by package with file paths relative to package root
|
|
99
|
+
|
|
100
|
+
Parse errors are handled gracefully as warnings — the tool continues with remaining files.
|
|
101
|
+
|
|
102
|
+
## License
|
|
103
|
+
|
|
104
|
+
MIT
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
export { WorkspaceDetector, WorkspaceDetectorLive } from './lib/workspace-detector.js';
|
|
3
|
+
export type { WorkspaceResult } from './lib/workspace-detector.js';
|
|
4
|
+
export { FileScanner, FileScannerLive } from './lib/file-scanner.js';
|
|
5
|
+
export { ExportParser, ExportParserLive } from './lib/export-parser.js';
|
|
6
|
+
export { ImportParser, ImportParserLive } from './lib/import-parser.js';
|
|
7
|
+
export { ExportGraph, ExportGraphLive } from './lib/export-graph.js';
|
|
8
|
+
export { Reporter, ReporterLive } from './lib/reporter.js';
|
|
9
|
+
export { WorkspaceNotFoundError, ParseError, EntryPointResolutionError } from './lib/errors.js';
|
|
10
|
+
export type { PackageInfo, ExportedSymbol, ImportedSymbol, DeadExport, AnalysisResult, } from './lib/schemas.js';
|
|
11
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,6BAA6B,CAAC;AACvF,YAAY,EAAE,eAAe,EAAE,MAAM,6BAA6B,CAAC;AACnE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AACrE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AACxE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AACxE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AACrE,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAC3D,OAAO,EAAE,sBAAsB,EAAE,UAAU,EAAE,yBAAyB,EAAE,MAAM,iBAAiB,CAAC;AAChG,YAAY,EACV,WAAW,EACX,cAAc,EACd,cAAc,EACd,UAAU,EACV,cAAc,GACf,MAAM,kBAAkB,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// Public API
|
|
3
|
+
export { WorkspaceDetector, WorkspaceDetectorLive } from './lib/workspace-detector.js';
|
|
4
|
+
export { FileScanner, FileScannerLive } from './lib/file-scanner.js';
|
|
5
|
+
export { ExportParser, ExportParserLive } from './lib/export-parser.js';
|
|
6
|
+
export { ImportParser, ImportParserLive } from './lib/import-parser.js';
|
|
7
|
+
export { ExportGraph, ExportGraphLive } from './lib/export-graph.js';
|
|
8
|
+
export { Reporter, ReporterLive } from './lib/reporter.js';
|
|
9
|
+
export { WorkspaceNotFoundError, ParseError, EntryPointResolutionError } from './lib/errors.js';
|
|
10
|
+
import { Command, Options } from '@effect/cli';
|
|
11
|
+
import { NodeContext, NodeRuntime } from '@effect/platform-node';
|
|
12
|
+
import { FileSystem } from '@effect/platform';
|
|
13
|
+
import { Console, Data, Effect, Layer, Array as Arr, Option, pipe } from 'effect';
|
|
14
|
+
import { WorkspaceDetector, WorkspaceDetectorLive } from './lib/workspace-detector.js';
|
|
15
|
+
import { FileScanner, FileScannerLive } from './lib/file-scanner.js';
|
|
16
|
+
import { ExportParser, ExportParserLive } from './lib/export-parser.js';
|
|
17
|
+
import { ImportParser, ImportParserLive } from './lib/import-parser.js';
|
|
18
|
+
import { ExportGraph, ExportGraphLive } from './lib/export-graph.js';
|
|
19
|
+
import { Reporter, ReporterLive } from './lib/reporter.js';
|
|
20
|
+
// ─── Exit code error ──────────────────────────────────────────────────────────
|
|
21
|
+
class ExitWithCode extends Data.TaggedError('ExitWithCode') {
|
|
22
|
+
}
|
|
23
|
+
// ─── Options ──────────────────────────────────────────────────────────────────
|
|
24
|
+
const packages = Options.text('packages').pipe(Options.withAlias('p'), Options.withDescription('Scope analysis to specific package names (repeat for multiple).'), Options.repeated, Options.optional);
|
|
25
|
+
const ignore = Options.text('ignore').pipe(Options.withAlias('i'), Options.withDescription('Glob patterns to exclude from scanning (repeat for multiple).'), Options.repeated, Options.optional);
|
|
26
|
+
const verbose = Options.boolean('verbose').pipe(Options.withAlias('v'), Options.withDescription('Print verbose output including timing and parse warnings.'), Options.withDefault(false));
|
|
27
|
+
// ─── Layer ────────────────────────────────────────────────────────────────────
|
|
28
|
+
const AppLayer = Layer.mergeAll(ExportParserLive, ImportParserLive, ExportGraphLive, ReporterLive, WorkspaceDetectorLive, FileScannerLive).pipe(Layer.provideMerge(NodeContext.layer));
|
|
29
|
+
const scanWorkspace = (workspace, ignoreGlobs, isVerbose) => Effect.gen(function* () {
|
|
30
|
+
const scanner = yield* FileScanner;
|
|
31
|
+
const results = yield* Effect.all(pipe(workspace.packages, Arr.map((pkg) => pipe(scanner.scan(pkg.root, ignoreGlobs), Effect.catchTag('GlobError', (e) => Effect.gen(function* () {
|
|
32
|
+
const msg = `failed to scan files in ${pkg.root}: ${String(e.cause)}`;
|
|
33
|
+
if (isVerbose)
|
|
34
|
+
yield* Console.log(`Warning: ${msg}`);
|
|
35
|
+
return { files: [], warning: msg };
|
|
36
|
+
})), Effect.map((result) => 'warning' in result
|
|
37
|
+
? result
|
|
38
|
+
: { files: result, warning: null }), Effect.map((r) => ({ pkg, files: r.files, warning: r.warning }))))));
|
|
39
|
+
return {
|
|
40
|
+
filesByPackage: pipe(results, Arr.map((r) => [r.pkg, r.files])),
|
|
41
|
+
warnings: pipe(results, Arr.filterMap((r) => (r.warning !== null ? Option.some(r.warning) : Option.none()))),
|
|
42
|
+
};
|
|
43
|
+
});
|
|
44
|
+
const parseAllFiles = (filesByPackage, isVerbose) => Effect.gen(function* () {
|
|
45
|
+
const fs = yield* FileSystem.FileSystem;
|
|
46
|
+
const exportParser = yield* ExportParser;
|
|
47
|
+
const importParser = yield* ImportParser;
|
|
48
|
+
const allFiles = pipe(filesByPackage, Arr.flatMap(([, files]) => files));
|
|
49
|
+
const fileResults = yield* Effect.all(pipe(allFiles, Arr.map((filePath) => pipe(fs.readFileString(filePath, 'utf-8'), Effect.either, Effect.flatMap((sourceResult) => {
|
|
50
|
+
if (sourceResult._tag === 'Left') {
|
|
51
|
+
const msg = `could not read ${filePath}: ${String(sourceResult.left)}`;
|
|
52
|
+
return isVerbose
|
|
53
|
+
? pipe(Console.log(`Warning: ${msg}`), Effect.map(() => ({
|
|
54
|
+
filePath,
|
|
55
|
+
exports: [],
|
|
56
|
+
imports: [],
|
|
57
|
+
warning: msg,
|
|
58
|
+
})))
|
|
59
|
+
: Effect.succeed({
|
|
60
|
+
filePath,
|
|
61
|
+
exports: [],
|
|
62
|
+
imports: [],
|
|
63
|
+
warning: msg,
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
const source = sourceResult.right;
|
|
67
|
+
const parseExports = pipe(exportParser.parse(filePath, source), Effect.map((symbols) => ({
|
|
68
|
+
symbols,
|
|
69
|
+
warning: null,
|
|
70
|
+
})), Effect.catchTag('ParseError', (e) => {
|
|
71
|
+
const msg = `failed to parse exports in ${e.filePath}: ${e.message}`;
|
|
72
|
+
const result = { symbols: [], warning: msg };
|
|
73
|
+
return isVerbose
|
|
74
|
+
? pipe(Console.log(`Warning: ${msg}`), Effect.map(() => result))
|
|
75
|
+
: Effect.succeed(result);
|
|
76
|
+
}));
|
|
77
|
+
const parseImports = pipe(importParser.parse(filePath, source), Effect.map((symbols) => ({
|
|
78
|
+
symbols,
|
|
79
|
+
warning: null,
|
|
80
|
+
})), Effect.catchTag('ParseError', (e) => {
|
|
81
|
+
const msg = `failed to parse imports in ${e.filePath}: ${e.message}`;
|
|
82
|
+
const result = { symbols: [], warning: msg };
|
|
83
|
+
return isVerbose
|
|
84
|
+
? pipe(Console.log(`Warning: ${msg}`), Effect.map(() => result))
|
|
85
|
+
: Effect.succeed(result);
|
|
86
|
+
}));
|
|
87
|
+
return pipe(Effect.all({ exports: parseExports, imports: parseImports }), Effect.map(({ exports: exp, imports: imp }) => ({
|
|
88
|
+
filePath,
|
|
89
|
+
exports: exp.symbols,
|
|
90
|
+
imports: imp.symbols,
|
|
91
|
+
warning: exp.warning ?? imp.warning,
|
|
92
|
+
})));
|
|
93
|
+
})))));
|
|
94
|
+
const exportEntries = pipe(fileResults, Arr.filter((r) => r.exports.length > 0), Arr.map((r) => [r.filePath, r.exports]));
|
|
95
|
+
const importEntries = pipe(fileResults, Arr.filter((r) => r.imports.length > 0), Arr.map((r) => [r.filePath, r.imports]));
|
|
96
|
+
const warnings = pipe(fileResults, Arr.filterMap((r) => (r.warning !== null ? Option.some(r.warning) : Option.none())));
|
|
97
|
+
return {
|
|
98
|
+
allExports: new Map(exportEntries),
|
|
99
|
+
allImports: new Map(importEntries),
|
|
100
|
+
warnings,
|
|
101
|
+
};
|
|
102
|
+
});
|
|
103
|
+
const analyzeAndReport = (targetPackages, allPackages, allExports, allImports) => Effect.gen(function* () {
|
|
104
|
+
const graph = yield* ExportGraph;
|
|
105
|
+
const reporter = yield* Reporter;
|
|
106
|
+
const result = yield* graph.analyze(targetPackages, allExports, allImports);
|
|
107
|
+
const packageRoots = new Map(pipe(allPackages, Arr.map((p) => [p.name, p.root])));
|
|
108
|
+
const report = reporter.format(result, packageRoots);
|
|
109
|
+
yield* Console.log(report);
|
|
110
|
+
return { deadCount: result.deadExports.length, warnings: result.warnings };
|
|
111
|
+
});
|
|
112
|
+
// ─── Command ──────────────────────────────────────────────────────────────────
|
|
113
|
+
const command = Command.make('dead-export-finder', { packages, ignore, verbose }, ({ packages: packagesOpt, ignore: ignoreOpt, verbose: isVerbose }) => Effect.gen(function* () {
|
|
114
|
+
const startTime = Date.now();
|
|
115
|
+
const detector = yield* WorkspaceDetector;
|
|
116
|
+
const cwd = process.cwd();
|
|
117
|
+
const workspace = yield* detector.detect(cwd);
|
|
118
|
+
if (isVerbose) {
|
|
119
|
+
yield* Console.log(`Detected workspace type: ${workspace.type}`);
|
|
120
|
+
yield* Console.log(`Found ${workspace.packages.length} packages`);
|
|
121
|
+
}
|
|
122
|
+
const packageFilter = packagesOpt._tag === 'Some' ? new Set(packagesOpt.value) : null;
|
|
123
|
+
const targetPackages = packageFilter !== null
|
|
124
|
+
? pipe(workspace.packages, Arr.filter((p) => packageFilter.has(p.name)))
|
|
125
|
+
: [...workspace.packages];
|
|
126
|
+
const ignoreGlobs = ignoreOpt._tag === 'Some' ? ignoreOpt.value : [];
|
|
127
|
+
if (isVerbose && packageFilter !== null && targetPackages.length > 0) {
|
|
128
|
+
yield* Console.log(`Scoping to packages: ${pipe(targetPackages, Arr.map((p) => p.name), Arr.join(', '))}`);
|
|
129
|
+
}
|
|
130
|
+
const scanResult = yield* scanWorkspace(workspace, ignoreGlobs, isVerbose);
|
|
131
|
+
const parseResult = yield* parseAllFiles(scanResult.filesByPackage, isVerbose);
|
|
132
|
+
if (isVerbose) {
|
|
133
|
+
yield* Console.log(`Scanned ${parseResult.allExports.size} files with exports, ${parseResult.allImports.size} files with imports`);
|
|
134
|
+
}
|
|
135
|
+
const { deadCount, warnings: analysisWarnings } = yield* analyzeAndReport(targetPackages, [...workspace.packages], parseResult.allExports, parseResult.allImports);
|
|
136
|
+
const allWarnings = pipe(scanResult.warnings, Arr.appendAll(parseResult.warnings), Arr.appendAll(analysisWarnings));
|
|
137
|
+
if (allWarnings.length > 0 && !isVerbose) {
|
|
138
|
+
yield* Console.log(`\nWarning: ${allWarnings.length} issue(s) during analysis — results may be incomplete. Run with --verbose for details.`);
|
|
139
|
+
}
|
|
140
|
+
if (isVerbose) {
|
|
141
|
+
const elapsed = Date.now() - startTime;
|
|
142
|
+
yield* Console.log(`\nCompleted in ${elapsed}ms`);
|
|
143
|
+
}
|
|
144
|
+
if (deadCount > 0) {
|
|
145
|
+
return yield* new ExitWithCode({ code: 1 });
|
|
146
|
+
}
|
|
147
|
+
})).pipe(Command.withDescription('Find dead exports across monorepo package boundaries.'));
|
|
148
|
+
// ─── Runner ───────────────────────────────────────────────────────────────────
|
|
149
|
+
const cli = Command.run(command, {
|
|
150
|
+
name: 'Dead Export Finder',
|
|
151
|
+
version: '0.0.0',
|
|
152
|
+
});
|
|
153
|
+
cli(process.argv).pipe(Effect.catchTags({
|
|
154
|
+
ExitWithCode: (e) => Effect.sync(() => (process.exitCode = e.code)),
|
|
155
|
+
WorkspaceNotFoundError: (e) => Console.error(`error: workspace not found at ${e.cwd}`).pipe(Effect.zipRight(Effect.sync(() => {
|
|
156
|
+
process.exitCode = 1;
|
|
157
|
+
}))),
|
|
158
|
+
}), Effect.provide(AppLayer), NodeRuntime.runMain);
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
declare const WorkspaceNotFoundError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").VoidIfEmpty<{ readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }>) => import("effect/Cause").YieldableError & {
|
|
2
|
+
readonly _tag: "WorkspaceNotFoundError";
|
|
3
|
+
} & Readonly<A>;
|
|
4
|
+
export declare class WorkspaceNotFoundError extends WorkspaceNotFoundError_base<{
|
|
5
|
+
readonly cwd: string;
|
|
6
|
+
}> {
|
|
7
|
+
}
|
|
8
|
+
declare const ParseError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").VoidIfEmpty<{ readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }>) => import("effect/Cause").YieldableError & {
|
|
9
|
+
readonly _tag: "ParseError";
|
|
10
|
+
} & Readonly<A>;
|
|
11
|
+
export declare class ParseError extends ParseError_base<{
|
|
12
|
+
readonly filePath: string;
|
|
13
|
+
readonly message: string;
|
|
14
|
+
}> {
|
|
15
|
+
}
|
|
16
|
+
declare const EntryPointResolutionError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").VoidIfEmpty<{ readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }>) => import("effect/Cause").YieldableError & {
|
|
17
|
+
readonly _tag: "EntryPointResolutionError";
|
|
18
|
+
} & Readonly<A>;
|
|
19
|
+
export declare class EntryPointResolutionError extends EntryPointResolutionError_base<{
|
|
20
|
+
readonly packageName: string;
|
|
21
|
+
readonly entryPoint: string;
|
|
22
|
+
}> {
|
|
23
|
+
}
|
|
24
|
+
export {};
|
|
25
|
+
//# sourceMappingURL=errors.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../../src/lib/errors.ts"],"names":[],"mappings":";;;AAEA,qBAAa,sBAAuB,SAAQ,4BAA2C;IACrF,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;CACtB,CAAC;CAAG;;;;AAEL,qBAAa,UAAW,SAAQ,gBAA+B;IAC7D,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;CAC1B,CAAC;CAAG;;;;AAEL,qBAAa,yBAA0B,SAAQ,+BAA8C;IAC3F,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;CAC7B,CAAC;CAAG"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { Data } from 'effect';
|
|
2
|
+
export class WorkspaceNotFoundError extends Data.TaggedError('WorkspaceNotFoundError') {
|
|
3
|
+
}
|
|
4
|
+
export class ParseError extends Data.TaggedError('ParseError') {
|
|
5
|
+
}
|
|
6
|
+
export class EntryPointResolutionError extends Data.TaggedError('EntryPointResolutionError') {
|
|
7
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from 'effect';
|
|
2
|
+
import type { PackageInfo, ExportedSymbol, ImportedSymbol, AnalysisResult } from './schemas.js';
|
|
3
|
+
export interface ExportGraphShape {
|
|
4
|
+
readonly analyze: (packages: readonly PackageInfo[], allExports: ReadonlyMap<string, readonly ExportedSymbol[]>, allImports: ReadonlyMap<string, readonly ImportedSymbol[]>) => Effect.Effect<AnalysisResult>;
|
|
5
|
+
}
|
|
6
|
+
declare const ExportGraph_base: Context.TagClass<ExportGraph, "ExportGraph", ExportGraphShape>;
|
|
7
|
+
export declare class ExportGraph extends ExportGraph_base {
|
|
8
|
+
}
|
|
9
|
+
export declare const ExportGraphLive: Layer.Layer<ExportGraph, never, never>;
|
|
10
|
+
export {};
|
|
11
|
+
//# sourceMappingURL=export-graph.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"export-graph.d.ts","sourceRoot":"","sources":["../../src/lib/export-graph.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAuC,MAAM,QAAQ,CAAC;AAErF,OAAO,KAAK,EACV,WAAW,EACX,cAAc,EACd,cAAc,EAEd,cAAc,EACf,MAAM,cAAc,CAAC;AAkQtB,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,CAAC,OAAO,EAAE,CAChB,QAAQ,EAAE,SAAS,WAAW,EAAE,EAChC,UAAU,EAAE,WAAW,CAAC,MAAM,EAAE,SAAS,cAAc,EAAE,CAAC,EAC1D,UAAU,EAAE,WAAW,CAAC,MAAM,EAAE,SAAS,cAAc,EAAE,CAAC,KACvD,MAAM,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;CACpC;;AAID,qBAAa,WAAY,SAAQ,gBAA2D;CAAG;AA6B/F,eAAO,MAAM,eAAe,wCAG1B,CAAC"}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { Context, Effect, Layer, Array as Arr, HashSet, Option, pipe } from 'effect';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
// ─── Extension stripping ───────────────────────────────────────────────────────
|
|
4
|
+
const EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'];
|
|
5
|
+
const stripExtension = (filePath) => pipe(EXTENSIONS, Arr.findFirst((ext) => filePath.endsWith(ext)), (opt) => (opt._tag === 'Some' ? filePath.slice(0, -opt.value.length) : filePath));
|
|
6
|
+
const BUILD_DIR_MAPPINGS = [
|
|
7
|
+
['/dist/', '/src/'],
|
|
8
|
+
['/build/', '/src/'],
|
|
9
|
+
['/out/', '/src/'],
|
|
10
|
+
];
|
|
11
|
+
const resolveEntryPointToSource = (entryPoint, scannedFiles, scannedStripped) => {
|
|
12
|
+
if (HashSet.has(scannedFiles, entryPoint))
|
|
13
|
+
return entryPoint;
|
|
14
|
+
const stripped = stripExtension(entryPoint);
|
|
15
|
+
if (HashSet.has(scannedStripped, stripped))
|
|
16
|
+
return stripped;
|
|
17
|
+
return pipe(BUILD_DIR_MAPPINGS, Arr.findFirst(([buildDir]) => entryPoint.includes(buildDir)), (opt) => {
|
|
18
|
+
if (opt._tag === 'None')
|
|
19
|
+
return null;
|
|
20
|
+
const [buildDir, sourceDir] = opt.value;
|
|
21
|
+
const sourcePath = entryPoint.replace(buildDir, sourceDir);
|
|
22
|
+
if (HashSet.has(scannedFiles, sourcePath))
|
|
23
|
+
return sourcePath;
|
|
24
|
+
const sourceStripped = stripExtension(sourcePath);
|
|
25
|
+
if (HashSet.has(scannedStripped, sourceStripped))
|
|
26
|
+
return sourceStripped;
|
|
27
|
+
return null;
|
|
28
|
+
});
|
|
29
|
+
};
|
|
30
|
+
// ─── Pure pipeline stages ────────────────────────────────────────────────────
|
|
31
|
+
const resolveEntryPoints = (packages, scannedFiles, scannedStripped) => pipe(packages, Arr.flatMap((pkg) => pipe(pkg.entryPoints, Arr.filterMap((ep) => {
|
|
32
|
+
const resolved = path.resolve(pkg.root, ep);
|
|
33
|
+
const sourcePath = resolveEntryPointToSource(resolved, scannedFiles, scannedStripped);
|
|
34
|
+
return sourcePath !== null ? Option.some(sourcePath) : Option.none();
|
|
35
|
+
}))), HashSet.fromIterable);
|
|
36
|
+
const buildFileToPackageMap = (packages, exportedFilePaths) => new Map(pipe(exportedFilePaths, Arr.filterMap((filePath) => {
|
|
37
|
+
const pkg = pipe(packages, Arr.findFirst((p) => filePath.startsWith(p.root + path.sep) || filePath === p.root));
|
|
38
|
+
return pkg._tag === 'Some' ? Option.some([filePath, pkg.value]) : Option.none();
|
|
39
|
+
})));
|
|
40
|
+
const isRelativePath = (src) => src.startsWith('./') || src.startsWith('../') || src.startsWith('/');
|
|
41
|
+
const collectImportEdges = (allImports) => {
|
|
42
|
+
const entries = pipe([...allImports.entries()], Arr.flatMap(([importerFile, imports]) => pipe(imports, Arr.map((imp) => ({ importerFile, imp })))));
|
|
43
|
+
const relativeEntries = pipe(entries, Arr.filter(({ imp }) => isRelativePath(imp.source)));
|
|
44
|
+
const packageEntries = pipe(entries, Arr.filter(({ imp }) => !isRelativePath(imp.source)));
|
|
45
|
+
const byRelative = pipe(relativeEntries, Arr.filter(({ imp }) => !imp.isNamespace && imp.name !== '*'), Arr.map(({ importerFile, imp }) => {
|
|
46
|
+
const importerDir = path.dirname(importerFile);
|
|
47
|
+
const resolved = stripExtension(path.resolve(importerDir, imp.source));
|
|
48
|
+
return `${resolved}:${imp.name}`;
|
|
49
|
+
}), HashSet.fromIterable);
|
|
50
|
+
const byPackage = pipe(packageEntries, Arr.filter(({ imp }) => !imp.isNamespace && imp.name !== '*'), Arr.map(({ imp }) => `${imp.source}:${imp.name}`), HashSet.fromIterable);
|
|
51
|
+
const relativeNamespaces = pipe(relativeEntries, Arr.filter(({ imp }) => imp.isNamespace || imp.name === '*'), Arr.map(({ importerFile, imp }) => {
|
|
52
|
+
const importerDir = path.dirname(importerFile);
|
|
53
|
+
return stripExtension(path.resolve(importerDir, imp.source));
|
|
54
|
+
}));
|
|
55
|
+
const packageNamespaces = pipe(packageEntries, Arr.filter(({ imp }) => imp.isNamespace || imp.name === '*'), Arr.map(({ imp }) => imp.source));
|
|
56
|
+
const byNamespace = pipe([...relativeNamespaces, ...packageNamespaces], HashSet.fromIterable);
|
|
57
|
+
return { byRelative, byPackage, byNamespace };
|
|
58
|
+
};
|
|
59
|
+
const collectReExportEdges = (allExports) => {
|
|
60
|
+
const reExports = pipe([...allExports.entries()], Arr.flatMap(([filePath, exports]) => pipe(exports, Arr.filter((exp) => exp.isReExport && exp.reExportSource !== undefined), Arr.filter((exp) => isRelativePath(exp.reExportSource)), Arr.map((exp) => ({ filePath, exp })))));
|
|
61
|
+
const byNamespace = pipe(reExports, Arr.filter(({ exp }) => exp.name === '*'), Arr.map(({ filePath, exp }) => {
|
|
62
|
+
const dir = path.dirname(filePath);
|
|
63
|
+
return stripExtension(path.resolve(dir, exp.reExportSource));
|
|
64
|
+
}), HashSet.fromIterable);
|
|
65
|
+
const byRelative = pipe(reExports, Arr.filter(({ exp }) => exp.name !== '*'), Arr.map(({ filePath, exp }) => {
|
|
66
|
+
const dir = path.dirname(filePath);
|
|
67
|
+
const resolved = stripExtension(path.resolve(dir, exp.reExportSource));
|
|
68
|
+
const consumedName = exp.reExportLocalName ?? exp.name;
|
|
69
|
+
return `${resolved}:${consumedName}`;
|
|
70
|
+
}), HashSet.fromIterable);
|
|
71
|
+
return {
|
|
72
|
+
byRelative,
|
|
73
|
+
byPackage: HashSet.empty(),
|
|
74
|
+
byNamespace,
|
|
75
|
+
};
|
|
76
|
+
};
|
|
77
|
+
const mergeConsumedSets = (a, b) => ({
|
|
78
|
+
byRelative: HashSet.union(a.byRelative, b.byRelative),
|
|
79
|
+
byPackage: HashSet.union(a.byPackage, b.byPackage),
|
|
80
|
+
byNamespace: HashSet.union(a.byNamespace, b.byNamespace),
|
|
81
|
+
});
|
|
82
|
+
const buildConsumedSets = (allImports, allExports) => mergeConsumedSets(collectImportEdges(allImports), collectReExportEdges(allExports));
|
|
83
|
+
// ─── Dead export detection ──────────────────────────────────────────────────
|
|
84
|
+
const isConsumed = (exp, strippedFilePath, pkg, consumed) => {
|
|
85
|
+
if (exp.name === '*')
|
|
86
|
+
return true;
|
|
87
|
+
if (HashSet.has(consumed.byRelative, `${strippedFilePath}:${exp.name}`))
|
|
88
|
+
return true;
|
|
89
|
+
if (HashSet.has(consumed.byPackage, `${pkg.name}:${exp.name}`))
|
|
90
|
+
return true;
|
|
91
|
+
if (HashSet.has(consumed.byNamespace, strippedFilePath))
|
|
92
|
+
return true;
|
|
93
|
+
if (HashSet.has(consumed.byNamespace, pkg.name))
|
|
94
|
+
return true;
|
|
95
|
+
return false;
|
|
96
|
+
};
|
|
97
|
+
const findDeadExports = (allExports, entryPoints, fileToPackage, consumed) => pipe([...allExports.entries()], Arr.filter(([filePath]) => !HashSet.has(entryPoints, filePath)), Arr.flatMap(([filePath, exports]) => {
|
|
98
|
+
const pkg = fileToPackage.get(filePath);
|
|
99
|
+
if (pkg === undefined)
|
|
100
|
+
return [];
|
|
101
|
+
const strippedFilePath = stripExtension(filePath);
|
|
102
|
+
return pipe(exports, Arr.filter((exp) => !isConsumed(exp, strippedFilePath, pkg, consumed)), Arr.map((exp) => ({ symbol: exp, packageName: pkg.name })));
|
|
103
|
+
}));
|
|
104
|
+
const countTotalExports = (allExports) => pipe([...allExports.values()], Arr.map((exports) => exports.length), Arr.reduce(0, (acc, n) => acc + n));
|
|
105
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
106
|
+
export class ExportGraph extends Context.Tag('ExportGraph')() {
|
|
107
|
+
}
|
|
108
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
109
|
+
const analyze = (packages, allExports, allImports) => {
|
|
110
|
+
const scannedFiles = HashSet.fromIterable(allExports.keys());
|
|
111
|
+
const scannedStripped = pipe([...allExports.keys()], Arr.map(stripExtension), HashSet.fromIterable);
|
|
112
|
+
const entryPoints = resolveEntryPoints(packages, scannedFiles, scannedStripped);
|
|
113
|
+
const fileToPackage = buildFileToPackageMap(packages, [...allExports.keys()]);
|
|
114
|
+
const consumed = buildConsumedSets(allImports, allExports);
|
|
115
|
+
const deadExports = findDeadExports(allExports, entryPoints, fileToPackage, consumed);
|
|
116
|
+
return {
|
|
117
|
+
deadExports: [...deadExports],
|
|
118
|
+
totalExports: countTotalExports(allExports),
|
|
119
|
+
totalFiles: allExports.size,
|
|
120
|
+
warnings: [],
|
|
121
|
+
};
|
|
122
|
+
};
|
|
123
|
+
export const ExportGraphLive = Layer.succeed(ExportGraph, {
|
|
124
|
+
analyze: (packages, allExports, allImports) => Effect.sync(() => analyze(packages, allExports, allImports)),
|
|
125
|
+
});
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from 'effect';
|
|
2
|
+
import type { ExportedSymbol } from './schemas.js';
|
|
3
|
+
import { ParseError } from './errors.js';
|
|
4
|
+
export interface ExportParserShape {
|
|
5
|
+
readonly parse: (filePath: string, source: string) => Effect.Effect<readonly ExportedSymbol[], ParseError>;
|
|
6
|
+
}
|
|
7
|
+
declare const ExportParser_base: Context.TagClass<ExportParser, "ExportParser", ExportParserShape>;
|
|
8
|
+
export declare class ExportParser extends ExportParser_base {
|
|
9
|
+
}
|
|
10
|
+
export declare const ExportParserLive: Layer.Layer<ExportParser, never, never>;
|
|
11
|
+
export {};
|
|
12
|
+
//# sourceMappingURL=export-parser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"export-parser.d.ts","sourceRoot":"","sources":["../../src/lib/export-parser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAsB,MAAM,QAAQ,CAAC;AAEpE,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAwUzC,MAAM,WAAW,iBAAiB;IAChC,QAAQ,CAAC,KAAK,EAAE,CACd,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,KACX,MAAM,CAAC,MAAM,CAAC,SAAS,cAAc,EAAE,EAAE,UAAU,CAAC,CAAC;CAC3D;;AAID,qBAAa,YAAa,SAAQ,iBAA8D;CAAG;AA+BnG,eAAO,MAAM,gBAAgB,yCAE3B,CAAC"}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import { Context, Effect, Layer, Array as Arr, pipe } from 'effect';
|
|
2
|
+
import oxc from 'oxc-parser';
|
|
3
|
+
import { ParseError } from './errors.js';
|
|
4
|
+
// ─── Pure helpers ────────────────────────────────────────────────────────────
|
|
5
|
+
const lineFromOffset = (source, offset) => pipe(source.slice(0, offset).split('\n'), Arr.length);
|
|
6
|
+
const extractDeclarationNames = (decl) => {
|
|
7
|
+
switch (decl.type) {
|
|
8
|
+
case 'FunctionDeclaration':
|
|
9
|
+
case 'ClassDeclaration': {
|
|
10
|
+
const d = decl;
|
|
11
|
+
return d.id ? [d.id.name] : [];
|
|
12
|
+
}
|
|
13
|
+
case 'VariableDeclaration': {
|
|
14
|
+
const d = decl;
|
|
15
|
+
return pipe(d.declarations, Arr.flatMap((v) => (v.id.type === 'Identifier' ? [v.id.name] : [])));
|
|
16
|
+
}
|
|
17
|
+
case 'TSTypeAliasDeclaration':
|
|
18
|
+
case 'TSInterfaceDeclaration': {
|
|
19
|
+
const d = decl;
|
|
20
|
+
return [d.id.name];
|
|
21
|
+
}
|
|
22
|
+
default:
|
|
23
|
+
return [];
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
const extractNamedDeclaration = (node, filePath, source) => {
|
|
27
|
+
const isReExport = node.source !== null;
|
|
28
|
+
const reExportSource = node.source ? String(node.source.value) : undefined;
|
|
29
|
+
if (node.declaration !== null) {
|
|
30
|
+
return pipe(extractDeclarationNames(node.declaration), Arr.map((name) => ({
|
|
31
|
+
name,
|
|
32
|
+
filePath,
|
|
33
|
+
line: lineFromOffset(source, node.start),
|
|
34
|
+
isDefault: false,
|
|
35
|
+
isReExport: false,
|
|
36
|
+
})));
|
|
37
|
+
}
|
|
38
|
+
return pipe(node.specifiers, Arr.map((spec) => {
|
|
39
|
+
const localName = spec.local?.name;
|
|
40
|
+
const isRenamed = localName !== undefined && localName !== spec.exported.name;
|
|
41
|
+
return {
|
|
42
|
+
name: spec.exported.name,
|
|
43
|
+
filePath,
|
|
44
|
+
line: lineFromOffset(source, spec.start),
|
|
45
|
+
isDefault: false,
|
|
46
|
+
isReExport,
|
|
47
|
+
...(reExportSource !== undefined ? { reExportSource } : {}),
|
|
48
|
+
...(isRenamed ? { reExportLocalName: localName } : {}),
|
|
49
|
+
};
|
|
50
|
+
}));
|
|
51
|
+
};
|
|
52
|
+
const extractDefaultDeclaration = (node, filePath, source) => [
|
|
53
|
+
{
|
|
54
|
+
name: 'default',
|
|
55
|
+
filePath,
|
|
56
|
+
line: lineFromOffset(source, node.start),
|
|
57
|
+
isDefault: true,
|
|
58
|
+
isReExport: false,
|
|
59
|
+
},
|
|
60
|
+
];
|
|
61
|
+
const extractAllDeclaration = (node, filePath, source) => [
|
|
62
|
+
{
|
|
63
|
+
name: node.exported ? node.exported.name : '*',
|
|
64
|
+
filePath,
|
|
65
|
+
line: lineFromOffset(source, node.start),
|
|
66
|
+
isDefault: false,
|
|
67
|
+
isReExport: true,
|
|
68
|
+
reExportSource: String(node.source.value),
|
|
69
|
+
},
|
|
70
|
+
];
|
|
71
|
+
const extractCjsExports = (node, filePath, source) => {
|
|
72
|
+
const expr = node.expression;
|
|
73
|
+
if (expr.type !== 'AssignmentExpression')
|
|
74
|
+
return [];
|
|
75
|
+
const assign = expr;
|
|
76
|
+
const left = assign.left;
|
|
77
|
+
if (left.type !== 'MemberExpression')
|
|
78
|
+
return [];
|
|
79
|
+
const member = left;
|
|
80
|
+
const objName = member.object.type === 'Identifier' ? member.object.name : '';
|
|
81
|
+
const propName = member.property.name;
|
|
82
|
+
// exports.foo = ...
|
|
83
|
+
if (objName === 'exports') {
|
|
84
|
+
return [
|
|
85
|
+
{
|
|
86
|
+
name: propName,
|
|
87
|
+
filePath,
|
|
88
|
+
line: lineFromOffset(source, assign.start),
|
|
89
|
+
isDefault: false,
|
|
90
|
+
isReExport: false,
|
|
91
|
+
},
|
|
92
|
+
];
|
|
93
|
+
}
|
|
94
|
+
// module.exports.foo = ... (nested MemberExpression)
|
|
95
|
+
if (member.object.type === 'MemberExpression') {
|
|
96
|
+
const inner = member.object;
|
|
97
|
+
const innerObj = inner.object.type === 'Identifier' ? inner.object.name : '';
|
|
98
|
+
const innerProp = inner.property.name;
|
|
99
|
+
if (innerObj === 'module' && innerProp === 'exports') {
|
|
100
|
+
return [
|
|
101
|
+
{
|
|
102
|
+
name: propName,
|
|
103
|
+
filePath,
|
|
104
|
+
line: lineFromOffset(source, assign.start),
|
|
105
|
+
isDefault: false,
|
|
106
|
+
isReExport: false,
|
|
107
|
+
},
|
|
108
|
+
];
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// module.exports = { foo, bar }
|
|
112
|
+
if (objName === 'module' && propName === 'exports') {
|
|
113
|
+
const right = assign.right;
|
|
114
|
+
if (right.type !== 'ObjectExpression')
|
|
115
|
+
return [];
|
|
116
|
+
const obj = right;
|
|
117
|
+
return pipe(obj.properties, Arr.flatMap((prop) => {
|
|
118
|
+
if (prop.type === 'Property' && prop.key.type === 'Identifier') {
|
|
119
|
+
const key = prop.key;
|
|
120
|
+
return [
|
|
121
|
+
{
|
|
122
|
+
name: key.name,
|
|
123
|
+
filePath,
|
|
124
|
+
line: lineFromOffset(source, prop.start),
|
|
125
|
+
isDefault: false,
|
|
126
|
+
isReExport: false,
|
|
127
|
+
},
|
|
128
|
+
];
|
|
129
|
+
}
|
|
130
|
+
return [];
|
|
131
|
+
}));
|
|
132
|
+
}
|
|
133
|
+
return [];
|
|
134
|
+
};
|
|
135
|
+
const extractExportsFromNode = (node, filePath, source) => {
|
|
136
|
+
switch (node.type) {
|
|
137
|
+
case 'ExportNamedDeclaration':
|
|
138
|
+
return extractNamedDeclaration(node, filePath, source);
|
|
139
|
+
case 'ExportDefaultDeclaration':
|
|
140
|
+
return extractDefaultDeclaration(node, filePath, source);
|
|
141
|
+
case 'ExportAllDeclaration':
|
|
142
|
+
return extractAllDeclaration(node, filePath, source);
|
|
143
|
+
case 'ExpressionStatement':
|
|
144
|
+
return extractCjsExports(node, filePath, source);
|
|
145
|
+
default:
|
|
146
|
+
return [];
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
150
|
+
export class ExportParser extends Context.Tag('ExportParser')() {
|
|
151
|
+
}
|
|
152
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
153
|
+
const parseSource = (filePath, source) => Effect.try({
|
|
154
|
+
try: () => {
|
|
155
|
+
const result = oxc.parseSync(filePath, source);
|
|
156
|
+
if (result.errors.length > 0) {
|
|
157
|
+
const msg = result.errors[0].message ?? 'parse error';
|
|
158
|
+
throw new Error(msg);
|
|
159
|
+
}
|
|
160
|
+
const program = result.program;
|
|
161
|
+
return pipe(program.body, Arr.flatMap((node) => extractExportsFromNode(node, filePath, source)));
|
|
162
|
+
},
|
|
163
|
+
catch: (e) => new ParseError({
|
|
164
|
+
filePath,
|
|
165
|
+
message: e instanceof Error ? e.message : String(e),
|
|
166
|
+
}),
|
|
167
|
+
});
|
|
168
|
+
export const ExportParserLive = Layer.succeed(ExportParser, {
|
|
169
|
+
parse: parseSource,
|
|
170
|
+
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from 'effect';
|
|
2
|
+
import { FileSystem, Path } from '@effect/platform';
|
|
3
|
+
declare const GlobError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").VoidIfEmpty<{ readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }>) => import("effect/Cause").YieldableError & {
|
|
4
|
+
readonly _tag: "GlobError";
|
|
5
|
+
} & Readonly<A>;
|
|
6
|
+
declare class GlobError extends GlobError_base<{
|
|
7
|
+
readonly cause: unknown;
|
|
8
|
+
}> {
|
|
9
|
+
}
|
|
10
|
+
export interface FileScannerShape {
|
|
11
|
+
readonly scan: (root: string, ignoreGlobs: readonly string[]) => Effect.Effect<readonly string[], GlobError>;
|
|
12
|
+
}
|
|
13
|
+
declare const FileScanner_base: Context.TagClass<FileScanner, "FileScanner", FileScannerShape>;
|
|
14
|
+
export declare class FileScanner extends FileScanner_base {
|
|
15
|
+
}
|
|
16
|
+
export declare const FileScannerLive: Layer.Layer<FileScanner, never, FileSystem.FileSystem | Path.Path>;
|
|
17
|
+
export {};
|
|
18
|
+
//# sourceMappingURL=file-scanner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-scanner.d.ts","sourceRoot":"","sources":["../../src/lib/file-scanner.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAQ,MAAM,EAAE,KAAK,EAAsB,MAAM,QAAQ,CAAC;AAC1E,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,MAAM,kBAAkB,CAAC;;;;AAMpD,cAAM,SAAU,SAAQ,eAA8B;IACpD,QAAQ,CAAC,KAAK,EAAE,OAAO,CAAC;CACzB,CAAC;CAAG;AAmEL,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,CAAC,IAAI,EAAE,CACb,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,SAAS,MAAM,EAAE,KAC3B,MAAM,CAAC,MAAM,CAAC,SAAS,MAAM,EAAE,EAAE,SAAS,CAAC,CAAC;CAClD;;AAID,qBAAa,WAAY,SAAQ,gBAA2D;CAAG;AAI/F,eAAO,MAAM,eAAe,oEAuB3B,CAAC"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { Context, Data, Effect, Layer, Array as Arr, pipe } from 'effect';
|
|
2
|
+
import { FileSystem, Path } from '@effect/platform';
|
|
3
|
+
import fg from 'fast-glob';
|
|
4
|
+
import ignore from 'ignore';
|
|
5
|
+
// ─── Internal errors ──────────────────────────────────────────────────────────
|
|
6
|
+
class GlobError extends Data.TaggedError('GlobError') {
|
|
7
|
+
}
|
|
8
|
+
// ─── Pure helpers ────────────────────────────────────────────────────────────
|
|
9
|
+
const loadGitignorePatterns = (fs, pathSvc, root) => {
|
|
10
|
+
const gitignorePath = pathSvc.join(root, '.gitignore');
|
|
11
|
+
return pipe(fs.exists(gitignorePath), Effect.orElseSucceed(() => false), Effect.flatMap((exists) => exists
|
|
12
|
+
? pipe(fs.readFileString(gitignorePath, 'utf-8'), Effect.orElseSucceed(() => ''), Effect.map((content) => content.split('\n')))
|
|
13
|
+
: Effect.succeed([])));
|
|
14
|
+
};
|
|
15
|
+
const buildIgnorePatterns = (gitignorePatterns, customGlobs) => pipe(['node_modules'], Arr.appendAll(gitignorePatterns), Arr.appendAll(customGlobs));
|
|
16
|
+
const discoverFiles = (root) => Effect.tryPromise({
|
|
17
|
+
try: () => fg('**/*.{ts,tsx,js,jsx,mjs,cjs}', {
|
|
18
|
+
cwd: root,
|
|
19
|
+
absolute: true,
|
|
20
|
+
onlyFiles: true,
|
|
21
|
+
ignore: ['**/node_modules/**'],
|
|
22
|
+
}),
|
|
23
|
+
catch: (cause) => new GlobError({ cause }),
|
|
24
|
+
});
|
|
25
|
+
const filterWithIgnore = (files, patterns, pathSvc, root) => {
|
|
26
|
+
const ig = ignore();
|
|
27
|
+
ig.add([...patterns]);
|
|
28
|
+
return pipe(files, Arr.filter((absPath) => {
|
|
29
|
+
const rel = pathSvc.relative(root, absPath);
|
|
30
|
+
return !ig.ignores(rel);
|
|
31
|
+
}));
|
|
32
|
+
};
|
|
33
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
34
|
+
export class FileScanner extends Context.Tag('FileScanner')() {
|
|
35
|
+
}
|
|
36
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
37
|
+
export const FileScannerLive = Layer.effect(FileScanner, Effect.gen(function* () {
|
|
38
|
+
const fs = yield* FileSystem.FileSystem;
|
|
39
|
+
const pathSvc = yield* Path.Path;
|
|
40
|
+
const scan = (root, ignoreGlobs) => pipe(loadGitignorePatterns(fs, pathSvc, root), Effect.map((gitignorePatterns) => buildIgnorePatterns(gitignorePatterns, ignoreGlobs)), Effect.flatMap((patterns) => pipe(discoverFiles(root), Effect.map((files) => filterWithIgnore(files, patterns, pathSvc, root)))));
|
|
41
|
+
return { scan };
|
|
42
|
+
}));
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from 'effect';
|
|
2
|
+
import type { ImportedSymbol } from './schemas.js';
|
|
3
|
+
import { ParseError } from './errors.js';
|
|
4
|
+
export interface ImportParserShape {
|
|
5
|
+
readonly parse: (filePath: string, source: string) => Effect.Effect<readonly ImportedSymbol[], ParseError>;
|
|
6
|
+
}
|
|
7
|
+
declare const ImportParser_base: Context.TagClass<ImportParser, "ImportParser", ImportParserShape>;
|
|
8
|
+
export declare class ImportParser extends ImportParser_base {
|
|
9
|
+
}
|
|
10
|
+
export declare const ImportParserLive: Layer.Layer<ImportParser, never, never>;
|
|
11
|
+
export {};
|
|
12
|
+
//# sourceMappingURL=import-parser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"import-parser.d.ts","sourceRoot":"","sources":["../../src/lib/import-parser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAsB,MAAM,QAAQ,CAAC;AAEpE,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AA4LzC,MAAM,WAAW,iBAAiB;IAChC,QAAQ,CAAC,KAAK,EAAE,CACd,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,KACX,MAAM,CAAC,MAAM,CAAC,SAAS,cAAc,EAAE,EAAE,UAAU,CAAC,CAAC;CAC3D;;AAID,qBAAa,YAAa,SAAQ,iBAA8D;CAAG;AAsCnG,eAAO,MAAM,gBAAgB,yCAE3B,CAAC"}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { Context, Effect, Layer, Array as Arr, pipe } from 'effect';
|
|
2
|
+
import oxc from 'oxc-parser';
|
|
3
|
+
import { ParseError } from './errors.js';
|
|
4
|
+
// ─── Pure extractors ─────────────────────────────────────────────────────────
|
|
5
|
+
const extractSpecifier = (spec, filePath, importSource) => {
|
|
6
|
+
switch (spec.type) {
|
|
7
|
+
case 'ImportSpecifier': {
|
|
8
|
+
const s = spec;
|
|
9
|
+
return [
|
|
10
|
+
{
|
|
11
|
+
name: s.imported.name,
|
|
12
|
+
filePath,
|
|
13
|
+
source: importSource,
|
|
14
|
+
isNamespace: false,
|
|
15
|
+
isDynamic: false,
|
|
16
|
+
},
|
|
17
|
+
];
|
|
18
|
+
}
|
|
19
|
+
case 'ImportDefaultSpecifier':
|
|
20
|
+
return [
|
|
21
|
+
{
|
|
22
|
+
name: 'default',
|
|
23
|
+
filePath,
|
|
24
|
+
source: importSource,
|
|
25
|
+
isNamespace: false,
|
|
26
|
+
isDynamic: false,
|
|
27
|
+
},
|
|
28
|
+
];
|
|
29
|
+
case 'ImportNamespaceSpecifier':
|
|
30
|
+
return [
|
|
31
|
+
{
|
|
32
|
+
name: '*',
|
|
33
|
+
filePath,
|
|
34
|
+
source: importSource,
|
|
35
|
+
isNamespace: true,
|
|
36
|
+
isDynamic: false,
|
|
37
|
+
},
|
|
38
|
+
];
|
|
39
|
+
default:
|
|
40
|
+
return [];
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
const extractStaticImports = (node, filePath) => {
|
|
44
|
+
if (node.type !== 'ImportDeclaration')
|
|
45
|
+
return [];
|
|
46
|
+
const n = node;
|
|
47
|
+
if (n.specifiers.length === 0)
|
|
48
|
+
return [];
|
|
49
|
+
return pipe(n.specifiers, Arr.flatMap((spec) => extractSpecifier(spec, filePath, n.source.value)));
|
|
50
|
+
};
|
|
51
|
+
const collectSymbols = (node, filePath) => {
|
|
52
|
+
if (node === null || typeof node !== 'object')
|
|
53
|
+
return [];
|
|
54
|
+
if (node.type === 'ImportExpression') {
|
|
55
|
+
const n = node;
|
|
56
|
+
if (n.source.type === 'Literal') {
|
|
57
|
+
const lit = n.source;
|
|
58
|
+
return [
|
|
59
|
+
{
|
|
60
|
+
name: '*',
|
|
61
|
+
filePath,
|
|
62
|
+
source: lit.value,
|
|
63
|
+
isNamespace: false,
|
|
64
|
+
isDynamic: true,
|
|
65
|
+
},
|
|
66
|
+
];
|
|
67
|
+
}
|
|
68
|
+
return [];
|
|
69
|
+
}
|
|
70
|
+
const currentSymbols = node.type === 'CallExpression'
|
|
71
|
+
? extractRequireCall(node, filePath)
|
|
72
|
+
: [];
|
|
73
|
+
const childSymbols = pipe(Object.keys(node), Arr.flatMap((key) => {
|
|
74
|
+
const child = node[key];
|
|
75
|
+
if (Array.isArray(child)) {
|
|
76
|
+
return pipe(child, Arr.filter((item) => item !== null && typeof item === 'object' && typeof item.type === 'string'), Arr.flatMap((item) => collectSymbols(item, filePath)));
|
|
77
|
+
}
|
|
78
|
+
if (child !== null &&
|
|
79
|
+
typeof child === 'object' &&
|
|
80
|
+
typeof child.type === 'string') {
|
|
81
|
+
return collectSymbols(child, filePath);
|
|
82
|
+
}
|
|
83
|
+
return [];
|
|
84
|
+
}));
|
|
85
|
+
return pipe(currentSymbols, Arr.appendAll(childSymbols));
|
|
86
|
+
};
|
|
87
|
+
const extractRequireCall = (node, filePath) => {
|
|
88
|
+
const callee = node.callee;
|
|
89
|
+
if (callee.type === 'Identifier' && callee.name === 'require') {
|
|
90
|
+
const arg = node.arguments[0];
|
|
91
|
+
if (arg !== undefined && arg.type === 'Literal') {
|
|
92
|
+
const lit = arg;
|
|
93
|
+
return [
|
|
94
|
+
{
|
|
95
|
+
name: '*',
|
|
96
|
+
filePath,
|
|
97
|
+
source: lit.value,
|
|
98
|
+
isNamespace: true,
|
|
99
|
+
isDynamic: false,
|
|
100
|
+
},
|
|
101
|
+
];
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return [];
|
|
105
|
+
};
|
|
106
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
107
|
+
export class ImportParser extends Context.Tag('ImportParser')() {
|
|
108
|
+
}
|
|
109
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
110
|
+
const parseSource = (filePath, source) => Effect.try({
|
|
111
|
+
try: () => {
|
|
112
|
+
const result = oxc.parseSync(filePath, source);
|
|
113
|
+
if (result.errors.length > 0) {
|
|
114
|
+
const msg = result.errors[0].message ?? 'parse error';
|
|
115
|
+
throw new Error(msg);
|
|
116
|
+
}
|
|
117
|
+
const program = result.program;
|
|
118
|
+
const staticImports = pipe(program.body, Arr.flatMap((node) => extractStaticImports(node, filePath)));
|
|
119
|
+
const dynamicImports = pipe(program.body, Arr.flatMap((node) => collectSymbols(node, filePath)));
|
|
120
|
+
return pipe(staticImports, Arr.appendAll(dynamicImports));
|
|
121
|
+
},
|
|
122
|
+
catch: (e) => new ParseError({
|
|
123
|
+
filePath,
|
|
124
|
+
message: e instanceof Error ? e.message : String(e),
|
|
125
|
+
}),
|
|
126
|
+
});
|
|
127
|
+
export const ImportParserLive = Layer.succeed(ImportParser, {
|
|
128
|
+
parse: parseSource,
|
|
129
|
+
});
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { Context, Layer } from 'effect';
|
|
2
|
+
import type { AnalysisResult } from './schemas.js';
|
|
3
|
+
export interface ReporterShape {
|
|
4
|
+
readonly format: (result: AnalysisResult, packageRoots: ReadonlyMap<string, string>) => string;
|
|
5
|
+
}
|
|
6
|
+
declare const Reporter_base: Context.TagClass<Reporter, "Reporter", ReporterShape>;
|
|
7
|
+
export declare class Reporter extends Reporter_base {
|
|
8
|
+
}
|
|
9
|
+
export declare const ReporterLive: Layer.Layer<Reporter, never, never>;
|
|
10
|
+
export {};
|
|
11
|
+
//# sourceMappingURL=reporter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"reporter.d.ts","sourceRoot":"","sources":["../../src/lib/reporter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,KAAK,EAA6B,MAAM,QAAQ,CAAC;AAEnE,OAAO,KAAK,EAAE,cAAc,EAAc,MAAM,cAAc,CAAC;AA4F/D,MAAM,WAAW,aAAa;IAC5B,QAAQ,CAAC,MAAM,EAAE,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,WAAW,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,MAAM,CAAC;CAChG;;AAID,qBAAa,QAAS,SAAQ,aAAkD;CAAG;AAInF,eAAO,MAAM,YAAY,qCAEvB,CAAC"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Context, Layer, Array as Arr, Order, pipe } from 'effect';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
// ─── Pure formatting helpers ─────────────────────────────────────────────────
|
|
4
|
+
const pluralize = (count, singular, plural) => count === 1 ? singular : plural;
|
|
5
|
+
const formatFileSection = (filePath, fileExports, pkgRoot) => {
|
|
6
|
+
const relPath = pkgRoot ? path.relative(pkgRoot, filePath) : filePath;
|
|
7
|
+
const sorted = pipe(fileExports, Arr.sort(Order.mapInput(Order.number, (d) => d.symbol.line)));
|
|
8
|
+
return [
|
|
9
|
+
` ${relPath}`,
|
|
10
|
+
...pipe(sorted, Arr.map((dead) => ` :${String(dead.symbol.line).padEnd(4)} ${dead.symbol.name}`)),
|
|
11
|
+
];
|
|
12
|
+
};
|
|
13
|
+
const formatPackageSection = (pkgName, pkgDeadExports, packageRoots) => {
|
|
14
|
+
const pkgRoot = packageRoots.get(pkgName) ?? '';
|
|
15
|
+
const count = pkgDeadExports.length;
|
|
16
|
+
const exportWord = pluralize(count, 'dead export', 'dead exports');
|
|
17
|
+
const byFile = pipe(pkgDeadExports, Arr.groupBy((dead) => dead.symbol.filePath));
|
|
18
|
+
const sortedFiles = pipe(Object.keys(byFile), Arr.sort(Order.string));
|
|
19
|
+
return [
|
|
20
|
+
`${pkgName} (${count} ${exportWord})`,
|
|
21
|
+
...pipe(sortedFiles, Arr.flatMap((filePath) => formatFileSection(filePath, byFile[filePath], pkgRoot))),
|
|
22
|
+
'',
|
|
23
|
+
];
|
|
24
|
+
};
|
|
25
|
+
const formatReport = (result, packageRoots) => {
|
|
26
|
+
const { deadExports, totalExports, totalFiles } = result;
|
|
27
|
+
if (deadExports.length === 0) {
|
|
28
|
+
return `No dead exports found. Scanned ${totalExports} exports across ${totalFiles} files.`;
|
|
29
|
+
}
|
|
30
|
+
const byPackage = pipe(deadExports, Arr.groupBy((dead) => dead.packageName));
|
|
31
|
+
const sortedPackages = pipe(Object.keys(byPackage), Arr.sort(Order.string));
|
|
32
|
+
const header = ['Dead Export Report', '══════════════════', ''];
|
|
33
|
+
const packageSections = pipe(sortedPackages, Arr.flatMap((pkgName) => formatPackageSection(pkgName, byPackage[pkgName], packageRoots)));
|
|
34
|
+
const totalDead = deadExports.length;
|
|
35
|
+
const pkgCount = sortedPackages.length;
|
|
36
|
+
const deadWord = pluralize(totalDead, 'dead export', 'dead exports');
|
|
37
|
+
const pkgWord = pluralize(pkgCount, 'package', 'packages');
|
|
38
|
+
const summary = [
|
|
39
|
+
'────────────────────────────',
|
|
40
|
+
`Summary: ${totalDead} ${deadWord} across ${pkgCount} ${pkgWord}`,
|
|
41
|
+
];
|
|
42
|
+
return pipe(header, Arr.appendAll(packageSections), Arr.appendAll(summary), Arr.join('\n'));
|
|
43
|
+
};
|
|
44
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
45
|
+
export class Reporter extends Context.Tag('Reporter')() {
|
|
46
|
+
}
|
|
47
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
48
|
+
export const ReporterLive = Layer.succeed(Reporter, {
|
|
49
|
+
format: formatReport,
|
|
50
|
+
});
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { Schema } from 'effect';
|
|
2
|
+
export declare const WorkspaceType: Schema.Literal<["pnpm", "npm", "yarn", "nx", "turborepo", "single"]>;
|
|
3
|
+
export type WorkspaceType = typeof WorkspaceType.Type;
|
|
4
|
+
export declare const PackageInfo: Schema.Struct<{
|
|
5
|
+
name: typeof Schema.String;
|
|
6
|
+
root: typeof Schema.String;
|
|
7
|
+
entryPoints: Schema.Array$<typeof Schema.String>;
|
|
8
|
+
}>;
|
|
9
|
+
export type PackageInfo = Schema.Schema.Type<typeof PackageInfo>;
|
|
10
|
+
export declare const ExportedSymbol: Schema.Struct<{
|
|
11
|
+
name: typeof Schema.String;
|
|
12
|
+
filePath: typeof Schema.String;
|
|
13
|
+
line: typeof Schema.Number;
|
|
14
|
+
isDefault: typeof Schema.Boolean;
|
|
15
|
+
isReExport: typeof Schema.Boolean;
|
|
16
|
+
reExportSource: Schema.optional<typeof Schema.String>;
|
|
17
|
+
reExportLocalName: Schema.optional<typeof Schema.String>;
|
|
18
|
+
}>;
|
|
19
|
+
export type ExportedSymbol = Schema.Schema.Type<typeof ExportedSymbol>;
|
|
20
|
+
export declare const ImportedSymbol: Schema.Struct<{
|
|
21
|
+
name: typeof Schema.String;
|
|
22
|
+
filePath: typeof Schema.String;
|
|
23
|
+
source: typeof Schema.String;
|
|
24
|
+
isNamespace: typeof Schema.Boolean;
|
|
25
|
+
isDynamic: typeof Schema.Boolean;
|
|
26
|
+
}>;
|
|
27
|
+
export type ImportedSymbol = Schema.Schema.Type<typeof ImportedSymbol>;
|
|
28
|
+
export declare const DeadExport: Schema.Struct<{
|
|
29
|
+
symbol: Schema.Struct<{
|
|
30
|
+
name: typeof Schema.String;
|
|
31
|
+
filePath: typeof Schema.String;
|
|
32
|
+
line: typeof Schema.Number;
|
|
33
|
+
isDefault: typeof Schema.Boolean;
|
|
34
|
+
isReExport: typeof Schema.Boolean;
|
|
35
|
+
reExportSource: Schema.optional<typeof Schema.String>;
|
|
36
|
+
reExportLocalName: Schema.optional<typeof Schema.String>;
|
|
37
|
+
}>;
|
|
38
|
+
packageName: typeof Schema.String;
|
|
39
|
+
}>;
|
|
40
|
+
export type DeadExport = Schema.Schema.Type<typeof DeadExport>;
|
|
41
|
+
export declare const AnalysisResult: Schema.Struct<{
|
|
42
|
+
deadExports: Schema.Array$<Schema.Struct<{
|
|
43
|
+
symbol: Schema.Struct<{
|
|
44
|
+
name: typeof Schema.String;
|
|
45
|
+
filePath: typeof Schema.String;
|
|
46
|
+
line: typeof Schema.Number;
|
|
47
|
+
isDefault: typeof Schema.Boolean;
|
|
48
|
+
isReExport: typeof Schema.Boolean;
|
|
49
|
+
reExportSource: Schema.optional<typeof Schema.String>;
|
|
50
|
+
reExportLocalName: Schema.optional<typeof Schema.String>;
|
|
51
|
+
}>;
|
|
52
|
+
packageName: typeof Schema.String;
|
|
53
|
+
}>>;
|
|
54
|
+
totalExports: typeof Schema.Number;
|
|
55
|
+
totalFiles: typeof Schema.Number;
|
|
56
|
+
warnings: Schema.Array$<typeof Schema.String>;
|
|
57
|
+
}>;
|
|
58
|
+
export type AnalysisResult = Schema.Schema.Type<typeof AnalysisResult>;
|
|
59
|
+
//# sourceMappingURL=schemas.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schemas.d.ts","sourceRoot":"","sources":["../../src/lib/schemas.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC,eAAO,MAAM,aAAa,sEAAqE,CAAC;AAEhG,MAAM,MAAM,aAAa,GAAG,OAAO,aAAa,CAAC,IAAI,CAAC;AAEtD,eAAO,MAAM,WAAW;;;;EAItB,CAAC;AACH,MAAM,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,WAAW,CAAC,CAAC;AAEjE,eAAO,MAAM,cAAc;;;;;;;;EAQzB,CAAC;AACH,MAAM,MAAM,cAAc,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,cAAc,CAAC,CAAC;AAEvE,eAAO,MAAM,cAAc;;;;;;EAMzB,CAAC;AACH,MAAM,MAAM,cAAc,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,cAAc,CAAC,CAAC;AAEvE,eAAO,MAAM,UAAU;;;;;;;;;;;EAGrB,CAAC;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,UAAU,CAAC,CAAC;AAE/D,eAAO,MAAM,cAAc;;;;;;;;;;;;;;;;EAKzB,CAAC;AACH,MAAM,MAAM,cAAc,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,cAAc,CAAC,CAAC"}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { Schema } from 'effect';
|
|
2
|
+
export const WorkspaceType = Schema.Literal('pnpm', 'npm', 'yarn', 'nx', 'turborepo', 'single');
|
|
3
|
+
export const PackageInfo = Schema.Struct({
|
|
4
|
+
name: Schema.String,
|
|
5
|
+
root: Schema.String,
|
|
6
|
+
entryPoints: Schema.Array(Schema.String),
|
|
7
|
+
});
|
|
8
|
+
export const ExportedSymbol = Schema.Struct({
|
|
9
|
+
name: Schema.String,
|
|
10
|
+
filePath: Schema.String,
|
|
11
|
+
line: Schema.Number,
|
|
12
|
+
isDefault: Schema.Boolean,
|
|
13
|
+
isReExport: Schema.Boolean,
|
|
14
|
+
reExportSource: Schema.optional(Schema.String),
|
|
15
|
+
reExportLocalName: Schema.optional(Schema.String),
|
|
16
|
+
});
|
|
17
|
+
export const ImportedSymbol = Schema.Struct({
|
|
18
|
+
name: Schema.String,
|
|
19
|
+
filePath: Schema.String,
|
|
20
|
+
source: Schema.String,
|
|
21
|
+
isNamespace: Schema.Boolean,
|
|
22
|
+
isDynamic: Schema.Boolean,
|
|
23
|
+
});
|
|
24
|
+
export const DeadExport = Schema.Struct({
|
|
25
|
+
symbol: ExportedSymbol,
|
|
26
|
+
packageName: Schema.String,
|
|
27
|
+
});
|
|
28
|
+
export const AnalysisResult = Schema.Struct({
|
|
29
|
+
deadExports: Schema.Array(DeadExport),
|
|
30
|
+
totalExports: Schema.Number,
|
|
31
|
+
totalFiles: Schema.Number,
|
|
32
|
+
warnings: Schema.Array(Schema.String),
|
|
33
|
+
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from 'effect';
|
|
2
|
+
import { FileSystem, Path } from '@effect/platform';
|
|
3
|
+
import type { PackageInfo, WorkspaceType } from './schemas.js';
|
|
4
|
+
import { WorkspaceNotFoundError } from './errors.js';
|
|
5
|
+
export interface WorkspaceResult {
|
|
6
|
+
readonly type: WorkspaceType;
|
|
7
|
+
readonly root: string;
|
|
8
|
+
readonly packages: readonly PackageInfo[];
|
|
9
|
+
}
|
|
10
|
+
export interface WorkspaceDetectorShape {
|
|
11
|
+
readonly detect: (cwd: string) => Effect.Effect<WorkspaceResult, WorkspaceNotFoundError>;
|
|
12
|
+
}
|
|
13
|
+
declare const WorkspaceDetector_base: Context.TagClass<WorkspaceDetector, "WorkspaceDetector", WorkspaceDetectorShape>;
|
|
14
|
+
export declare class WorkspaceDetector extends WorkspaceDetector_base {
|
|
15
|
+
}
|
|
16
|
+
export declare const WorkspaceDetectorLive: Layer.Layer<WorkspaceDetector, never, FileSystem.FileSystem | Path.Path>;
|
|
17
|
+
export {};
|
|
18
|
+
//# sourceMappingURL=workspace-detector.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workspace-detector.d.ts","sourceRoot":"","sources":["../../src/lib/workspace-detector.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAQ,MAAM,EAAE,KAAK,EAAsC,MAAM,QAAQ,CAAC;AAC1F,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,MAAM,kBAAkB,CAAC;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC/D,OAAO,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAC;AAMrD,MAAM,WAAW,eAAe;IAC9B,QAAQ,CAAC,IAAI,EAAE,aAAa,CAAC;IAC7B,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,QAAQ,EAAE,SAAS,WAAW,EAAE,CAAC;CAC3C;AAID,MAAM,WAAW,sBAAsB;IACrC,QAAQ,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,sBAAsB,CAAC,CAAC;CAC1F;;AAID,qBAAa,iBAAkB,SAAQ,sBAGpC;CAAG;AAkQN,eAAO,MAAM,qBAAqB,0EAyCjC,CAAC"}
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { Context, Data, Effect, Layer, Array as Arr, Option, pipe, Schema } from 'effect';
|
|
2
|
+
import { FileSystem, Path } from '@effect/platform';
|
|
3
|
+
import fg from 'fast-glob';
|
|
4
|
+
import YAML from 'yaml';
|
|
5
|
+
import { WorkspaceNotFoundError } from './errors.js';
|
|
6
|
+
const JsonRecord = Schema.parseJson(Schema.Record({ key: Schema.String, value: Schema.Unknown }));
|
|
7
|
+
// ─── Tag ─────────────────────────────────────────────────────────────────────
|
|
8
|
+
export class WorkspaceDetector extends Context.Tag('WorkspaceDetector')() {
|
|
9
|
+
}
|
|
10
|
+
// ─── Internal errors ──────────────────────────────────────────────────────────
|
|
11
|
+
class GlobError extends Data.TaggedError('GlobError') {
|
|
12
|
+
}
|
|
13
|
+
// ─── Pure helpers ────────────────────────────────────────────────────────────
|
|
14
|
+
const collectExportsStrings = (value) => {
|
|
15
|
+
if (typeof value === 'string')
|
|
16
|
+
return [value];
|
|
17
|
+
if (Array.isArray(value))
|
|
18
|
+
return pipe(value, Arr.flatMap(collectExportsStrings));
|
|
19
|
+
if (value !== null && typeof value === 'object') {
|
|
20
|
+
return pipe(Object.values(value), Arr.flatMap(collectExportsStrings));
|
|
21
|
+
}
|
|
22
|
+
return [];
|
|
23
|
+
};
|
|
24
|
+
const extractEntryPoints = (pkg) => {
|
|
25
|
+
if (pkg['exports'] !== undefined) {
|
|
26
|
+
return pipe(collectExportsStrings(pkg['exports']), Arr.dedupe);
|
|
27
|
+
}
|
|
28
|
+
return pipe(['main', 'module', 'types'], Arr.filterMap((field) => {
|
|
29
|
+
const val = pkg[field];
|
|
30
|
+
return typeof val === 'string' ? Option.some(val) : Option.none();
|
|
31
|
+
}), Arr.dedupe);
|
|
32
|
+
};
|
|
33
|
+
const readPackageInfo = (fs, pathSvc, pkgDir) => {
|
|
34
|
+
const pkgPath = pathSvc.join(pkgDir, 'package.json');
|
|
35
|
+
return pipe(fs.exists(pkgPath), Effect.flatMap((exists) => {
|
|
36
|
+
if (!exists)
|
|
37
|
+
return Effect.succeed(null);
|
|
38
|
+
return pipe(fs.readFileString(pkgPath, 'utf-8'), Effect.flatMap((contents) => Schema.decodeUnknown(JsonRecord)(contents)), Effect.map((parsed) => {
|
|
39
|
+
const name = typeof parsed['name'] === 'string' ? parsed['name'] : pathSvc.basename(pkgDir);
|
|
40
|
+
const entryPoints = extractEntryPoints(parsed);
|
|
41
|
+
return { name, root: pkgDir, entryPoints: [...entryPoints] };
|
|
42
|
+
}), Effect.catchAll(() => Effect.succeed(null)));
|
|
43
|
+
}), Effect.catchAll(() => Effect.succeed(null)));
|
|
44
|
+
};
|
|
45
|
+
const resolveWorkspaceGlobs = (pathSvc, root, globs) => pipe(Effect.tryPromise({
|
|
46
|
+
try: () => fg(pipe(globs, Arr.map((g) => `${g}/package.json`)), { cwd: root, absolute: true, onlyFiles: true }),
|
|
47
|
+
catch: (cause) => new GlobError({ cause }),
|
|
48
|
+
}), Effect.map((files) => pipe(files, Arr.map((p) => pathSvc.dirname(p)))));
|
|
49
|
+
const readPkgDirs = (fs, pathSvc, root, globs) => pipe(resolveWorkspaceGlobs(pathSvc, root, globs), Effect.flatMap((dirs) => Effect.all(pipe(dirs, Arr.map((d) => readPackageInfo(fs, pathSvc, d))))), Effect.map((infos) => pipe(infos, Arr.filter((p) => p !== null))), Effect.catchTag('GlobError', () => Effect.succeed([])));
|
|
50
|
+
// ─── Workspace detection strategies ─────────────────────────────────────────
|
|
51
|
+
const extractWorkspaceGlobs = (workspaces) => {
|
|
52
|
+
if (Array.isArray(workspaces)) {
|
|
53
|
+
return pipe(workspaces, Arr.filter((g) => typeof g === 'string'));
|
|
54
|
+
}
|
|
55
|
+
if (typeof workspaces === 'object' && workspaces !== null) {
|
|
56
|
+
const obj = workspaces;
|
|
57
|
+
if (Array.isArray(obj.packages)) {
|
|
58
|
+
return pipe(obj.packages, Arr.filter((g) => typeof g === 'string'));
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return [];
|
|
62
|
+
};
|
|
63
|
+
const detectPnpm = (fs, pathSvc, cwd) => pipe(fs.exists(pathSvc.join(cwd, 'pnpm-workspace.yaml')), Effect.orDie, Effect.flatMap((exists) => exists
|
|
64
|
+
? pipe(fs.readFileString(pathSvc.join(cwd, 'pnpm-workspace.yaml'), 'utf-8'), Effect.orDie, Effect.map((raw) => {
|
|
65
|
+
const parsed = YAML.parse(raw);
|
|
66
|
+
return parsed?.packages ?? [];
|
|
67
|
+
}), Effect.flatMap((globs) => readPkgDirs(fs, pathSvc, cwd, globs)), Effect.map((packages) => ({
|
|
68
|
+
type: 'pnpm',
|
|
69
|
+
root: cwd,
|
|
70
|
+
packages,
|
|
71
|
+
})))
|
|
72
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))));
|
|
73
|
+
const detectNpmWorkspaces = (fs, pathSvc, cwd, rootPkg) => {
|
|
74
|
+
const globs = extractWorkspaceGlobs(rootPkg['workspaces']);
|
|
75
|
+
return globs.length > 0
|
|
76
|
+
? pipe(readPkgDirs(fs, pathSvc, cwd, globs), Effect.map((packages) => ({
|
|
77
|
+
type: 'npm',
|
|
78
|
+
root: cwd,
|
|
79
|
+
packages,
|
|
80
|
+
})))
|
|
81
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }));
|
|
82
|
+
};
|
|
83
|
+
const detectNx = (fs, pathSvc, cwd) => pipe(fs.exists(pathSvc.join(cwd, 'nx.json')), Effect.orDie, Effect.flatMap((exists) => exists
|
|
84
|
+
? pipe(readPkgDirs(fs, pathSvc, cwd, ['packages/*', 'libs/*', 'apps/*']), Effect.flatMap((packages) => packages.length > 0
|
|
85
|
+
? Effect.succeed({
|
|
86
|
+
type: 'nx',
|
|
87
|
+
root: cwd,
|
|
88
|
+
packages,
|
|
89
|
+
})
|
|
90
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))))
|
|
91
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))));
|
|
92
|
+
const detectTurbo = (fs, pathSvc, cwd) => pipe(fs.exists(pathSvc.join(cwd, 'turbo.json')), Effect.orDie, Effect.flatMap((exists) => exists
|
|
93
|
+
? pipe(readPkgDirs(fs, pathSvc, cwd, ['packages/*', 'apps/*']), Effect.flatMap((packages) => packages.length > 0
|
|
94
|
+
? Effect.succeed({
|
|
95
|
+
type: 'turborepo',
|
|
96
|
+
root: cwd,
|
|
97
|
+
packages,
|
|
98
|
+
})
|
|
99
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))))
|
|
100
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))));
|
|
101
|
+
const detectSingle = (fs, pathSvc, cwd) => pipe(readPackageInfo(fs, pathSvc, cwd), Effect.flatMap((pkg) => pkg !== null
|
|
102
|
+
? Effect.succeed({
|
|
103
|
+
type: 'single',
|
|
104
|
+
root: cwd,
|
|
105
|
+
packages: [pkg],
|
|
106
|
+
})
|
|
107
|
+
: Effect.fail(new WorkspaceNotFoundError({ cwd }))));
|
|
108
|
+
// ─── Live implementation ──────────────────────────────────────────────────────
|
|
109
|
+
export const WorkspaceDetectorLive = Layer.effect(WorkspaceDetector, Effect.gen(function* () {
|
|
110
|
+
const fs = yield* FileSystem.FileSystem;
|
|
111
|
+
const pathSvc = yield* Path.Path;
|
|
112
|
+
const detect = (cwd) => pipe(detectPnpm(fs, pathSvc, cwd), Effect.catchTag('WorkspaceNotFoundError', () => pipe(fs.exists(pathSvc.join(cwd, 'package.json')), Effect.orDie, Effect.flatMap((hasRootPkg) => {
|
|
113
|
+
if (!hasRootPkg)
|
|
114
|
+
return Effect.fail(new WorkspaceNotFoundError({ cwd }));
|
|
115
|
+
return pipe(fs.readFileString(pathSvc.join(cwd, 'package.json'), 'utf-8'), Effect.orDie, Effect.flatMap((raw) => pipe(Schema.decodeUnknown(JsonRecord)(raw), Effect.mapError(() => new WorkspaceNotFoundError({ cwd })))), Effect.flatMap((rootPkg) => pipe(detectNpmWorkspaces(fs, pathSvc, cwd, rootPkg), Effect.catchTag('WorkspaceNotFoundError', () => detectNx(fs, pathSvc, cwd)), Effect.catchTag('WorkspaceNotFoundError', () => detectTurbo(fs, pathSvc, cwd)), Effect.catchTag('WorkspaceNotFoundError', () => detectSingle(fs, pathSvc, cwd)))));
|
|
116
|
+
}))));
|
|
117
|
+
return { detect };
|
|
118
|
+
}));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"test-setup.d.ts","sourceRoot":"","sources":["../src/test-setup.ts"],"names":[],"mappings":""}
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@wolfcola/dead-export-finder",
|
|
3
|
+
"version": "0.0.0-beta-20260513233134",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"private": false,
|
|
6
|
+
"description": "Find dead exports across monorepo package boundaries",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/ryanbas21/devtools.git",
|
|
11
|
+
"directory": "packages/dead-export-finder"
|
|
12
|
+
},
|
|
13
|
+
"main": "./dist/index.js",
|
|
14
|
+
"types": "./dist/index.d.ts",
|
|
15
|
+
"bin": {
|
|
16
|
+
"dead-export-finder": "./dist/index.js"
|
|
17
|
+
},
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"types": "./dist/index.d.ts",
|
|
21
|
+
"import": "./dist/index.js",
|
|
22
|
+
"default": "./dist/index.js"
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist",
|
|
27
|
+
"!dist/*.tsbuildinfo"
|
|
28
|
+
],
|
|
29
|
+
"publishConfig": {
|
|
30
|
+
"access": "public"
|
|
31
|
+
},
|
|
32
|
+
"dependencies": {
|
|
33
|
+
"effect": "^3.21.1",
|
|
34
|
+
"@effect/cli": "^0.75.1",
|
|
35
|
+
"@effect/platform": "^0.96.0",
|
|
36
|
+
"@effect/platform-node": "^0.106.0",
|
|
37
|
+
"oxc-parser": "^0.72.0",
|
|
38
|
+
"fast-glob": "^3.3.3",
|
|
39
|
+
"ignore": "^7.0.4",
|
|
40
|
+
"yaml": "^2.7.1"
|
|
41
|
+
},
|
|
42
|
+
"devDependencies": {
|
|
43
|
+
"@effect/vitest": "^0.29.0",
|
|
44
|
+
"vitest": "^3.2.0",
|
|
45
|
+
"vite": "^7.3.2",
|
|
46
|
+
"typescript": "5.8.3"
|
|
47
|
+
},
|
|
48
|
+
"scripts": {
|
|
49
|
+
"build": "tsc -p tsconfig.lib.json",
|
|
50
|
+
"lint": "eslint .",
|
|
51
|
+
"test": "vitest run"
|
|
52
|
+
}
|
|
53
|
+
}
|