@blocksdiy/eslint-plugin-auto-import 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/cjs/index.d.ts +28 -0
- package/dist/cjs/index.js +304 -0
- package/dist/index.d.ts +28 -0
- package/dist/index.js +268 -0
- package/package.json +59 -0
package/README.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# @blocks/eslint-plugin-auto-import
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { TSESLint } from "@typescript-eslint/utils";
|
|
2
|
+
type ImportMapping = Record<string, {
|
|
3
|
+
path: string;
|
|
4
|
+
isDefault?: boolean;
|
|
5
|
+
}>;
|
|
6
|
+
interface ScanDir {
|
|
7
|
+
dir: string;
|
|
8
|
+
file?: string;
|
|
9
|
+
cacheable?: boolean;
|
|
10
|
+
barrelPattern?: boolean;
|
|
11
|
+
recursive?: boolean;
|
|
12
|
+
}
|
|
13
|
+
type ScanDirs = Record<string, ScanDir>;
|
|
14
|
+
type Options = [
|
|
15
|
+
{
|
|
16
|
+
scanDirs?: ScanDirs;
|
|
17
|
+
mappings?: ImportMapping;
|
|
18
|
+
}
|
|
19
|
+
];
|
|
20
|
+
export declare const rules: {
|
|
21
|
+
"auto-import": TSESLint.RuleModule<"missingImport", Options, unknown, TSESLint.RuleListener>;
|
|
22
|
+
};
|
|
23
|
+
declare const _default: {
|
|
24
|
+
rules: {
|
|
25
|
+
"auto-import": TSESLint.RuleModule<"missingImport", Options, unknown, TSESLint.RuleListener>;
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
export default _default;
|
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.rules = void 0;
|
|
37
|
+
const fs = __importStar(require("fs"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const ts = __importStar(require("typescript"));
|
|
40
|
+
// Cache for scanned exports
|
|
41
|
+
const exportCache = new Map();
|
|
42
|
+
// Helper to scan a directory for TypeScript files
|
|
43
|
+
function scanDirectory(dir, recursive) {
|
|
44
|
+
const files = [];
|
|
45
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
46
|
+
for (const entry of entries) {
|
|
47
|
+
const fullPath = path.join(dir, entry.name);
|
|
48
|
+
if (entry.isDirectory() && recursive) {
|
|
49
|
+
files.push(...scanDirectory(fullPath, recursive));
|
|
50
|
+
}
|
|
51
|
+
else if (entry.isFile() && /\.(ts|tsx)$/.test(entry.name)) {
|
|
52
|
+
files.push(fullPath);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return files;
|
|
56
|
+
}
|
|
57
|
+
// Helper to get exports from a TypeScript file
|
|
58
|
+
function getExportsFromFile(filePath) {
|
|
59
|
+
const program = ts.createProgram([filePath], {
|
|
60
|
+
target: ts.ScriptTarget.ES2020,
|
|
61
|
+
module: ts.ModuleKind.CommonJS,
|
|
62
|
+
});
|
|
63
|
+
const sourceFile = program.getSourceFile(filePath);
|
|
64
|
+
if (!sourceFile) {
|
|
65
|
+
return new Map();
|
|
66
|
+
}
|
|
67
|
+
const exports = new Map();
|
|
68
|
+
function visit(node) {
|
|
69
|
+
if (ts.isExportDeclaration(node)) {
|
|
70
|
+
if (node.exportClause && ts.isNamedExports(node.exportClause)) {
|
|
71
|
+
node.exportClause.elements.forEach((element) => {
|
|
72
|
+
exports.set(element.name.text, filePath);
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
else if (ts.isVariableStatement(node) && node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword)) {
|
|
77
|
+
node.declarationList.declarations.forEach((decl) => {
|
|
78
|
+
if (ts.isIdentifier(decl.name)) {
|
|
79
|
+
exports.set(decl.name.text, filePath);
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
else if (ts.isFunctionDeclaration(node) &&
|
|
84
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
85
|
+
node.name) {
|
|
86
|
+
exports.set(node.name.text, filePath);
|
|
87
|
+
}
|
|
88
|
+
else if (ts.isClassDeclaration(node) &&
|
|
89
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
90
|
+
node.name) {
|
|
91
|
+
exports.set(node.name.text, filePath);
|
|
92
|
+
}
|
|
93
|
+
else if (ts.isInterfaceDeclaration(node) &&
|
|
94
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
95
|
+
node.name) {
|
|
96
|
+
exports.set(node.name.text, filePath);
|
|
97
|
+
}
|
|
98
|
+
else if (ts.isTypeAliasDeclaration(node) &&
|
|
99
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
100
|
+
node.name) {
|
|
101
|
+
exports.set(node.name.text, filePath);
|
|
102
|
+
}
|
|
103
|
+
ts.forEachChild(node, visit);
|
|
104
|
+
}
|
|
105
|
+
visit(sourceFile);
|
|
106
|
+
return exports;
|
|
107
|
+
}
|
|
108
|
+
// Helper to scan directories and cache results
|
|
109
|
+
function scanAndCacheExports(scanDirs) {
|
|
110
|
+
const cachePath = path.join(process.cwd(), "exportCache.json");
|
|
111
|
+
let cachedLoaded = false;
|
|
112
|
+
// Try to load from cache first
|
|
113
|
+
if (fs.existsSync(cachePath)) {
|
|
114
|
+
try {
|
|
115
|
+
const content = JSON.parse(fs.readFileSync(cachePath, "utf8"));
|
|
116
|
+
for (const [importPrefix, exports] of Object.entries(content)) {
|
|
117
|
+
exportCache.set(importPrefix, new Map(Object.entries(exports)));
|
|
118
|
+
}
|
|
119
|
+
cachedLoaded = true;
|
|
120
|
+
}
|
|
121
|
+
catch (error) {
|
|
122
|
+
console.error("Error loading cache:", error);
|
|
123
|
+
// If cache loading fails, continue with scanning
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
const newPersistedCache = new Map();
|
|
127
|
+
// Scan directories and populate exportCache
|
|
128
|
+
for (const [importPrefix, scanDir] of Object.entries(scanDirs)) {
|
|
129
|
+
if (cachedLoaded && scanDir.cacheable) {
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
console.log("scanning dir", scanDir.dir);
|
|
133
|
+
let files = [];
|
|
134
|
+
const dir = scanDir.dir;
|
|
135
|
+
if (!scanDir.file) {
|
|
136
|
+
files = scanDirectory(dir, scanDir.recursive ?? true);
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
files = [path.join(dir, scanDir.file)];
|
|
140
|
+
}
|
|
141
|
+
const exports = new Map();
|
|
142
|
+
for (const file of files) {
|
|
143
|
+
console.log("scanning file", file);
|
|
144
|
+
const fileExports = getExportsFromFile(file);
|
|
145
|
+
const relativePath = path
|
|
146
|
+
.relative(dir, file)
|
|
147
|
+
.replace(/\\/g, "/")
|
|
148
|
+
.replace(/\.(ts|tsx)$/, "");
|
|
149
|
+
const importPath = scanDir.barrelPattern ? `${importPrefix}` : `${importPrefix}/${relativePath}`;
|
|
150
|
+
fileExports.forEach((_, name) => {
|
|
151
|
+
exports.set(name, importPath);
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
if (scanDir.cacheable) {
|
|
155
|
+
newPersistedCache.set(importPrefix, exports);
|
|
156
|
+
}
|
|
157
|
+
exportCache.set(importPrefix, exports);
|
|
158
|
+
}
|
|
159
|
+
// Write to cache after populating exportCache
|
|
160
|
+
if (!cachedLoaded) {
|
|
161
|
+
try {
|
|
162
|
+
const cacheContent = Object.fromEntries(Array.from(newPersistedCache.entries()).map(([key, value]) => [key, Object.fromEntries(value.entries())]));
|
|
163
|
+
fs.writeFileSync(cachePath, JSON.stringify(cacheContent, null, 2));
|
|
164
|
+
}
|
|
165
|
+
catch (error) {
|
|
166
|
+
console.error("Error writing cache:", error);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
const rule = {
|
|
171
|
+
defaultOptions: [{ scanDirs: {}, mappings: {} }],
|
|
172
|
+
meta: {
|
|
173
|
+
type: "problem",
|
|
174
|
+
docs: {
|
|
175
|
+
description: "Automatically fix missing imports",
|
|
176
|
+
},
|
|
177
|
+
fixable: "code",
|
|
178
|
+
messages: {
|
|
179
|
+
missingImport: "Missing import for {{ identifier }}",
|
|
180
|
+
},
|
|
181
|
+
schema: [
|
|
182
|
+
{
|
|
183
|
+
type: "object",
|
|
184
|
+
properties: {
|
|
185
|
+
scanDirs: {
|
|
186
|
+
type: "object",
|
|
187
|
+
additionalProperties: {
|
|
188
|
+
type: "object",
|
|
189
|
+
properties: {
|
|
190
|
+
dir: { type: "string" },
|
|
191
|
+
file: { type: "string" },
|
|
192
|
+
cacheable: { type: "boolean" },
|
|
193
|
+
barrelPattern: { type: "boolean" },
|
|
194
|
+
},
|
|
195
|
+
},
|
|
196
|
+
},
|
|
197
|
+
mappings: {
|
|
198
|
+
type: "object",
|
|
199
|
+
additionalProperties: {
|
|
200
|
+
type: "object",
|
|
201
|
+
properties: {
|
|
202
|
+
path: { type: "string" },
|
|
203
|
+
isDefault: { type: "boolean" },
|
|
204
|
+
},
|
|
205
|
+
required: ["path"],
|
|
206
|
+
},
|
|
207
|
+
},
|
|
208
|
+
},
|
|
209
|
+
},
|
|
210
|
+
],
|
|
211
|
+
},
|
|
212
|
+
create(context) {
|
|
213
|
+
const options = context.options[0];
|
|
214
|
+
const { scanDirs = {}, mappings = {} } = options;
|
|
215
|
+
// Scan directories and cache results on first run
|
|
216
|
+
if (exportCache.size === 0 && Object.keys(scanDirs).length > 0) {
|
|
217
|
+
scanAndCacheExports(scanDirs);
|
|
218
|
+
}
|
|
219
|
+
// Track existing imports
|
|
220
|
+
const existing = new Map(); // name -> true (already imported)
|
|
221
|
+
const importByPath = new Map(); // path -> ImportDeclaration node
|
|
222
|
+
function rememberImport(node) {
|
|
223
|
+
importByPath.set(node.source.value, node);
|
|
224
|
+
for (const spec of node.specifiers) {
|
|
225
|
+
if (spec.type === "ImportSpecifier") {
|
|
226
|
+
existing.set(spec.local.name, true);
|
|
227
|
+
}
|
|
228
|
+
else if (spec.type === "ImportDefaultSpecifier") {
|
|
229
|
+
existing.set(spec.local.name, true);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
function buildNamedInsert(name, path, fixer, isDefault = false) {
|
|
234
|
+
const target = importByPath.get(path);
|
|
235
|
+
if (target) {
|
|
236
|
+
// Add to existing import
|
|
237
|
+
if (isDefault) {
|
|
238
|
+
const firstNode = context.sourceCode.ast.body[0];
|
|
239
|
+
return fixer.insertTextBefore(firstNode, `import ${name} from "${path}";\n`);
|
|
240
|
+
}
|
|
241
|
+
else {
|
|
242
|
+
const lastSpecifier = target.specifiers[target.specifiers.length - 1];
|
|
243
|
+
return fixer.insertTextAfter(lastSpecifier, `, ${name}`);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
// Make a new top-of-file import
|
|
247
|
+
const firstNode = context.sourceCode.ast.body[0];
|
|
248
|
+
if (isDefault) {
|
|
249
|
+
return fixer.insertTextBefore(firstNode, `import ${name} from "${path}";\n`);
|
|
250
|
+
}
|
|
251
|
+
return fixer.insertTextBefore(firstNode, `import { ${name} } from "${path}";\n`);
|
|
252
|
+
}
|
|
253
|
+
return {
|
|
254
|
+
ImportDeclaration(node) {
|
|
255
|
+
rememberImport(node);
|
|
256
|
+
},
|
|
257
|
+
"Program:exit"(node) {
|
|
258
|
+
const globalScope = context.sourceCode.getScope(node);
|
|
259
|
+
for (const ref of globalScope.through) {
|
|
260
|
+
const identifier = ref.identifier.name;
|
|
261
|
+
// Skip if already imported
|
|
262
|
+
if (existing.has(identifier)) {
|
|
263
|
+
continue;
|
|
264
|
+
}
|
|
265
|
+
// First check explicit mappings
|
|
266
|
+
let mapping = mappings[identifier];
|
|
267
|
+
let importPath;
|
|
268
|
+
// If no explicit mapping, check scanned exports
|
|
269
|
+
if (!mapping) {
|
|
270
|
+
for (const exports of exportCache.values()) {
|
|
271
|
+
const foundPath = exports.get(identifier);
|
|
272
|
+
if (foundPath) {
|
|
273
|
+
importPath = foundPath;
|
|
274
|
+
mapping = { path: foundPath, isDefault: false };
|
|
275
|
+
break;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
else {
|
|
280
|
+
importPath = mapping.path;
|
|
281
|
+
}
|
|
282
|
+
if (mapping && importPath) {
|
|
283
|
+
context.report({
|
|
284
|
+
node: ref.identifier,
|
|
285
|
+
messageId: "missingImport",
|
|
286
|
+
data: {
|
|
287
|
+
identifier,
|
|
288
|
+
},
|
|
289
|
+
fix(fixer) {
|
|
290
|
+
return buildNamedInsert(identifier, importPath, fixer, mapping.isDefault);
|
|
291
|
+
},
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
},
|
|
296
|
+
};
|
|
297
|
+
},
|
|
298
|
+
};
|
|
299
|
+
exports.rules = {
|
|
300
|
+
"auto-import": rule,
|
|
301
|
+
};
|
|
302
|
+
exports.default = {
|
|
303
|
+
rules: exports.rules,
|
|
304
|
+
};
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { TSESLint } from "@typescript-eslint/utils";
|
|
2
|
+
type ImportMapping = Record<string, {
|
|
3
|
+
path: string;
|
|
4
|
+
isDefault?: boolean;
|
|
5
|
+
}>;
|
|
6
|
+
interface ScanDir {
|
|
7
|
+
dir: string;
|
|
8
|
+
file?: string;
|
|
9
|
+
cacheable?: boolean;
|
|
10
|
+
barrelPattern?: boolean;
|
|
11
|
+
recursive?: boolean;
|
|
12
|
+
}
|
|
13
|
+
type ScanDirs = Record<string, ScanDir>;
|
|
14
|
+
type Options = [
|
|
15
|
+
{
|
|
16
|
+
scanDirs?: ScanDirs;
|
|
17
|
+
mappings?: ImportMapping;
|
|
18
|
+
}
|
|
19
|
+
];
|
|
20
|
+
export declare const rules: {
|
|
21
|
+
"auto-import": TSESLint.RuleModule<"missingImport", Options, unknown, TSESLint.RuleListener>;
|
|
22
|
+
};
|
|
23
|
+
declare const _default: {
|
|
24
|
+
rules: {
|
|
25
|
+
"auto-import": TSESLint.RuleModule<"missingImport", Options, unknown, TSESLint.RuleListener>;
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
export default _default;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import * as ts from "typescript";
|
|
4
|
+
// Cache for scanned exports
|
|
5
|
+
const exportCache = new Map();
|
|
6
|
+
// Helper to scan a directory for TypeScript files
|
|
7
|
+
function scanDirectory(dir, recursive) {
|
|
8
|
+
const files = [];
|
|
9
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
10
|
+
for (const entry of entries) {
|
|
11
|
+
const fullPath = path.join(dir, entry.name);
|
|
12
|
+
if (entry.isDirectory() && recursive) {
|
|
13
|
+
files.push(...scanDirectory(fullPath, recursive));
|
|
14
|
+
}
|
|
15
|
+
else if (entry.isFile() && /\.(ts|tsx)$/.test(entry.name)) {
|
|
16
|
+
files.push(fullPath);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return files;
|
|
20
|
+
}
|
|
21
|
+
// Helper to get exports from a TypeScript file
|
|
22
|
+
function getExportsFromFile(filePath) {
|
|
23
|
+
const program = ts.createProgram([filePath], {
|
|
24
|
+
target: ts.ScriptTarget.ES2020,
|
|
25
|
+
module: ts.ModuleKind.CommonJS,
|
|
26
|
+
});
|
|
27
|
+
const sourceFile = program.getSourceFile(filePath);
|
|
28
|
+
if (!sourceFile) {
|
|
29
|
+
return new Map();
|
|
30
|
+
}
|
|
31
|
+
const exports = new Map();
|
|
32
|
+
function visit(node) {
|
|
33
|
+
if (ts.isExportDeclaration(node)) {
|
|
34
|
+
if (node.exportClause && ts.isNamedExports(node.exportClause)) {
|
|
35
|
+
node.exportClause.elements.forEach((element) => {
|
|
36
|
+
exports.set(element.name.text, filePath);
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
else if (ts.isVariableStatement(node) && node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword)) {
|
|
41
|
+
node.declarationList.declarations.forEach((decl) => {
|
|
42
|
+
if (ts.isIdentifier(decl.name)) {
|
|
43
|
+
exports.set(decl.name.text, filePath);
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
else if (ts.isFunctionDeclaration(node) &&
|
|
48
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
49
|
+
node.name) {
|
|
50
|
+
exports.set(node.name.text, filePath);
|
|
51
|
+
}
|
|
52
|
+
else if (ts.isClassDeclaration(node) &&
|
|
53
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
54
|
+
node.name) {
|
|
55
|
+
exports.set(node.name.text, filePath);
|
|
56
|
+
}
|
|
57
|
+
else if (ts.isInterfaceDeclaration(node) &&
|
|
58
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
59
|
+
node.name) {
|
|
60
|
+
exports.set(node.name.text, filePath);
|
|
61
|
+
}
|
|
62
|
+
else if (ts.isTypeAliasDeclaration(node) &&
|
|
63
|
+
node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) &&
|
|
64
|
+
node.name) {
|
|
65
|
+
exports.set(node.name.text, filePath);
|
|
66
|
+
}
|
|
67
|
+
ts.forEachChild(node, visit);
|
|
68
|
+
}
|
|
69
|
+
visit(sourceFile);
|
|
70
|
+
return exports;
|
|
71
|
+
}
|
|
72
|
+
// Helper to scan directories and cache results
|
|
73
|
+
function scanAndCacheExports(scanDirs) {
|
|
74
|
+
const cachePath = path.join(process.cwd(), "exportCache.json");
|
|
75
|
+
let cachedLoaded = false;
|
|
76
|
+
// Try to load from cache first
|
|
77
|
+
if (fs.existsSync(cachePath)) {
|
|
78
|
+
try {
|
|
79
|
+
const content = JSON.parse(fs.readFileSync(cachePath, "utf8"));
|
|
80
|
+
for (const [importPrefix, exports] of Object.entries(content)) {
|
|
81
|
+
exportCache.set(importPrefix, new Map(Object.entries(exports)));
|
|
82
|
+
}
|
|
83
|
+
cachedLoaded = true;
|
|
84
|
+
}
|
|
85
|
+
catch (error) {
|
|
86
|
+
console.error("Error loading cache:", error);
|
|
87
|
+
// If cache loading fails, continue with scanning
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
const newPersistedCache = new Map();
|
|
91
|
+
// Scan directories and populate exportCache
|
|
92
|
+
for (const [importPrefix, scanDir] of Object.entries(scanDirs)) {
|
|
93
|
+
if (cachedLoaded && scanDir.cacheable) {
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
console.log("scanning dir", scanDir.dir);
|
|
97
|
+
let files = [];
|
|
98
|
+
const dir = scanDir.dir;
|
|
99
|
+
if (!scanDir.file) {
|
|
100
|
+
files = scanDirectory(dir, scanDir.recursive ?? true);
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
files = [path.join(dir, scanDir.file)];
|
|
104
|
+
}
|
|
105
|
+
const exports = new Map();
|
|
106
|
+
for (const file of files) {
|
|
107
|
+
console.log("scanning file", file);
|
|
108
|
+
const fileExports = getExportsFromFile(file);
|
|
109
|
+
const relativePath = path
|
|
110
|
+
.relative(dir, file)
|
|
111
|
+
.replace(/\\/g, "/")
|
|
112
|
+
.replace(/\.(ts|tsx)$/, "");
|
|
113
|
+
const importPath = scanDir.barrelPattern ? `${importPrefix}` : `${importPrefix}/${relativePath}`;
|
|
114
|
+
fileExports.forEach((_, name) => {
|
|
115
|
+
exports.set(name, importPath);
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
if (scanDir.cacheable) {
|
|
119
|
+
newPersistedCache.set(importPrefix, exports);
|
|
120
|
+
}
|
|
121
|
+
exportCache.set(importPrefix, exports);
|
|
122
|
+
}
|
|
123
|
+
// Write to cache after populating exportCache
|
|
124
|
+
if (!cachedLoaded) {
|
|
125
|
+
try {
|
|
126
|
+
const cacheContent = Object.fromEntries(Array.from(newPersistedCache.entries()).map(([key, value]) => [key, Object.fromEntries(value.entries())]));
|
|
127
|
+
fs.writeFileSync(cachePath, JSON.stringify(cacheContent, null, 2));
|
|
128
|
+
}
|
|
129
|
+
catch (error) {
|
|
130
|
+
console.error("Error writing cache:", error);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
const rule = {
|
|
135
|
+
defaultOptions: [{ scanDirs: {}, mappings: {} }],
|
|
136
|
+
meta: {
|
|
137
|
+
type: "problem",
|
|
138
|
+
docs: {
|
|
139
|
+
description: "Automatically fix missing imports",
|
|
140
|
+
},
|
|
141
|
+
fixable: "code",
|
|
142
|
+
messages: {
|
|
143
|
+
missingImport: "Missing import for {{ identifier }}",
|
|
144
|
+
},
|
|
145
|
+
schema: [
|
|
146
|
+
{
|
|
147
|
+
type: "object",
|
|
148
|
+
properties: {
|
|
149
|
+
scanDirs: {
|
|
150
|
+
type: "object",
|
|
151
|
+
additionalProperties: {
|
|
152
|
+
type: "object",
|
|
153
|
+
properties: {
|
|
154
|
+
dir: { type: "string" },
|
|
155
|
+
file: { type: "string" },
|
|
156
|
+
cacheable: { type: "boolean" },
|
|
157
|
+
barrelPattern: { type: "boolean" },
|
|
158
|
+
},
|
|
159
|
+
},
|
|
160
|
+
},
|
|
161
|
+
mappings: {
|
|
162
|
+
type: "object",
|
|
163
|
+
additionalProperties: {
|
|
164
|
+
type: "object",
|
|
165
|
+
properties: {
|
|
166
|
+
path: { type: "string" },
|
|
167
|
+
isDefault: { type: "boolean" },
|
|
168
|
+
},
|
|
169
|
+
required: ["path"],
|
|
170
|
+
},
|
|
171
|
+
},
|
|
172
|
+
},
|
|
173
|
+
},
|
|
174
|
+
],
|
|
175
|
+
},
|
|
176
|
+
create(context) {
|
|
177
|
+
const options = context.options[0];
|
|
178
|
+
const { scanDirs = {}, mappings = {} } = options;
|
|
179
|
+
// Scan directories and cache results on first run
|
|
180
|
+
if (exportCache.size === 0 && Object.keys(scanDirs).length > 0) {
|
|
181
|
+
scanAndCacheExports(scanDirs);
|
|
182
|
+
}
|
|
183
|
+
// Track existing imports
|
|
184
|
+
const existing = new Map(); // name -> true (already imported)
|
|
185
|
+
const importByPath = new Map(); // path -> ImportDeclaration node
|
|
186
|
+
function rememberImport(node) {
|
|
187
|
+
importByPath.set(node.source.value, node);
|
|
188
|
+
for (const spec of node.specifiers) {
|
|
189
|
+
if (spec.type === "ImportSpecifier") {
|
|
190
|
+
existing.set(spec.local.name, true);
|
|
191
|
+
}
|
|
192
|
+
else if (spec.type === "ImportDefaultSpecifier") {
|
|
193
|
+
existing.set(spec.local.name, true);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
function buildNamedInsert(name, path, fixer, isDefault = false) {
|
|
198
|
+
const target = importByPath.get(path);
|
|
199
|
+
if (target) {
|
|
200
|
+
// Add to existing import
|
|
201
|
+
if (isDefault) {
|
|
202
|
+
const firstNode = context.sourceCode.ast.body[0];
|
|
203
|
+
return fixer.insertTextBefore(firstNode, `import ${name} from "${path}";\n`);
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
const lastSpecifier = target.specifiers[target.specifiers.length - 1];
|
|
207
|
+
return fixer.insertTextAfter(lastSpecifier, `, ${name}`);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
// Make a new top-of-file import
|
|
211
|
+
const firstNode = context.sourceCode.ast.body[0];
|
|
212
|
+
if (isDefault) {
|
|
213
|
+
return fixer.insertTextBefore(firstNode, `import ${name} from "${path}";\n`);
|
|
214
|
+
}
|
|
215
|
+
return fixer.insertTextBefore(firstNode, `import { ${name} } from "${path}";\n`);
|
|
216
|
+
}
|
|
217
|
+
return {
|
|
218
|
+
ImportDeclaration(node) {
|
|
219
|
+
rememberImport(node);
|
|
220
|
+
},
|
|
221
|
+
"Program:exit"(node) {
|
|
222
|
+
const globalScope = context.sourceCode.getScope(node);
|
|
223
|
+
for (const ref of globalScope.through) {
|
|
224
|
+
const identifier = ref.identifier.name;
|
|
225
|
+
// Skip if already imported
|
|
226
|
+
if (existing.has(identifier)) {
|
|
227
|
+
continue;
|
|
228
|
+
}
|
|
229
|
+
// First check explicit mappings
|
|
230
|
+
let mapping = mappings[identifier];
|
|
231
|
+
let importPath;
|
|
232
|
+
// If no explicit mapping, check scanned exports
|
|
233
|
+
if (!mapping) {
|
|
234
|
+
for (const exports of exportCache.values()) {
|
|
235
|
+
const foundPath = exports.get(identifier);
|
|
236
|
+
if (foundPath) {
|
|
237
|
+
importPath = foundPath;
|
|
238
|
+
mapping = { path: foundPath, isDefault: false };
|
|
239
|
+
break;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
else {
|
|
244
|
+
importPath = mapping.path;
|
|
245
|
+
}
|
|
246
|
+
if (mapping && importPath) {
|
|
247
|
+
context.report({
|
|
248
|
+
node: ref.identifier,
|
|
249
|
+
messageId: "missingImport",
|
|
250
|
+
data: {
|
|
251
|
+
identifier,
|
|
252
|
+
},
|
|
253
|
+
fix(fixer) {
|
|
254
|
+
return buildNamedInsert(identifier, importPath, fixer, mapping.isDefault);
|
|
255
|
+
},
|
|
256
|
+
});
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
},
|
|
260
|
+
};
|
|
261
|
+
},
|
|
262
|
+
};
|
|
263
|
+
export const rules = {
|
|
264
|
+
"auto-import": rule,
|
|
265
|
+
};
|
|
266
|
+
export default {
|
|
267
|
+
rules,
|
|
268
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@blocksdiy/eslint-plugin-auto-import",
|
|
3
|
+
"blocksLinkIgnore": false,
|
|
4
|
+
"version": "1.0.0",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"description": "",
|
|
7
|
+
"keywords": [],
|
|
8
|
+
"author": "Dan Ofir <dan@blocks.diy>",
|
|
9
|
+
"license": "ISC",
|
|
10
|
+
"main": "dist/index.js",
|
|
11
|
+
"types": "dist/index.d.ts",
|
|
12
|
+
"directories": {
|
|
13
|
+
"lib": "src"
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"publishConfig": {
|
|
19
|
+
"registry": "https://registry.npmjs.org/",
|
|
20
|
+
"access": "public"
|
|
21
|
+
},
|
|
22
|
+
"nx": {
|
|
23
|
+
"tags": [
|
|
24
|
+
"type:package"
|
|
25
|
+
]
|
|
26
|
+
},
|
|
27
|
+
"scripts": {
|
|
28
|
+
"clean": "rm -rf ./dist/ && rm -rf tsconfig.tsbuildinfo && rm -rf node_modules",
|
|
29
|
+
"build": "tsc && tsc --module commonjs --outDir dist/cjs",
|
|
30
|
+
"test": "jest --passWithNoTests",
|
|
31
|
+
"lint": "eslint .",
|
|
32
|
+
"lint-fix": "pnpm eslint src/ --fix"
|
|
33
|
+
},
|
|
34
|
+
"dependencies": {
|
|
35
|
+
"@typescript-eslint/parser": "^8.52.0",
|
|
36
|
+
"@typescript-eslint/utils": "^8.52.0",
|
|
37
|
+
"typescript": "^5.9.3"
|
|
38
|
+
},
|
|
39
|
+
"devDependencies": {
|
|
40
|
+
"@blockscom/eslint-config": "4.6.2",
|
|
41
|
+
"@types/node": "^24.9.2",
|
|
42
|
+
"jest": "^29.7.0",
|
|
43
|
+
"tsc-alias": "^1.8.15"
|
|
44
|
+
},
|
|
45
|
+
"engines": {
|
|
46
|
+
"node": ">=24.11.0"
|
|
47
|
+
},
|
|
48
|
+
"packageManager": "pnpm@10.20.0",
|
|
49
|
+
"exports": {
|
|
50
|
+
".": {
|
|
51
|
+
"types": "./dist/index.d.ts",
|
|
52
|
+
"import": "./dist/index.js",
|
|
53
|
+
"require": "./dist/index.cjs"
|
|
54
|
+
}
|
|
55
|
+
},
|
|
56
|
+
"peerDependencies": {
|
|
57
|
+
"eslint": ">=9.0.0"
|
|
58
|
+
}
|
|
59
|
+
}
|