newpr 1.0.23 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
import { describe, test, expect, afterAll } from "bun:test";
|
|
2
|
+
import { mkdtempSync, rmSync, writeFileSync, mkdirSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { tmpdir } from "node:os";
|
|
5
|
+
import { analyzeImportDependencies } from "./import-deps.ts";
|
|
6
|
+
|
|
7
|
+
const tmpDirs: string[] = [];
|
|
8
|
+
|
|
9
|
+
function makeTmpRepo(): string {
|
|
10
|
+
const dir = mkdtempSync(join(tmpdir(), "import-deps-"));
|
|
11
|
+
tmpDirs.push(dir);
|
|
12
|
+
return dir;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
afterAll(() => {
|
|
16
|
+
for (const dir of tmpDirs) {
|
|
17
|
+
rmSync(dir, { recursive: true, force: true });
|
|
18
|
+
}
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
async function initRepo(path: string): Promise<void> {
|
|
22
|
+
await Bun.$`git init ${path}`.quiet();
|
|
23
|
+
await Bun.$`git -C ${path} config user.name "Test"`.quiet();
|
|
24
|
+
await Bun.$`git -C ${path} config user.email "test@test.com"`.quiet();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async function commitFiles(repoPath: string, files: Record<string, string>, msg: string): Promise<string> {
|
|
28
|
+
for (const [filePath, content] of Object.entries(files)) {
|
|
29
|
+
const fullPath = join(repoPath, filePath);
|
|
30
|
+
const dir = fullPath.substring(0, fullPath.lastIndexOf("/"));
|
|
31
|
+
mkdirSync(dir, { recursive: true });
|
|
32
|
+
writeFileSync(fullPath, content);
|
|
33
|
+
}
|
|
34
|
+
await Bun.$`git -C ${repoPath} add -A`.quiet();
|
|
35
|
+
await Bun.$`git -C ${repoPath} commit -m ${msg}`.quiet();
|
|
36
|
+
return (await Bun.$`git -C ${repoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
describe("analyzeImportDependencies", () => {
|
|
40
|
+
test("detects single-line imports between changed files", async () => {
|
|
41
|
+
const repo = makeTmpRepo();
|
|
42
|
+
await initRepo(repo);
|
|
43
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
44
|
+
|
|
45
|
+
const headSha = await commitFiles(repo, {
|
|
46
|
+
"src/utils/helper.ts": "export function greet() { return 'hi'; }",
|
|
47
|
+
"src/components/button.ts": "import { greet } from '../utils/helper';\nexport const btn = greet();",
|
|
48
|
+
}, "add files");
|
|
49
|
+
|
|
50
|
+
const ownership = new Map([
|
|
51
|
+
["src/utils/helper.ts", "Utils"],
|
|
52
|
+
["src/components/button.ts", "UI"],
|
|
53
|
+
]);
|
|
54
|
+
|
|
55
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
56
|
+
|
|
57
|
+
expect(result.fileDeps.get("src/components/button.ts")).toContain("src/utils/helper.ts");
|
|
58
|
+
expect(result.groupDeps.get("UI")).toContain("Utils");
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
test("detects multiline named imports", async () => {
|
|
62
|
+
const repo = makeTmpRepo();
|
|
63
|
+
await initRepo(repo);
|
|
64
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
65
|
+
|
|
66
|
+
const headSha = await commitFiles(repo, {
|
|
67
|
+
"src/types.ts": "export interface Foo {}\nexport interface Bar {}\nexport interface Baz {}",
|
|
68
|
+
"src/consumer.ts": [
|
|
69
|
+
"import {",
|
|
70
|
+
" Foo,",
|
|
71
|
+
" Bar,",
|
|
72
|
+
" Baz,",
|
|
73
|
+
"} from './types';",
|
|
74
|
+
"",
|
|
75
|
+
"const x: Foo = {} as Foo;",
|
|
76
|
+
].join("\n"),
|
|
77
|
+
}, "multiline import");
|
|
78
|
+
|
|
79
|
+
const ownership = new Map([
|
|
80
|
+
["src/types.ts", "Types"],
|
|
81
|
+
["src/consumer.ts", "Feature"],
|
|
82
|
+
]);
|
|
83
|
+
|
|
84
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
85
|
+
|
|
86
|
+
expect(result.fileDeps.get("src/consumer.ts")).toContain("src/types.ts");
|
|
87
|
+
expect(result.groupDeps.get("Feature")).toContain("Types");
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
test("detects side-effect imports", async () => {
|
|
91
|
+
const repo = makeTmpRepo();
|
|
92
|
+
await initRepo(repo);
|
|
93
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
94
|
+
|
|
95
|
+
const headSha = await commitFiles(repo, {
|
|
96
|
+
"src/setup.ts": "globalThis.__initialized = true;",
|
|
97
|
+
"src/app.ts": "import './setup';\nconsole.log('app');",
|
|
98
|
+
}, "side-effect");
|
|
99
|
+
|
|
100
|
+
const ownership = new Map([
|
|
101
|
+
["src/setup.ts", "Setup"],
|
|
102
|
+
["src/app.ts", "App"],
|
|
103
|
+
]);
|
|
104
|
+
|
|
105
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
106
|
+
|
|
107
|
+
expect(result.fileDeps.get("src/app.ts")).toContain("src/setup.ts");
|
|
108
|
+
expect(result.groupDeps.get("App")).toContain("Setup");
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
test("detects dynamic imports", async () => {
|
|
112
|
+
const repo = makeTmpRepo();
|
|
113
|
+
await initRepo(repo);
|
|
114
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
115
|
+
|
|
116
|
+
const headSha = await commitFiles(repo, {
|
|
117
|
+
"src/lazy.ts": "export const LazyComponent = () => 'lazy';",
|
|
118
|
+
"src/loader.ts": "export async function load() { const mod = await import('./lazy'); return mod; }",
|
|
119
|
+
}, "dynamic import");
|
|
120
|
+
|
|
121
|
+
const ownership = new Map([
|
|
122
|
+
["src/lazy.ts", "Lazy"],
|
|
123
|
+
["src/loader.ts", "Loader"],
|
|
124
|
+
]);
|
|
125
|
+
|
|
126
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
127
|
+
|
|
128
|
+
expect(result.fileDeps.get("src/loader.ts")).toContain("src/lazy.ts");
|
|
129
|
+
expect(result.groupDeps.get("Loader")).toContain("Lazy");
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
test("detects re-exports (export from)", async () => {
|
|
133
|
+
const repo = makeTmpRepo();
|
|
134
|
+
await initRepo(repo);
|
|
135
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
136
|
+
|
|
137
|
+
const headSha = await commitFiles(repo, {
|
|
138
|
+
"src/internal.ts": "export const SECRET = 42;",
|
|
139
|
+
"src/index.ts": "export { SECRET } from './internal';",
|
|
140
|
+
}, "re-export");
|
|
141
|
+
|
|
142
|
+
const ownership = new Map([
|
|
143
|
+
["src/internal.ts", "Core"],
|
|
144
|
+
["src/index.ts", "Public"],
|
|
145
|
+
]);
|
|
146
|
+
|
|
147
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
148
|
+
|
|
149
|
+
expect(result.fileDeps.get("src/index.ts")).toContain("src/internal.ts");
|
|
150
|
+
expect(result.groupDeps.get("Public")).toContain("Core");
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
test("detects type-only imports", async () => {
|
|
154
|
+
const repo = makeTmpRepo();
|
|
155
|
+
await initRepo(repo);
|
|
156
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
157
|
+
|
|
158
|
+
const headSha = await commitFiles(repo, {
|
|
159
|
+
"src/types.ts": "export interface Config { key: string; }",
|
|
160
|
+
"src/use-config.ts": "import type { Config } from './types';\nconst c: Config = { key: 'x' };",
|
|
161
|
+
}, "type import");
|
|
162
|
+
|
|
163
|
+
const ownership = new Map([
|
|
164
|
+
["src/types.ts", "Types"],
|
|
165
|
+
["src/use-config.ts", "Feature"],
|
|
166
|
+
]);
|
|
167
|
+
|
|
168
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
169
|
+
|
|
170
|
+
expect(result.fileDeps.get("src/use-config.ts")).toContain("src/types.ts");
|
|
171
|
+
expect(result.groupDeps.get("Feature")).toContain("Types");
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
test("resolves extensionless imports to .ts files", async () => {
|
|
175
|
+
const repo = makeTmpRepo();
|
|
176
|
+
await initRepo(repo);
|
|
177
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
178
|
+
|
|
179
|
+
const headSha = await commitFiles(repo, {
|
|
180
|
+
"src/utils.ts": "export const add = (a: number, b: number) => a + b;",
|
|
181
|
+
"src/main.ts": "import { add } from './utils';\nconsole.log(add(1, 2));",
|
|
182
|
+
}, "extensionless");
|
|
183
|
+
|
|
184
|
+
const ownership = new Map([
|
|
185
|
+
["src/utils.ts", "Utils"],
|
|
186
|
+
["src/main.ts", "Main"],
|
|
187
|
+
]);
|
|
188
|
+
|
|
189
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
190
|
+
|
|
191
|
+
expect(result.fileDeps.get("src/main.ts")).toContain("src/utils.ts");
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
test("resolves index file imports", async () => {
|
|
195
|
+
const repo = makeTmpRepo();
|
|
196
|
+
await initRepo(repo);
|
|
197
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
198
|
+
|
|
199
|
+
const headSha = await commitFiles(repo, {
|
|
200
|
+
"src/lib/index.ts": "export { default as Btn } from './button';",
|
|
201
|
+
"src/lib/button.ts": "export default function Btn() { return 'btn'; }",
|
|
202
|
+
"src/app.ts": "import { Btn } from './lib';\nconsole.log(Btn());",
|
|
203
|
+
}, "index import");
|
|
204
|
+
|
|
205
|
+
const ownership = new Map([
|
|
206
|
+
["src/lib/index.ts", "Lib"],
|
|
207
|
+
["src/lib/button.ts", "Lib"],
|
|
208
|
+
["src/app.ts", "App"],
|
|
209
|
+
]);
|
|
210
|
+
|
|
211
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
212
|
+
|
|
213
|
+
expect(result.fileDeps.get("src/app.ts")).toContain("src/lib/index.ts");
|
|
214
|
+
expect(result.groupDeps.get("App")).toContain("Lib");
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
test("ignores non-relative imports", async () => {
|
|
218
|
+
const repo = makeTmpRepo();
|
|
219
|
+
await initRepo(repo);
|
|
220
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
221
|
+
|
|
222
|
+
const headSha = await commitFiles(repo, {
|
|
223
|
+
"src/app.ts": "import React from 'react';\nimport { useState } from 'react';\nconsole.log(React);",
|
|
224
|
+
}, "npm imports");
|
|
225
|
+
|
|
226
|
+
const ownership = new Map([["src/app.ts", "App"]]);
|
|
227
|
+
|
|
228
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
229
|
+
|
|
230
|
+
expect(result.fileDeps.size).toBe(0);
|
|
231
|
+
expect(result.groupDeps.size).toBe(0);
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
test("does not create self-group dependencies", async () => {
|
|
235
|
+
const repo = makeTmpRepo();
|
|
236
|
+
await initRepo(repo);
|
|
237
|
+
await commitFiles(repo, { "dummy.txt": "init" }, "init");
|
|
238
|
+
|
|
239
|
+
const headSha = await commitFiles(repo, {
|
|
240
|
+
"src/a.ts": "export const A = 1;",
|
|
241
|
+
"src/b.ts": "import { A } from './a';\nexport const B = A + 1;",
|
|
242
|
+
}, "same group");
|
|
243
|
+
|
|
244
|
+
const ownership = new Map([
|
|
245
|
+
["src/a.ts", "Feature"],
|
|
246
|
+
["src/b.ts", "Feature"],
|
|
247
|
+
]);
|
|
248
|
+
|
|
249
|
+
const result = await analyzeImportDependencies(repo, headSha, [...ownership.keys()], ownership);
|
|
250
|
+
|
|
251
|
+
expect(result.fileDeps.get("src/b.ts")).toContain("src/a.ts");
|
|
252
|
+
expect(result.groupDeps.size).toBe(0);
|
|
253
|
+
});
|
|
254
|
+
});
|
package/src/stack/import-deps.ts
CHANGED
|
@@ -1,21 +1,77 @@
|
|
|
1
|
-
const IMPORT_PATTERNS: RegExp[] = [
|
|
2
|
-
/^\s*import\s+.*?\s+from\s+['"]([^'"]+)['"]/gm,
|
|
3
|
-
/^\s*export\s+.*?\s+from\s+['"]([^'"]+)['"]/gm,
|
|
4
|
-
/^\s*import\s*\(\s*['"]([^'"]+)['"]\s*\)/gm,
|
|
5
|
-
/^\s*require\s*\(\s*['"]([^'"]+)['"]\s*\)/gm,
|
|
6
|
-
];
|
|
7
|
-
|
|
8
1
|
const ANALYZABLE_EXTENSIONS = new Set([
|
|
9
2
|
".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs",
|
|
10
3
|
]);
|
|
11
4
|
|
|
5
|
+
const EXT_TO_LOADER: Record<string, "ts" | "tsx" | "js" | "jsx"> = {
|
|
6
|
+
".ts": "ts",
|
|
7
|
+
".tsx": "tsx",
|
|
8
|
+
".js": "js",
|
|
9
|
+
".jsx": "jsx",
|
|
10
|
+
".mjs": "js",
|
|
11
|
+
".cjs": "js",
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const transpilerCache = new Map<string, InstanceType<typeof Bun.Transpiler>>();
|
|
15
|
+
|
|
16
|
+
function getTranspiler(loader: "ts" | "tsx" | "js" | "jsx"): InstanceType<typeof Bun.Transpiler> {
|
|
17
|
+
let t = transpilerCache.get(loader);
|
|
18
|
+
if (!t) {
|
|
19
|
+
t = new Bun.Transpiler({ loader });
|
|
20
|
+
transpilerCache.set(loader, t);
|
|
21
|
+
}
|
|
22
|
+
return t;
|
|
23
|
+
}
|
|
24
|
+
|
|
12
25
|
function isRelativeImport(specifier: string): boolean {
|
|
13
26
|
return specifier.startsWith("./") || specifier.startsWith("../");
|
|
14
27
|
}
|
|
15
28
|
|
|
16
|
-
function
|
|
29
|
+
function fileExt(f: string): string {
|
|
30
|
+
const dot = f.lastIndexOf(".");
|
|
31
|
+
return dot >= 0 ? f.slice(dot) : "";
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// scanImports skips `import type` — this catches them for dependency tracking
|
|
35
|
+
const TYPE_IMPORT_RE = /import\s+type\s+(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)\s+from\s+['"]([^'"]+)['"]/gs;
|
|
36
|
+
|
|
37
|
+
function extractImports(source: string, filePath: string): string[] {
|
|
38
|
+
const ext = fileExt(filePath);
|
|
39
|
+
const loader = EXT_TO_LOADER[ext];
|
|
40
|
+
if (!loader) return [];
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
const transpiler = getTranspiler(loader);
|
|
44
|
+
const scanned = transpiler.scanImports(source);
|
|
45
|
+
const results = new Set(
|
|
46
|
+
scanned.map((imp) => imp.path).filter(isRelativeImport),
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
TYPE_IMPORT_RE.lastIndex = 0;
|
|
50
|
+
let m: RegExpExecArray | null;
|
|
51
|
+
while ((m = TYPE_IMPORT_RE.exec(source)) !== null) {
|
|
52
|
+
if (m[1] && isRelativeImport(m[1])) results.add(m[1]);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return Array.from(results);
|
|
56
|
+
} catch {
|
|
57
|
+
return extractImportsFallback(source);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const FALLBACK_PATTERNS: RegExp[] = [
|
|
62
|
+
// import/export ... from '...' (multiline-safe via [\s\S])
|
|
63
|
+
/(?:import|export)\s+[\s\S]*?\s+from\s+['"]([^'"]+)['"]/g,
|
|
64
|
+
// side-effect: import './foo'
|
|
65
|
+
/import\s+['"]([^'"]+)['"]/g,
|
|
66
|
+
// dynamic: import('./foo')
|
|
67
|
+
/import\s*\(\s*['"]([^'"]+)['"]\s*\)/g,
|
|
68
|
+
// require('./foo')
|
|
69
|
+
/require\s*\(\s*['"]([^'"]+)['"]\s*\)/g,
|
|
70
|
+
];
|
|
71
|
+
|
|
72
|
+
function extractImportsFallback(source: string): string[] {
|
|
17
73
|
const imports = new Set<string>();
|
|
18
|
-
for (const re of
|
|
74
|
+
for (const re of FALLBACK_PATTERNS) {
|
|
19
75
|
re.lastIndex = 0;
|
|
20
76
|
let match: RegExpExecArray | null;
|
|
21
77
|
while ((match = re.exec(source)) !== null) {
|
|
@@ -81,18 +137,13 @@ export async function analyzeImportDependencies(
|
|
|
81
137
|
const fileSet = new Set(changedFiles);
|
|
82
138
|
const fileDeps = new Map<string, string[]>();
|
|
83
139
|
|
|
84
|
-
const
|
|
85
|
-
const dot = f.lastIndexOf(".");
|
|
86
|
-
return dot >= 0 ? f.slice(dot) : "";
|
|
87
|
-
};
|
|
88
|
-
|
|
89
|
-
const analyzable = changedFiles.filter((f) => ANALYZABLE_EXTENSIONS.has(ext(f)));
|
|
140
|
+
const analyzable = changedFiles.filter((f) => ANALYZABLE_EXTENSIONS.has(fileExt(f)));
|
|
90
141
|
|
|
91
142
|
await Promise.all(analyzable.map(async (filePath) => {
|
|
92
143
|
const source = await readFileFromGit(repoPath, headSha, filePath);
|
|
93
144
|
if (!source) return;
|
|
94
145
|
|
|
95
|
-
const rawImports = extractImports(source);
|
|
146
|
+
const rawImports = extractImports(source, filePath);
|
|
96
147
|
const resolved: string[] = [];
|
|
97
148
|
|
|
98
149
|
for (const specifier of rawImports) {
|
|
@@ -124,7 +175,9 @@ export async function analyzeImportDependencies(
|
|
|
124
175
|
|
|
125
176
|
const groupDepsMap = new Map<string, string[]>();
|
|
126
177
|
for (const [group, deps] of groupDeps) {
|
|
127
|
-
|
|
178
|
+
if (deps.size > 0) {
|
|
179
|
+
groupDepsMap.set(group, Array.from(deps));
|
|
180
|
+
}
|
|
128
181
|
}
|
|
129
182
|
|
|
130
183
|
return { fileDeps, groupDeps: groupDepsMap };
|
|
@@ -134,8 +134,9 @@ function isLineLabelCandidate(text: string): boolean {
|
|
|
134
134
|
if (trimmed.length > MAX_LINE_LABEL_LENGTH) return false;
|
|
135
135
|
if (trimmed.includes("\n")) return false;
|
|
136
136
|
if (trimmed.includes("[[")) return false;
|
|
137
|
-
|
|
138
|
-
|
|
137
|
+
// No backtick parity check — inlineMarkdownToHtml handles unpaired
|
|
138
|
+
// backticks gracefully (regex simply won't match them, they render as
|
|
139
|
+
// literal characters after HTML-escaping).
|
|
139
140
|
return true;
|
|
140
141
|
}
|
|
141
142
|
|
|
@@ -171,13 +172,13 @@ function replaceLineAnchors(text: string): string {
|
|
|
171
172
|
else if (text[end] === ")") depth--;
|
|
172
173
|
end++;
|
|
173
174
|
}
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
afterClose = end;
|
|
179
|
-
}
|
|
175
|
+
if (depth === 0) {
|
|
176
|
+
const candidate = text.slice(afterClose + 1, end - 1);
|
|
177
|
+
if (isLineLabelCandidate(candidate)) {
|
|
178
|
+
label = candidate;
|
|
180
179
|
}
|
|
180
|
+
afterClose = end;
|
|
181
|
+
}
|
|
181
182
|
}
|
|
182
183
|
|
|
183
184
|
const encoded = encodeURIComponent(id);
|