@reactgraph/cli 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +319 -0
- package/bun.lock +527 -0
- package/dist/cli/components/IndexProgress.d.ts +18 -0
- package/dist/cli/components/IndexProgress.d.ts.map +1 -0
- package/dist/cli/components/IndexProgress.js +26 -0
- package/dist/cli/components/IndexProgress.js.map +1 -0
- package/dist/cli/components/InitResult.d.ts +7 -0
- package/dist/cli/components/InitResult.d.ts.map +1 -0
- package/dist/cli/components/InitResult.js +6 -0
- package/dist/cli/components/InitResult.js.map +1 -0
- package/dist/cli/index-cmd.d.ts +7 -0
- package/dist/cli/index-cmd.d.ts.map +1 -0
- package/dist/cli/index-cmd.js +28 -0
- package/dist/cli/index-cmd.js.map +1 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +81 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli/init.d.ts +8 -0
- package/dist/cli/init.d.ts.map +1 -0
- package/dist/cli/init.js +77 -0
- package/dist/cli/init.js.map +1 -0
- package/dist/cli/serve.d.ts +2 -0
- package/dist/cli/serve.d.ts.map +1 -0
- package/dist/cli/serve.js +28 -0
- package/dist/cli/serve.js.map +1 -0
- package/dist/cli/unused.d.ts +2 -0
- package/dist/cli/unused.d.ts.map +1 -0
- package/dist/cli/unused.js +56 -0
- package/dist/cli/unused.js.map +1 -0
- package/dist/graph/graph.d.ts +30 -0
- package/dist/graph/graph.d.ts.map +1 -0
- package/dist/graph/graph.js +166 -0
- package/dist/graph/graph.js.map +1 -0
- package/dist/graph/index.d.ts +5 -0
- package/dist/graph/index.d.ts.map +1 -0
- package/dist/graph/index.js +5 -0
- package/dist/graph/index.js.map +1 -0
- package/dist/graph/schema.d.ts +33 -0
- package/dist/graph/schema.d.ts.map +1 -0
- package/dist/graph/schema.js +3 -0
- package/dist/graph/schema.js.map +1 -0
- package/dist/graph/serialize.d.ts +7 -0
- package/dist/graph/serialize.d.ts.map +1 -0
- package/dist/graph/serialize.js +39 -0
- package/dist/graph/serialize.js.map +1 -0
- package/dist/graph/traverse.d.ts +14 -0
- package/dist/graph/traverse.d.ts.map +1 -0
- package/dist/graph/traverse.js +50 -0
- package/dist/graph/traverse.js.map +1 -0
- package/dist/mcp/formatter.d.ts +26 -0
- package/dist/mcp/formatter.d.ts.map +1 -0
- package/dist/mcp/formatter.js +691 -0
- package/dist/mcp/formatter.js.map +1 -0
- package/dist/mcp/server.d.ts +2 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/server.js +45 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/mcp/tools.d.ts +9 -0
- package/dist/mcp/tools.d.ts.map +1 -0
- package/dist/mcp/tools.js +136 -0
- package/dist/mcp/tools.js.map +1 -0
- package/dist/output/ai-context.d.ts +7 -0
- package/dist/output/ai-context.d.ts.map +1 -0
- package/dist/output/ai-context.js +26 -0
- package/dist/output/ai-context.js.map +1 -0
- package/dist/parser/extractors/api-calls.d.ts +15 -0
- package/dist/parser/extractors/api-calls.d.ts.map +1 -0
- package/dist/parser/extractors/api-calls.js +168 -0
- package/dist/parser/extractors/api-calls.js.map +1 -0
- package/dist/parser/extractors/components.d.ts +5 -0
- package/dist/parser/extractors/components.d.ts.map +1 -0
- package/dist/parser/extractors/components.js +236 -0
- package/dist/parser/extractors/components.js.map +1 -0
- package/dist/parser/extractors/context.d.ts +14 -0
- package/dist/parser/extractors/context.d.ts.map +1 -0
- package/dist/parser/extractors/context.js +196 -0
- package/dist/parser/extractors/context.js.map +1 -0
- package/dist/parser/extractors/effects.d.ts +14 -0
- package/dist/parser/extractors/effects.d.ts.map +1 -0
- package/dist/parser/extractors/effects.js +175 -0
- package/dist/parser/extractors/effects.js.map +1 -0
- package/dist/parser/extractors/hooks.d.ts +5 -0
- package/dist/parser/extractors/hooks.d.ts.map +1 -0
- package/dist/parser/extractors/hooks.js +242 -0
- package/dist/parser/extractors/hooks.js.map +1 -0
- package/dist/parser/extractors/imports.d.ts +6 -0
- package/dist/parser/extractors/imports.d.ts.map +1 -0
- package/dist/parser/extractors/imports.js +148 -0
- package/dist/parser/extractors/imports.js.map +1 -0
- package/dist/parser/extractors/index.d.ts +12 -0
- package/dist/parser/extractors/index.d.ts.map +1 -0
- package/dist/parser/extractors/index.js +11 -0
- package/dist/parser/extractors/index.js.map +1 -0
- package/dist/parser/extractors/jsx-tree.d.ts +5 -0
- package/dist/parser/extractors/jsx-tree.d.ts.map +1 -0
- package/dist/parser/extractors/jsx-tree.js +226 -0
- package/dist/parser/extractors/jsx-tree.js.map +1 -0
- package/dist/parser/extractors/routes.d.ts +13 -0
- package/dist/parser/extractors/routes.d.ts.map +1 -0
- package/dist/parser/extractors/routes.js +275 -0
- package/dist/parser/extractors/routes.js.map +1 -0
- package/dist/parser/extractors/state.d.ts +14 -0
- package/dist/parser/extractors/state.d.ts.map +1 -0
- package/dist/parser/extractors/state.js +368 -0
- package/dist/parser/extractors/state.js.map +1 -0
- package/dist/parser/extractors/types.d.ts +22 -0
- package/dist/parser/extractors/types.d.ts.map +1 -0
- package/dist/parser/extractors/types.js +51 -0
- package/dist/parser/extractors/types.js.map +1 -0
- package/dist/parser/indexer.d.ts +14 -0
- package/dist/parser/indexer.d.ts.map +1 -0
- package/dist/parser/indexer.js +167 -0
- package/dist/parser/indexer.js.map +1 -0
- package/dist/parser/pipeline.d.ts +16 -0
- package/dist/parser/pipeline.d.ts.map +1 -0
- package/dist/parser/pipeline.js +63 -0
- package/dist/parser/pipeline.js.map +1 -0
- package/dist/parser/setup.d.ts +4 -0
- package/dist/parser/setup.d.ts.map +1 -0
- package/dist/parser/setup.js +29 -0
- package/dist/parser/setup.js.map +1 -0
- package/dist/parser/walker.d.ts +6 -0
- package/dist/parser/walker.d.ts.map +1 -0
- package/dist/parser/walker.js +45 -0
- package/dist/parser/walker.js.map +1 -0
- package/dist/watcher.d.ts +12 -0
- package/dist/watcher.d.ts.map +1 -0
- package/dist/watcher.js +72 -0
- package/dist/watcher.js.map +1 -0
- package/package.json +51 -0
- package/src/cli/components/IndexProgress.tsx +79 -0
- package/src/cli/components/InitResult.tsx +28 -0
- package/src/cli/index-cmd.ts +41 -0
- package/src/cli/index.ts +92 -0
- package/src/cli/init.ts +97 -0
- package/src/cli/serve.ts +29 -0
- package/src/cli/unused.ts +88 -0
- package/src/graph/graph.ts +179 -0
- package/src/graph/index.ts +4 -0
- package/src/graph/schema.ts +68 -0
- package/src/graph/serialize.ts +40 -0
- package/src/graph/traverse.ts +66 -0
- package/src/mcp/formatter.ts +757 -0
- package/src/mcp/server.ts +59 -0
- package/src/mcp/tools.ts +154 -0
- package/src/output/ai-context.ts +29 -0
- package/src/parser/extractors/api-calls.ts +192 -0
- package/src/parser/extractors/components.ts +273 -0
- package/src/parser/extractors/context.ts +216 -0
- package/src/parser/extractors/effects.ts +205 -0
- package/src/parser/extractors/hooks.ts +268 -0
- package/src/parser/extractors/imports.ts +192 -0
- package/src/parser/extractors/index.ts +11 -0
- package/src/parser/extractors/jsx-tree.ts +271 -0
- package/src/parser/extractors/routes.ts +331 -0
- package/src/parser/extractors/state.ts +392 -0
- package/src/parser/extractors/types.ts +71 -0
- package/src/parser/indexer.ts +197 -0
- package/src/parser/pipeline.ts +89 -0
- package/src/parser/setup.ts +33 -0
- package/src/parser/walker.ts +61 -0
- package/src/watcher.ts +91 -0
- package/templates/CLAUDE.md +7 -0
- package/tests/extractors.test.ts +164 -0
- package/tests/fixtures/basic/src/App.tsx +12 -0
- package/tests/fixtures/basic/src/components/Dashboard.tsx +24 -0
- package/tests/fixtures/basic/src/components/MetricsCard.tsx +15 -0
- package/tests/fixtures/basic/src/components/Sidebar.tsx +20 -0
- package/tests/fixtures/basic/src/contexts/ThemeContext.tsx +16 -0
- package/tests/fixtures/basic/src/hooks/useAuth.ts +25 -0
- package/tests/fixtures/basic/src/stores/authStore.ts +15 -0
- package/tests/fixtures/basic/src/utils.ts +7 -0
- package/tests/graph.test.ts +91 -0
- package/tests/phase2.test.ts +309 -0
- package/tests/smoke.test.ts +77 -0
- package/tsconfig.json +20 -0
- package/vitest.config.ts +8 -0
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
import type Parser from 'tree-sitter';
|
|
2
|
+
import type { GraphNode, GraphEdge } from '../../graph/schema.js';
|
|
3
|
+
import { nodeId, findAll, findEnclosingFunction } from './types.js';
|
|
4
|
+
import type { ExtractionResult } from './types.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* State management extractor — detects Zustand, Redux Toolkit, Jotai, and Recoil stores.
|
|
8
|
+
*
|
|
9
|
+
* Creation patterns:
|
|
10
|
+
* Zustand: const useStore = create((set) => ({ ... }))
|
|
11
|
+
* Redux: const slice = createSlice({ name, initialState, reducers })
|
|
12
|
+
* Jotai: const atom = atom(defaultValue)
|
|
13
|
+
* Recoil: const state = atom({ key, default })
|
|
14
|
+
*/
|
|
15
|
+
export function extractState(
|
|
16
|
+
tree: Parser.Tree,
|
|
17
|
+
filePath: string,
|
|
18
|
+
sourceCode: string,
|
|
19
|
+
existingNodes: GraphNode[],
|
|
20
|
+
): ExtractionResult {
|
|
21
|
+
const nodes: GraphNode[] = [];
|
|
22
|
+
const edges: GraphEdge[] = [];
|
|
23
|
+
const root = tree.rootNode;
|
|
24
|
+
|
|
25
|
+
// Build function map for edge sources
|
|
26
|
+
const functionNodes = new Map<string, GraphNode>();
|
|
27
|
+
for (const n of existingNodes) {
|
|
28
|
+
if (n.file === filePath && (n.kind === 'Component' || n.kind === 'Hook')) {
|
|
29
|
+
functionNodes.set(n.name, n);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Pass 1: Detect store definitions
|
|
34
|
+
const storeNames = new Map<string, { id: string; library: string }>();
|
|
35
|
+
|
|
36
|
+
for (let i = 0; i < root.childCount; i++) {
|
|
37
|
+
const child = root.child(i)!;
|
|
38
|
+
const store = detectStoreCreation(child, filePath);
|
|
39
|
+
if (store) {
|
|
40
|
+
nodes.push(store);
|
|
41
|
+
storeNames.set(store.name, { id: store.id, library: store.meta.library as string });
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (child.type === 'export_statement') {
|
|
45
|
+
for (let j = 0; j < child.childCount; j++) {
|
|
46
|
+
const store2 = detectStoreCreation(child.child(j)!, filePath);
|
|
47
|
+
if (store2) {
|
|
48
|
+
store2.exportType = child.text.startsWith('export default') ? 'default' : 'named';
|
|
49
|
+
nodes.push(store2);
|
|
50
|
+
storeNames.set(store2.name, { id: store2.id, library: store2.meta.library as string });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Also build a map of stores from other files (via existingNodes)
|
|
57
|
+
for (const n of existingNodes) {
|
|
58
|
+
if (n.kind === 'Store' && !storeNames.has(n.name)) {
|
|
59
|
+
storeNames.set(n.name, { id: n.id, library: (n.meta.library as string) ?? 'unknown' });
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Pass 2: Detect reads/writes in components and hooks
|
|
64
|
+
const callExprs = findAll(root, 'call_expression');
|
|
65
|
+
for (const call of callExprs) {
|
|
66
|
+
const callee = call.childForFieldName('function');
|
|
67
|
+
if (!callee) continue;
|
|
68
|
+
|
|
69
|
+
const enclosingFn = findEnclosingFunction(call);
|
|
70
|
+
if (!enclosingFn) continue;
|
|
71
|
+
const enclosingName = getFunctionName(enclosingFn);
|
|
72
|
+
if (!enclosingName) continue;
|
|
73
|
+
const enclosingNode = functionNodes.get(enclosingName);
|
|
74
|
+
if (!enclosingNode) continue;
|
|
75
|
+
|
|
76
|
+
// Zustand: useAuthStore() or useAuthStore(selector)
|
|
77
|
+
const calleeName = callee.type === 'identifier' ? callee.text : null;
|
|
78
|
+
if (calleeName && storeNames.has(calleeName)) {
|
|
79
|
+
const store = storeNames.get(calleeName)!;
|
|
80
|
+
edges.push({
|
|
81
|
+
source: enclosingNode.id,
|
|
82
|
+
target: store.id,
|
|
83
|
+
kind: 'reads_store',
|
|
84
|
+
meta: { library: store.library },
|
|
85
|
+
});
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Zustand: useAuthStore.getState() — reads
|
|
90
|
+
if (callee.type === 'member_expression') {
|
|
91
|
+
const obj = callee.childForFieldName('object');
|
|
92
|
+
const prop = callee.childForFieldName('property');
|
|
93
|
+
if (obj && prop) {
|
|
94
|
+
const objName = obj.text;
|
|
95
|
+
if (storeNames.has(objName)) {
|
|
96
|
+
const store = storeNames.get(objName)!;
|
|
97
|
+
if (prop.text === 'getState') {
|
|
98
|
+
edges.push({
|
|
99
|
+
source: enclosingNode.id,
|
|
100
|
+
target: store.id,
|
|
101
|
+
kind: 'reads_store',
|
|
102
|
+
meta: { library: store.library, method: 'getState' },
|
|
103
|
+
});
|
|
104
|
+
} else if (prop.text === 'setState') {
|
|
105
|
+
edges.push({
|
|
106
|
+
source: enclosingNode.id,
|
|
107
|
+
target: store.id,
|
|
108
|
+
kind: 'writes_store',
|
|
109
|
+
meta: { library: store.library, method: 'setState' },
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Redux: useSelector(selector)
|
|
118
|
+
if (calleeName === 'useSelector') {
|
|
119
|
+
// Try to find which slice via the selector argument
|
|
120
|
+
const args = call.childForFieldName('arguments');
|
|
121
|
+
const selectorText = args?.child(1)?.text ?? '';
|
|
122
|
+
|
|
123
|
+
// Find matching store by checking if selector references store name
|
|
124
|
+
for (const [name, store] of storeNames) {
|
|
125
|
+
if (store.library === 'redux' && selectorText.includes(name.replace('Slice', ''))) {
|
|
126
|
+
edges.push({
|
|
127
|
+
source: enclosingNode.id,
|
|
128
|
+
target: store.id,
|
|
129
|
+
kind: 'reads_store',
|
|
130
|
+
meta: { library: 'redux', selector: selectorText },
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
// If no specific store found, create a generic edge
|
|
135
|
+
if (!edges.some(e => e.source === enclosingNode.id && e.kind === 'reads_store')) {
|
|
136
|
+
edges.push({
|
|
137
|
+
source: enclosingNode.id,
|
|
138
|
+
target: 'unresolved:redux-store',
|
|
139
|
+
kind: 'reads_store',
|
|
140
|
+
meta: { library: 'redux', selector: selectorText },
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Redux: useDispatch() — we track dispatch calls separately
|
|
147
|
+
// The actual writes happen when dispatch(action()) is called
|
|
148
|
+
|
|
149
|
+
// Jotai: useAtom(atom), useAtomValue(atom), useSetAtom(atom)
|
|
150
|
+
if (calleeName === 'useAtom' || calleeName === 'useAtomValue' || calleeName === 'useSetAtom') {
|
|
151
|
+
const args = call.childForFieldName('arguments');
|
|
152
|
+
const atomArg = args?.child(1);
|
|
153
|
+
if (atomArg) {
|
|
154
|
+
const atomName = atomArg.text;
|
|
155
|
+
const store = storeNames.get(atomName);
|
|
156
|
+
const targetId = store?.id ?? `unresolved:${atomName}`;
|
|
157
|
+
|
|
158
|
+
if (calleeName === 'useSetAtom') {
|
|
159
|
+
edges.push({
|
|
160
|
+
source: enclosingNode.id,
|
|
161
|
+
target: targetId,
|
|
162
|
+
kind: 'writes_store',
|
|
163
|
+
meta: { library: 'jotai' },
|
|
164
|
+
});
|
|
165
|
+
} else if (calleeName === 'useAtomValue') {
|
|
166
|
+
edges.push({
|
|
167
|
+
source: enclosingNode.id,
|
|
168
|
+
target: targetId,
|
|
169
|
+
kind: 'reads_store',
|
|
170
|
+
meta: { library: 'jotai' },
|
|
171
|
+
});
|
|
172
|
+
} else {
|
|
173
|
+
// useAtom — both read and write
|
|
174
|
+
edges.push({
|
|
175
|
+
source: enclosingNode.id,
|
|
176
|
+
target: targetId,
|
|
177
|
+
kind: 'reads_store',
|
|
178
|
+
meta: { library: 'jotai' },
|
|
179
|
+
});
|
|
180
|
+
edges.push({
|
|
181
|
+
source: enclosingNode.id,
|
|
182
|
+
target: targetId,
|
|
183
|
+
kind: 'writes_store',
|
|
184
|
+
meta: { library: 'jotai' },
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Recoil: useRecoilState, useRecoilValue, useSetRecoilState
|
|
192
|
+
if (calleeName === 'useRecoilState' || calleeName === 'useRecoilValue' || calleeName === 'useSetRecoilState') {
|
|
193
|
+
const args = call.childForFieldName('arguments');
|
|
194
|
+
const atomArg = args?.child(1);
|
|
195
|
+
if (atomArg) {
|
|
196
|
+
const atomName = atomArg.text;
|
|
197
|
+
const store = storeNames.get(atomName);
|
|
198
|
+
const targetId = store?.id ?? `unresolved:${atomName}`;
|
|
199
|
+
|
|
200
|
+
if (calleeName === 'useSetRecoilState') {
|
|
201
|
+
edges.push({
|
|
202
|
+
source: enclosingNode.id,
|
|
203
|
+
target: targetId,
|
|
204
|
+
kind: 'writes_store',
|
|
205
|
+
meta: { library: 'recoil' },
|
|
206
|
+
});
|
|
207
|
+
} else if (calleeName === 'useRecoilValue') {
|
|
208
|
+
edges.push({
|
|
209
|
+
source: enclosingNode.id,
|
|
210
|
+
target: targetId,
|
|
211
|
+
kind: 'reads_store',
|
|
212
|
+
meta: { library: 'recoil' },
|
|
213
|
+
});
|
|
214
|
+
} else {
|
|
215
|
+
edges.push({
|
|
216
|
+
source: enclosingNode.id,
|
|
217
|
+
target: targetId,
|
|
218
|
+
kind: 'reads_store',
|
|
219
|
+
meta: { library: 'recoil' },
|
|
220
|
+
});
|
|
221
|
+
edges.push({
|
|
222
|
+
source: enclosingNode.id,
|
|
223
|
+
target: targetId,
|
|
224
|
+
kind: 'writes_store',
|
|
225
|
+
meta: { library: 'recoil' },
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return { nodes, edges };
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
function detectStoreCreation(node: Parser.SyntaxNode, filePath: string): GraphNode | null {
|
|
237
|
+
if (node.type !== 'lexical_declaration') return null;
|
|
238
|
+
|
|
239
|
+
const declarator = findDirectChild(node, 'variable_declarator');
|
|
240
|
+
if (!declarator) return null;
|
|
241
|
+
|
|
242
|
+
const nameNode = declarator.childForFieldName('name');
|
|
243
|
+
const value = declarator.childForFieldName('value');
|
|
244
|
+
if (!nameNode || !value) return null;
|
|
245
|
+
|
|
246
|
+
if (value.type !== 'call_expression') return null;
|
|
247
|
+
|
|
248
|
+
const callee = value.childForFieldName('function');
|
|
249
|
+
if (!callee) return null;
|
|
250
|
+
|
|
251
|
+
const calleeName = callee.text;
|
|
252
|
+
|
|
253
|
+
// Zustand: create(...) or create<Type>(...)
|
|
254
|
+
if (calleeName === 'create') {
|
|
255
|
+
const shape = extractObjectShape(value);
|
|
256
|
+
return {
|
|
257
|
+
id: nodeId(filePath, nameNode.text),
|
|
258
|
+
kind: 'Store',
|
|
259
|
+
name: nameNode.text,
|
|
260
|
+
file: filePath,
|
|
261
|
+
line: node.startPosition.row + 1,
|
|
262
|
+
exportType: 'none',
|
|
263
|
+
meta: { library: 'zustand', shape },
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Redux: createSlice({ name, initialState, reducers })
|
|
268
|
+
if (calleeName === 'createSlice') {
|
|
269
|
+
const shape = extractSliceShape(value);
|
|
270
|
+
return {
|
|
271
|
+
id: nodeId(filePath, nameNode.text),
|
|
272
|
+
kind: 'Store',
|
|
273
|
+
name: nameNode.text,
|
|
274
|
+
file: filePath,
|
|
275
|
+
line: node.startPosition.row + 1,
|
|
276
|
+
exportType: 'none',
|
|
277
|
+
meta: { library: 'redux', shape },
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// Jotai: atom(defaultValue)
|
|
282
|
+
if (calleeName === 'atom') {
|
|
283
|
+
// Distinguish Jotai from Recoil: Recoil's atom takes { key, default }
|
|
284
|
+
const args = value.childForFieldName('arguments');
|
|
285
|
+
const firstArg = args?.child(1);
|
|
286
|
+
const isRecoil = firstArg?.type === 'object' && firstArg.text.includes('key');
|
|
287
|
+
|
|
288
|
+
if (isRecoil) {
|
|
289
|
+
return {
|
|
290
|
+
id: nodeId(filePath, nameNode.text),
|
|
291
|
+
kind: 'Store',
|
|
292
|
+
name: nameNode.text,
|
|
293
|
+
file: filePath,
|
|
294
|
+
line: node.startPosition.row + 1,
|
|
295
|
+
exportType: 'none',
|
|
296
|
+
meta: { library: 'recoil' },
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
return {
|
|
301
|
+
id: nodeId(filePath, nameNode.text),
|
|
302
|
+
kind: 'Store',
|
|
303
|
+
name: nameNode.text,
|
|
304
|
+
file: filePath,
|
|
305
|
+
line: node.startPosition.row + 1,
|
|
306
|
+
exportType: 'none',
|
|
307
|
+
meta: { library: 'jotai' },
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
return null;
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
function extractObjectShape(callNode: Parser.SyntaxNode): string[] {
|
|
315
|
+
// Look for the object returned by the zustand creator function
|
|
316
|
+
const args = callNode.childForFieldName('arguments');
|
|
317
|
+
if (!args) return [];
|
|
318
|
+
|
|
319
|
+
// The first arg to create() is (set) => ({ key1, key2, ... })
|
|
320
|
+
const creator = args.child(1);
|
|
321
|
+
if (!creator) return [];
|
|
322
|
+
|
|
323
|
+
// Find object or parenthesized_expression containing object
|
|
324
|
+
const objects = findAll(creator, 'object');
|
|
325
|
+
if (objects.length === 0) return [];
|
|
326
|
+
|
|
327
|
+
const obj = objects[0]!;
|
|
328
|
+
const keys: string[] = [];
|
|
329
|
+
for (let i = 0; i < obj.childCount; i++) {
|
|
330
|
+
const child = obj.child(i)!;
|
|
331
|
+
if (child.type === 'pair' || child.type === 'method_definition') {
|
|
332
|
+
const key = child.childForFieldName('key') ?? child.childForFieldName('name');
|
|
333
|
+
if (key) keys.push(key.text);
|
|
334
|
+
} else if (child.type === 'shorthand_property') {
|
|
335
|
+
keys.push(child.text);
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return keys;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
function extractSliceShape(callNode: Parser.SyntaxNode): string[] {
|
|
343
|
+
// createSlice({ initialState: { key1, key2 }, reducers: { ... } })
|
|
344
|
+
const args = callNode.childForFieldName('arguments');
|
|
345
|
+
if (!args) return [];
|
|
346
|
+
|
|
347
|
+
const configObj = args.child(1);
|
|
348
|
+
if (!configObj || configObj.type !== 'object') return [];
|
|
349
|
+
|
|
350
|
+
// Find initialState property
|
|
351
|
+
for (let i = 0; i < configObj.childCount; i++) {
|
|
352
|
+
const child = configObj.child(i)!;
|
|
353
|
+
if (child.type === 'pair') {
|
|
354
|
+
const key = child.childForFieldName('key');
|
|
355
|
+
if (key?.text === 'initialState') {
|
|
356
|
+
const val = child.childForFieldName('value');
|
|
357
|
+
if (val?.type === 'object') {
|
|
358
|
+
const keys: string[] = [];
|
|
359
|
+
for (let j = 0; j < val.childCount; j++) {
|
|
360
|
+
const prop = val.child(j)!;
|
|
361
|
+
if (prop.type === 'pair') {
|
|
362
|
+
const k = prop.childForFieldName('key');
|
|
363
|
+
if (k) keys.push(k.text);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
return keys;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
return [];
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
function getFunctionName(node: Parser.SyntaxNode): string | null {
|
|
376
|
+
const nameField = node.childForFieldName('name');
|
|
377
|
+
if (nameField) return nameField.text;
|
|
378
|
+
if (node.type === 'arrow_function' || node.type === 'function_expression') {
|
|
379
|
+
const parent = node.parent;
|
|
380
|
+
if (parent?.type === 'variable_declarator') {
|
|
381
|
+
return parent.childForFieldName('name')?.text ?? null;
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
return null;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
function findDirectChild(node: Parser.SyntaxNode, type: string): Parser.SyntaxNode | null {
|
|
388
|
+
for (let i = 0; i < node.childCount; i++) {
|
|
389
|
+
if (node.child(i)!.type === type) return node.child(i)!;
|
|
390
|
+
}
|
|
391
|
+
return null;
|
|
392
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import type Parser from 'tree-sitter';
|
|
2
|
+
import type { GraphNode, GraphEdge } from '../../graph/schema.js';
|
|
3
|
+
|
|
4
|
+
export interface ExtractionResult {
|
|
5
|
+
nodes: GraphNode[];
|
|
6
|
+
edges: GraphEdge[];
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export type Extractor = (
|
|
10
|
+
tree: Parser.Tree,
|
|
11
|
+
filePath: string,
|
|
12
|
+
sourceCode: string,
|
|
13
|
+
existingNodes: GraphNode[],
|
|
14
|
+
) => ExtractionResult;
|
|
15
|
+
|
|
16
|
+
/** Generate a stable node ID from file path and name */
|
|
17
|
+
export function nodeId(filePath: string, name: string): string {
|
|
18
|
+
return `${filePath}:${name}`;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/** Check if a name starts with uppercase (React component convention) */
|
|
22
|
+
export function isPascalCase(name: string): boolean {
|
|
23
|
+
return /^[A-Z]/.test(name);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/** Check if a name matches hook convention: use + uppercase char */
|
|
27
|
+
export function isHookName(name: string): boolean {
|
|
28
|
+
return /^use[A-Z]/.test(name);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/** Walk all descendants of a tree-sitter node */
|
|
32
|
+
export function* walkTree(node: Parser.SyntaxNode): Generator<Parser.SyntaxNode> {
|
|
33
|
+
yield node;
|
|
34
|
+
for (let i = 0; i < node.childCount; i++) {
|
|
35
|
+
yield* walkTree(node.child(i)!);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/** Find all descendants matching a type */
|
|
40
|
+
export function findAll(node: Parser.SyntaxNode, type: string): Parser.SyntaxNode[] {
|
|
41
|
+
const results: Parser.SyntaxNode[] = [];
|
|
42
|
+
for (const n of walkTree(node)) {
|
|
43
|
+
if (n.type === type) results.push(n);
|
|
44
|
+
}
|
|
45
|
+
return results;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/** Find first descendant matching a type */
|
|
49
|
+
export function findFirst(node: Parser.SyntaxNode, type: string): Parser.SyntaxNode | null {
|
|
50
|
+
for (const n of walkTree(node)) {
|
|
51
|
+
if (n.type === type) return n;
|
|
52
|
+
}
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/** Walk up to find the nearest enclosing function/arrow */
|
|
57
|
+
export function findEnclosingFunction(node: Parser.SyntaxNode): Parser.SyntaxNode | null {
|
|
58
|
+
let current = node.parent;
|
|
59
|
+
while (current) {
|
|
60
|
+
if (
|
|
61
|
+
current.type === 'function_declaration' ||
|
|
62
|
+
current.type === 'arrow_function' ||
|
|
63
|
+
current.type === 'function_expression' ||
|
|
64
|
+
current.type === 'method_definition'
|
|
65
|
+
) {
|
|
66
|
+
return current;
|
|
67
|
+
}
|
|
68
|
+
current = current.parent;
|
|
69
|
+
}
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { readFile, writeFile, mkdir } from 'node:fs/promises';
|
|
2
|
+
import { createHash } from 'node:crypto';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import { ReactGraph } from '../graph/graph.js';
|
|
5
|
+
import { saveGraph, loadGraph } from '../graph/serialize.js';
|
|
6
|
+
import { walkProject } from './walker.js';
|
|
7
|
+
import { processFile } from './pipeline.js';
|
|
8
|
+
import type { GraphNode } from '../graph/schema.js';
|
|
9
|
+
|
|
10
|
+
export interface IndexStats {
|
|
11
|
+
totalFiles: number;
|
|
12
|
+
parsedFiles: number;
|
|
13
|
+
skippedFiles: number;
|
|
14
|
+
nodes: number;
|
|
15
|
+
edges: number;
|
|
16
|
+
durationMs: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
interface FileHashes {
|
|
20
|
+
[relativePath: string]: string;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function indexProject(
|
|
24
|
+
projectDir: string,
|
|
25
|
+
exclude: string[] = [],
|
|
26
|
+
onProgress?: (file: string, current: number, total: number) => void,
|
|
27
|
+
): Promise<{ graph: ReactGraph; stats: IndexStats }> {
|
|
28
|
+
const start = Date.now();
|
|
29
|
+
|
|
30
|
+
const files = await walkProject(projectDir, exclude);
|
|
31
|
+
|
|
32
|
+
const oldHashes = await loadHashes(projectDir);
|
|
33
|
+
const existingGraph = await loadGraph(projectDir);
|
|
34
|
+
const newHashes: FileHashes = {};
|
|
35
|
+
|
|
36
|
+
// --- Pass 1: Extract all nodes + intra-file edges ---
|
|
37
|
+
// Accumulate globalNodes so later files can see stores/contexts from earlier ones
|
|
38
|
+
const globalNodes: GraphNode[] = [];
|
|
39
|
+
const graph = new ReactGraph();
|
|
40
|
+
let parsedFiles = 0;
|
|
41
|
+
let skippedFiles = 0;
|
|
42
|
+
const parsedFilePaths: string[] = []; // track which files we actually parsed
|
|
43
|
+
|
|
44
|
+
for (let i = 0; i < files.length; i++) {
|
|
45
|
+
const { absolutePath, relativePath } = files[i]!;
|
|
46
|
+
|
|
47
|
+
onProgress?.(relativePath, i + 1, files.length);
|
|
48
|
+
|
|
49
|
+
const content = await readFile(absolutePath, 'utf-8');
|
|
50
|
+
const hash = computeHash(content);
|
|
51
|
+
newHashes[relativePath] = hash;
|
|
52
|
+
|
|
53
|
+
// Reuse from cache if unchanged
|
|
54
|
+
if (oldHashes[relativePath] === hash && existingGraph) {
|
|
55
|
+
const existingNodes = existingGraph.getNodesByFile(relativePath);
|
|
56
|
+
for (const node of existingNodes) {
|
|
57
|
+
graph.addNode(node);
|
|
58
|
+
globalNodes.push(node);
|
|
59
|
+
}
|
|
60
|
+
const existingEdges = existingNodes.flatMap(n => [
|
|
61
|
+
...existingGraph.getEdgesFrom(n.id),
|
|
62
|
+
...existingGraph.getEdgesTo(n.id),
|
|
63
|
+
]);
|
|
64
|
+
const seen = new Set<string>();
|
|
65
|
+
for (const edge of existingEdges) {
|
|
66
|
+
const key = `${edge.source}|${edge.target}|${edge.kind}`;
|
|
67
|
+
if (!seen.has(key)) {
|
|
68
|
+
seen.add(key);
|
|
69
|
+
graph.addEdge(edge);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
skippedFiles++;
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
try {
|
|
77
|
+
const result = await processFile(relativePath, projectDir, globalNodes);
|
|
78
|
+
for (const node of result.nodes) {
|
|
79
|
+
graph.addNode(node);
|
|
80
|
+
globalNodes.push(node);
|
|
81
|
+
}
|
|
82
|
+
for (const edge of result.edges) graph.addEdge(edge);
|
|
83
|
+
parsedFiles++;
|
|
84
|
+
parsedFilePaths.push(relativePath);
|
|
85
|
+
} catch (err) {
|
|
86
|
+
console.error(`Warning: failed to parse ${relativePath}: ${err}`);
|
|
87
|
+
parsedFiles++;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// --- Pass 2: Re-process early files that missed cross-file references ---
|
|
92
|
+
// Files processed before stores/contexts were discovered may lack edges.
|
|
93
|
+
// Re-run only files that were parsed in pass 1 AND processed before a
|
|
94
|
+
// store/context/hook was discovered in a later file.
|
|
95
|
+
const crossFileNodeKinds = new Set(['Store', 'Context', 'Hook', 'Component']);
|
|
96
|
+
const allCrossFileNodes = globalNodes.filter(n => crossFileNodeKinds.has(n.kind));
|
|
97
|
+
|
|
98
|
+
if (allCrossFileNodes.length > 0 && parsedFilePaths.length > 1) {
|
|
99
|
+
// Find files that were processed before cross-file targets existed
|
|
100
|
+
const crossFileNodeFiles = new Set(allCrossFileNodes.map(n => n.file));
|
|
101
|
+
|
|
102
|
+
for (const filePath of parsedFilePaths) {
|
|
103
|
+
// Skip files that define the cross-file nodes themselves
|
|
104
|
+
if (crossFileNodeFiles.has(filePath)) continue;
|
|
105
|
+
|
|
106
|
+
// Re-process with full global node visibility
|
|
107
|
+
try {
|
|
108
|
+
// Remove old edges from this file's nodes
|
|
109
|
+
graph.removeNodesByFile(filePath);
|
|
110
|
+
|
|
111
|
+
const result = await processFile(filePath, projectDir, globalNodes);
|
|
112
|
+
for (const node of result.nodes) graph.addNode(node);
|
|
113
|
+
for (const edge of result.edges) graph.addEdge(edge);
|
|
114
|
+
} catch {
|
|
115
|
+
// Already processed in pass 1, skip errors
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Resolution pass: replace remaining unresolved targets
|
|
121
|
+
resolveUnresolvedEdges(graph);
|
|
122
|
+
|
|
123
|
+
await saveGraph(graph, projectDir);
|
|
124
|
+
await saveHashes(projectDir, newHashes);
|
|
125
|
+
|
|
126
|
+
const stats: IndexStats = {
|
|
127
|
+
totalFiles: files.length,
|
|
128
|
+
parsedFiles,
|
|
129
|
+
skippedFiles,
|
|
130
|
+
nodes: graph.stats().totalNodes,
|
|
131
|
+
edges: graph.stats().totalEdges,
|
|
132
|
+
durationMs: Date.now() - start,
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
return { graph, stats };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
function computeHash(content: string): string {
|
|
139
|
+
return createHash('sha256').update(content).digest('hex').slice(0, 16);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async function loadHashes(projectDir: string): Promise<FileHashes> {
|
|
143
|
+
try {
|
|
144
|
+
const raw = await readFile(join(projectDir, '.reactgraph', 'file-hashes.json'), 'utf-8');
|
|
145
|
+
return JSON.parse(raw);
|
|
146
|
+
} catch {
|
|
147
|
+
return {};
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async function saveHashes(projectDir: string, hashes: FileHashes): Promise<void> {
|
|
152
|
+
const dir = join(projectDir, '.reactgraph');
|
|
153
|
+
await mkdir(dir, { recursive: true });
|
|
154
|
+
await writeFile(join(dir, 'file-hashes.json'), JSON.stringify(hashes, null, 2));
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Replace unresolved edge targets with actual node IDs.
|
|
159
|
+
*/
|
|
160
|
+
function resolveUnresolvedEdges(graph: ReactGraph): void {
|
|
161
|
+
const allNodes = graph.getAllNodes();
|
|
162
|
+
const nameToId = new Map<string, string>();
|
|
163
|
+
|
|
164
|
+
for (const node of allNodes) {
|
|
165
|
+
if (node.kind === 'Component' || node.kind === 'Hook' || node.kind === 'Store' || node.kind === 'Context') {
|
|
166
|
+
if (!nameToId.has(node.name)) {
|
|
167
|
+
nameToId.set(node.name, node.id);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const oldEdges = graph.getAllEdges();
|
|
173
|
+
const resolvedEdges = oldEdges.map(edge => {
|
|
174
|
+
let { source, target } = edge;
|
|
175
|
+
let changed = false;
|
|
176
|
+
|
|
177
|
+
if (source.startsWith('unresolved:')) {
|
|
178
|
+
const name = source.slice(11);
|
|
179
|
+
const resolved = nameToId.get(name);
|
|
180
|
+
if (resolved) { source = resolved; changed = true; }
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (target.startsWith('unresolved:')) {
|
|
184
|
+
const name = target.slice(11);
|
|
185
|
+
const resolved = nameToId.get(name);
|
|
186
|
+
if (resolved) { target = resolved; changed = true; }
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return changed ? { ...edge, source, target } : edge;
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
if (resolvedEdges.some((e, i) => e !== oldEdges[i])) {
|
|
193
|
+
graph.clear();
|
|
194
|
+
for (const node of allNodes) graph.addNode(node);
|
|
195
|
+
for (const edge of resolvedEdges) graph.addEdge(edge);
|
|
196
|
+
}
|
|
197
|
+
}
|