cortexcode 0.9.1__tar.gz → 0.10.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cortexcode-0.9.1 → cortexcode-0.10.0}/PKG-INFO +1 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/architecture.py +15 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexer.py +18 -3
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/build.py +10 -3
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/defaults.py +1 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/filtering.py +1 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/imports_exports.py +7 -0
- cortexcode-0.10.0/cortexcode/indexing/nx_projects.py +228 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/pipeline.py +1 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/profile.py +48 -3
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/resolution.py +75 -10
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/performance/performance_config.py +20 -0
- cortexcode-0.10.0/cortexcode/reports/markdown/structure.py +46 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/tech.py +12 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/PKG-INFO +1 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/SOURCES.txt +5 -1
- {cortexcode-0.9.1 → cortexcode-0.10.0}/pyproject.toml +1 -1
- cortexcode-0.10.0/tests/test_nx_indexer.py +75 -0
- cortexcode-0.10.0/tests/test_nx_projects.py +178 -0
- cortexcode-0.10.0/tests/test_nx_resolution.py +110 -0
- cortexcode-0.9.1/cortexcode/reports/markdown/structure.py +0 -19
- {cortexcode-0.9.1 → cortexcode-0.10.0}/LICENSE +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/README.md +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_cycles.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_docs.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_duplicates.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_endpoints.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_search.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis/advanced_analysis_security.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/advanced_analysis.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/config.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/doc_cache.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/doc_generator.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/doc_lookup.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/doc_models.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/explainer.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/llm_client.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/page_generator.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/prompts.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/ai_docs/report_runner.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/analysis/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/analysis/analysis_complexity.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/analysis/analysis_dead_code.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/analysis/analysis_impact.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/analysis.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_ai_docs.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_bundle.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_complexity.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_config.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_context.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_dashboard.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_dead_code.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_diagrams.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_diff.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_docs.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_explain.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_find.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_githook.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_impact.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_index.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_jobs.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_package.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_report.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_scan.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_search.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_servers.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_shell.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_stats.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_support.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_trace.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_watch.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_wiki.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/cli/cli_workspace.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/config.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/context/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/context/context_format.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/context/context_query.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/context/context_tokens.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/context.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/dashboard.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/call_graph.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/class_diagram.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/dependencies.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/directory_tree.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/entities.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/file_tree.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/imports.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/save.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/sequence.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/state.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/diagrams/utils.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/diagrams.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/generator.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/html_generators.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/javascript.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/javascript_sections.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs/templates.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/docs.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/git_diff.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/calls.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/config.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/dispatch.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/entities.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extensions.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractor_mixin.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/csharp.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/dart.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/generic.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/java.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/javascript.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/kotlin.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/extractors/swift.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/frameworks.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/gitignore.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/incremental.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/languages.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/metadata.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/nodes.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/output.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/params.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/parsers.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/routes.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/session.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/storage.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/indexing/walk.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/build.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/citations.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/concepts.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/models.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/snippets.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/knowledge/usage.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/lsp_server.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/main.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/mcp_protocol.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/mcp_registry.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/mcp_server.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/mcp_tool_handlers.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/mcp/mcp_transport.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/performance/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/performance/performance_index_storage.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/performance/performance_preview.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/performance.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/plugins.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/html/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/html/dashboard.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/html/dashboard_fragments.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/html/view_model.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/api.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/flows.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/insights.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/markdown/readme.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/site/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/site/generator.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/reports/site/viz.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/semantic_search.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/__init__.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/analysis.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/completion.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/headers.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/prompts.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/reports.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/terminal/stats.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/vuln_scan.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/watcher.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode/workspace.py +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/dependency_links.txt +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/entry_points.txt +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/requires.txt +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/cortexcode.egg-info/top_level.txt +0 -0
- {cortexcode-0.9.1 → cortexcode-0.10.0}/setup.cfg +0 -0
|
@@ -35,6 +35,21 @@ def generate_architecture_diagram(index_data: dict[str, Any]) -> str:
|
|
|
35
35
|
lines.append(f" {framework_id}[\"{framework.get('name', 'unknown')} ({framework.get('count', 0)})\"]")
|
|
36
36
|
lines.append(" end")
|
|
37
37
|
|
|
38
|
+
# Nx project graph overlay
|
|
39
|
+
nx_graph = project_profile.get("nx_project_graph", {})
|
|
40
|
+
nx_projects = project_profile.get("nx_projects", [])
|
|
41
|
+
if nx_graph and nx_projects:
|
|
42
|
+
lines.append(" subgraph nx_workspace [Nx Workspace]")
|
|
43
|
+
for proj_name in nx_projects:
|
|
44
|
+
proj_id = sanitize_id(f"nx_{proj_name}")
|
|
45
|
+
lines.append(f" {proj_id}[\"{proj_name}\"]")
|
|
46
|
+
lines.append(" end")
|
|
47
|
+
for proj_name, deps in nx_graph.items():
|
|
48
|
+
proj_id = sanitize_id(f"nx_{proj_name}")
|
|
49
|
+
for dep in deps[:10]:
|
|
50
|
+
dep_id = sanitize_id(f"nx_{dep}")
|
|
51
|
+
lines.append(f" {proj_id} --> {dep_id}")
|
|
52
|
+
|
|
38
53
|
return "\n".join(lines)
|
|
39
54
|
|
|
40
55
|
files = index_data.get("files", {})
|
|
@@ -27,6 +27,7 @@ from cortexcode.indexing.resolution import (
|
|
|
27
27
|
build_file_dependencies,
|
|
28
28
|
build_type_map,
|
|
29
29
|
)
|
|
30
|
+
from cortexcode.indexing.nx_projects import parse_nx_workspace, build_nx_project_graph
|
|
30
31
|
from cortexcode.plugins import plugin_registry
|
|
31
32
|
|
|
32
33
|
|
|
@@ -223,18 +224,32 @@ class CodeIndexer(IndexerExtractorMixin):
|
|
|
223
224
|
|
|
224
225
|
def _build_index(self, root: Path) -> dict[str, Any]:
|
|
225
226
|
"""Build the final index structure."""
|
|
227
|
+
nx_workspace = parse_nx_workspace(root)
|
|
228
|
+
tsconfig_paths = nx_workspace.get("tsconfig_paths", {}) if nx_workspace else {}
|
|
229
|
+
|
|
230
|
+
def _build_file_deps(ts_paths=None):
|
|
231
|
+
return build_file_dependencies(self.file_symbols, ts_paths)
|
|
232
|
+
|
|
233
|
+
def _build_type_map(ts_paths=None):
|
|
234
|
+
return build_type_map(self.file_symbols, ts_paths)
|
|
235
|
+
|
|
236
|
+
def _build_project_profile(_root, file_deps, nx=None):
|
|
237
|
+
return build_project_profile(self.file_symbols, self.call_graph, file_deps, nx)
|
|
238
|
+
|
|
226
239
|
result = build_index_result(
|
|
227
240
|
root=root,
|
|
228
241
|
file_symbols=self.file_symbols,
|
|
229
242
|
call_graph=self.call_graph,
|
|
230
243
|
timestamp=timestamp_now(),
|
|
231
244
|
file_hashes=compute_hashes(root, self.file_symbols),
|
|
232
|
-
build_file_dependencies_fn=lambda:
|
|
233
|
-
build_type_map_fn=lambda:
|
|
234
|
-
build_project_profile_fn=lambda _root, file_deps:
|
|
245
|
+
build_file_dependencies_fn=lambda ts_paths=tsconfig_paths: _build_file_deps(ts_paths),
|
|
246
|
+
build_type_map_fn=lambda ts_paths=tsconfig_paths: _build_type_map(ts_paths),
|
|
247
|
+
build_project_profile_fn=lambda _root, file_deps, nx=nx_workspace: _build_project_profile(_root, file_deps, nx),
|
|
235
248
|
language_map=LANGUAGE_MAP,
|
|
236
249
|
regex_languages=REGEX_LANGUAGES,
|
|
237
250
|
plugin_registry=plugin_registry,
|
|
251
|
+
nx_workspace=nx_workspace,
|
|
252
|
+
tsconfig_paths=tsconfig_paths,
|
|
238
253
|
)
|
|
239
254
|
# Add source code for context retrieval
|
|
240
255
|
result["source_code"] = self.source_code
|
|
@@ -18,6 +18,8 @@ def build_index_result(
|
|
|
18
18
|
language_map: LanguageMap,
|
|
19
19
|
regex_languages: RegexLanguages,
|
|
20
20
|
plugin_registry,
|
|
21
|
+
nx_workspace: dict[str, Any] | None = None,
|
|
22
|
+
tsconfig_paths: dict[str, str] | None = None,
|
|
21
23
|
) -> dict[str, Any]:
|
|
22
24
|
languages = set()
|
|
23
25
|
for file_path in file_symbols.keys():
|
|
@@ -30,9 +32,9 @@ def build_index_result(
|
|
|
30
32
|
else:
|
|
31
33
|
languages.add(ext.lstrip("."))
|
|
32
34
|
|
|
33
|
-
file_deps = build_file_dependencies_fn()
|
|
34
|
-
type_map = build_type_map_fn()
|
|
35
|
-
project_profile = build_project_profile_fn(root, file_deps)
|
|
35
|
+
file_deps = build_file_dependencies_fn(tsconfig_paths)
|
|
36
|
+
type_map = build_type_map_fn(tsconfig_paths)
|
|
37
|
+
project_profile = build_project_profile_fn(root, file_deps, nx_workspace)
|
|
36
38
|
|
|
37
39
|
result = {
|
|
38
40
|
"project_root": str(root),
|
|
@@ -48,4 +50,9 @@ def build_index_result(
|
|
|
48
50
|
if type_map:
|
|
49
51
|
result["type_map"] = type_map
|
|
50
52
|
|
|
53
|
+
if nx_workspace:
|
|
54
|
+
result["nx_workspace"] = nx_workspace
|
|
55
|
+
if tsconfig_paths:
|
|
56
|
+
result["nx_workspace"]["tsconfig_paths"] = tsconfig_paths
|
|
57
|
+
|
|
51
58
|
return plugin_registry.run_post_processors(result)
|
|
@@ -4,7 +4,7 @@ DEFAULT_IGNORE_PATTERNS = {
|
|
|
4
4
|
"*.egg-info", ".eggs", "*.pyc", "*.pyo",
|
|
5
5
|
"node_modules", ".npm", ".yarn", ".pnpm-store", "bower_components",
|
|
6
6
|
"dist", "build", "out", "output", "target", "bin", "obj",
|
|
7
|
-
".build", "_build", "public/build",
|
|
7
|
+
".build", "_build", "public/build",
|
|
8
8
|
".next", ".nuxt", ".svelte-kit", ".angular", ".turbo",
|
|
9
9
|
".parcel-cache", ".webpack", ".rollup.cache", ".vite",
|
|
10
10
|
".expo", ".gradle", "Pods", "DerivedData",
|
|
@@ -22,7 +22,7 @@ def should_ignore_file(
|
|
|
22
22
|
rel_posix = rel_path.as_posix()
|
|
23
23
|
|
|
24
24
|
for pattern in default_ignore_patterns:
|
|
25
|
-
if pattern in
|
|
25
|
+
if pattern in rel_str or pattern in rel_posix:
|
|
26
26
|
return True
|
|
27
27
|
|
|
28
28
|
for pattern in exclude_patterns:
|
|
@@ -139,6 +139,13 @@ def find_js_exports(node: Any, exports: list[dict[str, Any]], get_node_name: Get
|
|
|
139
139
|
name = export_clause_child.child_by_field_name("name")
|
|
140
140
|
if name:
|
|
141
141
|
exports.append({"name": name.text.decode("utf-8"), "type": "named"})
|
|
142
|
+
elif child.type == "export_clause":
|
|
143
|
+
# Barrel re-export: export { Button } from './lib/button'
|
|
144
|
+
for export_clause_child in child.children:
|
|
145
|
+
if export_clause_child.type == "export_specifier":
|
|
146
|
+
name = export_clause_child.child_by_field_name("name")
|
|
147
|
+
if name:
|
|
148
|
+
exports.append({"name": name.text.decode("utf-8"), "type": "re-export"})
|
|
142
149
|
elif child.type == "variable_declaration":
|
|
143
150
|
for declaration_child in child.children:
|
|
144
151
|
if declaration_child.type == "variable_declarator":
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
"""Nx monorepo workspace parsing and project graph extraction.
|
|
2
|
+
|
|
3
|
+
Handles modern Nx workspaces (v15+) where project configuration lives in
|
|
4
|
+
individual project.json files rather than a centralized workspace.json.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _safe_json(path: Path) -> dict[str, Any] | None:
|
|
15
|
+
try:
|
|
16
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
17
|
+
except (OSError, json.JSONDecodeError):
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _find_project_jsons(root: Path) -> list[Path]:
|
|
22
|
+
"""Find all project.json files under apps/ and libs/ directories."""
|
|
23
|
+
project_jsons: list[Path] = []
|
|
24
|
+
for candidate_dir in ("apps", "libs", "packages"):
|
|
25
|
+
candidate = root / candidate_dir
|
|
26
|
+
if candidate.is_dir():
|
|
27
|
+
for sub in candidate.rglob("project.json"):
|
|
28
|
+
project_jsons.append(sub)
|
|
29
|
+
return sorted(project_jsons)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def parse_nx_workspace(root: Path) -> dict[str, Any] | None:
|
|
33
|
+
"""Parse an Nx workspace and return project graph + path mappings.
|
|
34
|
+
|
|
35
|
+
Returns None if the directory does not look like an Nx workspace.
|
|
36
|
+
"""
|
|
37
|
+
root = Path(root).resolve()
|
|
38
|
+
nx_json = root / "nx.json"
|
|
39
|
+
if not nx_json.exists():
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
nx_config = _safe_json(nx_json) or {}
|
|
43
|
+
|
|
44
|
+
# Collect projects from project.json files
|
|
45
|
+
projects: dict[str, dict[str, Any]] = {}
|
|
46
|
+
for project_json_path in _find_project_jsons(root):
|
|
47
|
+
data = _safe_json(project_json_path)
|
|
48
|
+
if not data:
|
|
49
|
+
continue
|
|
50
|
+
name = data.get("name", project_json_path.parent.name)
|
|
51
|
+
projects[name] = {
|
|
52
|
+
"name": name,
|
|
53
|
+
"projectType": data.get("projectType", "library"),
|
|
54
|
+
"sourceRoot": data.get("sourceRoot", str(project_json_path.parent.relative_to(root))),
|
|
55
|
+
"tags": data.get("tags", []),
|
|
56
|
+
"implicitDependencies": data.get("implicitDependencies", []),
|
|
57
|
+
"root": str(project_json_path.parent.relative_to(root)).replace("\\", "/"),
|
|
58
|
+
"targets": data.get("targets", {}),
|
|
59
|
+
"project_json": str(project_json_path.relative_to(root)).replace("\\", "/"),
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if not projects:
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
# Parse tsconfig.base.json for path mappings
|
|
66
|
+
tsconfig_paths = _parse_tsconfig_paths(root)
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
"nx_version": nx_config.get("nxVersion", nx_config.get("installationVersion")),
|
|
70
|
+
"projects": projects,
|
|
71
|
+
"tsconfig_paths": tsconfig_paths,
|
|
72
|
+
"namedInputs": nx_config.get("namedInputs", {}),
|
|
73
|
+
"targetDefaults": nx_config.get("targetDefaults", {}),
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _parse_tsconfig_paths(root: Path) -> dict[str, str]:
|
|
78
|
+
"""Parse tsconfig.base.json (or tsconfig.json) compilerOptions.paths.
|
|
79
|
+
|
|
80
|
+
Returns a mapping of alias -> relative directory path.
|
|
81
|
+
"""
|
|
82
|
+
for tsconfig_name in ("tsconfig.base.json", "tsconfig.json"):
|
|
83
|
+
tsconfig = root / tsconfig_name
|
|
84
|
+
if not tsconfig.exists():
|
|
85
|
+
continue
|
|
86
|
+
data = _safe_json(tsconfig)
|
|
87
|
+
if not data:
|
|
88
|
+
continue
|
|
89
|
+
paths = data.get("compilerOptions", {}).get("paths", {})
|
|
90
|
+
result: dict[str, str] = {}
|
|
91
|
+
for alias, targets in paths.items():
|
|
92
|
+
if not targets:
|
|
93
|
+
continue
|
|
94
|
+
# Take the first mapping; remove trailing /index.ts if present
|
|
95
|
+
target = targets[0]
|
|
96
|
+
target = target.rstrip("/").replace("\\", "/")
|
|
97
|
+
if target.endswith("/index.ts") or target.endswith("/index.tsx") or target.endswith("/index.js"):
|
|
98
|
+
target = str(Path(target).parent).replace("\\", "/")
|
|
99
|
+
result[alias] = target
|
|
100
|
+
return result
|
|
101
|
+
return {}
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def nx_framework_from_executor(targets: dict[str, Any]) -> str | None:
|
|
105
|
+
"""Infer framework from Nx target executors / generators."""
|
|
106
|
+
for target_name, target in targets.items():
|
|
107
|
+
executor = target.get("executor", "")
|
|
108
|
+
if not executor:
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
if "@nx/angular" in executor or "@angular-devkit" in executor:
|
|
112
|
+
return "angular"
|
|
113
|
+
if "@nx/react" in executor or "@nx/webpack" in executor:
|
|
114
|
+
return "react"
|
|
115
|
+
if "@nx/next" in executor:
|
|
116
|
+
return "nextjs"
|
|
117
|
+
if "@nx/vue" in executor or "@nx/vite" in executor and "vue" in executor.lower():
|
|
118
|
+
return "vue"
|
|
119
|
+
if "@nx/nuxt" in executor:
|
|
120
|
+
return "nuxt"
|
|
121
|
+
if "@nx/expo" in executor:
|
|
122
|
+
return "expo"
|
|
123
|
+
if "@nx/react-native" in executor:
|
|
124
|
+
return "react-native"
|
|
125
|
+
if "@nx/nest" in executor or "@nestjs" in executor:
|
|
126
|
+
return "nestjs"
|
|
127
|
+
if "@nx/node" in executor or "@nx/express" in executor:
|
|
128
|
+
return "nodejs"
|
|
129
|
+
if "@nx/plugin" in executor:
|
|
130
|
+
return "nx-plugin"
|
|
131
|
+
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def build_nx_project_graph(
|
|
136
|
+
workspace: dict[str, Any],
|
|
137
|
+
file_dependencies: dict[str, list[str]] | None = None,
|
|
138
|
+
) -> dict[str, list[str]]:
|
|
139
|
+
"""Build adjacency list of project -> dependent project names.
|
|
140
|
+
|
|
141
|
+
Uses both implicitDependencies from project.json and file-level imports
|
|
142
|
+
(when file_dependencies is provided) to derive cross-project edges.
|
|
143
|
+
"""
|
|
144
|
+
projects = workspace.get("projects", {})
|
|
145
|
+
tsconfig_paths = workspace.get("tsconfig_paths", {})
|
|
146
|
+
|
|
147
|
+
# Build reverse lookups
|
|
148
|
+
alias_to_project: dict[str, str] = {}
|
|
149
|
+
for name, proj in projects.items():
|
|
150
|
+
for alias, target in tsconfig_paths.items():
|
|
151
|
+
if target.startswith(proj["root"] + "/") or target == proj["root"]:
|
|
152
|
+
alias_to_project[alias] = name
|
|
153
|
+
|
|
154
|
+
# Map file path -> project name
|
|
155
|
+
file_to_project: dict[str, str] = {}
|
|
156
|
+
for name, proj in projects.items():
|
|
157
|
+
root = proj["root"]
|
|
158
|
+
for file_path in (file_dependencies or {}).keys():
|
|
159
|
+
if file_path.startswith(root + "/") or file_path == root:
|
|
160
|
+
file_to_project[file_path] = name
|
|
161
|
+
|
|
162
|
+
graph: dict[str, set[str]] = {name: set() for name in projects}
|
|
163
|
+
|
|
164
|
+
# Implicit dependencies from project.json
|
|
165
|
+
for name, proj in projects.items():
|
|
166
|
+
for dep in proj.get("implicitDependencies", []):
|
|
167
|
+
if dep in projects:
|
|
168
|
+
graph[name].add(dep)
|
|
169
|
+
|
|
170
|
+
# Derive from file dependencies: if a file in project A imports a file in project B
|
|
171
|
+
if file_dependencies:
|
|
172
|
+
for source_file, target_files in file_dependencies.items():
|
|
173
|
+
source_proj = file_to_project.get(source_file)
|
|
174
|
+
if not source_proj:
|
|
175
|
+
continue
|
|
176
|
+
for target_file in target_files:
|
|
177
|
+
target_proj = file_to_project.get(target_file)
|
|
178
|
+
if target_proj and target_proj != source_proj:
|
|
179
|
+
graph[source_proj].add(target_proj)
|
|
180
|
+
|
|
181
|
+
# Also derive from tsconfig alias usage in imports
|
|
182
|
+
# (file_dependencies already captures resolved files, so this is redundant
|
|
183
|
+
# but kept as a fallback if file_deps resolution missed something)
|
|
184
|
+
return {name: sorted(deps) for name, deps in graph.items()}
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def detect_shell_app(workspace: dict[str, Any], project_graph: dict[str, list[str]] | None = None) -> str | None:
|
|
188
|
+
"""Identify the likely shell / root application in an Nx workspace.
|
|
189
|
+
|
|
190
|
+
Heuristics (in order of priority):
|
|
191
|
+
1. A project tagged "shell" or "host" or "root".
|
|
192
|
+
2. An application with the most outgoing project dependencies.
|
|
193
|
+
3. An application named "shell", "host", "app", or matching the repo name.
|
|
194
|
+
"""
|
|
195
|
+
projects = workspace.get("projects", {})
|
|
196
|
+
if not projects:
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
apps = {n: p for n, p in projects.items() if p.get("projectType") == "application"}
|
|
200
|
+
if not apps:
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
# 1. Tag-based detection
|
|
204
|
+
shell_tags = {"shell", "host", "root", "entry", "main"}
|
|
205
|
+
for name, proj in apps.items():
|
|
206
|
+
tags = {t.lower() for t in proj.get("tags", [])}
|
|
207
|
+
if tags & shell_tags:
|
|
208
|
+
return name
|
|
209
|
+
|
|
210
|
+
# 2. Most outgoing deps among apps
|
|
211
|
+
graph = project_graph or build_nx_project_graph(workspace)
|
|
212
|
+
if graph:
|
|
213
|
+
sorted_apps = sorted(
|
|
214
|
+
apps.keys(),
|
|
215
|
+
key=lambda n: len(graph.get(n, [])),
|
|
216
|
+
reverse=True,
|
|
217
|
+
)
|
|
218
|
+
if sorted_apps and graph.get(sorted_apps[0]):
|
|
219
|
+
return sorted_apps[0]
|
|
220
|
+
|
|
221
|
+
# 3. Name heuristic
|
|
222
|
+
for keyword in ("shell", "host", "app", "main", "root"):
|
|
223
|
+
for name in apps:
|
|
224
|
+
if keyword in name.lower():
|
|
225
|
+
return name
|
|
226
|
+
|
|
227
|
+
# Fallback: first application
|
|
228
|
+
return next(iter(apps))
|
|
@@ -33,7 +33,7 @@ def index_file(
|
|
|
33
33
|
except (UnicodeDecodeError, OSError):
|
|
34
34
|
return None
|
|
35
35
|
|
|
36
|
-
rel_path =
|
|
36
|
+
rel_path = file_path.relative_to(root).as_posix()
|
|
37
37
|
|
|
38
38
|
plugin_symbols = plugin_registry.extract_symbols(content, ext, rel_path)
|
|
39
39
|
if plugin_symbols is not None:
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
from typing import Any
|
|
3
3
|
|
|
4
|
+
from .nx_projects import nx_framework_from_executor
|
|
5
|
+
|
|
4
6
|
|
|
5
7
|
FRONTEND_FRAMEWORKS = {"react", "react-native", "nextjs", "angular", "expo", "flutter", "swiftui", "uikit", "remix"}
|
|
6
8
|
|
|
@@ -42,7 +44,7 @@ def normalize_framework(framework: str | None) -> str | None:
|
|
|
42
44
|
return normalized
|
|
43
45
|
|
|
44
46
|
|
|
45
|
-
def infer_file_role(rel_path: str, file_data: dict[str, Any]) -> str:
|
|
47
|
+
def infer_file_role(rel_path: str, file_data: dict[str, Any], nx_workspace: dict[str, Any] | None = None) -> str:
|
|
46
48
|
normalized_path = rel_path.replace("\\", "/").lower()
|
|
47
49
|
file_name = Path(normalized_path).name
|
|
48
50
|
symbols = file_data.get("symbols", [])
|
|
@@ -54,6 +56,27 @@ def infer_file_role(rel_path: str, file_data: dict[str, Any]) -> str:
|
|
|
54
56
|
if isinstance(sym, dict) and sym.get("framework")
|
|
55
57
|
}
|
|
56
58
|
|
|
59
|
+
# Nx monorepo: infer role from project type (app vs lib) and path
|
|
60
|
+
if nx_workspace:
|
|
61
|
+
projects = nx_workspace.get("projects", {})
|
|
62
|
+
for proj in projects.values():
|
|
63
|
+
root = proj.get("root", "")
|
|
64
|
+
if root and normalized_path.startswith(root.lower() + "/"):
|
|
65
|
+
ptype = proj.get("projectType", "")
|
|
66
|
+
tags = [t.lower() for t in proj.get("tags", [])]
|
|
67
|
+
if ptype == "application":
|
|
68
|
+
return "app"
|
|
69
|
+
# Library roles from tags
|
|
70
|
+
if any(t in tags for t in ("ui", "frontend", "component")):
|
|
71
|
+
return "ui"
|
|
72
|
+
if any(t in tags for t in ("data", "model", "entity", "db")):
|
|
73
|
+
return "data"
|
|
74
|
+
if any(t in tags for t in ("api", "service", "controller", "route")):
|
|
75
|
+
return "api"
|
|
76
|
+
if any(t in tags for t in ("util", "shared", "helper", "infra")):
|
|
77
|
+
return "infra"
|
|
78
|
+
return "lib"
|
|
79
|
+
|
|
57
80
|
if file_name in ("cli.py", "manage.py") or any(segment in normalized_path for segment in ("/cli/", "/commands/")):
|
|
58
81
|
return "cli"
|
|
59
82
|
if routes or any(segment in normalized_path for segment in ("/api/", "/routes/", "/route/", "/controllers/", "/handlers/", "/endpoints/")):
|
|
@@ -137,6 +160,7 @@ def build_project_profile(
|
|
|
137
160
|
file_symbols: dict[str, Any],
|
|
138
161
|
call_graph: dict[str, list[str]],
|
|
139
162
|
file_deps: dict[str, list[str]],
|
|
163
|
+
nx_workspace: dict[str, Any] | None = None,
|
|
140
164
|
) -> dict[str, Any]:
|
|
141
165
|
framework_counts: dict[str, int] = {}
|
|
142
166
|
symbol_type_counts: dict[str, int] = {}
|
|
@@ -155,7 +179,7 @@ def build_project_profile(
|
|
|
155
179
|
symbols = file_data.get("symbols", [])
|
|
156
180
|
api_routes = file_data.get("api_routes", [])
|
|
157
181
|
entities = file_data.get("entities", [])
|
|
158
|
-
role = infer_file_role(rel_path, file_data)
|
|
182
|
+
role = infer_file_role(rel_path, file_data, nx_workspace)
|
|
159
183
|
role_by_file[rel_path] = role
|
|
160
184
|
|
|
161
185
|
bucket = layer_stats.setdefault(role, {"files": 0, "symbols": 0, "routes": 0, "entities": 0})
|
|
@@ -189,6 +213,15 @@ def build_project_profile(
|
|
|
189
213
|
|
|
190
214
|
entry_points.extend(infer_file_entry_points(rel_path, file_data, callers))
|
|
191
215
|
|
|
216
|
+
# Augment with Nx project-level framework detection from executors
|
|
217
|
+
if nx_workspace:
|
|
218
|
+
for project in nx_workspace.get("projects", {}).values():
|
|
219
|
+
fw = nx_framework_from_executor(project.get("targets", {}))
|
|
220
|
+
if fw:
|
|
221
|
+
nf = normalize_framework(fw)
|
|
222
|
+
if nf:
|
|
223
|
+
framework_counts[nf] = framework_counts.get(nf, 0) + 1
|
|
224
|
+
|
|
192
225
|
layer_dependencies: dict[tuple[str, str], int] = {}
|
|
193
226
|
for source_file, target_files in file_deps.items():
|
|
194
227
|
source_role = role_by_file.get(source_file, "core")
|
|
@@ -249,7 +282,7 @@ def build_project_profile(
|
|
|
249
282
|
|
|
250
283
|
recommendations = build_recommendations(frameworks, route_samples, entity_samples, layers)
|
|
251
284
|
|
|
252
|
-
|
|
285
|
+
profile = {
|
|
253
286
|
"frameworks": frameworks,
|
|
254
287
|
"symbol_types": [
|
|
255
288
|
{"name": symbol_type, "count": count}
|
|
@@ -269,3 +302,15 @@ def build_project_profile(
|
|
|
269
302
|
"entity_samples": entity_samples[:20],
|
|
270
303
|
"recommendations": recommendations,
|
|
271
304
|
}
|
|
305
|
+
|
|
306
|
+
# Add Nx project graph if available
|
|
307
|
+
if nx_workspace:
|
|
308
|
+
from cortexcode.indexing.nx_projects import build_nx_project_graph, detect_shell_app
|
|
309
|
+
nx_graph = build_nx_project_graph(nx_workspace, file_deps)
|
|
310
|
+
profile["nx_project_graph"] = nx_graph
|
|
311
|
+
profile["nx_projects"] = list(nx_workspace.get("projects", {}).keys())
|
|
312
|
+
shell = detect_shell_app(nx_workspace, nx_graph)
|
|
313
|
+
if shell:
|
|
314
|
+
profile["nx_shell_app"] = shell
|
|
315
|
+
|
|
316
|
+
return profile
|
|
@@ -33,6 +33,7 @@ def build_exports_by_file(file_symbols: dict[str, Any]) -> dict[str, dict[str, d
|
|
|
33
33
|
"defined_in": rel_path,
|
|
34
34
|
"type": exp.get("type", "export"),
|
|
35
35
|
"line": exp.get("line"),
|
|
36
|
+
"source": exp.get("source"),
|
|
36
37
|
}
|
|
37
38
|
|
|
38
39
|
exports_by_file[rel_path] = file_exports
|
|
@@ -74,7 +75,12 @@ def build_module_lookup(file_symbols: dict[str, Any]) -> dict[str, set[str]]:
|
|
|
74
75
|
return module_lookup
|
|
75
76
|
|
|
76
77
|
|
|
77
|
-
def candidate_module_keys(
|
|
78
|
+
def candidate_module_keys(
|
|
79
|
+
rel_path: str,
|
|
80
|
+
imp: dict[str, Any],
|
|
81
|
+
tsconfig_paths: dict[str, str] | None = None,
|
|
82
|
+
root_path: str | None = None,
|
|
83
|
+
) -> list[str]:
|
|
78
84
|
module = str(imp.get("module", "")).strip()
|
|
79
85
|
imported_names = [name for name in imp.get("imported", []) if name and name != "*"]
|
|
80
86
|
if not module:
|
|
@@ -86,7 +92,7 @@ def candidate_module_keys(rel_path: str, imp: dict[str, Any]) -> list[str]:
|
|
|
86
92
|
|
|
87
93
|
if module.startswith("."):
|
|
88
94
|
dot_prefix = len(module) - len(module.lstrip("."))
|
|
89
|
-
remainder = module[dot_prefix:]
|
|
95
|
+
remainder = module[dot_prefix:].lstrip("/")
|
|
90
96
|
base_dir = current_dir
|
|
91
97
|
for _ in range(max(dot_prefix - 1, 0)):
|
|
92
98
|
base_dir = base_dir.parent
|
|
@@ -99,6 +105,27 @@ def candidate_module_keys(rel_path: str, imp: dict[str, Any]) -> list[str]:
|
|
|
99
105
|
for imported_name in imported_names:
|
|
100
106
|
candidates.append(str(base_candidate / imported_name.replace(".", "/")))
|
|
101
107
|
else:
|
|
108
|
+
# Nx / tsconfig path mapping: @scope/lib -> libs/scope/lib
|
|
109
|
+
if tsconfig_paths and module in tsconfig_paths:
|
|
110
|
+
mapped = tsconfig_paths[module]
|
|
111
|
+
candidates.append(mapped)
|
|
112
|
+
for imported_name in imported_names:
|
|
113
|
+
candidates.append(f"{mapped}/{imported_name.replace('.', '/')}")
|
|
114
|
+
return candidates
|
|
115
|
+
# Partial prefix match for sub-path imports like @scope/lib/sub
|
|
116
|
+
if tsconfig_paths and "/" in module:
|
|
117
|
+
parts = module.split("/")
|
|
118
|
+
# Handle scoped packages: @scope/lib/sub -> prefix is @scope/lib
|
|
119
|
+
prefix = "/".join(parts[:2]) if parts[0].startswith("@") else parts[0]
|
|
120
|
+
if prefix in tsconfig_paths:
|
|
121
|
+
mapped = tsconfig_paths[prefix]
|
|
122
|
+
suffix = module[len(prefix):].lstrip("/")
|
|
123
|
+
if suffix:
|
|
124
|
+
candidates.append(f"{mapped}/{suffix.replace('.', '/')}")
|
|
125
|
+
else:
|
|
126
|
+
candidates.append(mapped)
|
|
127
|
+
return candidates
|
|
128
|
+
|
|
102
129
|
cleaned = module.replace("@/", "src/").replace("~/", "")
|
|
103
130
|
candidates.append(cleaned)
|
|
104
131
|
if "/" not in cleaned and "." in cleaned:
|
|
@@ -113,9 +140,10 @@ def resolve_import_to_files(
|
|
|
113
140
|
rel_path: str,
|
|
114
141
|
imp: dict[str, Any],
|
|
115
142
|
module_lookup: dict[str, set[str]],
|
|
143
|
+
tsconfig_paths: dict[str, str] | None = None,
|
|
116
144
|
) -> list[str]:
|
|
117
145
|
resolved_files = set()
|
|
118
|
-
for candidate in candidate_module_keys(rel_path, imp):
|
|
146
|
+
for candidate in candidate_module_keys(rel_path, imp, tsconfig_paths):
|
|
119
147
|
normalized_candidate = normalize_module_key(candidate)
|
|
120
148
|
if not normalized_candidate:
|
|
121
149
|
continue
|
|
@@ -131,7 +159,29 @@ def resolve_import_to_files(
|
|
|
131
159
|
return sorted(file_path for file_path in resolved_files if file_path != normalized_rel)
|
|
132
160
|
|
|
133
161
|
|
|
134
|
-
def
|
|
162
|
+
def _resolve_reexport_source(barrel_path: str, source: str, file_symbols: dict[str, Any]) -> str | None:
|
|
163
|
+
"""Resolve a re-export source path like './lib/button' to an actual file path."""
|
|
164
|
+
if not source:
|
|
165
|
+
return None
|
|
166
|
+
source = source.lstrip("./")
|
|
167
|
+
dir_parts = barrel_path.split("/")[:-1]
|
|
168
|
+
while source.startswith("../"):
|
|
169
|
+
source = source[3:]
|
|
170
|
+
if dir_parts:
|
|
171
|
+
dir_parts.pop()
|
|
172
|
+
base = "/".join(dir_parts) + "/" + source if dir_parts else source
|
|
173
|
+
candidates = [base]
|
|
174
|
+
for ext in (".ts", ".tsx", ".js", ".jsx"):
|
|
175
|
+
candidates.append(base + ext)
|
|
176
|
+
for idx in ("index.ts", "index.tsx", "index.js", "index.jsx"):
|
|
177
|
+
candidates.append(base + "/" + idx)
|
|
178
|
+
for candidate in candidates:
|
|
179
|
+
if candidate in file_symbols:
|
|
180
|
+
return candidate
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def build_type_map(file_symbols: dict[str, Any], tsconfig_paths: dict[str, str] | None = None) -> dict[str, dict[str, Any]]:
|
|
135
185
|
exports_by_file = build_exports_by_file(file_symbols)
|
|
136
186
|
module_lookup = build_module_lookup(file_symbols)
|
|
137
187
|
|
|
@@ -142,7 +192,7 @@ def build_type_map(file_symbols: dict[str, Any]) -> dict[str, dict[str, Any]]:
|
|
|
142
192
|
|
|
143
193
|
for imp in file_data.get("imports", []):
|
|
144
194
|
imported_names = imp.get("imported", [])
|
|
145
|
-
target_files = resolve_import_to_files(rel_path, imp, module_lookup)
|
|
195
|
+
target_files = resolve_import_to_files(rel_path, imp, module_lookup, tsconfig_paths)
|
|
146
196
|
if not target_files:
|
|
147
197
|
continue
|
|
148
198
|
|
|
@@ -158,19 +208,34 @@ def build_type_map(file_symbols: dict[str, Any]) -> dict[str, dict[str, Any]]:
|
|
|
158
208
|
defn = target_exports[imported_name]
|
|
159
209
|
if defn["defined_in"] == rel_path:
|
|
160
210
|
continue
|
|
211
|
+
# Trace re-exports through barrel files to actual definitions
|
|
212
|
+
traced = defn
|
|
213
|
+
depth = 0
|
|
214
|
+
while traced.get("type") == "re-export" and traced.get("source") and depth < 3:
|
|
215
|
+
resolved = _resolve_reexport_source(traced["defined_in"], traced["source"], file_symbols)
|
|
216
|
+
if not resolved:
|
|
217
|
+
break
|
|
218
|
+
next_exports = exports_by_file.get(resolved, {})
|
|
219
|
+
if imported_name not in next_exports:
|
|
220
|
+
break
|
|
221
|
+
traced = next_exports[imported_name]
|
|
222
|
+
depth += 1
|
|
161
223
|
key = f"{rel_path}:{imported_name}"
|
|
162
224
|
type_map[key] = {
|
|
163
225
|
"imported_in": rel_path,
|
|
164
226
|
"name": imported_name,
|
|
165
|
-
"defined_in":
|
|
166
|
-
"type":
|
|
167
|
-
"line":
|
|
227
|
+
"defined_in": traced["defined_in"],
|
|
228
|
+
"type": traced.get("type"),
|
|
229
|
+
"line": traced.get("line"),
|
|
168
230
|
}
|
|
169
231
|
|
|
170
232
|
return type_map
|
|
171
233
|
|
|
172
234
|
|
|
173
|
-
def build_file_dependencies(
|
|
235
|
+
def build_file_dependencies(
|
|
236
|
+
file_symbols: dict[str, Any],
|
|
237
|
+
tsconfig_paths: dict[str, str] | None = None,
|
|
238
|
+
) -> dict[str, list[str]]:
|
|
174
239
|
deps = {}
|
|
175
240
|
module_lookup = build_module_lookup(file_symbols)
|
|
176
241
|
|
|
@@ -183,7 +248,7 @@ def build_file_dependencies(file_symbols: dict[str, Any]) -> dict[str, list[str]
|
|
|
183
248
|
|
|
184
249
|
dep_files = set()
|
|
185
250
|
for imp in imports:
|
|
186
|
-
dep_files.update(resolve_import_to_files(rel_path, imp, module_lookup))
|
|
251
|
+
dep_files.update(resolve_import_to_files(rel_path, imp, module_lookup, tsconfig_paths))
|
|
187
252
|
|
|
188
253
|
if dep_files:
|
|
189
254
|
deps[rel_path] = sorted(dep_files)
|
|
@@ -34,6 +34,26 @@ def detect_monorepo(root_path: Path) -> Optional[Dict[str, Any]]:
|
|
|
34
34
|
|
|
35
35
|
nx_json = root_path / "nx.json"
|
|
36
36
|
if nx_json.exists():
|
|
37
|
+
from cortexcode.indexing.nx_projects import parse_nx_workspace
|
|
38
|
+
|
|
39
|
+
nx_workspace = parse_nx_workspace(root_path)
|
|
40
|
+
if nx_workspace:
|
|
41
|
+
projects = nx_workspace.get("projects", {})
|
|
42
|
+
if projects:
|
|
43
|
+
# Use actual source roots as include patterns
|
|
44
|
+
patterns = []
|
|
45
|
+
for proj in projects.values():
|
|
46
|
+
source_root = proj.get("sourceRoot", proj.get("root", ""))
|
|
47
|
+
if source_root:
|
|
48
|
+
patterns.append(f"{source_root}/**/*")
|
|
49
|
+
# Always include root-level configs (nx.json, tsconfig, package.json)
|
|
50
|
+
patterns.extend(["nx.json", "package.json", "tsconfig*.json"])
|
|
51
|
+
return {
|
|
52
|
+
"type": "nx",
|
|
53
|
+
"include_patterns": patterns,
|
|
54
|
+
"nx_workspace": nx_workspace,
|
|
55
|
+
}
|
|
56
|
+
# Fallback: old-style nx.json with projects array (pre-Nx v15)
|
|
37
57
|
try:
|
|
38
58
|
data = json.loads(nx_json.read_text(encoding="utf-8"))
|
|
39
59
|
projects = data.get("projects", [])
|