codebeacon 0.1.7__tar.gz → 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. {codebeacon-0.1.7 → codebeacon-0.2.0}/PKG-INFO +1 -1
  2. codebeacon-0.2.0/codebeacon/__init__.py +1 -0
  3. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/cli.py +12 -7
  4. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/export/obsidian.py +6 -0
  5. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/base.py +1 -1
  6. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/graph/analyze.py +129 -35
  7. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/graph/enrich.py +95 -6
  8. {codebeacon-0.1.7 → codebeacon-0.2.0}/pyproject.toml +1 -1
  9. codebeacon-0.1.7/codebeacon/__init__.py +0 -1
  10. {codebeacon-0.1.7 → codebeacon-0.2.0}/.cursorrules +0 -0
  11. {codebeacon-0.1.7 → codebeacon-0.2.0}/.github/CODEOWNERS +0 -0
  12. {codebeacon-0.1.7 → codebeacon-0.2.0}/.github/dependabot.yml +0 -0
  13. {codebeacon-0.1.7 → codebeacon-0.2.0}/.github/workflows/ci.yml +0 -0
  14. {codebeacon-0.1.7 → codebeacon-0.2.0}/.github/workflows/release.yml +0 -0
  15. {codebeacon-0.1.7 → codebeacon-0.2.0}/.gitignore +0 -0
  16. {codebeacon-0.1.7 → codebeacon-0.2.0}/AGENTS.md +0 -0
  17. {codebeacon-0.1.7 → codebeacon-0.2.0}/CLAUDE.md +0 -0
  18. {codebeacon-0.1.7 → codebeacon-0.2.0}/LICENSE +0 -0
  19. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.de.md +0 -0
  20. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.es.md +0 -0
  21. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.fr.md +0 -0
  22. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.ja.md +0 -0
  23. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.ko.md +0 -0
  24. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.md +0 -0
  25. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.pt-BR.md +0 -0
  26. {codebeacon-0.1.7 → codebeacon-0.2.0}/README.zh-CN.md +0 -0
  27. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/__main__.py +0 -0
  28. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/cache.py +0 -0
  29. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/common/__init__.py +0 -0
  30. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/common/filters.py +0 -0
  31. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/common/symbols.py +0 -0
  32. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/common/types.py +0 -0
  33. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/config.py +0 -0
  34. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/contextmap/__init__.py +0 -0
  35. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/contextmap/generator.py +0 -0
  36. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/discover/__init__.py +0 -0
  37. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/discover/detector.py +0 -0
  38. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/discover/scanner.py +0 -0
  39. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/export/__init__.py +0 -0
  40. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/export/mcp.py +0 -0
  41. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/__init__.py +0 -0
  42. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/components.py +0 -0
  43. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/dependencies.py +0 -0
  44. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/entities.py +0 -0
  45. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/README.md +0 -0
  46. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/actix.scm +0 -0
  47. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/angular.scm +0 -0
  48. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/aspnet.scm +0 -0
  49. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/django.scm +0 -0
  50. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/express.scm +0 -0
  51. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/fastapi.scm +0 -0
  52. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/flask.scm +0 -0
  53. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/gin.scm +0 -0
  54. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/ktor.scm +0 -0
  55. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/laravel.scm +0 -0
  56. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/nestjs.scm +0 -0
  57. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/rails.scm +0 -0
  58. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/react.scm +0 -0
  59. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/spring_boot.scm +0 -0
  60. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/svelte.scm +0 -0
  61. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/tauri.scm +0 -0
  62. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/vapor.scm +0 -0
  63. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/queries/vue.scm +0 -0
  64. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/routes.py +0 -0
  65. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/semantic.py +0 -0
  66. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/extract/services.py +0 -0
  67. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/graph/__init__.py +0 -0
  68. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/graph/build.py +0 -0
  69. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/graph/cluster.py +0 -0
  70. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/skill/SKILL.md +0 -0
  71. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/wave.py +0 -0
  72. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/wiki/__init__.py +0 -0
  73. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/wiki/generator.py +0 -0
  74. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/wiki/index.py +0 -0
  75. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon/wiki/templates.py +0 -0
  76. {codebeacon-0.1.7 → codebeacon-0.2.0}/codebeacon.yaml.example +0 -0
  77. {codebeacon-0.1.7 → codebeacon-0.2.0}/docs/TRANSLATION_STATUS.md +0 -0
  78. {codebeacon-0.1.7 → codebeacon-0.2.0}/public-plan.md +0 -0
  79. {codebeacon-0.1.7 → codebeacon-0.2.0}/skill/SKILL.md +0 -0
  80. {codebeacon-0.1.7 → codebeacon-0.2.0}/skill/install.py +0 -0
  81. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/__init__.py +0 -0
  82. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/conftest.py +0 -0
  83. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/actix/main.rs +0 -0
  84. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/angular/app.component.ts +0 -0
  85. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/aspnet/UserController.cs +0 -0
  86. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/django/views.py +0 -0
  87. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/express/userRouter.js +0 -0
  88. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/fastapi/main.py +0 -0
  89. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/flask/app.py +0 -0
  90. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/gin/main.go +0 -0
  91. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/ktor/UserRoutes.kt +0 -0
  92. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/laravel/UserController.php +0 -0
  93. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/nestjs/user.controller.ts +0 -0
  94. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/rails/users_controller.rb +0 -0
  95. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/react/UserPage.tsx +0 -0
  96. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/spring_boot/UserController.java +0 -0
  97. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/sveltekit/+page.svelte +0 -0
  98. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/vapor/routes.swift +0 -0
  99. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/fixtures/vue/UserList.vue +0 -0
  100. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_discover.py +0 -0
  101. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_entities.py +0 -0
  102. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_filters.py +0 -0
  103. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_graph.py +0 -0
  104. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_resolve.py +0 -0
  105. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_routes.py +0 -0
  106. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_services.py +0 -0
  107. {codebeacon-0.1.7 → codebeacon-0.2.0}/tests/test_wiki.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: codebeacon
3
- Version: 0.1.7
3
+ Version: 0.2.0
4
4
  Summary: Source code AST analysis tool for AI context generation — unified multi-framework knowledge graph
5
5
  Project-URL: Homepage, https://github.com/codebeacon/codebeacon
6
6
  Project-URL: Repository, https://github.com/codebeacon/codebeacon
@@ -0,0 +1 @@
1
+ __version__ = "0.2.0"
@@ -127,7 +127,7 @@ def _run_pipeline(projects, output_dir: str, args) -> int:
127
127
  from codebeacon.cache import Cache
128
128
  from codebeacon.wave import auto_wave
129
129
  from codebeacon.graph.build import build_graph
130
- from codebeacon.graph.enrich import enrich_http_api, enrich_shared_db
130
+ from codebeacon.graph.enrich import enrich_http_api, enrich_shared_db, enrich_ipc_invoke
131
131
  from codebeacon.graph.cluster import cluster, apply_communities, score_all
132
132
 
133
133
  cache = Cache(output_dir)
@@ -176,8 +176,13 @@ def _run_pipeline(projects, output_dir: str, args) -> int:
176
176
  # Enrichment
177
177
  api_edges = enrich_http_api(G)
178
178
  db_edges = enrich_shared_db(G)
179
- if api_edges or db_edges:
180
- print(f" Enriched: +{api_edges} calls_api, +{db_edges} shares_db_entity edges")
179
+ ipc_edges = enrich_ipc_invoke(G)
180
+ enriched_parts = []
181
+ if api_edges: enriched_parts.append(f"+{api_edges} calls_api")
182
+ if db_edges: enriched_parts.append(f"+{db_edges} shares_db_entity")
183
+ if ipc_edges: enriched_parts.append(f"+{ipc_edges} invokes_command")
184
+ if enriched_parts:
185
+ print(f" Enriched: {', '.join(enriched_parts)} edges")
181
186
 
182
187
  # Community detection
183
188
  print(" Detecting communities ...")
@@ -188,7 +193,7 @@ def _run_pipeline(projects, output_dir: str, args) -> int:
188
193
  print(f" {n_communities} communities detected")
189
194
 
190
195
  # Analysis
191
- report = analyze(G, communities, cohesion)
196
+ report = analyze(G, communities, cohesion, project_paths={p.name: p.path for p in projects})
192
197
 
193
198
  # Save outputs
194
199
  import networkx.readwrite.json_graph as nxjson
@@ -249,7 +254,7 @@ def _run_deep_dive_pipeline(projects, workspace_output_dir: str, args) -> int:
249
254
  from pathlib import Path
250
255
  from codebeacon.graph.analyze import analyze, report_to_markdown
251
256
  from codebeacon.graph.build import build_graph
252
- from codebeacon.graph.enrich import enrich_http_api, enrich_shared_db
257
+ from codebeacon.graph.enrich import enrich_http_api, enrich_shared_db, enrich_ipc_invoke
253
258
  from codebeacon.graph.cluster import cluster, apply_communities, score_all
254
259
  from codebeacon.wiki.generator import generate_wiki
255
260
  from codebeacon.export.obsidian import generate_obsidian_vault
@@ -364,7 +369,7 @@ def _run_deep_dive_pipeline(projects, workspace_output_dir: str, args) -> int:
364
369
  n_communities = len(set(communities.values())) if communities else 0
365
370
  print(f" {n_communities} communities")
366
371
 
367
- report = analyze(G, communities, cohesion)
372
+ report = analyze(G, communities, cohesion, project_paths={project.name: project.path})
368
373
 
369
374
  beacon_path = Path(proj_output_dir) / "beacon.json"
370
375
  beacon_path.write_text(
@@ -418,7 +423,7 @@ def _run_deep_dive_pipeline(projects, workspace_output_dir: str, args) -> int:
418
423
  n_communities_all = len(set(communities_all.values())) if communities_all else 0
419
424
  print(f" {n_communities_all} communities detected")
420
425
 
421
- report_all = analyze(G_all, communities_all, cohesion_all)
426
+ report_all = analyze(G_all, communities_all, cohesion_all, project_paths={p.name: p.path for p in projects})
422
427
 
423
428
  beacon_path = workspace_path / "beacon.json"
424
429
  beacon_path.write_text(
@@ -74,6 +74,12 @@ def generate_obsidian_vault(
74
74
  vault = Path(obsidian_dir) if obsidian_dir else Path(output_dir) / "obsidian"
75
75
  vault.mkdir(parents=True, exist_ok=True)
76
76
 
77
+ # Clear stale notes from previous runs so step 5 can overwrite them
78
+ for svc_dir in vault.iterdir():
79
+ if svc_dir.is_dir() and not svc_dir.name.startswith("."):
80
+ for md in svc_dir.glob("*.md"):
81
+ md.unlink()
82
+
77
83
  # Step 1 — basic note generation
78
84
  _step1_generate_notes(G, communities, vault)
79
85
 
@@ -104,7 +104,7 @@ def is_grammar_allowed(query_name: str, lang: Language) -> bool:
104
104
  # Reverse-lookup the grammar name from the cached Language object
105
105
  gram_name = next((k for k, v in _LANG_CACHE.items() if v is lang), None)
106
106
  if gram_name is None:
107
- return True # can't determine — let it run
107
+ return False # can't determine — deny when allowlist exists
108
108
  return gram_name in allowed
109
109
 
110
110
 
@@ -3,15 +3,18 @@
3
3
  These metrics help users understand their codebase structure at a glance.
4
4
 
5
5
  Public API:
6
- god_nodes(G, top_n, min_degree) → list[GodNode]
7
- surprising_connections(G, communities) → list[SurprisingConnection]
8
- hub_files(G, top_n) → list[HubFile]
9
- analyze(G, communities, cohesion_scores) → GraphReport
10
- report_to_markdown(report) str
6
+ god_nodes(G, top_n, min_degree, project_paths) → list[GodNode]
7
+ surprising_connections(G, communities) → list[SurprisingConnection]
8
+ hub_files(G, top_n) → list[HubFile]
9
+ analyze(G, communities, cohesion_scores,
10
+ project_paths) GraphReport
11
+ report_to_markdown(report) → str
11
12
  """
12
13
 
13
14
  from __future__ import annotations
14
15
 
16
+ import os
17
+ from collections import defaultdict
15
18
  from dataclasses import dataclass, field
16
19
  from typing import Optional
17
20
 
@@ -22,15 +25,15 @@ import networkx as nx
22
25
 
23
26
  @dataclass
24
27
  class GodNode:
25
- """A node with unusually high degree (hub / bottleneck)."""
26
- node_id: str
27
- label: str
28
- type: str
29
- in_degree: int
30
- out_degree: int
31
- degree: int
32
- centrality: float
33
- source_file: str
28
+ """A directory with unusually high cross-boundary coupling."""
29
+ folder_path: str # relative path within project: "lib/utils" or "src-tauri/src"
30
+ label: str # folder name: "utils"
31
+ project: str # owning project: "desktop"
32
+ child_count: int # number of nodes inside this folder
33
+ in_degree: int # external → folder edges
34
+ out_degree: int # folder → external edges
35
+ degree: int # total cross-boundary edges
36
+ centrality: float # degree / (total_nodes - child_count)
34
37
 
35
38
 
36
39
  @dataclass
@@ -70,37 +73,123 @@ class GraphReport:
70
73
 
71
74
  # ── Analysis functions ────────────────────────────────────────────────────────
72
75
 
76
+ def _infer_project_paths(G: nx.DiGraph) -> dict[str, str]:
77
+ """Infer project root paths from source_file attributes in the graph.
78
+
79
+ Groups nodes by their ``project`` attribute, then finds the common path
80
+ prefix of all source_file directories within each project.
81
+ """
82
+ project_dirs: dict[str, list[str]] = defaultdict(list)
83
+ for _node_id, data in G.nodes(data=True):
84
+ sf = data.get("source_file", "")
85
+ proj = data.get("project", "")
86
+ if sf and proj:
87
+ project_dirs[proj].append(os.path.dirname(os.path.abspath(sf)))
88
+
89
+ result: dict[str, str] = {}
90
+ for proj, dirs in project_dirs.items():
91
+ if dirs:
92
+ result[proj] = os.path.commonpath(dirs)
93
+ return result
94
+
95
+
73
96
  def god_nodes(
74
97
  G: nx.DiGraph,
75
98
  top_n: int = 20,
76
99
  min_degree: int = 5,
100
+ project_paths: Optional[dict[str, str]] = None,
77
101
  ) -> list[GodNode]:
78
- """Find nodes with the highest degree (potential god classes / bottlenecks).
102
+ """Find directories with the highest cross-boundary coupling.
103
+
104
+ Counts only edges that cross folder boundaries (cross-boundary edges).
105
+ Intra-folder edges are ignored, so a single large wrapper file can no
106
+ longer dominate solely because of its high node-level degree.
79
107
 
80
108
  Args:
81
109
  G: the knowledge graph
82
- top_n: return at most this many nodes
83
- min_degree: minimum total degree to qualify
110
+ top_n: return at most this many folders
111
+ min_degree: minimum cross-boundary edge count to qualify
112
+ project_paths: optional dict mapping project name → absolute project
113
+ root path. When None, paths are inferred automatically
114
+ from source_file attributes via ``_infer_project_paths``.
84
115
 
85
116
  Returns:
86
- List of GodNode sorted by degree descending.
117
+ List of GodNode (folder-level) sorted by degree descending.
87
118
  """
88
- centrality = nx.degree_centrality(G)
119
+ if project_paths is None:
120
+ project_paths = _infer_project_paths(G)
121
+
122
+ total_nodes = G.number_of_nodes()
123
+
124
+ # Step 1: build node → (folder_key, folder_path, project) mapping.
125
+ # folder_key uses "{project}/{rel}" for cross-project uniqueness.
126
+ # folder_path stores only the relative portion shown in the report.
127
+ node_folder_key: dict[str, str] = {}
128
+ key_to_rel: dict[str, str] = {}
129
+ key_to_project: dict[str, str] = {}
89
130
 
90
- results: list[GodNode] = []
91
131
  for node_id, data in G.nodes(data=True):
92
- deg = G.degree(node_id)
93
- if deg < min_degree:
132
+ sf = data.get("source_file", "")
133
+ proj = data.get("project", "")
134
+ if not sf:
135
+ continue
136
+ dirname = os.path.dirname(os.path.abspath(sf))
137
+ if proj and proj in project_paths:
138
+ try:
139
+ rel = os.path.relpath(dirname, project_paths[proj])
140
+ except ValueError:
141
+ rel = dirname
142
+ # Skip nodes whose source lives outside the project root
143
+ if rel.startswith(".."):
144
+ rel = dirname
145
+ else:
146
+ rel = dirname
147
+ key = f"{proj}/{rel}" if proj else rel
148
+ node_folder_key[node_id] = key
149
+ key_to_rel[key] = rel
150
+ key_to_project[key] = proj
151
+
152
+ # Step 2: count cross-boundary edges in a single pass.
153
+ folder_in: dict[str, int] = defaultdict(int)
154
+ folder_out: dict[str, int] = defaultdict(int)
155
+ folder_children: dict[str, set] = defaultdict(set)
156
+
157
+ for node_id in G.nodes():
158
+ fk = node_folder_key.get(node_id)
159
+ if fk:
160
+ folder_children[fk].add(node_id)
161
+
162
+ for src, tgt in G.edges():
163
+ src_key = node_folder_key.get(src)
164
+ tgt_key = node_folder_key.get(tgt)
165
+ if src_key is None or tgt_key is None:
94
166
  continue
167
+ if src_key != tgt_key:
168
+ folder_out[src_key] += 1
169
+ folder_in[tgt_key] += 1
170
+
171
+ # Step 3: filter, build GodNode list, sort.
172
+ results: list[GodNode] = []
173
+ for folder_key in folder_children:
174
+ in_d = folder_in.get(folder_key, 0)
175
+ out_d = folder_out.get(folder_key, 0)
176
+ degree = in_d + out_d
177
+ if degree < min_degree:
178
+ continue
179
+ child_count = len(folder_children[folder_key])
180
+ centrality = degree / max(1, total_nodes - child_count)
181
+ rel = key_to_rel.get(folder_key, folder_key)
182
+ proj = key_to_project.get(folder_key, "")
183
+ label = os.path.basename(rel) if rel not in (".", "") else "(root)"
95
184
  results.append(GodNode(
96
- node_id=node_id,
97
- label=data.get("label", node_id),
98
- type=data.get("type", "unknown"),
99
- in_degree=G.in_degree(node_id),
100
- out_degree=G.out_degree(node_id),
101
- degree=deg,
102
- centrality=centrality.get(node_id, 0.0),
103
- source_file=data.get("source_file", ""),
185
+ folder_path=rel,
186
+ label=label,
187
+ project=proj,
188
+ child_count=child_count,
189
+ in_degree=in_d,
190
+ out_degree=out_d,
191
+ degree=degree,
192
+ centrality=centrality,
104
193
  ))
105
194
 
106
195
  results.sort(key=lambda n: n.degree, reverse=True)
@@ -207,6 +296,7 @@ def analyze(
207
296
  G: nx.DiGraph,
208
297
  communities: Optional[dict[str, int]] = None,
209
298
  cohesion_scores: Optional[dict[int, float]] = None,
299
+ project_paths: Optional[dict[str, str]] = None,
210
300
  ) -> GraphReport:
211
301
  """Run all analyses and return a unified GraphReport.
212
302
 
@@ -214,6 +304,8 @@ def analyze(
214
304
  G: built knowledge graph (output of build.py + optional enrich.py)
215
305
  communities: optional community mapping from cluster.py
216
306
  cohesion_scores: optional per-community cohesion scores from cluster.score_all()
307
+ project_paths: optional dict mapping project name → absolute project root path.
308
+ When None, paths are inferred automatically from the graph.
217
309
  """
218
310
  report = GraphReport(
219
311
  node_count=G.number_of_nodes(),
@@ -224,7 +316,7 @@ def analyze(
224
316
  isolated_nodes=sum(1 for n in G.nodes() if G.degree(n) == 0),
225
317
  )
226
318
 
227
- report.god_nodes = god_nodes(G)
319
+ report.god_nodes = god_nodes(G, project_paths=project_paths)
228
320
  report.hub_files = hub_files(G)
229
321
 
230
322
  if communities:
@@ -248,12 +340,14 @@ def report_to_markdown(report: GraphReport) -> str:
248
340
  ]
249
341
 
250
342
  if report.god_nodes:
251
- lines += ["## God Nodes (High Coupling)", ""]
252
- lines.append(f"{'Node':<40} {'Type':<12} {'Degree':>6} {'Centrality':>10}")
253
- lines.append("-" * 72)
343
+ lines += ["## God Nodes (High-Coupling Directories)", ""]
344
+ lines.append(
345
+ f"{'Folder':<44} {'Project':<12} {'Cross-Edges':>11} {'Children':>8} {'Centrality':>10}"
346
+ )
347
+ lines.append("-" * 89)
254
348
  for gn in report.god_nodes[:10]:
255
349
  lines.append(
256
- f"{gn.label:<40} {gn.type:<12} {gn.degree:>6} {gn.centrality:>10.4f}"
350
+ f"{gn.folder_path:<44} {gn.project:<12} {gn.degree:>11} {gn.child_count:>8} {gn.centrality:>10.4f}"
257
351
  )
258
352
  lines.append("")
259
353
 
@@ -1,8 +1,10 @@
1
- """Graph enrichment: HTTP API cross-service edges + shared DB entity edges.
1
+ """Graph enrichment: HTTP/IPC cross-service edges + shared DB entity edges.
2
2
 
3
- Two enrichment passes run AFTER the base graph is built by build.py:
4
- 1. enrich_http_api() — frontend URL calls → backend controller routes (calls_api edges)
5
- 2. enrich_shared_db() — same DAO/Entity used by multiple services (shares_db_entity edges)
3
+ Three enrichment passes run AFTER the base graph is built by build.py:
4
+ 1. enrich_http_api() — frontend URL calls → backend routes (calls_api edges)
5
+ 2. enrich_shared_db() — same DAO/Entity across services (shares_db_entity edges)
6
+ 3. enrich_ipc_invoke() — frontend invoke("cmd") → IPC command routes (invokes_command edges)
7
+ Covers Tauri, Electron ipcRenderer, and any invoke()-pattern IPC framework.
6
8
  """
7
9
 
8
10
  from __future__ import annotations
@@ -21,6 +23,8 @@ _API_URL_RES = [
21
23
  re.compile(r'''(?:api|http|client|instance|request)\.\w+\b[^`"']*[`"']([^`"'$]+)[`"']'''),
22
24
  re.compile(r'''url\s*[:=]\s*[`"']([^`"'$]+)[`"']'''),
23
25
  re.compile(r'''["'](/api/[^"'`\s]+)["'`]'''),
26
+ # Rust reqwest: format!("{}/api/...", base_url)
27
+ re.compile(r'''"\{\}(/api/[^"'\s]+)'''),
24
28
  ]
25
29
  _URL_LIKE = re.compile(r'^/[a-zA-Z]')
26
30
 
@@ -66,9 +70,9 @@ def enrich_http_api(G: nx.DiGraph) -> int:
66
70
  if not route_map:
67
71
  return 0
68
72
 
69
- # Find component/class nodes and scan their source for API calls
73
+ # Find nodes that may call external APIs and scan their source files
70
74
  for node_id, data in G.nodes(data=True):
71
- if data.get("type") not in ("component", "class"):
75
+ if data.get("type") not in ("component", "class", "route", "service"):
72
76
  continue
73
77
  src_proj = data.get("project", "")
74
78
 
@@ -191,6 +195,91 @@ def enrich_shared_db(G: nx.DiGraph) -> int:
191
195
  return added
192
196
 
193
197
 
198
+ # ── IPC invoke enrichment (Tauri, Electron, etc.) ────────────────────────────
199
+
200
+ # Regexes for IPC invoke patterns across desktop/hybrid frameworks:
201
+ # Tauri: invoke("cmd") invoke<T>("cmd")
202
+ # Electron: ipcRenderer.invoke("cmd") ipcRenderer.send("cmd")
203
+ _IPC_INVOKE_RES = [
204
+ re.compile(r"""invoke\s*(?:<[^>]*>)?\s*\(\s*["'](\w+)["']"""),
205
+ re.compile(r"""ipcRenderer\.(?:invoke|send)\s*\(\s*["']([^"']+)["']"""),
206
+ ]
207
+
208
+
209
+ def _extract_ipc_commands(source_file: str) -> list[str]:
210
+ """Extract IPC invoke/send command names from a frontend source file."""
211
+ try:
212
+ content = Path(source_file).read_text(encoding="utf-8", errors="replace")
213
+ except OSError:
214
+ return []
215
+ commands: set[str] = set()
216
+ for pat in _IPC_INVOKE_RES:
217
+ for m in pat.finditer(content):
218
+ commands.add(m.group(1))
219
+ return list(commands)
220
+
221
+
222
+ def enrich_ipc_invoke(G: nx.DiGraph) -> int:
223
+ """Add invokes_command edges: frontend invoke("cmd") → backend IPC command route.
224
+
225
+ Framework-agnostic — works with any route whose method is INVOKE,
226
+ regardless of backend framework (Tauri, Electron, etc.).
227
+
228
+ Strategy:
229
+ - Collect all 'route' nodes where method == "INVOKE"
230
+ - Extract the command name from the route handler
231
+ - For each frontend component, scan for invoke()/ipcRenderer.invoke() calls
232
+ - Match command names across projects
233
+
234
+ Returns:
235
+ Number of new invokes_command edges added.
236
+ """
237
+ added = 0
238
+
239
+ # Build command lookup: handler_name → (node_id, project)
240
+ cmd_map: dict[str, tuple[str, str]] = {}
241
+ for node_id, data in G.nodes(data=True):
242
+ if data.get("type") != "route":
243
+ continue
244
+ method = data.get("method", "")
245
+ if method != "INVOKE":
246
+ continue
247
+ handler = data.get("label", "").split(" ")[0] # "handler [INVOKE /...]" → "handler"
248
+ if handler:
249
+ cmd_map[handler] = (node_id, data.get("project", ""))
250
+
251
+ if not cmd_map:
252
+ return 0
253
+
254
+ # Find frontend component nodes and scan for IPC calls
255
+ for node_id, data in G.nodes(data=True):
256
+ if data.get("type") != "component":
257
+ continue
258
+ src_proj = data.get("project", "")
259
+ src_file = data.get("source_file", "")
260
+ if not src_file:
261
+ continue
262
+
263
+ commands = _extract_ipc_commands(src_file)
264
+ for cmd in commands:
265
+ if cmd not in cmd_map:
266
+ continue
267
+ target_id, target_proj = cmd_map[cmd]
268
+ if target_proj == src_proj:
269
+ continue
270
+ if not G.has_edge(node_id, target_id):
271
+ G.add_edge(
272
+ node_id, target_id,
273
+ relation="invokes_command",
274
+ confidence="EXTRACTED",
275
+ confidence_score=1.0,
276
+ source_file=src_file,
277
+ )
278
+ added += 1
279
+
280
+ return added
281
+
282
+
194
283
  # ── URL / path utilities ──────────────────────────────────────────────────────
195
284
 
196
285
  def _normalize_path(path: str) -> str:
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "codebeacon"
7
- version = "0.1.7"
7
+ version = "0.2.0"
8
8
  description = "Source code AST analysis tool for AI context generation — unified multi-framework knowledge graph"
9
9
  readme = "README.md"
10
10
  license = { text = "MIT" }
@@ -1 +0,0 @@
1
- __version__ = "0.1.7"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes