@dtt_siye/atool 1.3.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/lib/install-skills.sh +16 -0
  2. package/lib/knowledge-graph.sh +483 -81
  3. package/lib/pre-scan.sh +70 -5
  4. package/package.json +1 -1
  5. package/skills/project-analyze/SKILL.md +34 -15
  6. package/skills/project-analyze/phases/phase2-understand.md +7 -1
  7. package/skills/project-analyze/phases/phase2.5-refine.md +284 -0
  8. package/skills/project-analyze/phases/phase4-synthesize.md +100 -119
  9. package/skills/project-analyze/phases/phase5-export.md +78 -32
  10. package/skills/project-analyze/prompts/understand-agent.md +17 -0
  11. package/skills/project-analyze/rules/android.md +61 -260
  12. package/skills/project-analyze/rules/devops.md +61 -421
  13. package/skills/project-analyze/rules/generic.md +53 -221
  14. package/skills/project-analyze/rules/go.md +60 -275
  15. package/skills/project-analyze/rules/harmony.md +64 -237
  16. package/skills/project-analyze/rules/java.md +47 -485
  17. package/skills/project-analyze/rules/mobile-flutter.md +57 -292
  18. package/skills/project-analyze/rules/mobile-react-native.md +65 -262
  19. package/skills/project-analyze/rules/mobile-swift.md +58 -303
  20. package/skills/project-analyze/rules/python.md +50 -296
  21. package/skills/project-analyze/rules/rust-tauri.md +51 -217
  22. package/skills/project-analyze/rules/rust.md +50 -274
  23. package/skills/project-analyze/rules/web-nextjs.md +61 -335
  24. package/skills/project-analyze/rules/web-react.md +50 -272
  25. package/skills/project-analyze/rules/web-vue.md +58 -352
  26. package/skills/project-analyze/rules/web.md +55 -347
  27. package/skills/requirements-writer/SKILL.md +48 -1
  28. package/skills/software-architecture/SKILL.md +73 -3
@@ -9,6 +9,51 @@
9
9
  # in .atool-docs/inventory/ and output structured data to .atool-docs/knowledge-graph.json
10
10
  set -euo pipefail
11
11
 
12
+ # === Internal Helper: Safe jq -s with Invalid JSON Filtering ===
13
+
14
+ # Run jq -s safely on a directory's JSON files, skipping invalid ones.
15
+ # Returns the jq output or the fallback value if no valid files exist.
16
+ #
17
+ # Args:
18
+ # $1 - filter: jq filter expression
19
+ # $2 - json_dir: directory containing *.json files (NOT a glob — bash expands globs before function call)
20
+ # $3 - fallback: value to return if jq fails or no valid files (default: "{}")
21
+ # Output: jq result or fallback to stdout
22
+ _jq_slurp_safe() {
23
+ local filter="$1"
24
+ local json_dir="$2"
25
+ local fallback="${3:-}"
26
+ [[ -z "$fallback" ]] && fallback="{}"
27
+
28
+ if [[ ! -d "$json_dir" ]]; then
29
+ echo "$fallback"
30
+ return
31
+ fi
32
+
33
+ # Collect only valid JSON files from the directory
34
+ local valid_files=()
35
+ local f
36
+ for f in "$json_dir"/*.json; do
37
+ [[ ! -f "$f" ]] && continue
38
+ if jq '.' "$f" > /dev/null 2>&1; then
39
+ valid_files+=("$f")
40
+ fi
41
+ done
42
+
43
+ if [[ ${#valid_files[@]} -eq 0 ]]; then
44
+ echo "$fallback"
45
+ return
46
+ fi
47
+
48
+ local result
49
+ result=$(jq -s "$filter" "${valid_files[@]}" 2>/dev/null) || true
50
+ if [[ -n "$result" && "$result" != "null" ]]; then
51
+ echo "$result"
52
+ else
53
+ echo "$fallback"
54
+ fi
55
+ }
56
+
12
57
  # === Node Counting ===
13
58
 
14
59
  # Count nodes by type from inventory JSON files.
@@ -32,12 +77,10 @@ count_graph_nodes() {
32
77
  return
33
78
  fi
34
79
 
35
- # Aggregate node counts from all inventory JSON files.
36
- # Each file has "files" array with "type" field per entry.
37
- # Null-coalesce missing types to 0.
80
+ # Aggregate node counts from all valid inventory JSON files.
38
81
  local result
39
- result=$(jq -s \
40
- '[ .[].files // [] | .[].type ]
82
+ result=$(_jq_slurp_safe '
83
+ [ .[].files // [] | .[].type ]
41
84
  | group_by(.)
42
85
  | map({key: .[0], value: length})
43
86
  | from_entries
@@ -52,13 +95,9 @@ count_graph_nodes() {
52
95
  api_endpoint: (.api_endpoint // 0),
53
96
  route: (.route // 0),
54
97
  config: (.config // 0)
55
- }' "$inventory_dir"/*.json 2>/dev/null)
98
+ }' "$inventory_dir" "{}")
56
99
 
57
- if [[ -n "$result" ]]; then
58
- echo "$result"
59
- else
60
- echo "{}"
61
- fi
100
+ echo "$result"
62
101
  }
63
102
 
64
103
  # === Edge Counting ===
@@ -82,11 +121,10 @@ count_graph_edges() {
82
121
  return
83
122
  fi
84
123
 
85
- # Aggregate edge counts from all inventory files.
86
- # Each file has "outgoing_edges" array with "type" field per entry.
124
+ # Aggregate edge counts from all valid inventory files.
87
125
  local result
88
- result=$(jq -s \
89
- '[ .[].outgoing_edges // [] | .[].type ]
126
+ result=$(_jq_slurp_safe '
127
+ [ .[].outgoing_edges // [] | .[].type ]
90
128
  | group_by(.)
91
129
  | map({key: .[0], value: length})
92
130
  | from_entries
@@ -103,13 +141,9 @@ count_graph_edges() {
103
141
  persists_to: (.persists_to // 0),
104
142
  implements: (.implements // 0),
105
143
  extends: (.extends // 0)
106
- }' "$inventory_dir"/*.json 2>/dev/null)
144
+ }' "$inventory_dir" "{}")
107
145
 
108
- if [[ -n "$result" ]]; then
109
- echo "$result"
110
- else
111
- echo "{}"
112
- fi
146
+ echo "$result"
113
147
  }
114
148
 
115
149
  # === Cycle Detection ===
@@ -135,17 +169,16 @@ detect_cycles() {
135
169
  fi
136
170
 
137
171
  # Build adjacency list from depends_on edges: source_module -> [target_modules]
138
- # Each edge in outgoing_edges has: type, source, target_module
139
172
  local adj_json
140
- adj_json=$(jq -s \
141
- '[ .[].outgoing_edges // []
173
+ adj_json=$(_jq_slurp_safe '
174
+ [ .[].outgoing_edges // []
142
175
  | .[]
143
176
  | select(.type == "depends_on")
144
177
  | {source: (.source // ""), target: (.target_module // "")}
145
178
  | select(.source != "" and .target != "") ]
146
179
  | group_by(.source)
147
180
  | map({key: .[0].source, value: (map(.target) | unique)})
148
- | from_entries' "$inventory_dir"/*.json 2>/dev/null)
181
+ | from_entries' "$inventory_dir" "{}")
149
182
 
150
183
  if [[ -z "$adj_json" || "$adj_json" == "null" || "$adj_json" == "{}" ]]; then
151
184
  echo "[]"
@@ -209,8 +242,8 @@ compute_coupling_metrics() {
209
242
  fi
210
243
 
211
244
  local result
212
- result=$(jq -s \
213
- '[ .[].outgoing_edges // []
245
+ result=$(_jq_slurp_safe '
246
+ [ .[].outgoing_edges // []
214
247
  | .[]
215
248
  | select(.type == "depends_on")
216
249
  | {source: (.source // ""), target: (.target_module // "")}
@@ -233,13 +266,9 @@ compute_coupling_metrics() {
233
266
  else 0.5 end) as $inst |
234
267
 
235
268
  {key: $m, value: {ca: $ca, ce: $ce, instability: $inst}}
236
- ] | from_entries' "$inventory_dir"/*.json 2>/dev/null)
269
+ ] | from_entries' "$inventory_dir" "{}")
237
270
 
238
- if [[ -n "$result" ]]; then
239
- echo "$result"
240
- else
241
- echo "{}"
242
- fi
271
+ echo "$result"
243
272
  }
244
273
 
245
274
  # === Dependency Structure Matrix ===
@@ -265,8 +294,8 @@ generate_dsm() {
265
294
  fi
266
295
 
267
296
  local result
268
- result=$(jq -s \
269
- '[ .[].outgoing_edges // []
297
+ result=$(_jq_slurp_safe '
298
+ [ .[].outgoing_edges // []
270
299
  | .[]
271
300
  | select(.type == "depends_on")
272
301
  | {
@@ -287,13 +316,9 @@ generate_dsm() {
287
316
  value: (map(.symbols) | add)
288
317
  }) | from_entries)
289
318
  }
290
- ] | from_entries' "$inventory_dir"/*.json 2>/dev/null)
319
+ ] | from_entries' "$inventory_dir" "{}")
291
320
 
292
- if [[ -n "$result" ]]; then
293
- echo "$result"
294
- else
295
- echo "{}"
296
- fi
321
+ echo "$result"
297
322
  }
298
323
 
299
324
  # === Architectural Layer Detection ===
@@ -552,43 +577,372 @@ assemble_enhanced_graph() {
552
577
  fi
553
578
 
554
579
  local inventory_dir="$docs_dir/inventory"
580
+ local modules_dir="$docs_dir/modules"
555
581
  local output_file="$docs_dir/knowledge-graph.json"
556
582
 
557
- # Step 1: Node assembly from inventory/*.json + modules/*/analysis.json
583
+ # Detect data source: prefer inventory/ (Phase 1 inventory data), fall back to modules/*/data.json (Phase 2 analysis)
558
584
  local nodes
559
- nodes=$(jq -s '
560
- # Collect all nodes from inventory files
561
- [ .[].files // [] | .[] |
562
- {
563
- id: ((.module // .path // "") | tostring),
564
- name: (.name // (.path // "") | split("/") | .[-1]),
565
- type: (.type // "unknown"),
566
- path: (.path // ""),
567
- module: (.module // "")
568
- }
569
- | select(.id != "")
570
- ] | unique_by(.id)
571
- ' "$inventory_dir"/*.json 2>/dev/null)
585
+ if [[ -d "$inventory_dir" ]] && compgen -G "${inventory_dir}/*.json" > /dev/null 2>&1; then
586
+ # === 多层级节点生成 ===
587
+ # pre-scan 数据中提取: module file → function/class/data_entity/api_endpoint
588
+ nodes=$(_jq_slurp_safe '
589
+ # 收集所有模块级节点
590
+ ([ .[].module // "" | select(. != "") ] | unique) as $module_names |
591
+
592
+ # 模块级节点 (level 1)
593
+ [ $module_names[] as $mod |
594
+ { id: "mod:\($mod)", name: $mod, type: "module", path: "", module: $mod, level: 1, parent: null }
595
+ ]
596
+
597
+ # 文件级 + 子元素节点 (level 2-3)
598
+ + [ .[] | . as $inv |
599
+ .module as $mod |
600
+ (.files // [])[] | . as $file |
601
+
602
+ # 文件级节点 (level 2) — wrapped in [] for valid array concatenation
603
+ [{
604
+ id: "file:\($mod)/\($file.path | split("/") | .[-1])",
605
+ name: ($file.path | split("/") | .[-1]),
606
+ type: (if ($file.path | test("\\.tsx?$"; "i")) then "component"
607
+ elif ($file.path | test("store"; "i")) then "store"
608
+ elif ($file.path | test("parser|parsers"; "i")) then "repository"
609
+ elif ($file.path | test("service|commands|chat"; "i")) then "service"
610
+ elif ($file.path | test("model|types"; "i")) then "data_entity"
611
+ else "service" end),
612
+ path: $file.path,
613
+ module: $mod,
614
+ level: 2,
615
+ parent: "mod:\($mod)"
616
+ }]
617
+
618
+ # 函数级节点 (level 3)
619
+ + [ ($file.functions // [])[] | {
620
+ id: "fn:\($mod)/\($file.path | split("/") | .[-1])#\(.name)",
621
+ name: .name,
622
+ type: "function",
623
+ path: $file.path,
624
+ module: $mod,
625
+ level: 3,
626
+ parent: "file:\($mod)/\($file.path | split("/") | .[-1])",
627
+ params: (.params // "")
628
+ } ]
629
+
630
+ # 类级节点 (level 3)
631
+ + [ ($file.classes // [])[] | {
632
+ id: "cls:\($mod)/\($file.path | split("/") | .[-1])#\(.name)",
633
+ name: .name,
634
+ type: (if .type == "interface" then "api_endpoint"
635
+ elif .type == "enum" then "data_entity"
636
+ else "service" end),
637
+ path: $file.path,
638
+ module: $mod,
639
+ level: 3,
640
+ parent: "file:\($mod)/\($file.path | split("/") | .[-1])",
641
+ bases: (.bases // [])
642
+ } ]
643
+
644
+ # 数据模型节点 (level 3)
645
+ + [ ($file.data_models // [])[] | {
646
+ id: "data:\($mod)/\($file.path | split("/") | .[-1])#\(.name)",
647
+ name: .name,
648
+ type: "data_entity",
649
+ path: $file.path,
650
+ module: $mod,
651
+ level: 3,
652
+ parent: "file:\($mod)/\($file.path | split("/") | .[-1])",
653
+ model_type: (.type // "unknown")
654
+ } ]
655
+
656
+ # API 端点节点 (level 3)
657
+ + [ ($file.api_endpoints // [])[] | {
658
+ id: "api:\($mod)/\($file.path | split("/") | .[-1])#\(.method)-\(.path // "unknown")",
659
+ name: "\(.method) \(.path)",
660
+ type: "api_endpoint",
661
+ path: $file.path,
662
+ module: $mod,
663
+ level: 3,
664
+ parent: "file:\($mod)/\($file.path | split("/") | .[-1])",
665
+ http_method: .method,
666
+ http_path: .path
667
+ } ]
668
+ ]
669
+
670
+ # 展平并去重
671
+ | flatten
672
+ | map(select(.id != null and .id != ""))
673
+ | unique_by(.id)
674
+ ' "$inventory_dir" "[]")
675
+ elif [[ -d "$modules_dir" ]]; then
676
+ # Fallback: build nodes from modules/*/data.json (Phase 2 output)
677
+ local nodes_tmp_dir
678
+ nodes_tmp_dir=$(mktemp -d)
679
+ local idx=0
680
+ for mf in "$modules_dir"/*/data.json; do
681
+ [[ ! -f "$mf" ]] && continue
682
+ # Skip invalid JSON files gracefully
683
+ jq '.' "$mf" > /dev/null 2>&1 || continue
684
+ local mod_slug
685
+ mod_slug=$(basename "$(dirname "$mf")")
686
+ # Extract all node types in one jq pass
687
+ # Handle both "files" (detailed) and "sourceFiles" (simpler) formats
688
+ jq --arg slug "$mod_slug" '
689
+ # Module-level node
690
+ [{id: $slug, name: (.responsibility // .description // $slug), type: "module", path: "", module: $slug}]
691
+ # Sub-component nodes from files[] or sourceFiles[]
692
+ + [((.files // .sourceFiles) // [])[] | {
693
+ id: (.path | split("/") | .[-1]),
694
+ name: .role,
695
+ type: (if (.path | test("\\.tsx?$"; "i")) then "component"
696
+ elif (.path | test("store"; "i")) then "store"
697
+ elif (.role | test("解析|parser|parse"; "i")) then "repository"
698
+ else "service" end),
699
+ path: .path,
700
+ module: $slug
701
+ }]
702
+ # Data entity nodes
703
+ + [(.data_entities // [])[] | {
704
+ id: .name,
705
+ name: .name,
706
+ type: "data_entity",
707
+ path: "",
708
+ module: $slug
709
+ }]
710
+ # Exposed API nodes
711
+ + [(.exposed_apis // [])[] | {
712
+ id: .name,
713
+ name: .description,
714
+ type: "api_endpoint",
715
+ path: "",
716
+ module: $slug
717
+ }]
718
+ | map(select(.id != null and .id != ""))
719
+ ' "$mf" > "$nodes_tmp_dir/$idx.json" 2>/dev/null || true
720
+ idx=$((idx + 1))
721
+ done
722
+ nodes=$(jq -s '[ .[] | .[] ] | unique_by(.id)' "$nodes_tmp_dir"/*.json 2>/dev/null) || true
723
+ rm -rf "$nodes_tmp_dir"
724
+ else
725
+ nodes="[]"
726
+ fi
572
727
 
573
728
  if [[ -z "$nodes" || "$nodes" == "null" ]]; then
574
729
  nodes="[]"
575
730
  fi
576
-
577
- # Step 2: Edge assembly with weights
731
+ # Step 2: Edge assembly — 原始模块级边 + 从 imports 推断的文件级边 + 父子 contains 边
578
732
  local raw_edges
579
- raw_edges=$(jq -s '
580
- [ .[].outgoing_edges // [] | .[] |
581
- {
582
- source: (.source // ""),
583
- target: (.target_module // .target // ""),
584
- type: (.type // "depends_on"),
585
- symbols: (.symbols // []),
586
- confidence: (.confidence // 1.0),
587
- frequency: 1
588
- }
589
- | select(.source != "" and .target != "")
590
- ]
591
- ' "$inventory_dir"/*.json 2>/dev/null)
733
+ if [[ -d "$inventory_dir" ]] && compgen -G "${inventory_dir}/*.json" > /dev/null 2>&1; then
734
+ raw_edges=$(_jq_slurp_safe '
735
+ # Pre-compute lookup maps (must come before array construction; jq cannot use bindings as + operands)
736
+ (reduce (.[] | .module as $mod | (.files // [])[] | {file: ($mod + "/" + (.path | split("/") | .[-1])), exports: (.exports // [])}) as $f
737
+ ({}; .[$f.file] = $f.exports)) as $export_map |
738
+ (reduce (.[] | .module as $mod | (.files // [])[] | {fn: (.path | split("/") | .[-1]), mod: $mod}) as $entry
739
+ ({}; .[$entry.fn] = $entry.mod)) as $path_mod |
740
+
741
+ # === Part A: 原始 outgoing_edges (模块级) ===
742
+ [ .[].outgoing_edges // [] | .[] |
743
+ {
744
+ source: (.source // ""),
745
+ target: (.target_module // .target // ""),
746
+ type: (.type // "depends_on"),
747
+ symbols: (.symbols // []),
748
+ confidence: (.confidence // 1.0),
749
+ frequency: 1
750
+ }
751
+ | select(.source != "" and .target != "")
752
+ ]
753
+
754
+ # === Part B: 从 imports 推断文件级依赖边 ===
755
+ + [ .[] | . as $inv |
756
+ .module as $mod |
757
+ (.files // [])[] | . as $file |
758
+ ($file.path | split("/") | .[-1]) as $filename |
759
+ ($file.imports // [])[] |
760
+ # 只处理本地 import (is_local == true 或以 ./ 开头)
761
+ select(.is_local == true or ((.from // "") | test("^\\."))) |
762
+ (.from // "") as $from_path |
763
+ # 推断目标模块:从 import 路径中提取
764
+ ($from_path | sub("^\\./"; "") | sub("^\\.\\./"; "") | split("/") | .[0]) as $target_hint |
765
+ # 生成文件级边
766
+ {
767
+ source: "file:\($mod)/\($filename)",
768
+ target: (if $target_hint != "" then "file:\($target_hint)/?" else "" end),
769
+ type: "imports",
770
+ symbols: [(.import // .from // "")] | map(select(. != "" and . != null)),
771
+ confidence: 0.7,
772
+ frequency: 1
773
+ }
774
+ | select(.source != "" and .target != "" and .target != "file:/?")
775
+ ]
776
+
777
+ # === Part C: 父子 contains 边 (module→file, file→function/class) ===
778
+ + [ .[] |
779
+ .module as $mod |
780
+ (.files // [])[] | . as $file |
781
+ ($file.path | split("/") | .[-1]) as $filename |
782
+
783
+ # module → file contains 边
784
+ [{ source: "mod:\($mod)", target: "file:\($mod)/\($filename)", type: "contains", symbols: [], confidence: 1.0, frequency: 1 }]
785
+
786
+ # file → function contains 边
787
+ + [ ($file.functions // [])[] |
788
+ { source: "file:\($mod)/\($filename)", target: "fn:\($mod)/\($filename)#\(.name)", type: "contains", symbols: [], confidence: 1.0, frequency: 1 }
789
+ ]
790
+
791
+ # file → class contains 边
792
+ + [ ($file.classes // [])[] |
793
+ { source: "file:\($mod)/\($filename)", target: "cls:\($mod)/\($filename)#\(.name)", type: "contains", symbols: [], confidence: 1.0, frequency: 1 }
794
+ ]
795
+
796
+ # file → data_model contains 边
797
+ + [ ($file.data_models // [])[] |
798
+ { source: "file:\($mod)/\($filename)", target: "data:\($mod)/\($filename)#\(.name)", type: "contains", symbols: [], confidence: 1.0, frequency: 1 }
799
+ ]
800
+ ]
801
+
802
+ # === Part D: import → export 交叉引用边(函数级调用关系) ===
803
+ + [ .[] | .module as $mod |
804
+ (.files // [])[] | . as $file |
805
+ ($file.path | split("/") | .[-1]) as $filename |
806
+ ($file.imports // [])[] |
807
+ select(.is_local == true or ((.from // "") | test("^\\."))) |
808
+ (.from // "") as $from_path |
809
+ # 解析 import source → 目标文件名
810
+ ($from_path | sub("^\\./"; "") | sub("^\\.\\./"; "") | split("/") | .[-1] | sub("\\.(ts|tsx|js|jsx|vue|svelte)$"; "")) as $target_hint |
811
+ # 从 import symbols 和 export_map 做交叉引用
812
+ (.symbols // []) as $imported_syms |
813
+ ($imported_syms | length) as $sym_count |
814
+ # 如果有具名 import symbols,用它们生成函数级边
815
+ (if $sym_count > 0 then
816
+ [$imported_syms[] | . as $sym |
817
+ {
818
+ source: "file:\($mod)/\($filename)",
819
+ target: ("file:" + (if $from_path | test("^\\.\\.") then "" else "\($mod)/" end) + $target_hint),
820
+ type: "calls",
821
+ symbols: [$sym],
822
+ confidence: 0.8,
823
+ frequency: 1
824
+ }
825
+ | select(.target != "file:" and .target != .source)
826
+ ]
827
+ # 否则生成文件级 imports 边(旧数据回退)
828
+ else
829
+ [{
830
+ source: "file:\($mod)/\($filename)",
831
+ target: "file:\($target_hint)",
832
+ type: "imports",
833
+ symbols: [],
834
+ confidence: 0.6,
835
+ frequency: 1
836
+ }
837
+ | select(.target != "file:" and .target != .source)]
838
+ end)
839
+ ]
840
+
841
+ # 展平、去重、过滤
842
+ | flatten
843
+ | map(select(.source != "" and .target != "" and .source != .target))
844
+ | unique_by({source, target, type})
845
+ ' "$inventory_dir" "[]")
846
+ elif [[ -d "$modules_dir" ]]; then
847
+ # Fallback: build edges from modules/*/data.json dependencies
848
+ local edges_tmp_dir
849
+ edges_tmp_dir=$(mktemp -d)
850
+ local eidx=0
851
+
852
+ # Build multi-strategy path→module lookup over VALID data.json files
853
+ local valid_data_files=()
854
+ for mf in "$modules_dir"/*/data.json; do
855
+ [[ ! -f "$mf" ]] && continue
856
+ jq '.' "$mf" > /dev/null 2>&1 && valid_data_files+=("$mf")
857
+ done
858
+
859
+ local lookup_json="{}"
860
+ if [[ ${#valid_data_files[@]} -gt 0 ]]; then
861
+ # Build lookup: file paths + directory prefixes + module names → module slug
862
+ lookup_json=$(jq -s '
863
+ # Strategy 1: module name → module slug (direct reference)
864
+ [ .[] |
865
+ (.module // "") as $slug |
866
+ select($slug != "") |
867
+ {key: $slug, value: $slug}
868
+ ]
869
+
870
+ # Strategy 2: file paths from files/sourceFiles
871
+ + [ .[] |
872
+ (.module // "") as $slug |
873
+ select($slug != "") |
874
+ [((.files // .sourceFiles) // [])[].path] | map(select(. != "" and . != null)) | unique |
875
+ map(. as $p | {key: $p, value: $slug})
876
+ ]
877
+
878
+ # Strategy 3: first directory segment → module slug
879
+ + [ .[] |
880
+ (.module // "") as $slug |
881
+ select($slug != "") |
882
+ [((.files // .sourceFiles) // [])[].path] | map(select(. != "" and . != null)) | unique |
883
+ map(. as $p | ($p | split("/") | .[0]) | select(. != "") | {key: ., value: $slug})
884
+ ]
885
+
886
+ | flatten | map(select(.key != null and .key != "")) |
887
+ reduce .[] as $entry ({}; .[$entry.key] = $entry.value)
888
+ ' "${valid_data_files[@]}" 2>/dev/null) || true
889
+ fi
890
+
891
+ [[ -z "$lookup_json" || "$lookup_json" == "null" ]] && lookup_json="{}"
892
+
893
+ # Add 2 and 3 segment prefix lookups from file paths
894
+ lookup_json=$(echo "$lookup_json" | jq '
895
+ reduce to_entries[] as $e (.;
896
+ ($e.key | split("/")) as $parts |
897
+ if ($parts | length) >= 2 then
898
+ .[($parts[0:2] | join("/"))] = $e.value
899
+ else . end |
900
+ if ($parts | length) >= 3 then
901
+ .[($parts[0:3] | join("/"))] = $e.value
902
+ else . end
903
+ )
904
+ ' 2>/dev/null) || true
905
+
906
+ [[ -z "$lookup_json" || "$lookup_json" == "null" ]] && lookup_json="{}"
907
+
908
+ for mf in "$modules_dir"/*/data.json; do
909
+ [[ ! -f "$mf" ]] && continue
910
+ # Skip invalid JSON files (e.g., frontend-core with parse errors)
911
+ jq '.' "$mf" > /dev/null 2>&1 || continue
912
+ local mod_slug
913
+ mod_slug=$(jq -r '.module // ""' "$mf" 2>/dev/null || true)
914
+ [[ -z "$mod_slug" ]] && mod_slug=$(basename "$(dirname "$mf")")
915
+ jq --arg slug "$mod_slug" --argjson pmap "$lookup_json" '
916
+ [(.dependencies.internal // [])[] |
917
+ (if type == "string" then sub(" *\\(.*\\)"; "") else . end) as $raw_path |
918
+ # Multi-strategy lookup: exact → 3-seg prefix → 2-seg prefix → 1-seg prefix
919
+ ($pmap[$raw_path] //
920
+ ($raw_path | split("/") | .[0:3] | join("/") as $k | $pmap[$k]) //
921
+ ($raw_path | split("/") | .[0:2] | join("/") as $k | $pmap[$k]) //
922
+ ($raw_path | split("/") | .[0] as $k | $pmap[$k]) //
923
+ "UNKNOWN") as $target_mod |
924
+ select($target_mod != $slug and $target_mod != "UNKNOWN") |
925
+ {source: $slug, target: $target_mod, type: "depends_on", weight: 1, confidence: 0.8}
926
+ ] | map(select(.source != "" and .target != ""))
927
+ ' "$mf" > "$edges_tmp_dir/$eidx.json" 2>/dev/null || true
928
+ eidx=$((eidx + 1))
929
+ done
930
+
931
+ # Collect numbered edge files, skip empty ones
932
+ local edge_files_str=""
933
+ for ef in "$edges_tmp_dir"/[0-9]*.json; do
934
+ [[ ! -f "$ef" ]] && continue
935
+ [[ -s "$ef" ]] || continue
936
+ edge_files_str="$edge_files_str $ef"
937
+ done
938
+ if [[ -n "$edge_files_str" ]]; then
939
+ raw_edges=$(jq -s '[ .[] | if type == "array" then .[] else empty end ]' $edge_files_str 2>/dev/null \
940
+ | jq '[ .[] | select(.source != .target) ] | unique_by({source,target,type})' 2>/dev/null) || true
941
+ fi
942
+ rm -rf "$edges_tmp_dir"
943
+ else
944
+ raw_edges="[]"
945
+ fi
592
946
 
593
947
  if [[ -z "$raw_edges" || "$raw_edges" == "null" ]]; then
594
948
  raw_edges="[]"
@@ -596,15 +950,35 @@ assemble_enhanced_graph() {
596
950
 
597
951
  # Build a temporary graph to compute edge weights
598
952
  local weighted_edges
599
- weighted_edges=$(echo "{\"nodes\":$nodes,\"edges\":$raw_edges}" | compute_edge_weights -)
953
+ weighted_edges=$(echo "{\"nodes\":$nodes,\"edges\":$raw_edges}" | compute_edge_weights -) || weighted_edges="$raw_edges"
600
954
 
601
955
  # Step 3: Layer detection
602
956
  local layers
603
- layers=$(detect_architectural_layers "$project_root")
957
+ layers=$(detect_architectural_layers "$project_root") || layers="{}"
604
958
 
605
- # Step 4: Graph metrics (Ca, Ce, I via existing compute_coupling_metrics)
959
+ # Step 4: Graph metrics (Ca, Ce, I)
606
960
  local coupling_metrics
607
- coupling_metrics=$(compute_coupling_metrics "$inventory_dir")
961
+ if [[ -d "$inventory_dir" ]] && compgen -G "$inventory_dir/*.json" > /dev/null 2>&1; then
962
+ coupling_metrics=$(compute_coupling_metrics "$inventory_dir") || coupling_metrics="{}"
963
+ else
964
+ # Compute coupling from the edges we already assembled
965
+ coupling_metrics=$(echo "$weighted_edges" | jq '
966
+ [.[] | select(.type == "depends_on" or .type == "imports")] as $dep_edges |
967
+ ($dep_edges | map(.source) + map(.target) | unique) as $modules |
968
+ [ $modules[] as $m |
969
+ ($dep_edges | map(select(.source == $m).target) | unique | length) as $ce |
970
+ ($dep_edges | map(select(.target == $m).source) | unique | length) as $ca |
971
+ {
972
+ key: $m,
973
+ value: {
974
+ ca: $ca,
975
+ ce: $ce,
976
+ instability: (if ($ce + $ca) > 0 then (($ce * 100 / ($ce + $ca)) | round) / 100 else 0 end)
977
+ }
978
+ }
979
+ ] | from_entries
980
+ ' 2>/dev/null) || coupling_metrics="{}"
981
+ fi
608
982
 
609
983
  # Compute aggregate metrics from coupling data
610
984
  local aggregate_metrics
@@ -623,7 +997,7 @@ assemble_enhanced_graph() {
623
997
  avg_efferent: (($avg_ce * 100 | round) / 100),
624
998
  coupling_per_module: $cm
625
999
  }
626
- ')
1000
+ ' 2>/dev/null) || aggregate_metrics='{"total_modules":0,"avg_afferent":0,"avg_efferent":0,"coupling_per_module":{}}'
627
1001
 
628
1002
  # Step 5: Importance score integration
629
1003
  local importance_scores="{}"
@@ -637,15 +1011,43 @@ assemble_enhanced_graph() {
637
1011
 
638
1012
  # Step 6: Index construction
639
1013
  local indexes
640
- indexes=$(echo "{\"nodes\":$nodes,\"edges\":$weighted_edges}" | build_graph_indexes -)
1014
+ indexes=$(echo "{\"nodes\":$nodes,\"edges\":$weighted_edges}" | build_graph_indexes -) || indexes="{}"
641
1015
 
642
1016
  # Detect cycles
643
1017
  local cycles
644
- cycles=$(detect_cycles "$inventory_dir")
1018
+ if [[ -d "$inventory_dir" ]] && compgen -G "$inventory_dir/*.json" > /dev/null 2>&1; then
1019
+ cycles=$(detect_cycles "$inventory_dir") || cycles="[]"
1020
+ else
1021
+ # Compute cycles from the edges we already assembled
1022
+ cycles=$(echo "$weighted_edges" | jq '
1023
+ [.[] | select(.type == "depends_on")] as $edges |
1024
+ ($edges | map(.source) | unique) as $modules |
1025
+ # Build adjacency list
1026
+ ($edges | group_by(.source) | map({key: .[0].source, value: (map(.target) | unique)}) | from_entries) as $adj |
1027
+ # 2-cycles
1028
+ [ $modules[] as $a |
1029
+ ($adj[$a] // [])[] as $b |
1030
+ select($b > $a) |
1031
+ select(($adj[$b] // []) | index($a)) |
1032
+ [$a, $b] | sort
1033
+ ] | unique
1034
+ ' 2>/dev/null) || cycles="[]"
1035
+ fi
645
1036
 
646
1037
  # Generate DSM
647
1038
  local dsm
648
- dsm=$(generate_dsm "$inventory_dir" 2>/dev/null || echo "{}")
1039
+ if [[ -d "$inventory_dir" ]] && compgen -G "$inventory_dir/*.json" > /dev/null 2>&1; then
1040
+ dsm=$(generate_dsm "$inventory_dir" 2>/dev/null || echo "{}")
1041
+ else
1042
+ dsm=$(echo "$weighted_edges" | jq '
1043
+ [.[] | select(.type == "depends_on")] |
1044
+ group_by(.source) |
1045
+ map({
1046
+ key: .[0].source,
1047
+ value: (group_by(.target) | map({key: .[0].target, value: length}) | from_entries)
1048
+ }) | from_entries
1049
+ ' 2>/dev/null) || dsm="{}"
1050
+ fi
649
1051
 
650
1052
  # Assemble final graph
651
1053
  local timestamp
@@ -810,9 +1212,9 @@ compute_ck_metrics() {
810
1212
  return
811
1213
  fi
812
1214
 
813
- # Collect class nodes and their method/field information from inventory
1215
+ # Collect class nodes and their method/field information from valid inventory files
814
1216
  local inventory_data
815
- inventory_data=$(jq -s '
1217
+ inventory_data=$(_jq_slurp_safe '
816
1218
  {
817
1219
  # All class-type nodes
818
1220
  classes: [ .[].files // [] | .[] |
@@ -830,7 +1232,7 @@ compute_ck_metrics() {
830
1232
  # All edges for dependency analysis
831
1233
  edges: [ .[].outgoing_edges // [] | .[] | . ]
832
1234
  }
833
- ' "$inventory_dir"/*.json 2>/dev/null)
1235
+ ' "$inventory_dir" "{}")
834
1236
 
835
1237
  if [[ -z "$inventory_data" || "$inventory_data" == "null" ]]; then
836
1238
  echo "{}"