aidp 0.3.0 โ†’ 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +191 -5
  3. data/lib/aidp/analysis/kb_inspector.rb +456 -0
  4. data/lib/aidp/analysis/seams.rb +188 -0
  5. data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +493 -0
  6. data/lib/aidp/analysis/tree_sitter_scan.rb +703 -0
  7. data/lib/aidp/analyze/agent_personas.rb +1 -1
  8. data/lib/aidp/analyze/agent_tool_executor.rb +5 -11
  9. data/lib/aidp/analyze/data_retention_manager.rb +0 -5
  10. data/lib/aidp/analyze/database.rb +99 -82
  11. data/lib/aidp/analyze/error_handler.rb +12 -79
  12. data/lib/aidp/analyze/export_manager.rb +0 -7
  13. data/lib/aidp/analyze/focus_guidance.rb +2 -2
  14. data/lib/aidp/analyze/incremental_analyzer.rb +1 -11
  15. data/lib/aidp/analyze/large_analysis_progress.rb +0 -5
  16. data/lib/aidp/analyze/memory_manager.rb +34 -60
  17. data/lib/aidp/analyze/metrics_storage.rb +336 -0
  18. data/lib/aidp/analyze/parallel_processor.rb +0 -6
  19. data/lib/aidp/analyze/performance_optimizer.rb +0 -3
  20. data/lib/aidp/analyze/prioritizer.rb +2 -2
  21. data/lib/aidp/analyze/repository_chunker.rb +14 -21
  22. data/lib/aidp/analyze/ruby_maat_integration.rb +6 -102
  23. data/lib/aidp/analyze/runner.rb +107 -191
  24. data/lib/aidp/analyze/steps.rb +35 -30
  25. data/lib/aidp/analyze/storage.rb +233 -178
  26. data/lib/aidp/analyze/tool_configuration.rb +21 -36
  27. data/lib/aidp/cli/jobs_command.rb +489 -0
  28. data/lib/aidp/cli/terminal_io.rb +52 -0
  29. data/lib/aidp/cli.rb +160 -45
  30. data/lib/aidp/core_ext/class_attribute.rb +36 -0
  31. data/lib/aidp/database/pg_adapter.rb +148 -0
  32. data/lib/aidp/database_config.rb +69 -0
  33. data/lib/aidp/database_connection.rb +72 -0
  34. data/lib/aidp/execute/runner.rb +65 -92
  35. data/lib/aidp/execute/steps.rb +81 -82
  36. data/lib/aidp/job_manager.rb +41 -0
  37. data/lib/aidp/jobs/base_job.rb +45 -0
  38. data/lib/aidp/jobs/provider_execution_job.rb +83 -0
  39. data/lib/aidp/provider_manager.rb +25 -0
  40. data/lib/aidp/providers/agent_supervisor.rb +348 -0
  41. data/lib/aidp/providers/anthropic.rb +160 -3
  42. data/lib/aidp/providers/base.rb +153 -6
  43. data/lib/aidp/providers/cursor.rb +245 -43
  44. data/lib/aidp/providers/gemini.rb +164 -3
  45. data/lib/aidp/providers/supervised_base.rb +317 -0
  46. data/lib/aidp/providers/supervised_cursor.rb +22 -0
  47. data/lib/aidp/version.rb +1 -1
  48. data/lib/aidp.rb +31 -34
  49. data/templates/ANALYZE/01_REPOSITORY_ANALYSIS.md +4 -4
  50. data/templates/ANALYZE/06a_tree_sitter_scan.md +217 -0
  51. metadata +91 -36
@@ -0,0 +1,703 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require "fileutils"
5
+ require "digest"
6
+ require "concurrent"
7
+ require "etc"
8
+
9
+ require_relative "tree_sitter_grammar_loader"
10
+ require_relative "seams"
11
+
12
+ module Aidp
13
+ module Analysis
14
+ class TreeSitterScan
15
+ def initialize(root: Dir.pwd, kb_dir: ".aidp/kb", langs: %w[ruby], threads: Etc.nprocessors)
16
+ @root = File.expand_path(root)
17
+ @kb_dir = File.expand_path(kb_dir, @root)
18
+ @langs = Array(langs)
19
+ @threads = threads
20
+ @grammar_loader = TreeSitterGrammarLoader.new(@root)
21
+
22
+ # Data structures to accumulate analysis results
23
+ @symbols = []
24
+ @imports = []
25
+ @calls = []
26
+ @metrics = []
27
+ @seams = []
28
+ @hotspots = []
29
+ @tests = []
30
+ @cycles = []
31
+
32
+ # Cache for parsed files
33
+ @cache = {}
34
+ @cache_file = File.join(@kb_dir, ".cache")
35
+ end
36
+
37
+ def run
38
+ puts "๐Ÿ” Starting Tree-sitter static analysis..."
39
+ puts "๐Ÿ“ Root: #{@root}"
40
+ puts "๐Ÿ—‚๏ธ KB Directory: #{@kb_dir}"
41
+ puts "๐ŸŒ Languages: #{@langs.join(", ")}"
42
+ puts "๐Ÿงต Threads: #{@threads}"
43
+
44
+ files = discover_files
45
+ puts "๐Ÿ“„ Found #{files.length} files to analyze"
46
+
47
+ prepare_kb_dir
48
+ load_cache
49
+
50
+ parallel_parse(files)
51
+ write_kb_files
52
+
53
+ puts "โœ… Tree-sitter analysis complete!"
54
+ puts "๐Ÿ“Š Generated KB files in #{@kb_dir}"
55
+ end
56
+
57
+ private
58
+
59
+ def discover_files
60
+ files = []
61
+
62
+ @langs.each do |lang|
63
+ patterns = @grammar_loader.file_patterns_for_language(lang)
64
+ patterns.each do |pattern|
65
+ files.concat(Dir.glob(File.join(@root, pattern)))
66
+ end
67
+ end
68
+
69
+ # Filter out files that should be ignored
70
+ files = filter_ignored_files(files)
71
+
72
+ # Sort for consistent processing
73
+ files.sort
74
+ end
75
+
76
+ def filter_ignored_files(files)
77
+ # Respect .gitignore
78
+ gitignore_path = File.join(@root, ".gitignore")
79
+ ignored_patterns = []
80
+
81
+ if File.exist?(gitignore_path)
82
+ File.readlines(gitignore_path).each do |line|
83
+ line = line.strip
84
+ next if line.empty? || line.start_with?("#")
85
+
86
+ # Convert gitignore patterns to glob patterns
87
+ pattern = convert_gitignore_to_glob(line)
88
+ ignored_patterns << pattern
89
+ end
90
+ end
91
+
92
+ # Add common ignore patterns
93
+ ignored_patterns.concat([
94
+ "**/.git/**", "**/node_modules/**", "**/vendor/**",
95
+ "**/tmp/**", "tmp/**", "**/log/**", "log/**", "**/.aidp/**"
96
+ ])
97
+
98
+ files.reject do |file|
99
+ relative_path = file.sub(/^#{Regexp.escape(@root)}\/?/, "")
100
+ ignored_patterns.any? { |pattern| File.fnmatch?(pattern, relative_path) }
101
+ end
102
+ end
103
+
104
+ # Convert gitignore patterns to Ruby glob patterns
105
+ def convert_gitignore_to_glob(gitignore_pattern)
106
+ # Handle different gitignore pattern types
107
+ case gitignore_pattern
108
+ when /^\//
109
+ # Absolute path from root: /foo -> foo
110
+ gitignore_pattern[1..]
111
+ when /\/$/
112
+ # Directory only: foo/ -> **/foo/**
113
+ "**/" + gitignore_pattern.chomp("/") + "/**"
114
+ when /\*\*/
115
+ # Already contains **: leave as-is for glob
116
+ gitignore_pattern
117
+ else
118
+ # Regular pattern: foo -> **/foo
119
+ if gitignore_pattern.include?("/")
120
+ # Contains path separator: keep relative structure
121
+ gitignore_pattern
122
+ else
123
+ # Simple filename: match anywhere
124
+ "**/" + gitignore_pattern
125
+ end
126
+ end
127
+ end
128
+
129
+ def prepare_kb_dir
130
+ FileUtils.mkdir_p(@kb_dir)
131
+ end
132
+
133
+ def load_cache
134
+ return unless File.exist?(@cache_file)
135
+
136
+ begin
137
+ @cache = JSON.parse(File.read(@cache_file), symbolize_names: true)
138
+ rescue JSON::ParserError
139
+ @cache = {}
140
+ end
141
+ end
142
+
143
+ def save_cache
144
+ File.write(@cache_file, JSON.pretty_generate(@cache))
145
+ end
146
+
147
+ def parallel_parse(files)
148
+ puts "๐Ÿ”„ Parsing files in parallel..."
149
+
150
+ # Group files by language for efficient processing
151
+ files_by_lang = files.group_by { |file| detect_language(file) }
152
+
153
+ # Process each language group
154
+ files_by_lang.each do |lang, lang_files|
155
+ puts "๐Ÿ“ Processing #{lang_files.length} #{lang} files..."
156
+
157
+ # Load grammar for this language
158
+ grammar = @grammar_loader.load_grammar(lang)
159
+
160
+ # Process files in parallel using Concurrent gem
161
+ pool = Concurrent::FixedThreadPool.new(@threads)
162
+ futures = []
163
+
164
+ lang_files.each do |file|
165
+ future = Concurrent::Promise.execute(executor: pool) do
166
+ parse_file(file, grammar)
167
+ end
168
+ futures << future
169
+ end
170
+
171
+ # Wait for all futures to complete
172
+ futures.each(&:value!)
173
+ pool.shutdown
174
+ pool.wait_for_termination
175
+ end
176
+
177
+ save_cache
178
+ end
179
+
180
+ def detect_language(file_path)
181
+ case File.extname(file_path)
182
+ when ".rb"
183
+ "ruby"
184
+ when ".js", ".jsx"
185
+ "javascript"
186
+ when ".ts", ".tsx"
187
+ "typescript"
188
+ when ".py"
189
+ "python"
190
+ else
191
+ "unknown"
192
+ end
193
+ end
194
+
195
+ def parse_file(file_path, grammar)
196
+ relative_path = file_path.sub(@root + File::SEPARATOR, "")
197
+
198
+ # Check cache first
199
+ cache_key = relative_path
200
+ file_mtime = File.mtime(file_path).to_i
201
+
202
+ if @cache[cache_key] && @cache[cache_key][:mtime] == file_mtime
203
+ # Use cached results
204
+ cached_data = @cache[cache_key][:data]
205
+ merge_cached_data(cached_data)
206
+ return
207
+ end
208
+
209
+ # Parse the file
210
+ # Set current file for context in helper methods
211
+ @current_file_path = file_path
212
+
213
+ source_code = File.read(file_path)
214
+ ast = grammar[:parser][:parse].call(source_code)
215
+
216
+ # Extract data from AST
217
+ file_data = extract_file_data(file_path, ast, source_code)
218
+
219
+ # Cache the results
220
+ @cache[cache_key] = {
221
+ mtime: file_mtime,
222
+ data: file_data
223
+ }
224
+
225
+ # Merge into global data structures
226
+ merge_file_data(file_data)
227
+ end
228
+
229
+ def extract_file_data(file_path, ast, source_code)
230
+ relative_path = file_path.sub(@root + File::SEPARATOR, "")
231
+
232
+ {
233
+ symbols: extract_symbols(ast, relative_path),
234
+ imports: extract_imports(ast, relative_path),
235
+ calls: extract_calls(ast, relative_path),
236
+ metrics: calculate_metrics(ast, source_code, relative_path),
237
+ seams: extract_seams(ast, relative_path)
238
+ }
239
+ end
240
+
241
+ def extract_symbols(ast, file_path)
242
+ symbols = []
243
+
244
+ children = ast[:children] || []
245
+ children = children.is_a?(Array) ? children : []
246
+
247
+ children.each do |node|
248
+ case node[:type].to_s
249
+ when "class"
250
+ symbols << {
251
+ id: "#{file_path}:#{node[:line]}:#{node[:name]}",
252
+ file: file_path,
253
+ line: node[:line],
254
+ kind: "class",
255
+ name: node[:name],
256
+ visibility: "public",
257
+ arity: 0,
258
+ loc: {
259
+ start_line: node[:line],
260
+ end_line: node[:line],
261
+ start_column: node[:start_column],
262
+ end_column: node[:end_column]
263
+ },
264
+ nesting_depth: calculate_nesting_depth(node)
265
+ }
266
+ when "module"
267
+ symbols << {
268
+ id: "#{file_path}:#{node[:line]}:#{node[:name]}",
269
+ file: file_path,
270
+ line: node[:line],
271
+ kind: "module",
272
+ name: node[:name],
273
+ visibility: "public",
274
+ arity: 0,
275
+ loc: {
276
+ start_line: node[:line],
277
+ end_line: node[:line],
278
+ start_column: node[:start_column],
279
+ end_column: node[:end_column]
280
+ },
281
+ nesting_depth: calculate_nesting_depth(node)
282
+ }
283
+ when "method"
284
+ symbols << {
285
+ id: "#{file_path}:#{node[:line]}:#{node[:name]}",
286
+ file: file_path,
287
+ line: node[:line],
288
+ kind: "method",
289
+ name: node[:name],
290
+ visibility: determine_method_visibility(node),
291
+ arity: calculate_method_arity(node),
292
+ loc: {
293
+ start_line: node[:line],
294
+ end_line: node[:line],
295
+ start_column: node[:start_column],
296
+ end_column: node[:end_column]
297
+ },
298
+ nesting_depth: calculate_nesting_depth(node)
299
+ }
300
+ end
301
+ end
302
+
303
+ symbols
304
+ end
305
+
306
+ def extract_imports(ast, file_path)
307
+ imports = []
308
+
309
+ children = ast[:children] || []
310
+ children = children.is_a?(Array) ? children : []
311
+
312
+ children.each do |node|
313
+ case node[:type].to_s
314
+ when "require"
315
+ imports << {
316
+ file: file_path,
317
+ kind: "require",
318
+ target: node[:target],
319
+ line: node[:line]
320
+ }
321
+ when "require_relative"
322
+ imports << {
323
+ file: file_path,
324
+ kind: "require_relative",
325
+ target: node[:target],
326
+ line: node[:line]
327
+ }
328
+ when "call"
329
+ # Handle require statements that are parsed as call nodes
330
+ # Check if this is a require call by looking at the first child (identifier)
331
+ # The children are nested in the structure
332
+ actual_children = (node[:children] && node[:children][:children]) ? node[:children][:children] : node[:children]
333
+ if actual_children&.is_a?(Array) && actual_children.first
334
+ first_child = actual_children.first
335
+ # Extract the actual identifier name from the source code
336
+ identifier_name = extract_identifier_name(first_child, file_path)
337
+ if identifier_name == "require"
338
+ # Extract the target from the argument list
339
+ target = extract_require_target(node)
340
+ if target
341
+ imports << {
342
+ file: file_path,
343
+ kind: "require",
344
+ target: target,
345
+ line: node[:line]
346
+ }
347
+ end
348
+ elsif identifier_name == "require_relative"
349
+ # Extract the target from the argument list
350
+ target = extract_require_target(node)
351
+ if target
352
+ imports << {
353
+ file: file_path,
354
+ kind: "require_relative",
355
+ target: target,
356
+ line: node[:line]
357
+ }
358
+ end
359
+ end
360
+ end
361
+ end
362
+ end
363
+
364
+ imports
365
+ end
366
+
367
+ def extract_require_target(node)
368
+ # Recursively search for string literals in the require call
369
+ find_string_in_node(node)
370
+ end
371
+
372
+ # Recursively find string content in a Tree-sitter node
373
+ def find_string_in_node(node)
374
+ return nil unless node.is_a?(Hash)
375
+
376
+ case node[:type].to_s
377
+ when "string"
378
+ # Found a string node - extract its content
379
+ return extract_string_literal_content(node)
380
+ when "string_content"
381
+ # Direct string content - extract text
382
+ return extract_node_text_from_source(node)
383
+ end
384
+
385
+ # Recursively search in children
386
+ children = get_node_children(node)
387
+ if children&.is_a?(Array)
388
+ children.each do |child|
389
+ result = find_string_in_node(child)
390
+ return result if result
391
+ end
392
+ end
393
+
394
+ nil
395
+ end
396
+
397
+ # Extract content from a string literal, handling quotes properly
398
+ def extract_string_literal_content(string_node)
399
+ # Get the full text of the string node including quotes
400
+ full_text = extract_node_text_from_source(string_node)
401
+ return nil unless full_text
402
+
403
+ # Remove quotes and handle escape sequences
404
+ case full_text[0]
405
+ when '"'
406
+ # Double-quoted string - handle escape sequences
407
+ content = full_text[1..-2] # Remove surrounding quotes
408
+ unescape_string(content)
409
+ when "'"
410
+ # Single-quoted string - minimal escaping
411
+ content = full_text[1..-2] # Remove surrounding quotes
412
+ content.gsub("\\'", "'").gsub("\\\\", "\\")
413
+ when "%"
414
+ # Percent notation strings (%q, %Q, %w, etc.)
415
+ handle_percent_string(full_text)
416
+ else
417
+ # Fallback: try to extract content between quotes
418
+ if (match = full_text.match(/^["'](.*)["']$/))
419
+ match[1]
420
+ else
421
+ full_text
422
+ end
423
+ end
424
+ end
425
+
426
+ # Handle Ruby's percent notation strings
427
+ def handle_percent_string(text)
428
+ return text unless text.start_with?("%")
429
+
430
+ case text[1]
431
+ when "q", "Q"
432
+ # %q{...} or %Q{...}
433
+ delimiter = text[2]
434
+ closing_delimiter = get_closing_delimiter(delimiter)
435
+ content = text[3..-(closing_delimiter.length + 1)]
436
+ (text[1] == "Q") ? unescape_string(content) : content
437
+ else
438
+ text
439
+ end
440
+ end
441
+
442
+ # Get closing delimiter for percent strings
443
+ def get_closing_delimiter(opening)
444
+ case opening
445
+ when "(" then ")"
446
+ when "[" then "]"
447
+ when "{" then "}"
448
+ when "<" then ">"
449
+ else; opening
450
+ end
451
+ end
452
+
453
+ # Unescape common Ruby escape sequences
454
+ def unescape_string(str)
455
+ str.gsub("\\n", "\n")
456
+ .gsub("\\t", "\t")
457
+ .gsub("\\r", "\r")
458
+ .gsub('\"', '"')
459
+ .gsub("\\\\", "\\")
460
+ end
461
+
462
+ # Safely extract children from a node, handling nested structures
463
+ def get_node_children(node)
464
+ if node[:children].is_a?(Hash) && node[:children][:children]
465
+ node[:children][:children]
466
+ else
467
+ node[:children]
468
+ end
469
+ end
470
+
471
+ def extract_string_content(string_content_node, _line_number)
472
+ # Extract the actual string content from the source code using node position
473
+ extract_node_text_from_source(string_content_node)
474
+ end
475
+
476
+ def extract_identifier_name(identifier_node, file_path)
477
+ # Extract the actual identifier name from the source code using node position
478
+ extract_node_text_from_source(identifier_node, file_path)
479
+ end
480
+
481
+ # Generalized method to extract text from any Tree-sitter node using source position
482
+ def extract_node_text_from_source(node, file_path = nil)
483
+ return nil unless node&.dig(:start_line) && node.dig(:end_line)
484
+ return nil unless node&.dig(:start_column) && node.dig(:end_column)
485
+
486
+ # Get source file path - either from parameter or from current parsing context
487
+ source_file = file_path ? File.join(@root, file_path) : @current_file_path
488
+ return nil unless source_file && File.exist?(source_file)
489
+
490
+ # Read source lines
491
+ source_lines = File.readlines(source_file)
492
+
493
+ start_line = node[:start_line]
494
+ end_line = node[:end_line]
495
+ start_col = node[:start_column]
496
+ end_col = node[:end_column]
497
+
498
+ # Handle single line nodes
499
+ if start_line == end_line
500
+ line_content = source_lines[start_line - 1] || ""
501
+ return line_content[start_col...end_col]
502
+ end
503
+
504
+ # Handle multi-line nodes
505
+ result = ""
506
+ (start_line..end_line).each do |line_num|
507
+ line_content = source_lines[line_num - 1] || ""
508
+
509
+ result += if line_num == start_line
510
+ # First line: from start_col to end
511
+ line_content[start_col..] || ""
512
+ elsif line_num == end_line
513
+ # Last line: from start to end_col
514
+ line_content[0...end_col] || ""
515
+ else
516
+ # Middle lines: entire line
517
+ line_content
518
+ end
519
+ end
520
+
521
+ result
522
+ end
523
+
524
+ def extract_calls(_ast, _file_path)
525
+ []
526
+
527
+ # This would extract method calls from the AST
528
+ # For now, return empty array
529
+ end
530
+
531
+ def calculate_metrics(ast, source_code, file_path)
532
+ metrics = []
533
+
534
+ ast[:children]&.each do |node|
535
+ if node[:type] == "method"
536
+ method_metrics = {
537
+ symbol_id: "#{file_path}:#{node[:line]}:#{node[:name]}",
538
+ file: file_path,
539
+ method: node[:name],
540
+ cyclomatic_proxy: calculate_cyclomatic_complexity(node),
541
+ branch_count: count_branches(node),
542
+ max_nesting: calculate_max_nesting(node),
543
+ fan_out: calculate_fan_out(node),
544
+ lines: calculate_method_lines(node)
545
+ }
546
+ metrics << method_metrics
547
+ end
548
+ end
549
+
550
+ # Add file-level metrics
551
+ children = ast[:children] || []
552
+ children = children.is_a?(Array) ? children : []
553
+
554
+ file_metrics = {
555
+ file: file_path,
556
+ total_lines: source_code.lines.count,
557
+ total_methods: children.count { |n| n[:type].to_s == "method" },
558
+ total_classes: children.count { |n| n[:type].to_s == "class" },
559
+ total_modules: children.count { |n| n[:type].to_s == "module" }
560
+ }
561
+ metrics << file_metrics
562
+
563
+ metrics
564
+ end
565
+
566
+ def extract_seams(ast, file_path)
567
+ children = ast[:children] || []
568
+ children = children.is_a?(Array) ? children : []
569
+ Seams.detect_seams_in_ast(children, file_path)
570
+ end
571
+
572
+ def calculate_nesting_depth(_node)
573
+ # Simple nesting depth calculation
574
+ # In practice, this would analyze the actual AST structure
575
+ 0
576
+ end
577
+
578
+ def determine_method_visibility(_node)
579
+ # Determine method visibility based on context
580
+ # In practice, this would analyze the AST structure
581
+ "public"
582
+ end
583
+
584
+ def calculate_method_arity(_node)
585
+ # Calculate method arity from parameters
586
+ # In practice, this would analyze the method's parameter list
587
+ 0
588
+ end
589
+
590
+ def calculate_cyclomatic_complexity(node)
591
+ # Calculate cyclomatic complexity proxy
592
+ # Count control flow statements
593
+ count_branches(node) + 1
594
+ end
595
+
596
+ def count_branches(_node)
597
+ # Count branching statements in the method
598
+ # This would analyze the method's AST for if/elsif/else/case/when/while/until/rescue
599
+ 0
600
+ end
601
+
602
+ def calculate_max_nesting(_node)
603
+ # Calculate maximum nesting depth in the method
604
+ 0
605
+ end
606
+
607
+ def calculate_fan_out(_node)
608
+ # Calculate fan-out (number of distinct method calls)
609
+ 0
610
+ end
611
+
612
+ def calculate_method_lines(_node)
613
+ # Calculate lines of code in the method
614
+ 1
615
+ end
616
+
617
+ def merge_cached_data(cached_data)
618
+ @symbols.concat(cached_data[:symbols] || [])
619
+ @imports.concat(cached_data[:imports] || [])
620
+ @calls.concat(cached_data[:calls] || [])
621
+ @metrics.concat(cached_data[:metrics] || [])
622
+ @seams.concat(cached_data[:seams] || [])
623
+ end
624
+
625
+ def merge_file_data(file_data)
626
+ @symbols.concat(file_data[:symbols])
627
+ @imports.concat(file_data[:imports])
628
+ @calls.concat(file_data[:calls])
629
+ @metrics.concat(file_data[:metrics])
630
+ @seams.concat(file_data[:seams])
631
+ end
632
+
633
+ def write_kb_files
634
+ puts "๐Ÿ’พ Writing knowledge base files..."
635
+
636
+ prepare_kb_dir
637
+
638
+ write_json_file("symbols.json", @symbols)
639
+ write_json_file("imports.json", @imports)
640
+ write_json_file("calls.json", @calls)
641
+ write_json_file("metrics.json", @metrics)
642
+ write_json_file("seams.json", @seams)
643
+
644
+ # Generate derived data
645
+ generate_hotspots
646
+ generate_tests
647
+ generate_cycles
648
+
649
+ write_json_file("hotspots.json", @hotspots)
650
+ write_json_file("tests.json", @tests)
651
+ write_json_file("cycles.json", @cycles)
652
+ end
653
+
654
+ def write_json_file(filename, data)
655
+ file_path = File.join(@kb_dir, filename)
656
+ File.write(file_path, JSON.pretty_generate(data))
657
+ puts "๐Ÿ“„ Written #{filename} (#{data.length} entries)"
658
+ end
659
+
660
+ def generate_hotspots
661
+ # Merge structural metrics with git churn data
662
+ # For now, create mock hotspots based on complexity
663
+ @hotspots = @metrics.select { |m| m[:symbol_id] }
664
+ .map do |metric|
665
+ {
666
+ symbol_id: metric[:symbol_id],
667
+ score: (metric[:cyclomatic_proxy] || 1) * (metric[:fan_out] || 1),
668
+ complexity: metric[:cyclomatic_proxy] || 1,
669
+ touches: 1, # This would come from git log analysis
670
+ file: metric[:file],
671
+ method: metric[:method]
672
+ }
673
+ end
674
+ .sort_by { |h| -h[:score] }
675
+ .first(20)
676
+ end
677
+
678
+ def generate_tests
679
+ # Map public APIs to tests based on naming conventions
680
+ public_methods = @symbols.select { |s| s[:kind] == "method" && s[:visibility] == "public" }
681
+
682
+ @tests = public_methods.map do |method|
683
+ {
684
+ symbol_id: method[:id],
685
+ tests: find_tests_for_method(method)
686
+ }
687
+ end
688
+ end
689
+
690
+ def generate_cycles
691
+ # Detect import cycles
692
+ # For now, return empty array
693
+ @cycles = []
694
+ end
695
+
696
+ def find_tests_for_method(_method)
697
+ # Find test files that might test this method
698
+ # This would analyze test file naming and content
699
+ []
700
+ end
701
+ end
702
+ end
703
+ end
@@ -9,7 +9,7 @@ module Aidp
9
9
  "name" => "Repository Analyst",
10
10
  "description" => "Expert in analyzing version control data, code evolution patterns, and repository metrics. Specializes in identifying hotspots, technical debt, and code quality trends over time.",
11
11
  "expertise" => ["Git analysis", "Code metrics", "Temporal patterns", "Hotspot identification"],
12
- "tools" => ["Code Maat", "Git log analysis", "Statistical analysis"]
12
+ "tools" => ["ruby-maat", "Git log analysis", "Statistical analysis"]
13
13
  },
14
14
  "Architecture Analyst" => {
15
15
  "name" => "Architecture Analyst",