ai-codeindex 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
codeindex/README_AI.md ADDED
@@ -0,0 +1,767 @@
1
+ <!-- Generated by codeindex at 2026-02-03T16:03:18+08:00 -->
2
+
3
+ # README_AI.md - codeindex
4
+
5
+ ## Overview
6
+
7
+ - **Files**: 28
8
+ - **Symbols**: 183
9
+
10
+ ## Files
11
+
12
+ ### __init__.py
13
+ _codeindex - AI-native code indexing tool for large codebases
14
+
15
+ Usage:
16
+ codeindex scan <path> # Scan a directory and generate README_AI.md
17
+ co_
18
+
19
+ ### adaptive_config.py
20
+ _Adaptive symbols configuration.
21
+
22
+ This module defines the configuration structure for adaptive symbol extraction,
23
+ which allows dynamically adjusting th_
24
+
25
+ **class** `class AdaptiveSymbolsConfig`
26
+ > Configuration for adaptive symbol extraction.
27
+
28
+ Adaptive symbol extraction adjusts the number of
29
+
30
+ ### adaptive_selector.py
31
+ _Adaptive symbol selector for dynamic symbol limit calculation.
32
+
33
+ This module implements the core algorithm for adaptive symbol extraction,
34
+ which adjust_
35
+
36
+ **class** `class AdaptiveSymbolSelector`
37
+ > Selects appropriate symbol limit based on file size.
38
+
39
+ This selector implements a tiered approach
40
+
41
+ **Methods:**
42
+ - `def calculate_limit(self, file_lines: int, total_symbols: int) -> int`
43
+ - `def _determine_size_category(self, lines: int) -> str`
44
+ - `def _apply_constraints(self, limit: int, total_symbols: int) -> int`
45
+
46
+ ### ai_enhancement.py
47
+ _AI enhancement strategies for super large files (Epic 3.2).
48
+
49
+ This module provides intelligent file size detection and strategy selection
50
+ for optimizin_
51
+
52
+ **class** `class SuperLargeFileDetection`
53
+ > Result of super large file detection.
54
+
55
+ **class** `class SymbolGroup`
56
+ > Group of symbols by responsibility.
57
+
58
+ **class** `class MultiTurnResult`
59
+ > Result of multi-turn dialogue enhancement.
60
+
61
+ **Functions:**
62
+ - `def is_super_large_file(parse_result: ParseResult, config: Config) -> SuperLargeFileDetection`
63
+ - `def select_enhancement_strategy(
64
+ parse_result: ParseResult,
65
+ config: Config,
66
+ ) -> EnhancementStrategy`
67
+ - `def _group_symbols_by_responsibility(parse_result: ParseResult) -> list[SymbolGroup]`
68
+ - `def _generate_round1_prompt(parse_result: ParseResult) -> str`
69
+ - `def _generate_round2_prompt(
70
+ round1_output: str,
71
+ symbol_groups: list[SymbolGroup],
72
+ ) -> str`
73
+ - `def _generate_round3_prompt(
74
+ round1_output: str,
75
+ round2_output: str,
76
+ parse_result: ParseResult,
77
+ ) -> str`
78
+ - `def multi_turn_ai_enhancement(
79
+ parse_result: ParseResult,
80
+ config: Config,
81
+ ai_command: str,
82
+ timeout_per_round: int = 180,
83
+ ) -> MultiTurnResult`
84
+
85
+ ### ai_helper.py
86
+ _AI enhancement helper functions (Epic 4 Story 4.1).
87
+
88
+ This module provides reusable functions for AI enhancement operations,
89
+ eliminating code duplicati_
90
+
91
+ **Functions:**
92
+ - `def aggregate_parse_results(
93
+ parse_results: list[ParseResult],
94
+ path: Path,
95
+ ) -> ParseResult`
96
+
97
+ ### cli.py
98
+ _CLI entry point for codeindex.
99
+
100
+ This module serves as the main entry point for the codeindex CLI tool.
101
+ It imports and registers commands from speciali_
102
+
103
+ **Functions:**
104
+ - `def main()`
105
+
106
+ ### cli_common.py
107
+ _Common utilities for CLI modules.
108
+
109
+ This module provides shared resources used across all CLI command modules,
110
+ such as the Rich console instance for fo_
111
+
112
+ ### cli_config.py
113
+ _CLI commands for configuration and project status.
114
+
115
+ This module provides commands for initializing configuration files,
116
+ checking indexing status, and _
117
+
118
+ **Functions:**
119
+ - `def init(force: bool)`
120
+ - `def status(root: Path)`
121
+ - `def list_dirs(root: Path)`
122
+
123
+ ### cli_scan.py
124
+ _CLI commands for scanning directories and generating README files.
125
+
126
+ This module provides the core scanning functionality, including single directory
127
+ s_
128
+
129
+ **Functions:**
130
+ - `def _process_directory_with_smartwriter(
131
+ dir_path: Path,
132
+ tree: DirectoryTree,
133
+ config: Config,
134
+ ) -> tuple[Path, bool, str, int]`
135
+ - `def scan(
136
+ path: Path,
137
+ dry_run: bool,
138
+ fallback: bool,
139
+ quiet: bool,
140
+ timeout: int,
141
+ parallel: int | None,
142
+ docstring_mode: str | None,
143
+ show_cost: bool,
144
+ )`
145
+ - `def scan_all(
146
+ root: Path | None,
147
+ parallel: int | None,
148
+ timeout: int,
149
+ no_ai: bool,
150
+ fallback: bool,
151
+ quiet: bool,
152
+ hierarchical: bool,
153
+ docstring_mode: str | None,
154
+ show_cost: bool,
155
+ )`
156
+
157
+ ### cli_symbols.py
158
+ _CLI commands for symbol indexing and dependency analysis.
159
+
160
+ This module provides commands for generating project-wide indices
161
+ and analyzing code depend_
162
+
163
+ **Functions:**
164
+ - `def extract_module_purpose(
165
+ dir_path: Path,
166
+ config: Config,
167
+ output_file: str = "README_AI.md"
168
+ ) -> str`
169
+ - `def index(root: Path, output: str)`
170
+ - `def symbols(root: Path, output: str, quiet: bool)`
171
+ - `def affected(since: str, until: str, as_json: bool)`
172
+
173
+ ### cli_tech_debt.py
174
+ _CLI commands for technical debt analysis.
175
+
176
+ This module provides the tech-debt command for analyzing technical debt
177
+ in a directory, including file size_
178
+
179
+ **Functions:**
180
+ - `def _find_source_files(
181
+ path: Path, recursive: bool, languages: list[str] | None = None
182
+ ) -> list[Path]`
183
+ - `def _analyze_files(
184
+ files: list[Path],
185
+ detector: TechDebtDetector,
186
+ reporter: TechDebtReporter,
187
+ show_progress: bool,
188
+ ) -> None`
189
+ - `def _format_and_output(
190
+ report: TechDebtReport,
191
+ format: str,
192
+ output: Path | None,
193
+ quiet: bool,
194
+ ) -> None`
195
+ - `def tech_debt(path: Path, format: str, output: Path | None, recursive: bool, quiet: bool)`
196
+
197
+ ### config.py
198
+ _Configuration management for codeindex._
199
+
200
+ **class** `class SymbolsConfig`
201
+ > Configuration for symbol extraction.
202
+
203
+ **class** `class GroupingConfig`
204
+ > Configuration for file grouping.
205
+
206
+ **class** `class SemanticConfig`
207
+ > Configuration for semantic extraction.
208
+
209
+ **class** `class IndexingConfig`
210
+ > Configuration for smart indexing.
211
+
212
+ **class** `class IncrementalConfig`
213
+ > Configuration for incremental updates.
214
+
215
+ **class** `class DocstringConfig`
216
+ > Configuration for docstring extraction.
217
+
218
+ **class** `class Config`
219
+ > Configuration for codeindex.
220
+
221
+ ### directory_tree.py
222
+ _Directory tree structure for hierarchical indexing._
223
+
224
+ **class** `class DirectoryNode`
225
+ > A node in the directory tree.
226
+
227
+ **class** `class DirectoryTree`
228
+ > Pre-scanned directory tree for determining index levels.
229
+
230
+ This enables two-pass indexing:
231
+ 1.
232
+
233
+ **Methods:**
234
+ - `def _build_tree(self)`
235
+ - `def print_tree(self, max_depth: int = 3)`
236
+
237
+ ### file_classifier.py
238
+ _Unified file size classification system (Epic 4 Story 4.2).
239
+
240
+ This module provides a unified approach to file size classification,
241
+ replacing hard-coded_
242
+
243
+ **class** `class FileSizeCategory(Enum)`
244
+ > File size categories for classification.
245
+
246
+ **class** `class FileSizeAnalysis`
247
+ > Result of file size analysis.
248
+
249
+ Attributes:
250
+ category: File size category (enum)
251
+ f
252
+
253
+ **class** `class FileSizeClassifier`
254
+ > Unified file size classifier for all modules.
255
+
256
+ This classifier provides consistent file size det
257
+
258
+ **Methods:**
259
+ - `def classify(self, parse_result: ParseResult) -> FileSizeAnalysis`
260
+ - `def is_super_large(self, parse_result: ParseResult) -> bool`
261
+ - `def is_large(self, parse_result: ParseResult) -> bool`
262
+
263
+ ### framework_detect.py
264
+ _Framework detection and pattern extraction for PHP projects._
265
+
266
+ **class** `class RouteInfo`
267
+ > Information about a route.
268
+
269
+ **class** `class ModelInfo`
270
+ > Information about a model.
271
+
272
+ **class** `class FrameworkInfo`
273
+ > Detected framework information.
274
+
275
+ **Functions:**
276
+ - `def detect_framework(root: Path) -> FrameworkType`
277
+ - `def extract_thinkphp_routes(
278
+ parse_results: list[ParseResult],
279
+ module_name: str,
280
+ ) -> list[RouteInfo]`
281
+ - `def extract_thinkphp_models(
282
+ parse_results: list[ParseResult],
283
+ ) -> list[ModelInfo]`
284
+ - `def analyze_thinkphp_project(
285
+ root: Path,
286
+ parse_results_by_dir: dict[Path, list[ParseResult]],
287
+ ) -> FrameworkInfo`
288
+ - `def format_framework_info(info: FrameworkInfo, max_routes: int = 20) -> str`
289
+
290
+ ### hierarchical.py
291
+ _Bottom-up hierarchical processing for codeindex._
292
+
293
+ **class** `class DirectoryInfo`
294
+ > Information about a directory in the hierarchy.
295
+
296
+ **Functions:**
297
+ - `def build_directory_hierarchy(directories: List[Path]) -> Tuple[Dict[Path, DirectoryInfo], List[Path]]`
298
+ - `def create_processing_batches(dir_info: Dict[Path, DirectoryInfo], max_workers: int) -> List[List[Path]]`
299
+ - `def process_directory_batch(
300
+ batch: List[Path],
301
+ config: Config,
302
+ use_fallback: bool = False,
303
+ quiet: bool = False,
304
+ timeout: int = 120,
305
+ root_path: Path = None,
306
+ ) -> Dict[Path, bool]`
307
+ - `def process_normal(path: Path, config: Config, use_fallback: bool, quiet: bool, timeout: int, root_path: Path = None) -> bool`
308
+ - `def process_with_children(path: Path, config: Config, use_fallback: bool, quiet: bool, timeout: int) -> bool`
309
+ - `def scan_directories_hierarchical(
310
+ root: Path,
311
+ config: Config,
312
+ max_workers: int = 8,
313
+ use_fallback: bool = True,
314
+ quiet: bool = False,
315
+ timeout: int = 120
316
+ ) -> bool`
317
+ - `def generate_enhanced_fallback_readme(
318
+ dir_path: Path,
319
+ parse_results: list,
320
+ child_readmes: List[Path],
321
+ output_file: str = "README_AI.md"
322
+ )`
323
+
324
+ ### incremental.py
325
+ _Incremental update logic for codeindex.
326
+
327
+ This module analyzes git changes and determines which directories
328
+ need README_AI.md updates based on configur_
329
+
330
+ **class** `class UpdateLevel(Enum)`
331
+ > Update decision levels.
332
+
333
+ **class** `class FileChange`
334
+ > Represents a changed file.
335
+
336
+ **class** `class ChangeAnalysis`
337
+ > Analysis result of git changes.
338
+
339
+ **Methods:**
340
+ - `def to_dict(self) -> dict`
341
+
342
+ **Functions:**
343
+ - `def run_git_command(args: list[str], cwd: Path | None = None) -> str`
344
+ - `def filter_code_files(
345
+ changes: list[FileChange],
346
+ languages: list[str],
347
+ ) -> list[FileChange]`
348
+ - `def analyze_changes(
349
+ config: Config,
350
+ since: str = "HEAD~1",
351
+ until: str = "HEAD",
352
+ cwd: Path | None = None,
353
+ ) -> ChangeAnalysis`
354
+ - `def should_update_project_index(analysis: ChangeAnalysis, config: Config) -> bool`
355
+
356
+ ### invoker.py
357
+ _AI CLI invoker - calls external AI CLI tools._
358
+
359
+ **class** `class InvokeResult`
360
+ > Result of invoking AI CLI.
361
+
362
+ **Functions:**
363
+ - `def clean_ai_output(output: str) -> str`
364
+ - `def validate_markdown_output(output: str) -> bool`
365
+ - `def format_prompt(
366
+ dir_path: Path,
367
+ files_info: str,
368
+ symbols_info: str,
369
+ imports_info: str,
370
+ ) -> str`
371
+ - `def invoke_ai_cli(
372
+ command_template: str,
373
+ prompt: str,
374
+ timeout: int = 120,
375
+ dry_run: bool = False,
376
+ ) -> InvokeResult`
377
+ - `def invoke_ai_cli_stdin(
378
+ command: str,
379
+ prompt: str,
380
+ timeout: int = 120,
381
+ dry_run: bool = False,
382
+ ) -> InvokeResult`
383
+
384
+ ### parallel.py
385
+ _Parallel processing utilities for codeindex._
386
+
387
+ **class** `class BatchResult`
388
+ > Result of processing a batch of files.
389
+
390
+ **Functions:**
391
+ - `def parse_files_parallel(
392
+ files: List[Path],
393
+ config: Config,
394
+ quiet: bool = False
395
+ ) -> list[ParseResult]`
396
+ - `def scan_directories_parallel(
397
+ directories: List[Path],
398
+ config: Config,
399
+ quiet: bool = False
400
+ ) -> List[Path]`
401
+
402
+ ### parser.py
403
+ _Multi-language AST parser using tree-sitter._
404
+
405
+ **class** `class Symbol`
406
+ > Represents a code symbol (class, function, etc.).
407
+
408
+ **class** `class Import`
409
+ > Represents an import statement.
410
+
411
+ **class** `class ParseResult`
412
+ > Result of parsing a file.
413
+
414
+ **Functions:**
415
+ - `def _get_node_text(node, source_bytes: bytes) -> str`
416
+ - `def _extract_docstring(node, source_bytes: bytes) -> str`
417
+ - `def _parse_function(
418
+ node,
419
+ source_bytes: bytes,
420
+ class_name: str = "",
421
+ decorators: list[str] | None = None
422
+ ) -> Symbol`
423
+ - `def _parse_class(node, source_bytes: bytes) -> list[Symbol]`
424
+ - `def _parse_import(node, source_bytes: bytes) -> Import | None`
425
+ - `def _extract_module_docstring(tree, source_bytes: bytes) -> str`
426
+ - `def parse_file(path: Path) -> ParseResult`
427
+ - `def parse_directory(paths: list[Path]) -> list[ParseResult]`
428
+ - `def _get_language(file_path: Path) -> str`
429
+ - `def _extract_php_docstring(node, source_bytes: bytes) -> str`
430
+ - `def _parse_php_function(node, source_bytes: bytes, class_name: str = "") -> Symbol`
431
+ - `def _parse_php_method(node, source_bytes: bytes, class_name: str) -> Symbol`
432
+
433
+ _... and 5 more symbols_
434
+
435
+ ### scanner.py
436
+ _Directory scanner for codeindex._
437
+
438
+ **class** `class ScanResult`
439
+ > Result of scanning a directory.
440
+
441
+ **Functions:**
442
+ - `def should_exclude(path: Path, exclude_patterns: list[str], base_path: Path) -> bool`
443
+ - `def scan_directory(
444
+ path: Path,
445
+ config: Config,
446
+ base_path: Path | None = None,
447
+ recursive: bool = True
448
+ ) -> ScanResult`
449
+ - `def find_all_directories(root: Path, config: Config) -> list[Path]`
450
+
451
+ ### semantic_extractor.py
452
+ _Business Semantic Extractor
453
+
454
+ Story 4.4: Extract business semantics from directory structure
455
+ Task 4.4.5: KISS Universal Description Generator
456
+
457
+ This mod_
458
+
459
+ **class** `class DirectoryContext`
460
+ > Context information about a directory
461
+
462
+ Used to collect information for semantic extraction.
463
+
464
+ **class** `class BusinessSemantic`
465
+ > Business semantic information
466
+
467
+ Extracted description of what a directory does.
468
+
469
+ **class** `class SimpleDescriptionGenerator`
470
+ > Universal description generator: zero assumptions, zero semantic understanding
471
+
472
+ Only extracts ob
473
+
474
+ **class** `class SemanticExtractor`
475
+ > Extract business semantics from directory context
476
+
477
+ Supports two modes:
478
+ - Heuristic mode: KIS
479
+
480
+ **Methods:**
481
+ - `def generate(self, context: DirectoryContext) -> str`
482
+ - `def _extract_path_context(self, path: str) -> str`
483
+ - `def _analyze_symbol_pattern(self, symbols: List[str]) -> str`
484
+ - `def _pluralize(self, suffix: str) -> str`
485
+ - `def _extract_entity_names(self, symbols: List[str]) -> List[str]`
486
+ - `def extract_directory_semantic(
487
+ self,
488
+ context: DirectoryContext
489
+ ) -> BusinessSemantic`
490
+ - `def _heuristic_extract(self, context: DirectoryContext) -> BusinessSemantic`
491
+ - `def _ai_extract(self, context: DirectoryContext) -> BusinessSemantic`
492
+ - `def _build_ai_prompt(self, context: DirectoryContext) -> str`
493
+ - `def _parse_ai_response(self, response: str) -> BusinessSemantic`
494
+
495
+ ### smart_writer.py
496
+ _Smart README writer with grouping, size limits, and hierarchical levels._
497
+
498
+ **class** `class WriteResult`
499
+ > Result of writing a README file.
500
+
501
+ **class** `class SmartWriter`
502
+ > Smart README writer that generates appropriate content based on level.
503
+
504
+ Levels:
505
+ - overview:
506
+
507
+ **Methods:**
508
+ - `def write_readme(
509
+ self,
510
+ dir_path: Path,
511
+ parse_results: list[ParseResult],
512
+ level: LevelType = "detailed",
513
+ child_dirs: list[Path] | None = None,
514
+ output_file: str = "README_AI.md",
515
+ ) -> WriteResult`
516
+ - `def _generate_overview(
517
+ self,
518
+ dir_path: Path,
519
+ parse_results: list[ParseResult],
520
+ child_dirs: list[Path],
521
+ ) -> str`
522
+ - `def _generate_navigation(
523
+ self,
524
+ dir_path: Path,
525
+ parse_results: list[ParseResult],
526
+ child_dirs: list[Path],
527
+ ) -> str`
528
+ - `def _generate_detailed(
529
+ self,
530
+ dir_path: Path,
531
+ parse_results: list[ParseResult],
532
+ child_dirs: list[Path],
533
+ ) -> str`
534
+ - `def _group_files(self, results: list[ParseResult]) -> dict[str, list[ParseResult]]`
535
+ - `def _filter_symbols(self, symbols: list[Symbol]) -> list[Symbol]`
536
+ - `def _get_key_symbols(self, symbols: list[Symbol]) -> list[Symbol]`
537
+ - `def _extract_module_description(self, dir_path: Path, output_file: str = "README_AI.md") -> str`
538
+ - `def _extract_module_description_semantic(
539
+ self,
540
+ dir_path: Path,
541
+ parse_result: Optional[ParseResult] = None
542
+ ) -> str`
543
+ - `def _truncate_content(self, content: str, max_size: int) -> tuple[str, bool]`
544
+
545
+ **Functions:**
546
+ - `def determine_level(
547
+ dir_path: Path,
548
+ root_path: Path,
549
+ has_children: bool,
550
+ config: IndexingConfig,
551
+ ) -> LevelType`
552
+
553
+ ### symbol_index.py
554
+ _Global symbol index generator for PROJECT_SYMBOLS.md._
555
+
556
+ **class** `class SymbolEntry`
557
+ > A symbol entry in the global index.
558
+
559
+ **class** `class GlobalSymbolIndex`
560
+ > Generates a global symbol index (PROJECT_SYMBOLS.md) for a project.
561
+
562
+ Collects all classes, funct
563
+
564
+ **Methods:**
565
+ - `def collect_symbols(self, quiet: bool = False) -> dict`
566
+ - `def generate_index(self, output_file: str = "PROJECT_SYMBOLS.md") -> Path`
567
+ - `def _group_by_type(self) -> dict[str, list[SymbolEntry]]`
568
+
569
+ ### symbol_scorer.py
570
+ _Symbol importance scoring system.
571
+
572
+ This module provides functionality to score symbols based on their importance,
573
+ helping to prioritize which symbols _
574
+
575
+ **class** `class ScoringContext`
576
+ > Scoring context for symbols.
577
+
578
+ Attributes:
579
+ framework: The framework being used (e.g., 'th
580
+
581
+ **class** `class SymbolImportanceScorer`
582
+ > Score symbols by importance for inclusion in documentation.
583
+
584
+ This scorer evaluates symbols acros
585
+
586
+ **Methods:**
587
+ - `def _score_visibility(self, symbol: Symbol) -> float`
588
+ - `def _score_semantics(self, symbol: Symbol) -> float`
589
+ - `def _score_documentation(self, symbol: Symbol) -> float`
590
+ - `def _score_complexity(self, symbol: Symbol) -> float`
591
+ - `def _score_naming_pattern(self, symbol: Symbol) -> float`
592
+ - `def score(self, symbol: Symbol) -> float`
593
+
594
+ ### tech_debt.py
595
+ _Technical debt detection for code analysis.
596
+
597
+ This module provides tools to detect and analyze technical debt in codebases,
598
+ including file size issues,_
599
+
600
+ **class** `class DebtSeverity(IntEnum)`
601
+ > Severity levels for technical debt issues.
602
+
603
+ Lower values indicate higher severity (CRITICAL is m
604
+
605
+ **class** `class DebtIssue`
606
+ > Represents a technical debt issue detected in code.
607
+
608
+ Attributes:
609
+ severity: The severity
610
+
611
+ **class** `class DebtAnalysisResult`
612
+ > Result of analyzing a file for technical debt.
613
+
614
+ Attributes:
615
+ issues: List of detected tec
616
+
617
+ **class** `class SymbolOverloadAnalysis`
618
+ > Analysis result of symbol overload detection.
619
+
620
+ Attributes:
621
+ total_symbols: Total number o
622
+
623
+ **class** `class FileReport`
624
+ > Report for a single file's technical debt analysis.
625
+
626
+ Attributes:
627
+ file_path: Path to the
628
+
629
+ **class** `class TechDebtReport`
630
+ > Aggregate report for technical debt across multiple files.
631
+
632
+ Attributes:
633
+ file_reports: Li
634
+
635
+ **class** `class TechDebtReporter`
636
+ > Reporter for aggregating technical debt analysis across multiple files.
637
+
638
+ This class collects ana
639
+
640
+ **class** `class TechDebtDetector`
641
+ > Detector for technical debt in code.
642
+
643
+ This class analyzes parsed code to identify technical debt
644
+
645
+ **Methods:**
646
+ - `def add_file_result(
647
+ self,
648
+ file_path: Path,
649
+ debt_analysis: DebtAnalysisResult,
650
+ symbol_analysis: SymbolOverloadAnalysis | None = None,
651
+ )`
652
+ - `def generate_report(self) -> TechDebtReport`
653
+ - `def analyze_file(
654
+ self, parse_result: ParseResult, scorer: SymbolImportanceScorer
655
+ ) -> DebtAnalysisResult`
656
+ - `def _detect_file_size_issues(self, parse_result: ParseResult) -> list[DebtIssue]`
657
+ - `def _detect_god_class(self, parse_result: ParseResult) -> list[DebtIssue]`
658
+ - `def _calculate_quality_score(
659
+ self, parse_result: ParseResult, issues: list[DebtIssue]
660
+ ) -> float`
661
+ - `def analyze_symbol_overload(
662
+ self, parse_result: ParseResult, scorer: SymbolImportanceScorer
663
+ ) -> tuple[list[DebtIssue], SymbolOverloadAnalysis]`
664
+
665
+ _... and 4 more symbols_
666
+
667
+ ### tech_debt_formatters.py
668
+ _Formatters for technical debt reports.
669
+
670
+ This module provides different output formatters for technical debt reports:
671
+ - ConsoleFormatter: Human-readabl_
672
+
673
+ **class** `class ReportFormatter(ABC)`
674
+ > Abstract base class for report formatters.
675
+
676
+ **class** `class ConsoleFormatter(ReportFormatter)`
677
+ > Formatter for console output with ANSI colors.
678
+
679
+ **class** `class MarkdownFormatter(ReportFormatter)`
680
+ > Formatter for Markdown output.
681
+
682
+ **class** `class JSONFormatter(ReportFormatter)`
683
+ > Formatter for JSON output.
684
+
685
+ **Methods:**
686
+ - `def format(self, report: TechDebtReport) -> str`
687
+ - `def _get_severity_color(self, severity: DebtSeverity) -> str`
688
+ - `def format(self, report: TechDebtReport) -> str`
689
+ - `def _format_issues_table(
690
+ self, report: TechDebtReport, severity: DebtSeverity
691
+ ) -> list[str]`
692
+ - `def format(self, report: TechDebtReport) -> str`
693
+
694
+ ### writer.py
695
+ _Markdown writer for README_AI.md files._
696
+
697
+ **class** `class WriteResult`
698
+ > Result of writing a README_AI.md file.
699
+
700
+ **Functions:**
701
+ - `def format_symbols_for_prompt(results: list[ParseResult]) -> str`
702
+ - `def format_imports_for_prompt(results: list[ParseResult]) -> str`
703
+ - `def format_files_for_prompt(results: list[ParseResult]) -> str`
704
+ - `def write_readme(
705
+ dir_path: Path,
706
+ content: str,
707
+ output_file: str = "README_AI.md",
708
+ ) -> WriteResult`
709
+ - `def generate_fallback_readme(
710
+ dir_path: Path,
711
+ results: list[ParseResult],
712
+ output_file: str = "README_AI.md",
713
+ ) -> WriteResult`
714
+
715
+ ## Dependencies
716
+
717
+ - .adaptive_selector
718
+ - .cli_common
719
+ - .cli_config
720
+ - .cli_scan
721
+ - .cli_symbols
722
+ - .cli_tech_debt
723
+ - .config
724
+ - .directory_tree
725
+ - .framework_detect
726
+ - .incremental
727
+ - .invoker
728
+ - .parallel
729
+ - .parser
730
+ - .scanner
731
+ - .semantic_extractor
732
+ - .smart_writer
733
+ - .symbol_index
734
+ - .symbol_scorer
735
+ - .tech_debt
736
+ - .tech_debt_formatters
737
+ _... and 29 more_
738
+
739
+
740
+ **Commit `d9c40ec`**: feat(json): implement ParseResult serialization (Story 1)
741
+
742
+ Changed files:
743
+ - `parser.py`
744
+
745
+
746
+ **Commit `5a89ba2`**: feat(json): add --output json to scan and scan-all commands (Stories 2 & 3)
747
+
748
+ Changed files:
749
+ - `cli_scan.py`
750
+ - `scanner.py`
751
+
752
+
753
+ **Commit ``**:
754
+
755
+ Changed files:
756
+ - `cli_scan.py`
757
+ - `errors.py`
758
+ - `parser.py`
759
+
760
+ ---
761
+
762
+ ## Recent Changes
763
+
764
+ **Commit ``**:
765
+
766
+ Changed files:
767
+ - `config.py`