autosar-calltree 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autosar_calltree/__init__.py +24 -0
- autosar_calltree/analyzers/__init__.py +5 -0
- autosar_calltree/analyzers/call_tree_builder.py +369 -0
- autosar_calltree/cli/__init__.py +5 -0
- autosar_calltree/cli/main.py +330 -0
- autosar_calltree/config/__init__.py +10 -0
- autosar_calltree/config/module_config.py +179 -0
- autosar_calltree/database/__init__.py +23 -0
- autosar_calltree/database/function_database.py +505 -0
- autosar_calltree/database/models.py +189 -0
- autosar_calltree/generators/__init__.py +5 -0
- autosar_calltree/generators/mermaid_generator.py +488 -0
- autosar_calltree/parsers/__init__.py +6 -0
- autosar_calltree/parsers/autosar_parser.py +314 -0
- autosar_calltree/parsers/c_parser.py +415 -0
- autosar_calltree/version.py +5 -0
- autosar_calltree-0.3.0.dist-info/METADATA +482 -0
- autosar_calltree-0.3.0.dist-info/RECORD +22 -0
- autosar_calltree-0.3.0.dist-info/WHEEL +5 -0
- autosar_calltree-0.3.0.dist-info/entry_points.txt +2 -0
- autosar_calltree-0.3.0.dist-info/licenses/LICENSE +21 -0
- autosar_calltree-0.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,505 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Function database module.
|
|
3
|
+
|
|
4
|
+
This module manages the database of all functions found in the codebase,
|
|
5
|
+
including caching for performance and lookup methods.
|
|
6
|
+
|
|
7
|
+
Requirements:
|
|
8
|
+
- SWR_CONFIG_00003: Module Configuration Integration
|
|
9
|
+
- SWR_CACHE_00001: File-by-File Cache Loading Progress
|
|
10
|
+
- SWR_CACHE_00002: Cache Status Indication
|
|
11
|
+
- SWR_CACHE_00003: Cache Loading Errors
|
|
12
|
+
- SWR_CACHE_00004: Performance Considerations
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import hashlib
|
|
16
|
+
import pickle
|
|
17
|
+
from dataclasses import dataclass, field
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import Any, Dict, List, Optional
|
|
21
|
+
|
|
22
|
+
from ..config.module_config import ModuleConfig
|
|
23
|
+
from ..parsers.autosar_parser import AutosarParser
|
|
24
|
+
from ..parsers.c_parser import CParser
|
|
25
|
+
from .models import FunctionInfo
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class CacheMetadata:
|
|
30
|
+
"""Metadata for cache validation."""
|
|
31
|
+
|
|
32
|
+
created_at: datetime
|
|
33
|
+
source_directory: str
|
|
34
|
+
file_count: int
|
|
35
|
+
file_checksums: Dict[str, str] = field(default_factory=dict)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class FunctionDatabase:
|
|
39
|
+
"""
|
|
40
|
+
Database of all functions in the codebase.
|
|
41
|
+
|
|
42
|
+
This class scans source files, parses function definitions using both
|
|
43
|
+
AUTOSAR and traditional C parsers, and maintains a searchable database
|
|
44
|
+
with optional caching support.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(
|
|
48
|
+
self,
|
|
49
|
+
source_dir: str,
|
|
50
|
+
cache_dir: Optional[str] = None,
|
|
51
|
+
module_config: Optional[ModuleConfig] = None,
|
|
52
|
+
):
|
|
53
|
+
"""
|
|
54
|
+
Initialize the function database.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
source_dir: Root directory containing source files
|
|
58
|
+
cache_dir: Directory for cache files (default: .cache in source_dir)
|
|
59
|
+
module_config: Module configuration for SW module mappings
|
|
60
|
+
"""
|
|
61
|
+
self.source_dir = Path(source_dir)
|
|
62
|
+
|
|
63
|
+
if cache_dir:
|
|
64
|
+
self.cache_dir = Path(cache_dir)
|
|
65
|
+
else:
|
|
66
|
+
self.cache_dir = self.source_dir / ".cache"
|
|
67
|
+
|
|
68
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
69
|
+
self.cache_file = self.cache_dir / "function_db.pkl"
|
|
70
|
+
|
|
71
|
+
# Database: function_name -> List[FunctionInfo]
|
|
72
|
+
# Multiple entries for functions with same name (static in different files)
|
|
73
|
+
self.functions: Dict[str, List[FunctionInfo]] = {}
|
|
74
|
+
|
|
75
|
+
# Qualified function keys: "file::function" -> FunctionInfo
|
|
76
|
+
# Used for resolving static functions
|
|
77
|
+
self.qualified_functions: Dict[str, FunctionInfo] = {}
|
|
78
|
+
|
|
79
|
+
# All functions by file
|
|
80
|
+
self.functions_by_file: Dict[str, List[FunctionInfo]] = {}
|
|
81
|
+
|
|
82
|
+
# Parsers
|
|
83
|
+
self.autosar_parser = AutosarParser()
|
|
84
|
+
self.c_parser = CParser()
|
|
85
|
+
|
|
86
|
+
# Module configuration
|
|
87
|
+
self.module_config = module_config
|
|
88
|
+
self.module_stats: Dict[str, int] = {}
|
|
89
|
+
|
|
90
|
+
# Statistics
|
|
91
|
+
self.total_files_scanned = 0
|
|
92
|
+
self.total_functions_found = 0
|
|
93
|
+
self.parse_errors: List[str] = []
|
|
94
|
+
|
|
95
|
+
def build_database(
|
|
96
|
+
self, use_cache: bool = True, rebuild_cache: bool = False, verbose: bool = False
|
|
97
|
+
) -> None:
|
|
98
|
+
"""
|
|
99
|
+
Build the function database by scanning all source files.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
use_cache: Whether to use cached data if available
|
|
103
|
+
rebuild_cache: Force rebuild of cache even if valid
|
|
104
|
+
verbose: Print progress information
|
|
105
|
+
"""
|
|
106
|
+
if verbose:
|
|
107
|
+
print(f"Scanning source directory: {self.source_dir}")
|
|
108
|
+
|
|
109
|
+
# Try to load from cache first
|
|
110
|
+
if use_cache and not rebuild_cache:
|
|
111
|
+
if self._load_from_cache(verbose):
|
|
112
|
+
if verbose:
|
|
113
|
+
print(f"Loaded {self.total_functions_found} functions from cache")
|
|
114
|
+
return
|
|
115
|
+
|
|
116
|
+
# Clear existing data
|
|
117
|
+
self.functions.clear()
|
|
118
|
+
self.qualified_functions.clear()
|
|
119
|
+
self.functions_by_file.clear()
|
|
120
|
+
self.parse_errors.clear()
|
|
121
|
+
self.total_files_scanned = 0
|
|
122
|
+
self.total_functions_found = 0
|
|
123
|
+
|
|
124
|
+
# Find all C source files
|
|
125
|
+
c_files = list(self.source_dir.rglob("*.c"))
|
|
126
|
+
|
|
127
|
+
if verbose:
|
|
128
|
+
print(f"Found {len(c_files)} C source files")
|
|
129
|
+
|
|
130
|
+
# Parse each file
|
|
131
|
+
for idx, file_path in enumerate(c_files, 1):
|
|
132
|
+
if verbose and idx % 100 == 0:
|
|
133
|
+
print(f"Processing file {idx}/{len(c_files)}: {file_path.name}")
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
self._parse_file(file_path)
|
|
137
|
+
except Exception as e:
|
|
138
|
+
error_msg = f"Error parsing {file_path}: {e}"
|
|
139
|
+
self.parse_errors.append(error_msg)
|
|
140
|
+
if verbose:
|
|
141
|
+
print(f"Warning: {error_msg}")
|
|
142
|
+
|
|
143
|
+
self.total_files_scanned = len(c_files)
|
|
144
|
+
|
|
145
|
+
if verbose:
|
|
146
|
+
print("\nDatabase built successfully:")
|
|
147
|
+
print(f" - Files scanned: {self.total_files_scanned}")
|
|
148
|
+
print(f" - Functions found: {self.total_functions_found}")
|
|
149
|
+
print(f" - Unique function names: {len(self.functions)}")
|
|
150
|
+
print(f" - Parse errors: {len(self.parse_errors)}")
|
|
151
|
+
|
|
152
|
+
# Save to cache
|
|
153
|
+
if use_cache:
|
|
154
|
+
self._save_to_cache(verbose)
|
|
155
|
+
|
|
156
|
+
def _parse_file(self, file_path: Path) -> None:
|
|
157
|
+
"""
|
|
158
|
+
Parse a single file and add functions to database.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
file_path: Path to source file
|
|
162
|
+
"""
|
|
163
|
+
# Use C parser which handles both traditional C and AUTOSAR via fallback
|
|
164
|
+
functions = self.c_parser.parse_file(file_path)
|
|
165
|
+
|
|
166
|
+
# Add functions to database
|
|
167
|
+
for func_info in functions:
|
|
168
|
+
self._add_function(func_info)
|
|
169
|
+
|
|
170
|
+
# Track functions by file
|
|
171
|
+
if functions:
|
|
172
|
+
file_key = str(file_path)
|
|
173
|
+
self.functions_by_file[file_key] = functions
|
|
174
|
+
|
|
175
|
+
def _add_function(self, func_info: FunctionInfo) -> None:
|
|
176
|
+
"""
|
|
177
|
+
Add a function to the database.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
func_info: Function information to add
|
|
181
|
+
"""
|
|
182
|
+
# Apply module mapping if configuration is available
|
|
183
|
+
if self.module_config:
|
|
184
|
+
func_info.sw_module = self.module_config.get_module_for_file(
|
|
185
|
+
func_info.file_path
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Track module statistics
|
|
189
|
+
if func_info.sw_module:
|
|
190
|
+
self.module_stats[func_info.sw_module] = (
|
|
191
|
+
self.module_stats.get(func_info.sw_module, 0) + 1
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Add to main functions dictionary
|
|
195
|
+
if func_info.name not in self.functions:
|
|
196
|
+
self.functions[func_info.name] = []
|
|
197
|
+
self.functions[func_info.name].append(func_info)
|
|
198
|
+
|
|
199
|
+
# Add to qualified functions (for static function resolution)
|
|
200
|
+
file_path = Path(func_info.file_path).stem # Get filename without extension
|
|
201
|
+
qualified_key = f"{file_path}::{func_info.name}"
|
|
202
|
+
self.qualified_functions[qualified_key] = func_info
|
|
203
|
+
|
|
204
|
+
self.total_functions_found += 1
|
|
205
|
+
|
|
206
|
+
def lookup_function(
|
|
207
|
+
self, function_name: str, context_file: Optional[str] = None
|
|
208
|
+
) -> List[FunctionInfo]:
|
|
209
|
+
"""
|
|
210
|
+
Lookup a function by name.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
function_name: Name of function to lookup
|
|
214
|
+
context_file: File path for context (helps resolve static functions)
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
List of FunctionInfo objects matching the name
|
|
218
|
+
"""
|
|
219
|
+
# If context file is provided and function is qualified, try qualified lookup
|
|
220
|
+
if context_file and "::" in function_name:
|
|
221
|
+
qualified_info = self.qualified_functions.get(function_name)
|
|
222
|
+
if qualified_info:
|
|
223
|
+
return [qualified_info]
|
|
224
|
+
|
|
225
|
+
# Try direct lookup
|
|
226
|
+
if function_name in self.functions:
|
|
227
|
+
results = self.functions[function_name]
|
|
228
|
+
|
|
229
|
+
# If multiple definitions, select the best one
|
|
230
|
+
if len(results) > 1:
|
|
231
|
+
best_match = self._select_best_function_match(results, context_file)
|
|
232
|
+
if best_match:
|
|
233
|
+
return [best_match]
|
|
234
|
+
|
|
235
|
+
return results
|
|
236
|
+
|
|
237
|
+
return []
|
|
238
|
+
|
|
239
|
+
def _select_best_function_match(
|
|
240
|
+
self, candidates: List[FunctionInfo], context_file: Optional[str] = None
|
|
241
|
+
) -> Optional[FunctionInfo]:
|
|
242
|
+
"""
|
|
243
|
+
Select the best function from multiple candidates.
|
|
244
|
+
|
|
245
|
+
Implements: SWR_CONFIG_00003 (Module Configuration Integration - Smart Function Selection)
|
|
246
|
+
|
|
247
|
+
Selection strategy:
|
|
248
|
+
1. Prefer functions that have actual implementations (have function calls)
|
|
249
|
+
2. Prefer functions from files that match the function name pattern
|
|
250
|
+
3. Avoid functions from the calling file (for cross-module calls)
|
|
251
|
+
4. Prefer functions with assigned modules over those without
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
candidates: List of FunctionInfo objects to choose from
|
|
255
|
+
context_file: File path of the calling function (optional)
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
Best matching FunctionInfo, or None if all are equal
|
|
259
|
+
"""
|
|
260
|
+
if not candidates:
|
|
261
|
+
return None
|
|
262
|
+
|
|
263
|
+
if len(candidates) == 1:
|
|
264
|
+
return candidates[0]
|
|
265
|
+
|
|
266
|
+
# Strategy 1: Prefer functions with actual implementations (have calls)
|
|
267
|
+
implementations = [f for f in candidates if f.calls]
|
|
268
|
+
if len(implementations) == 1:
|
|
269
|
+
return implementations[0]
|
|
270
|
+
elif len(implementations) > 1:
|
|
271
|
+
candidates = implementations
|
|
272
|
+
|
|
273
|
+
# Strategy 2: Prefer functions from files matching the function name
|
|
274
|
+
# e.g., COM_InitCommunication should be in com_*.c or communication.c
|
|
275
|
+
func_name_lower = candidates[0].name.lower()
|
|
276
|
+
|
|
277
|
+
# Check for matching files
|
|
278
|
+
for func_info in candidates:
|
|
279
|
+
file_stem = Path(func_info.file_path).stem.lower()
|
|
280
|
+
|
|
281
|
+
# Check if function name matches file name
|
|
282
|
+
if func_name_lower.startswith(file_stem.replace("_", "")):
|
|
283
|
+
# e.g., COM_InitCommunication matches communication.c or com_*.c
|
|
284
|
+
if func_info.sw_module and func_info.sw_module != "DemoModule":
|
|
285
|
+
return func_info
|
|
286
|
+
|
|
287
|
+
# Strategy 3: For cross-module calls, avoid the calling file
|
|
288
|
+
if context_file:
|
|
289
|
+
context_stem = Path(context_file).stem
|
|
290
|
+
# Prefer functions NOT from the calling file
|
|
291
|
+
others = [f for f in candidates if Path(f.file_path).stem != context_stem]
|
|
292
|
+
if len(others) == 1:
|
|
293
|
+
return others[0]
|
|
294
|
+
elif len(others) > 1:
|
|
295
|
+
candidates = others
|
|
296
|
+
|
|
297
|
+
# Strategy 4: Prefer functions with assigned modules over those without
|
|
298
|
+
with_modules = [f for f in candidates if f.sw_module]
|
|
299
|
+
if len(with_modules) == 1:
|
|
300
|
+
return with_modules[0]
|
|
301
|
+
elif len(with_modules) > 1:
|
|
302
|
+
candidates = with_modules
|
|
303
|
+
|
|
304
|
+
# If all else fails, return the first candidate
|
|
305
|
+
return candidates[0]
|
|
306
|
+
|
|
307
|
+
def get_function_by_qualified_name(
|
|
308
|
+
self, qualified_name: str
|
|
309
|
+
) -> Optional[FunctionInfo]:
|
|
310
|
+
"""
|
|
311
|
+
Get a function by its qualified name (file::function).
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
qualified_name: Qualified function name
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
FunctionInfo or None if not found
|
|
318
|
+
"""
|
|
319
|
+
return self.qualified_functions.get(qualified_name)
|
|
320
|
+
|
|
321
|
+
def get_all_function_names(self) -> List[str]:
|
|
322
|
+
"""
|
|
323
|
+
Get all unique function names in the database.
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
Sorted list of function names
|
|
327
|
+
"""
|
|
328
|
+
return sorted(self.functions.keys())
|
|
329
|
+
|
|
330
|
+
def get_functions_in_file(self, file_path: str) -> List[FunctionInfo]:
|
|
331
|
+
"""
|
|
332
|
+
Get all functions defined in a specific file.
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
file_path: Path to source file
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
List of FunctionInfo objects
|
|
339
|
+
"""
|
|
340
|
+
return self.functions_by_file.get(file_path, [])
|
|
341
|
+
|
|
342
|
+
def search_functions(self, pattern: str) -> List[FunctionInfo]:
|
|
343
|
+
"""
|
|
344
|
+
Search for functions matching a pattern.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
pattern: Search pattern (substring match)
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
List of matching FunctionInfo objects
|
|
351
|
+
"""
|
|
352
|
+
results = []
|
|
353
|
+
pattern_lower = pattern.lower()
|
|
354
|
+
|
|
355
|
+
for func_name, func_list in self.functions.items():
|
|
356
|
+
if pattern_lower in func_name.lower():
|
|
357
|
+
results.extend(func_list)
|
|
358
|
+
|
|
359
|
+
return results
|
|
360
|
+
|
|
361
|
+
def get_statistics(self) -> Dict[str, Any]:
|
|
362
|
+
"""
|
|
363
|
+
Get database statistics.
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
Dictionary with statistics
|
|
367
|
+
"""
|
|
368
|
+
static_count = sum(
|
|
369
|
+
1
|
|
370
|
+
for funcs in self.functions.values()
|
|
371
|
+
for func in funcs
|
|
372
|
+
if func.function_type.name == "STATIC"
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
return {
|
|
376
|
+
"total_files_scanned": self.total_files_scanned,
|
|
377
|
+
"total_functions_found": self.total_functions_found,
|
|
378
|
+
"unique_function_names": len(self.functions),
|
|
379
|
+
"static_functions": static_count,
|
|
380
|
+
"parse_errors": len(self.parse_errors),
|
|
381
|
+
"files_with_functions": len(self.functions_by_file),
|
|
382
|
+
"module_stats": self.module_stats.copy(),
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
def _compute_file_checksum(self, file_path: Path) -> str:
|
|
386
|
+
"""
|
|
387
|
+
Compute checksum of a file.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
file_path: Path to file
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
MD5 checksum as hex string
|
|
394
|
+
"""
|
|
395
|
+
md5 = hashlib.md5()
|
|
396
|
+
try:
|
|
397
|
+
with open(file_path, "rb") as f:
|
|
398
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
399
|
+
md5.update(chunk)
|
|
400
|
+
return md5.hexdigest()
|
|
401
|
+
except Exception:
|
|
402
|
+
return ""
|
|
403
|
+
|
|
404
|
+
def _save_to_cache(self, verbose: bool = False) -> None:
|
|
405
|
+
"""
|
|
406
|
+
Save database to cache file.
|
|
407
|
+
|
|
408
|
+
Args:
|
|
409
|
+
verbose: Print progress information
|
|
410
|
+
"""
|
|
411
|
+
try:
|
|
412
|
+
# Create metadata
|
|
413
|
+
metadata = CacheMetadata(
|
|
414
|
+
created_at=datetime.now(),
|
|
415
|
+
source_directory=str(self.source_dir),
|
|
416
|
+
file_count=self.total_files_scanned,
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
# Create cache data
|
|
420
|
+
cache_data = {
|
|
421
|
+
"metadata": metadata,
|
|
422
|
+
"functions": self.functions,
|
|
423
|
+
"qualified_functions": self.qualified_functions,
|
|
424
|
+
"functions_by_file": self.functions_by_file,
|
|
425
|
+
"total_files_scanned": self.total_files_scanned,
|
|
426
|
+
"total_functions_found": self.total_functions_found,
|
|
427
|
+
"parse_errors": self.parse_errors,
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
# Save to pickle
|
|
431
|
+
with open(self.cache_file, "wb") as f:
|
|
432
|
+
pickle.dump(cache_data, f, protocol=pickle.HIGHEST_PROTOCOL)
|
|
433
|
+
|
|
434
|
+
if verbose:
|
|
435
|
+
print(f"Cache saved to {self.cache_file}")
|
|
436
|
+
|
|
437
|
+
except Exception as e:
|
|
438
|
+
if verbose:
|
|
439
|
+
print(f"Warning: Failed to save cache: {e}")
|
|
440
|
+
|
|
441
|
+
def _load_from_cache(self, verbose: bool = False) -> bool:
|
|
442
|
+
"""
|
|
443
|
+
Load database from cache file.
|
|
444
|
+
|
|
445
|
+
Implements: SWR_CACHE_00001 (File-by-File Cache Loading Progress)
|
|
446
|
+
Implements: SWR_CACHE_00002 (Cache Status Indication)
|
|
447
|
+
Implements: SWR_CACHE_00003 (Cache Loading Errors)
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
verbose: Print progress information
|
|
451
|
+
|
|
452
|
+
Returns:
|
|
453
|
+
True if cache loaded successfully, False otherwise
|
|
454
|
+
"""
|
|
455
|
+
if not self.cache_file.exists():
|
|
456
|
+
return False
|
|
457
|
+
|
|
458
|
+
try:
|
|
459
|
+
with open(self.cache_file, "rb") as f:
|
|
460
|
+
cache_data = pickle.load(f)
|
|
461
|
+
|
|
462
|
+
# Validate metadata
|
|
463
|
+
metadata: CacheMetadata = cache_data.get("metadata")
|
|
464
|
+
if not metadata:
|
|
465
|
+
if verbose:
|
|
466
|
+
print("Cache invalid: missing metadata")
|
|
467
|
+
return False
|
|
468
|
+
|
|
469
|
+
# Check source directory matches
|
|
470
|
+
if metadata.source_directory != str(self.source_dir):
|
|
471
|
+
if verbose:
|
|
472
|
+
print("Cache invalid: source directory mismatch")
|
|
473
|
+
return False
|
|
474
|
+
|
|
475
|
+
# Load data
|
|
476
|
+
self.functions = cache_data.get("functions", {})
|
|
477
|
+
self.qualified_functions = cache_data.get("qualified_functions", {})
|
|
478
|
+
self.functions_by_file = cache_data.get("functions_by_file", {})
|
|
479
|
+
self.total_files_scanned = cache_data.get("total_files_scanned", 0)
|
|
480
|
+
self.total_functions_found = cache_data.get("total_functions_found", 0)
|
|
481
|
+
self.parse_errors = cache_data.get("parse_errors", [])
|
|
482
|
+
|
|
483
|
+
# Show file-by-file progress in verbose mode
|
|
484
|
+
if verbose:
|
|
485
|
+
print(f"Loading {self.total_files_scanned} files from cache...")
|
|
486
|
+
for idx, (file_path, functions) in enumerate(
|
|
487
|
+
self.functions_by_file.items(), 1
|
|
488
|
+
):
|
|
489
|
+
file_name = Path(file_path).name
|
|
490
|
+
func_count = len(functions)
|
|
491
|
+
print(
|
|
492
|
+
f" [{idx}/{self.total_files_scanned}] {file_name}: {func_count} functions"
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
return True
|
|
496
|
+
|
|
497
|
+
except Exception as e:
|
|
498
|
+
if verbose:
|
|
499
|
+
print(f"Warning: Failed to load cache: {e}")
|
|
500
|
+
return False
|
|
501
|
+
|
|
502
|
+
def clear_cache(self) -> None:
|
|
503
|
+
"""Delete the cache file if it exists."""
|
|
504
|
+
if self.cache_file.exists():
|
|
505
|
+
self.cache_file.unlink()
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Core data models for the AUTOSAR Call Tree Analyzer.
|
|
3
|
+
|
|
4
|
+
This module defines the data structures used throughout the package.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Dict, List, Optional, Set
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class FunctionType(Enum):
|
|
15
|
+
"""Classification of function types."""
|
|
16
|
+
|
|
17
|
+
AUTOSAR_FUNC = "autosar_func" # FUNC(rettype, memclass) name()
|
|
18
|
+
AUTOSAR_FUNC_P2VAR = "autosar_func_p2var" # FUNC_P2VAR(type, ...)
|
|
19
|
+
AUTOSAR_FUNC_P2CONST = "autosar_func_p2const" # FUNC_P2CONST(type, ...)
|
|
20
|
+
TRADITIONAL_C = "traditional_c" # Standard C: rettype name()
|
|
21
|
+
RTE_CALL = "rte_call" # Rte_Read_*, Rte_Write_*, etc.
|
|
22
|
+
UNKNOWN = "unknown"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class Parameter:
|
|
27
|
+
"""Function parameter information."""
|
|
28
|
+
|
|
29
|
+
name: str
|
|
30
|
+
param_type: str # Actual type (uint32, uint8*, etc.)
|
|
31
|
+
is_pointer: bool = False
|
|
32
|
+
is_const: bool = False
|
|
33
|
+
memory_class: Optional[str] = None # AUTOSAR memory class (AUTOMATIC, etc.)
|
|
34
|
+
|
|
35
|
+
def __str__(self) -> str:
|
|
36
|
+
"""String representation of parameter."""
|
|
37
|
+
const_str = "const " if self.is_const else ""
|
|
38
|
+
ptr_str = "*" if self.is_pointer else ""
|
|
39
|
+
if self.memory_class:
|
|
40
|
+
return f"{const_str}{self.param_type}{ptr_str} {self.name} [{self.memory_class}]"
|
|
41
|
+
return f"{const_str}{self.param_type}{ptr_str} {self.name}"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class FunctionInfo:
|
|
46
|
+
"""Complete function information."""
|
|
47
|
+
|
|
48
|
+
name: str
|
|
49
|
+
return_type: str
|
|
50
|
+
file_path: Path
|
|
51
|
+
line_number: int
|
|
52
|
+
is_static: bool
|
|
53
|
+
function_type: FunctionType
|
|
54
|
+
memory_class: Optional[str] = None # AUTOSAR memory class (RTE_CODE, etc.)
|
|
55
|
+
parameters: List[Parameter] = field(default_factory=list)
|
|
56
|
+
calls: List[str] = field(default_factory=list) # Functions called within
|
|
57
|
+
called_by: Set[str] = field(default_factory=set) # Functions that call this
|
|
58
|
+
|
|
59
|
+
# AUTOSAR specific
|
|
60
|
+
macro_type: Optional[str] = None # "FUNC", "FUNC_P2VAR", etc.
|
|
61
|
+
|
|
62
|
+
# For disambiguation of static functions
|
|
63
|
+
qualified_name: Optional[str] = None # file::function for static functions
|
|
64
|
+
|
|
65
|
+
# SW module assignment (from configuration)
|
|
66
|
+
sw_module: Optional[str] = None # SW module name from config
|
|
67
|
+
|
|
68
|
+
def __hash__(self) -> int:
|
|
69
|
+
"""Hash function for use in sets/dicts."""
|
|
70
|
+
return hash((self.name, str(self.file_path), self.line_number))
|
|
71
|
+
|
|
72
|
+
def __eq__(self, other: object) -> bool:
|
|
73
|
+
"""Equality comparison."""
|
|
74
|
+
if not isinstance(other, FunctionInfo):
|
|
75
|
+
return False
|
|
76
|
+
return (
|
|
77
|
+
self.name == other.name
|
|
78
|
+
and self.file_path == other.file_path
|
|
79
|
+
and self.line_number == other.line_number
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def get_signature(self) -> str:
|
|
83
|
+
"""Get function signature string."""
|
|
84
|
+
params_str = ", ".join(str(p) for p in self.parameters)
|
|
85
|
+
return f"{self.return_type} {self.name}({params_str})"
|
|
86
|
+
|
|
87
|
+
def is_rte_function(self) -> bool:
|
|
88
|
+
"""Check if this is an RTE function."""
|
|
89
|
+
return (
|
|
90
|
+
self.name.startswith("Rte_") or self.function_type == FunctionType.RTE_CALL
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class CallTreeNode:
|
|
96
|
+
"""Node in the function call tree."""
|
|
97
|
+
|
|
98
|
+
function_info: FunctionInfo
|
|
99
|
+
depth: int
|
|
100
|
+
children: List["CallTreeNode"] = field(default_factory=list)
|
|
101
|
+
parent: Optional["CallTreeNode"] = None
|
|
102
|
+
is_recursive: bool = False # True if function already in call stack
|
|
103
|
+
is_truncated: bool = False # True if depth limit reached
|
|
104
|
+
call_count: int = 1 # Number of times this function is called
|
|
105
|
+
|
|
106
|
+
def add_child(self, child: "CallTreeNode") -> None:
|
|
107
|
+
"""Add a child node."""
|
|
108
|
+
child.parent = self
|
|
109
|
+
self.children.append(child)
|
|
110
|
+
|
|
111
|
+
def get_all_functions(self) -> Set[FunctionInfo]:
|
|
112
|
+
"""Get all unique functions in this subtree."""
|
|
113
|
+
functions = {self.function_info}
|
|
114
|
+
for child in self.children:
|
|
115
|
+
functions.update(child.get_all_functions())
|
|
116
|
+
return functions
|
|
117
|
+
|
|
118
|
+
def get_max_depth(self) -> int:
|
|
119
|
+
"""Get maximum depth of this subtree."""
|
|
120
|
+
if not self.children:
|
|
121
|
+
return self.depth
|
|
122
|
+
return max(child.get_max_depth() for child in self.children)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@dataclass
|
|
126
|
+
class CircularDependency:
|
|
127
|
+
"""Represents a circular call chain."""
|
|
128
|
+
|
|
129
|
+
cycle: List[str]
|
|
130
|
+
depth: int
|
|
131
|
+
|
|
132
|
+
def __str__(self) -> str:
|
|
133
|
+
"""String representation of cycle."""
|
|
134
|
+
return " -> ".join(self.cycle)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
@dataclass
|
|
138
|
+
class AnalysisStatistics:
|
|
139
|
+
"""Statistics from call tree analysis."""
|
|
140
|
+
|
|
141
|
+
total_functions: int = 0
|
|
142
|
+
unique_functions: int = 0
|
|
143
|
+
max_depth_reached: int = 0
|
|
144
|
+
total_function_calls: int = 0
|
|
145
|
+
static_functions: int = 0
|
|
146
|
+
rte_functions: int = 0
|
|
147
|
+
autosar_functions: int = 0
|
|
148
|
+
circular_dependencies_found: int = 0
|
|
149
|
+
|
|
150
|
+
def to_dict(self) -> Dict[str, int]:
|
|
151
|
+
"""Convert to dictionary."""
|
|
152
|
+
return {
|
|
153
|
+
"total_functions": self.total_functions,
|
|
154
|
+
"unique_functions": self.unique_functions,
|
|
155
|
+
"max_depth_reached": self.max_depth_reached,
|
|
156
|
+
"total_function_calls": self.total_function_calls,
|
|
157
|
+
"static_functions": self.static_functions,
|
|
158
|
+
"rte_functions": self.rte_functions,
|
|
159
|
+
"autosar_functions": self.autosar_functions,
|
|
160
|
+
"circular_dependencies_found": self.circular_dependencies_found,
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@dataclass
|
|
165
|
+
class AnalysisResult:
|
|
166
|
+
"""Complete analysis result."""
|
|
167
|
+
|
|
168
|
+
root_function: str
|
|
169
|
+
call_tree: Optional[CallTreeNode]
|
|
170
|
+
statistics: AnalysisStatistics
|
|
171
|
+
circular_dependencies: List[CircularDependency] = field(default_factory=list)
|
|
172
|
+
errors: List[str] = field(default_factory=list)
|
|
173
|
+
timestamp: datetime = field(default_factory=datetime.now)
|
|
174
|
+
source_directory: Optional[Path] = None
|
|
175
|
+
max_depth_limit: int = 3
|
|
176
|
+
|
|
177
|
+
def get_all_functions(self) -> Set[FunctionInfo]:
|
|
178
|
+
"""Get all unique functions in the call tree."""
|
|
179
|
+
if self.call_tree is None:
|
|
180
|
+
return set()
|
|
181
|
+
return self.call_tree.get_all_functions()
|
|
182
|
+
|
|
183
|
+
def has_circular_dependencies(self) -> bool:
|
|
184
|
+
"""Check if circular dependencies were found."""
|
|
185
|
+
return len(self.circular_dependencies) > 0
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
# Type aliases for convenience
|
|
189
|
+
FunctionDict = Dict[str, List[FunctionInfo]] # Maps name to list of FunctionInfo
|