qgis-plugin-analyzer 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,145 @@
1
+ """Configuration and TOML parsing utilities."""
2
+
3
+ import logging
4
+ import pathlib
5
+ import re
6
+ from typing import Any, Dict
7
+
8
+ logger = logging.getLogger("qgis_analyzer")
9
+
10
+
11
+ def _parse_toml_value(val_str: str) -> Any:
12
+ """Converts a TOML value string to appropriate Python type."""
13
+ if val_str.lower() == "true":
14
+ return True
15
+ elif val_str.lower() == "false":
16
+ return False
17
+ elif re.match(r"^-?\d+$", val_str):
18
+ return int(val_str)
19
+ elif (val_str.startswith('"') and val_str.endswith('"')) or (
20
+ val_str.startswith("'") and val_str.endswith("'")
21
+ ):
22
+ return val_str[1:-1]
23
+ return val_str
24
+
25
+
26
+ def _process_section_header(line: str, profile_regex: re.Pattern, rules_regex: re.Pattern) -> tuple:
27
+ """Processes a TOML section header."""
28
+ rules_match = rules_regex.match(line)
29
+ if rules_match:
30
+ return rules_match.group(1), True
31
+
32
+ profile_match = profile_regex.match(line)
33
+ if profile_match:
34
+ return profile_match.group(1), False
35
+
36
+ return None, False
37
+
38
+
39
+ def _ensure_profile_structure(data: dict, profile_name: str, is_rules: bool) -> None:
40
+ """Ensures the profile structure exists in the data dictionary."""
41
+ if profile_name not in data["tool"]["qgis-analyzer"]["profiles"]:
42
+ data["tool"]["qgis-analyzer"]["profiles"][profile_name] = {}
43
+
44
+ if is_rules and "rules" not in data["tool"]["qgis-analyzer"]["profiles"][profile_name]:
45
+ data["tool"]["qgis-analyzer"]["profiles"][profile_name]["rules"] = {}
46
+
47
+
48
+ def _minimal_toml_load(file_obj) -> Dict[str, Any]:
49
+ """EXTREMELY minimal TOML parser for pyproject.toml."""
50
+ data: Dict[str, Any] = {"tool": {"qgis-analyzer": {"profiles": {}}}}
51
+ current_profile = None
52
+ in_rules_section = False
53
+
54
+ profile_regex = re.compile(r"^\[tool\.qgis-analyzer\.profiles\.([\w-]+)\]")
55
+ rules_regex = re.compile(r"^\[tool\.qgis-analyzer\.profiles\.([\w-]+)\.rules\]")
56
+
57
+ try:
58
+ content = file_obj.read().decode("utf-8")
59
+ for line in content.splitlines():
60
+ line = line.strip()
61
+ if not line or line.startswith("#"):
62
+ continue
63
+
64
+ if line.startswith("[") and line.endswith("]"):
65
+ profile_name, is_rules = _process_section_header(line, profile_regex, rules_regex)
66
+ if profile_name:
67
+ current_profile = profile_name
68
+ in_rules_section = is_rules
69
+ _ensure_profile_structure(data, current_profile, in_rules_section)
70
+ else:
71
+ current_profile = None
72
+ in_rules_section = False
73
+ continue
74
+
75
+ if current_profile and "=" in line:
76
+ key, val = line.split("=", 1)
77
+ key = key.strip()
78
+ val = _parse_toml_value(val.strip())
79
+
80
+ if in_rules_section:
81
+ data["tool"]["qgis-analyzer"]["profiles"][current_profile]["rules"][key] = val
82
+ else:
83
+ data["tool"]["qgis-analyzer"]["profiles"][current_profile][key] = val
84
+ except Exception as e:
85
+ logger.error(f"Error in minimal TOML parser: {e}")
86
+
87
+ return data
88
+
89
+
90
+ try:
91
+ import tomllib
92
+ except ImportError:
93
+ tomllib = None
94
+
95
+
96
+ def load_profile_config(
97
+ project_path: pathlib.Path, profile_name: str = "default"
98
+ ) -> Dict[str, Any]:
99
+ """Loads a specific profile configuration from pyproject.toml.
100
+
101
+ Args:
102
+ project_path: Root path of the project.
103
+ profile_name: Name of the configuration profile.
104
+
105
+ Returns:
106
+ A dictionary containing the profile configuration.
107
+ """
108
+ pyproject = project_path / "pyproject.toml"
109
+ default_config = {
110
+ "strict": False,
111
+ "generate_html": False,
112
+ "fail_on_error": False,
113
+ "rules": {},
114
+ }
115
+
116
+ if not pyproject.exists():
117
+ return default_config
118
+
119
+ try:
120
+ with open(pyproject, "rb") as f:
121
+ if tomllib:
122
+ import tomllib as tl # type: ignore
123
+
124
+ data = tl.load(f)
125
+ else:
126
+ data = _minimal_toml_load(f)
127
+
128
+ profiles = data.get("tool", {}).get("qgis-analyzer", {}).get("profiles", {})
129
+ profile_data = profiles.get(profile_name)
130
+
131
+ if not profile_data:
132
+ if profile_name != "default":
133
+ logger.warning(f"Profile '{profile_name}' not found. Using default values.")
134
+ return default_config
135
+
136
+ rules_config = profile_data.get("rules", {})
137
+
138
+ return {
139
+ **default_config,
140
+ **profile_data,
141
+ "rules": rules_config,
142
+ }
143
+ except Exception as e:
144
+ logger.error(f"Error loading profile: {e}")
145
+ return default_config
@@ -0,0 +1,46 @@
1
+ """Logging utilities for the QGIS Plugin Analyzer."""
2
+
3
+ import logging
4
+ import pathlib
5
+ import sys
6
+
7
+
8
+ def setup_logger(output_dir: pathlib.Path) -> logging.Logger:
9
+ """Configures the global logger with console and file handlers.
10
+
11
+ Args:
12
+ output_dir: Directory where the log file will be created.
13
+
14
+ Returns:
15
+ The configured logger instance.
16
+ """
17
+ logger = logging.getLogger("qgis_analyzer")
18
+ logger.setLevel(logging.DEBUG)
19
+
20
+ # Avoid duplicate handlers
21
+ if logger.handlers:
22
+ return logger
23
+
24
+ # Formatters
25
+ console_fmt = logging.Formatter("%(message)s")
26
+ file_fmt = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
27
+
28
+ # Console Handler (User facing)
29
+ ch = logging.StreamHandler(sys.stdout)
30
+ ch.setLevel(logging.INFO)
31
+ ch.setFormatter(console_fmt)
32
+ logger.addHandler(ch)
33
+
34
+ # File Handler (Detailed debugging)
35
+ log_file = output_dir / "analyzer.log"
36
+ fh = logging.FileHandler(log_file, encoding="utf-8")
37
+ fh.setLevel(logging.DEBUG)
38
+ fh.setFormatter(file_fmt)
39
+ logger.addHandler(fh)
40
+
41
+ return logger
42
+
43
+
44
+ # Global logger instance
45
+ logger = logging.getLogger("qgis_analyzer")
46
+ AttributeError = "logger" # For compatibility if needed, but usually just 'logger'
@@ -0,0 +1,135 @@
1
+ """Path and file matching utilities."""
2
+
3
+ import fnmatch
4
+ import os
5
+ import pathlib
6
+ from typing import Dict, List
7
+
8
+ # Default patterns to ignore if not specified
9
+ DEFAULT_EXCLUDE = {
10
+ ".venv/",
11
+ "venv/",
12
+ "__pycache__/",
13
+ ".git/",
14
+ ".github/",
15
+ "build/",
16
+ "dist/",
17
+ ".pytest_cache/",
18
+ ".ruff_cache/",
19
+ ".mypy_cache/",
20
+ ".analyzerignore",
21
+ "analysis_results/",
22
+ }
23
+
24
+
25
+ def safe_path_resolve(base_path: pathlib.Path, target_path_str: str) -> pathlib.Path:
26
+ """Resolves a target path safely relative to a base path.
27
+
28
+ Args:
29
+ base_path: The root directory for resolution.
30
+ target_path_str: The path string to resolve.
31
+
32
+ Returns:
33
+ The resolved absolute Path.
34
+
35
+ Raises:
36
+ ValueError: If path traversal is detected.
37
+ """
38
+ base_abs = base_path.resolve()
39
+ target_abs = (base_path / target_path_str).resolve()
40
+
41
+ # Check if target is still within base
42
+ try:
43
+ target_abs.relative_to(base_abs)
44
+ except (ValueError, RuntimeError):
45
+ raise ValueError(
46
+ f"Path traversal detected: '{target_path_str}' is outside base '{base_path}'"
47
+ ) from None
48
+
49
+ return target_abs
50
+
51
+
52
+ class IgnoreMatcher:
53
+ """Handles .analyzerignore patterns using fnmatch-style globbing."""
54
+
55
+ def __init__(self, root_path: pathlib.Path, patterns: List[str]):
56
+ """Initializes the matcher with root path and patterns.
57
+
58
+ Args:
59
+ root_path: The project root path.
60
+ patterns: List of ignore pattern strings.
61
+ """
62
+ self.root_path = root_path
63
+ # Combine user patterns with defaults
64
+ all_patterns = set(p.strip() for p in patterns if p.strip() and not p.startswith("#"))
65
+ all_patterns.update(DEFAULT_EXCLUDE)
66
+ self.patterns = list(all_patterns)
67
+ self._cache: Dict[str, bool] = {}
68
+
69
+ def is_ignored(self, path: pathlib.Path) -> bool:
70
+ """Checks if a path matches any ignore pattern.
71
+
72
+ Args:
73
+ path: The path to check.
74
+
75
+ Returns:
76
+ True if ignored, False otherwise.
77
+ """
78
+ str_path = str(path)
79
+ if str_path in self._cache:
80
+ return self._cache[str_path]
81
+
82
+ try:
83
+ rel_path = path.relative_to(self.root_path)
84
+ str_rel_path = str(rel_path)
85
+ result = self._check_patterns(str_rel_path, path.name)
86
+ self._cache[str_path] = result
87
+ return result
88
+ except ValueError:
89
+ return False
90
+
91
+ def _check_patterns(self, str_rel_path: str, name: str) -> bool:
92
+ for pattern in self.patterns:
93
+ # Handle anchored patterns (starting with /)
94
+ is_anchored = pattern.startswith("/")
95
+ clean_pattern = pattern.lstrip("/")
96
+
97
+ # Handle directory-specific patterns (ending in /)
98
+ is_dir_pattern = clean_pattern.endswith("/")
99
+ clean_pattern = clean_pattern.rstrip("/")
100
+
101
+ if is_dir_pattern:
102
+ if is_anchored:
103
+ if str_rel_path == clean_pattern or str_rel_path.startswith(
104
+ clean_pattern + os.sep
105
+ ):
106
+ return True
107
+ else:
108
+ parts = str_rel_path.split(os.sep)
109
+ if clean_pattern in parts:
110
+ return True
111
+ else:
112
+ if is_anchored:
113
+ if fnmatch.fnmatch(str_rel_path, clean_pattern):
114
+ return True
115
+ else:
116
+ if fnmatch.fnmatch(str_rel_path, clean_pattern):
117
+ return True
118
+ if "/" not in clean_pattern and fnmatch.fnmatch(name, clean_pattern):
119
+ return True
120
+ return False
121
+
122
+
123
+ def load_ignore_patterns(ignore_file: pathlib.Path) -> List[str]:
124
+ """Loads ignore patterns from a file.
125
+
126
+ Args:
127
+ ignore_file: Path to the .analyzerignore file.
128
+
129
+ Returns:
130
+ List of pattern strings.
131
+ """
132
+ if not ignore_file.exists():
133
+ return []
134
+ with open(ignore_file) as f:
135
+ return f.readlines()
@@ -0,0 +1,150 @@
1
+ """Performance and progress tracking utilities."""
2
+
3
+ import pathlib
4
+ import signal
5
+ import sys
6
+ import threading
7
+ import time
8
+ from collections import OrderedDict
9
+ from contextlib import contextmanager
10
+ from typing import Any, Dict
11
+
12
+
13
+ class LRUCache:
14
+ """Efficient Least Recently Used (LRU) Cache.
15
+
16
+ Attributes:
17
+ maxsize: Maximum number of items in the cache.
18
+ """
19
+
20
+ def __init__(self, maxsize: int = 256):
21
+ self.cache: OrderedDict[str, Any] = OrderedDict()
22
+ self._lock = threading.Lock()
23
+ self.maxsize = maxsize
24
+ self.hits = 0
25
+ self.misses = 0
26
+
27
+ def get(self, key: str) -> Any:
28
+ """Retrieves an item from the cache.
29
+
30
+ Args:
31
+ key: The cache key.
32
+
33
+ Returns:
34
+ The cached value or None if not found.
35
+ """
36
+ with self._lock:
37
+ if key in self.cache:
38
+ self.cache.move_to_end(key)
39
+ self.hits += 1
40
+ return self.cache[key]
41
+ self.misses += 1
42
+ return None
43
+
44
+ def set(self, key: str, value: Any) -> None:
45
+ """Adds an item to the cache.
46
+
47
+ Args:
48
+ key: The cache key.
49
+ value: The value to store.
50
+ """
51
+ with self._lock:
52
+ if key in self.cache:
53
+ self.cache.move_to_end(key)
54
+ self.cache[key] = value
55
+ if len(self.cache) > self.maxsize:
56
+ self.cache.popitem(last=False)
57
+
58
+ def stats(self) -> Dict[str, Any]:
59
+ """Returns cache performance statistics.
60
+
61
+ Returns:
62
+ A dictionary with size, hits, misses, and hit rate.
63
+ """
64
+ with self._lock:
65
+ total = self.hits + self.misses
66
+ return {
67
+ "size": len(self.cache),
68
+ "hits": self.hits,
69
+ "misses": self.misses,
70
+ "hit_rate": self.hits / total if total > 0 else 0,
71
+ }
72
+
73
+
74
+ class ProgressTracker:
75
+ """Real-time progress tracker for file processing.
76
+
77
+ Attributes:
78
+ total: Total number of files to process.
79
+ """
80
+
81
+ def __init__(self, total_files: int):
82
+ self.total = total_files
83
+ self.processed = 0
84
+ self.start_time = time.time()
85
+ self.avg_time = 0.0
86
+ self.last_update = 0.0
87
+
88
+ def update(self, file_path: pathlib.Path, processing_time: float) -> None:
89
+ """Updates the progress status.
90
+
91
+ Args:
92
+ file_path: Current file path.
93
+ processing_time: Time taken to process the file.
94
+ """
95
+ self.processed += 1
96
+ # Simple moving average for ETA
97
+ if self.avg_time == 0:
98
+ self.avg_time = processing_time
99
+ else:
100
+ self.avg_time = (self.avg_time * 0.9) + (processing_time * 0.1)
101
+
102
+ current_time = time.time()
103
+ if self.processed % 10 == 0 or current_time - self.last_update > 2:
104
+ self._display_progress()
105
+ self.last_update = current_time
106
+
107
+ def _display_progress(self) -> None:
108
+ percent = (self.processed / self.total) * 100 if self.total > 0 else 0
109
+ if self.processed > 0:
110
+ remaining = self.total - self.processed
111
+ eta = remaining * self.avg_time
112
+ eta_str = f"{eta:.0f}s"
113
+ else:
114
+ eta_str = "..."
115
+ sys.stdout.write(
116
+ f"\r\033[K📊 Progress: {self.processed}/{self.total} ({percent:.1f}%) | ETA: {eta_str}"
117
+ )
118
+ sys.stdout.flush()
119
+
120
+ def complete(self) -> Dict[str, Any]:
121
+ """Finalizes the progress tracking and returns final metrics.
122
+
123
+ Returns:
124
+ A dictionary with elapsed time and throughput.
125
+ """
126
+ elapsed = time.time() - self.start_time
127
+ print()
128
+ return {
129
+ "elapsed": elapsed,
130
+ "files_per_second": self.processed / elapsed if elapsed > 0 else 0,
131
+ }
132
+
133
+
134
+ @contextmanager
135
+ def timeout_manager(seconds: int):
136
+ """Context manager for enforcing operation timeouts.
137
+
138
+ Args:
139
+ seconds: Timeout duration in seconds.
140
+ """
141
+
142
+ def signal_handler(signum, frame):
143
+ raise TimeoutError(f"Operation exceeded {seconds}s")
144
+
145
+ signal.signal(signal.SIGALRM, signal_handler)
146
+ signal.alarm(seconds)
147
+ try:
148
+ yield
149
+ finally:
150
+ signal.alarm(0)