pytest-fastcollect 0.5.2__cp312-cp312-musllinux_1_2_i686.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ """pytest-fastcollect: High-performance test collection using Rust."""
2
+
3
+ __version__ = "0.5.0"
4
+
5
+ try:
6
+ from .pytest_fastcollect import FastCollector, get_version
7
+ except ImportError:
8
+ # Fallback when the Rust extension is not built
9
+ FastCollector = None
10
+ get_version = lambda: __version__
11
+
12
+ __all__ = ["FastCollector", "get_version", "__version__"]
@@ -0,0 +1,171 @@
1
+ """Caching layer for pytest-fastcollect to enable incremental collection."""
2
+
3
+ import json
4
+ import os
5
+ from pathlib import Path
6
+ from typing import Dict, Any, Optional, Tuple, List
7
+ from dataclasses import dataclass, asdict
8
+
9
+ from .constants import CACHE_VERSION, MTIME_TOLERANCE_SECONDS
10
+
11
+
12
+ @dataclass
13
+ class CacheStats:
14
+ """Statistics about cache usage."""
15
+ cache_hits: int = 0
16
+ cache_misses: int = 0
17
+ files_parsed: int = 0
18
+ files_from_cache: int = 0
19
+
20
+ @property
21
+ def hit_rate(self) -> float:
22
+ """Calculate cache hit rate."""
23
+ total = self.cache_hits + self.cache_misses
24
+ return self.cache_hits / total if total > 0 else 0.0
25
+
26
+ def __str__(self) -> str:
27
+ return (f"FastCollect Cache: {self.files_from_cache} files from cache, "
28
+ f"{self.files_parsed} parsed ({self.hit_rate:.1%} hit rate)")
29
+
30
+
31
+ class CollectionCache:
32
+ """Manages persistent cache of parsed test data with file modification times."""
33
+
34
+ def __init__(self, cache_dir: Path):
35
+ """
36
+ Initialize cache manager.
37
+
38
+ Args:
39
+ cache_dir: Directory to store cache (typically .pytest_cache/v/fastcollect)
40
+ """
41
+ self.cache_dir = cache_dir
42
+ self.cache_file = cache_dir / "cache.json"
43
+ self.cache_data: Dict[str, Dict[str, Any]] = {}
44
+ self.stats = CacheStats()
45
+ self._load_cache()
46
+
47
+ def _load_cache(self) -> None:
48
+ """Load cache from disk if it exists."""
49
+ if self.cache_file.exists():
50
+ try:
51
+ with open(self.cache_file, 'r') as f:
52
+ data = json.load(f)
53
+
54
+ # Check cache version
55
+ if data.get('version') == CACHE_VERSION:
56
+ self.cache_data = data.get('entries', {})
57
+ else:
58
+ # Cache version mismatch, start fresh
59
+ self.cache_data = {}
60
+ except (json.JSONDecodeError, IOError):
61
+ # Corrupted cache, start fresh
62
+ self.cache_data = {}
63
+
64
+ def save_cache(self) -> None:
65
+ """Save cache to disk."""
66
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
67
+
68
+ cache_structure = {
69
+ 'version': CACHE_VERSION,
70
+ 'entries': self.cache_data
71
+ }
72
+
73
+ try:
74
+ with open(self.cache_file, 'w') as f:
75
+ json.dump(cache_structure, f, indent=2)
76
+ except IOError:
77
+ # Silently fail if we can't write cache
78
+ pass
79
+
80
+ def get_cached_data(self, file_path: str, current_mtime: float) -> Optional[Dict[str, Any]]:
81
+ """
82
+ Get cached data for a file if it's still valid.
83
+
84
+ Args:
85
+ file_path: Absolute path to the file
86
+ current_mtime: Current modification time of the file
87
+
88
+ Returns:
89
+ Cached test data if valid, None otherwise
90
+ """
91
+ if file_path not in self.cache_data:
92
+ self.stats.cache_misses += 1
93
+ return None
94
+
95
+ cached_entry = self.cache_data[file_path]
96
+ cached_mtime = cached_entry.get('mtime', 0)
97
+
98
+ # Check if file has been modified
99
+ if abs(cached_mtime - current_mtime) < MTIME_TOLERANCE_SECONDS: # Allow small floating point difference
100
+ self.stats.cache_hits += 1
101
+ self.stats.files_from_cache += 1
102
+ return cached_entry.get('items', [])
103
+ else:
104
+ self.stats.cache_misses += 1
105
+ return None
106
+
107
+ def update_cache(self, file_path: str, mtime: float, items: List[Any]) -> None:
108
+ """
109
+ Update cache with newly parsed data.
110
+
111
+ Args:
112
+ file_path: Absolute path to the file
113
+ mtime: Modification time of the file
114
+ items: List of test items found in the file
115
+ """
116
+ self.cache_data[file_path] = {
117
+ 'mtime': mtime,
118
+ 'items': items
119
+ }
120
+ self.stats.files_parsed += 1
121
+
122
+ def merge_with_rust_data(self, rust_metadata: Dict[str, Dict[str, Any]]) -> Tuple[Dict[str, List[Any]], bool]:
123
+ """
124
+ Merge Rust-collected metadata with cache.
125
+
126
+ Args:
127
+ rust_metadata: Dictionary from Rust collector with {file_path: {mtime: float, items: list}}
128
+
129
+ Returns:
130
+ Tuple of (merged data dict, cache_updated flag)
131
+ """
132
+ merged_data = {}
133
+ cache_updated = False
134
+
135
+ for file_path, metadata in rust_metadata.items():
136
+ current_mtime = metadata['mtime']
137
+ rust_items = metadata['items']
138
+
139
+ # Try to use cached data
140
+ cached_items = self.get_cached_data(file_path, current_mtime)
141
+
142
+ if cached_items is not None:
143
+ # Use cached data
144
+ merged_data[file_path] = cached_items
145
+ else:
146
+ # Use newly parsed data and update cache
147
+ merged_data[file_path] = rust_items
148
+ self.update_cache(file_path, current_mtime, rust_items)
149
+ cache_updated = True
150
+
151
+ # Remove deleted files from cache
152
+ current_files = set(rust_metadata.keys())
153
+ cached_files = set(self.cache_data.keys())
154
+ deleted_files = cached_files - current_files
155
+
156
+ if deleted_files:
157
+ for file_path in deleted_files:
158
+ del self.cache_data[file_path]
159
+ cache_updated = True
160
+
161
+ return merged_data, cache_updated
162
+
163
+ def clear(self) -> None:
164
+ """Clear the entire cache."""
165
+ self.cache_data = {}
166
+ self.stats = CacheStats()
167
+ if self.cache_file.exists():
168
+ try:
169
+ self.cache_file.unlink()
170
+ except IOError:
171
+ pass
@@ -0,0 +1,89 @@
1
+ """Constants for pytest-fastcollect.
2
+
3
+ This module centralizes magic numbers and configuration values to improve
4
+ code maintainability and make it easier to tune performance.
5
+ """
6
+
7
+ # === Daemon Configuration ===
8
+
9
+ # Maximum size for incoming requests (10MB)
10
+ MAX_REQUEST_SIZE_BYTES = 10 * 1024 * 1024
11
+
12
+ # Maximum number of concurrent client connections
13
+ MAX_CONCURRENT_CONNECTIONS = 10
14
+
15
+ # Socket accept timeout in seconds
16
+ SOCKET_ACCEPT_TIMEOUT_SECONDS = 1.0
17
+
18
+ # Maximum time to process a single request
19
+ REQUEST_TIMEOUT_SECONDS = 30.0
20
+
21
+ # Interval for daemon health checks
22
+ HEALTH_CHECK_INTERVAL_SECONDS = 60.0
23
+
24
+ # Log file rotation settings
25
+ LOG_FILE_MAX_BYTES = 10 * 1024 * 1024 # 10MB
26
+ LOG_FILE_BACKUP_COUNT = 5
27
+
28
+ # Brief pause to prevent tight loops in daemon
29
+ DAEMON_LOOP_PAUSE_SECONDS = 0.1
30
+
31
+
32
+ # === Client Configuration ===
33
+
34
+ # Default number of retry attempts for failed requests
35
+ DEFAULT_MAX_RETRIES = 3
36
+
37
+ # Default timeout for client requests
38
+ DEFAULT_REQUEST_TIMEOUT_SECONDS = 5.0
39
+
40
+ # Timeout for health check requests
41
+ HEALTH_CHECK_TIMEOUT_SECONDS = 1.0
42
+
43
+ # Number of retries for health checks
44
+ HEALTH_CHECK_RETRIES = 1
45
+
46
+ # Timeout for stop command
47
+ STOP_COMMAND_TIMEOUT_SECONDS = 2.0
48
+
49
+ # Base sleep time for exponential backoff (in seconds)
50
+ RETRY_BACKOFF_BASE_SECONDS = 0.1
51
+
52
+
53
+ # === Process Management ===
54
+
55
+ # Sleep time after sending stop command
56
+ STOP_COMMAND_SLEEP_SECONDS = 0.5
57
+
58
+ # Sleep time after SIGTERM before checking if process stopped
59
+ SIGTERM_WAIT_SECONDS = 0.5
60
+
61
+ # Sleep time after SIGKILL
62
+ SIGKILL_WAIT_SECONDS = 0.2
63
+
64
+ # Timeout for Windows tasklist command
65
+ TASKLIST_TIMEOUT_SECONDS = 5
66
+
67
+
68
+ # === Performance ===
69
+
70
+ # Fallback CPU count if os.cpu_count() returns None
71
+ DEFAULT_CPU_COUNT = 4
72
+
73
+ # Benchmark timeout for standard pytest collection
74
+ BENCHMARK_TIMEOUT_SECONDS = 120
75
+
76
+
77
+ # === Cache Configuration ===
78
+
79
+ # Cache version string
80
+ CACHE_VERSION = "1.0"
81
+
82
+ # Tolerance for file modification time comparison (in seconds)
83
+ MTIME_TOLERANCE_SECONDS = 0.01
84
+
85
+
86
+ # === Socket Path ===
87
+
88
+ # MD5 hash length for socket path generation
89
+ SOCKET_PATH_HASH_LENGTH = 8