pytest-fastcollect 0.5.1__cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,601 @@
1
+ """Pytest plugin that monkey-patches the collection mechanism with Rust implementation."""
2
+
3
+ import os
4
+ import sys
5
+ import importlib.util
6
+ from pathlib import Path
7
+ from typing import List, Optional, Any, Set, Tuple, Dict
8
+ from concurrent.futures import ThreadPoolExecutor, as_completed
9
+ import pytest
10
+ from _pytest.python import Module, Class, Function
11
+ from _pytest.main import Session
12
+ from _pytest.config import Config
13
+ from _pytest.nodes import Collector
14
+
15
+ try:
16
+ from .pytest_fastcollect import FastCollector
17
+ from .cache import CollectionCache, CacheStats
18
+ from .filter import get_files_with_matching_tests
19
+ from .daemon import start_daemon_background
20
+ from .daemon_client import (
21
+ DaemonClient, get_socket_path, save_daemon_pid,
22
+ get_daemon_pid, stop_daemon, is_process_running
23
+ )
24
+ from .constants import DEFAULT_CPU_COUNT, BENCHMARK_TIMEOUT_SECONDS
25
+ RUST_AVAILABLE = True
26
+ except ImportError:
27
+ RUST_AVAILABLE = False
28
+ FastCollector = None
29
+ CollectionCache = None
30
+ CacheStats = None
31
+ get_files_with_matching_tests = None
32
+ start_daemon_background = None
33
+ DaemonClient = None
34
+ get_socket_path = None
35
+
36
+
37
+ def pytest_configure(config: Config) -> None:
38
+ """Register the plugin."""
39
+ global _test_files_cache, _collection_cache, _cache_stats, _collected_data
40
+
41
+ if not RUST_AVAILABLE:
42
+ if config.option.verbose > 0:
43
+ print("Warning: pytest-fastcollect Rust extension not available, using standard collection",
44
+ file=sys.stderr)
45
+ return
46
+
47
+ # Handle benchmark command first
48
+ if hasattr(config.option, 'benchmark_collect') and config.option.benchmark_collect:
49
+ _run_benchmark(config)
50
+ pytest.exit("Benchmark completed", returncode=0)
51
+
52
+ # Handle daemon commands first
53
+ root_path = str(config.rootpath)
54
+ socket_path = get_socket_path(root_path) if get_socket_path else None
55
+
56
+ # Handle --daemon-stop
57
+ if hasattr(config.option, 'daemon_stop') and config.option.daemon_stop:
58
+ if socket_path and stop_daemon:
59
+ if stop_daemon(socket_path):
60
+ print(f"Daemon stopped", file=sys.stderr)
61
+ else:
62
+ print(f"No daemon running", file=sys.stderr)
63
+ pytest.exit("Daemon stopped", returncode=0)
64
+
65
+ # Handle --daemon-status
66
+ if hasattr(config.option, 'daemon_status') and config.option.daemon_status:
67
+ if socket_path and DaemonClient:
68
+ try:
69
+ client = DaemonClient(socket_path)
70
+ status = client.get_status()
71
+ print(f"Daemon: RUNNING", file=sys.stderr)
72
+ print(f" PID: {status.get('pid')}", file=sys.stderr)
73
+ # Get uptime, preferring human-readable format
74
+ uptime_display = status.get('uptime_human')
75
+ if not uptime_display:
76
+ uptime_display = f"{status.get('uptime', 0):.1f}s"
77
+ print(f" Uptime: {uptime_display}", file=sys.stderr)
78
+ print(f" Cached modules: {status.get('cached_modules', 0)}", file=sys.stderr)
79
+ metrics = status.get('metrics', {})
80
+ if metrics:
81
+ print(f" Metrics:", file=sys.stderr)
82
+ print(f" Total requests: {metrics.get('total_requests', 0)}", file=sys.stderr)
83
+ print(f" Successful: {metrics.get('successful_requests', 0)}", file=sys.stderr)
84
+ print(f" Failed: {metrics.get('failed_requests', 0)}", file=sys.stderr)
85
+ except:
86
+ print(f"Daemon: NOT RUNNING", file=sys.stderr)
87
+ pytest.exit("Daemon status checked", returncode=0)
88
+
89
+ # Handle --daemon-health
90
+ if hasattr(config.option, 'daemon_health') and config.option.daemon_health:
91
+ if socket_path and DaemonClient:
92
+ try:
93
+ client = DaemonClient(socket_path)
94
+ health = client.get_health()
95
+ status = health.get('status', 'unknown')
96
+
97
+ if status == 'healthy':
98
+ print(f"Daemon Health: ✅ HEALTHY", file=sys.stderr)
99
+ elif status == 'degraded':
100
+ print(f"Daemon Health: ⚠️ DEGRADED", file=sys.stderr)
101
+ else:
102
+ print(f"Daemon Health: ❌ UNHEALTHY", file=sys.stderr)
103
+
104
+ print(f" Uptime: {health.get('uptime', 0):.1f}s", file=sys.stderr)
105
+
106
+ checks = health.get('checks', {})
107
+ if checks:
108
+ print(f" Checks:", file=sys.stderr)
109
+ for check_name, check_value in checks.items():
110
+ icon = "✅" if check_value is True or (isinstance(check_value, (int, float)) and check_value < 0.1) else "❌"
111
+ if isinstance(check_value, bool):
112
+ print(f" {icon} {check_name}: {check_value}", file=sys.stderr)
113
+ elif isinstance(check_value, float):
114
+ print(f" {icon} {check_name}: {check_value:.1%}", file=sys.stderr)
115
+ else:
116
+ print(f" {icon} {check_name}: {check_value}", file=sys.stderr)
117
+ except Exception as e:
118
+ print(f"Daemon: NOT RUNNING or UNREACHABLE", file=sys.stderr)
119
+ print(f" Error: {e}", file=sys.stderr)
120
+ pytest.exit("Daemon health checked", returncode=0)
121
+
122
+ # Check if fast collection is disabled
123
+ if hasattr(config.option, 'use_fast_collect') and not config.option.use_fast_collect:
124
+ return
125
+
126
+ # Initialize the collector and cache early
127
+ fast_collector = FastCollector(root_path)
128
+ use_cache = getattr(config.option, 'fastcollect_cache', True)
129
+
130
+ # Clear cache if requested
131
+ if hasattr(config.option, 'fastcollect_clear_cache') and config.option.fastcollect_clear_cache:
132
+ cache_dir = _get_cache_dir(config)
133
+ if CollectionCache:
134
+ cache = CollectionCache(cache_dir)
135
+ cache.clear()
136
+ if config.option.verbose >= 0:
137
+ print("FastCollect: Cache cleared", file=sys.stderr)
138
+
139
+ # Pre-collect test files and cache them
140
+ if use_cache:
141
+ cache_dir = _get_cache_dir(config)
142
+ _collection_cache = CollectionCache(cache_dir)
143
+ rust_metadata = fast_collector.collect_with_metadata()
144
+ collected_data, cache_updated = _collection_cache.merge_with_rust_data(rust_metadata)
145
+
146
+ if cache_updated:
147
+ _collection_cache.save_cache()
148
+
149
+ _cache_stats = _collection_cache.stats
150
+ else:
151
+ collected_data = fast_collector.collect()
152
+ _cache_stats = None
153
+
154
+ _collected_data = collected_data
155
+
156
+ # Apply selective import filtering based on -k and -m options
157
+ keyword_expr = config.getoption("-k", default=None)
158
+ marker_expr = config.getoption("-m", default=None)
159
+
160
+ if keyword_expr or marker_expr:
161
+ # Only include files with tests matching the filter
162
+ _test_files_cache = get_files_with_matching_tests(
163
+ collected_data,
164
+ keyword_expr=keyword_expr,
165
+ marker_expr=marker_expr
166
+ )
167
+ if config.option.verbose >= 1:
168
+ total_files = len(collected_data)
169
+ filtered_files = len(_test_files_cache)
170
+ print(
171
+ f"FastCollect: Selective import - {filtered_files}/{total_files} files match filter",
172
+ file=sys.stderr
173
+ )
174
+ else:
175
+ # No filtering, collect all files
176
+ _test_files_cache = set(collected_data.keys())
177
+
178
+ # Handle --daemon-start
179
+ if hasattr(config.option, 'daemon_start') and config.option.daemon_start:
180
+ if socket_path and start_daemon_background:
181
+ print(f"Starting collection daemon...", file=sys.stderr)
182
+ pid = start_daemon_background(root_path, socket_path, _test_files_cache)
183
+ if pid > 0:
184
+ save_daemon_pid(socket_path, pid)
185
+ print(f"Daemon started (PID {pid})", file=sys.stderr)
186
+ print(f"Future pytest runs will use instant collection!", file=sys.stderr)
187
+ pytest.exit("Daemon started", returncode=0)
188
+
189
+ # Parallel import optimization (if enabled)
190
+ if hasattr(config.option, 'parallel_import') and config.option.parallel_import:
191
+ _parallel_import_modules(_test_files_cache, config)
192
+
193
+
194
+ def pytest_collection_modifyitems(session: Session, config: Config, items: List[Any]) -> None:
195
+ """Modify collected items - this is called AFTER collection."""
196
+ # This is called after collection, so it's too late to optimize
197
+ pass
198
+
199
+
200
+
201
+
202
+ def pytest_addoption(parser: Any) -> None:
203
+ """Add command-line options."""
204
+ group = parser.getgroup('fastcollect')
205
+ group.addoption(
206
+ '--use-fast-collect',
207
+ action='store_true',
208
+ default=True,
209
+ help='Use Rust-based fast collection (default: True)'
210
+ )
211
+ group.addoption(
212
+ '--no-fast-collect',
213
+ dest='use_fast_collect',
214
+ action='store_false',
215
+ help='Disable fast collection and use standard pytest collection'
216
+ )
217
+ group.addoption(
218
+ '--fastcollect-cache',
219
+ action='store_true',
220
+ default=True,
221
+ help='Enable incremental caching (default: True)'
222
+ )
223
+ group.addoption(
224
+ '--no-fastcollect-cache',
225
+ dest='fastcollect_cache',
226
+ action='store_false',
227
+ help='Disable caching and parse all files'
228
+ )
229
+ group.addoption(
230
+ '--fastcollect-clear-cache',
231
+ action='store_true',
232
+ default=False,
233
+ help='Clear the fastcollect cache before collection'
234
+ )
235
+ group.addoption(
236
+ '--benchmark-collect',
237
+ action='store_true',
238
+ default=False,
239
+ help='Benchmark collection time (fast vs standard)'
240
+ )
241
+ group.addoption(
242
+ '--parallel-import',
243
+ action='store_true',
244
+ default=False,
245
+ help='Pre-import test modules in parallel (experimental)'
246
+ )
247
+ group.addoption(
248
+ '--parallel-workers',
249
+ type=int,
250
+ default=None,
251
+ help='Number of parallel import workers (default: CPU count)'
252
+ )
253
+ group.addoption(
254
+ '--daemon-start',
255
+ action='store_true',
256
+ default=False,
257
+ help='Start collection daemon (keeps modules imported for instant re-collection)'
258
+ )
259
+ group.addoption(
260
+ '--daemon-stop',
261
+ action='store_true',
262
+ default=False,
263
+ help='Stop collection daemon'
264
+ )
265
+ group.addoption(
266
+ '--daemon-status',
267
+ action='store_true',
268
+ default=False,
269
+ help='Show collection daemon status'
270
+ )
271
+ group.addoption(
272
+ '--daemon-health',
273
+ action='store_true',
274
+ default=False,
275
+ help='Check collection daemon health'
276
+ )
277
+
278
+
279
+ def pytest_report_header(config: Config) -> Optional[str]:
280
+ """Add information to the pytest header."""
281
+ if RUST_AVAILABLE:
282
+ from . import get_version
283
+ return f"fastcollect: v{get_version()} (Rust-accelerated collection enabled)"
284
+ else:
285
+ return "fastcollect: Rust extension not available"
286
+
287
+
288
+ # Store test files cache and collection cache
289
+ _test_files_cache = None
290
+ _collection_cache = None
291
+ _cache_stats = None
292
+ _collected_data = None
293
+
294
+
295
+ def _get_cache_dir(config: Config) -> Path:
296
+ """Get the cache directory for fastcollect."""
297
+ # Use pytest's cache directory
298
+ cache_dir = Path(config.cache._cachedir) / "v" / "fastcollect"
299
+ return cache_dir
300
+
301
+
302
+ def _import_test_module(file_path: str, root_path: str) -> Tuple[str, bool, Optional[str]]:
303
+ """Import a single test module.
304
+
305
+ Returns: (file_path, success, error_message)
306
+ """
307
+ try:
308
+ # Convert file path to module name
309
+ path_obj = Path(file_path)
310
+ root_obj = Path(root_path)
311
+
312
+ # Get relative path from root
313
+ try:
314
+ rel_path = path_obj.relative_to(root_obj)
315
+ except ValueError:
316
+ # If file is not under root, use absolute path
317
+ rel_path = path_obj
318
+
319
+ # Convert path to module name (remove .py and replace / with .)
320
+ module_name = str(rel_path.with_suffix('')).replace(os.sep, '.')
321
+
322
+ # Check if already imported
323
+ if module_name in sys.modules:
324
+ return (file_path, True, None)
325
+
326
+ # Import the module using importlib
327
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
328
+ if spec and spec.loader:
329
+ module = importlib.util.module_from_spec(spec)
330
+ sys.modules[module_name] = module
331
+ spec.loader.exec_module(module)
332
+ return (file_path, True, None)
333
+ else:
334
+ return (file_path, False, "Could not create module spec")
335
+
336
+ except Exception as e:
337
+ return (file_path, False, str(e))
338
+
339
+
340
+ def _parallel_import_modules(file_paths: Set[str], config: Config) -> Tuple[int, int]:
341
+ """Pre-import test modules in parallel to warm the cache.
342
+
343
+ This pre-imports modules before pytest's collection phase, so when pytest
344
+ calls importtestmodule(), the modules are already in sys.modules.
345
+ """
346
+ import time
347
+
348
+ if not file_paths:
349
+ return
350
+
351
+ # Get number of workers
352
+ workers = getattr(config.option, 'parallel_workers', None)
353
+ if workers is None:
354
+ workers = os.cpu_count() or DEFAULT_CPU_COUNT
355
+
356
+ root_path = str(config.rootpath)
357
+
358
+ if config.option.verbose >= 1:
359
+ print(f"\nFastCollect: Parallel import ({workers} workers) - importing {len(file_paths)} modules...",
360
+ file=sys.stderr, end=" ", flush=True)
361
+
362
+ start_time = time.time()
363
+ success_count = 0
364
+ error_count = 0
365
+
366
+ # Use ThreadPoolExecutor for parallel imports
367
+ # Even with GIL, I/O operations (reading .py/.pyc files) can benefit
368
+ with ThreadPoolExecutor(max_workers=workers) as executor:
369
+ # Submit all import tasks
370
+ future_to_path = {
371
+ executor.submit(_import_test_module, file_path, root_path): file_path
372
+ for file_path in file_paths
373
+ }
374
+
375
+ # Collect results
376
+ for future in as_completed(future_to_path):
377
+ file_path, success, error = future.result()
378
+ if success:
379
+ success_count += 1
380
+ else:
381
+ error_count += 1
382
+ if config.option.verbose >= 2:
383
+ print(f"\n Warning: Failed to import {file_path}: {error}",
384
+ file=sys.stderr)
385
+
386
+ elapsed = time.time() - start_time
387
+
388
+ if config.option.verbose >= 1:
389
+ print(f"Done ({elapsed:.2f}s)", file=sys.stderr)
390
+ if error_count > 0:
391
+ print(f" Imported: {success_count}/{len(file_paths)} modules ({error_count} errors)",
392
+ file=sys.stderr)
393
+
394
+
395
+ def pytest_ignore_collect(collection_path: Any, config: Config) -> Optional[bool]:
396
+ """Called to determine whether to ignore a file/directory during collection.
397
+
398
+ Uses Rust-parsed metadata from pytest_configure to efficiently filter files.
399
+ """
400
+ global _test_files_cache
401
+
402
+ if not RUST_AVAILABLE:
403
+ return None
404
+
405
+ # Check if fast collection is disabled
406
+ if hasattr(config.option, 'use_fast_collect') and not config.option.use_fast_collect:
407
+ return None
408
+
409
+ # If cache hasn't been initialized yet, don't filter
410
+ # (pytest_configure should have initialized it)
411
+ if _test_files_cache is None:
412
+ return None
413
+
414
+ # Only filter Python test files
415
+ if collection_path.is_file() and collection_path.suffix == ".py":
416
+ abs_path = str(collection_path.absolute())
417
+ # Ignore files that don't have tests according to Rust parser
418
+ should_ignore = abs_path not in _test_files_cache
419
+ return should_ignore
420
+
421
+ return None
422
+
423
+
424
+ def _run_benchmark(config: Config) -> None:
425
+ """Run benchmark comparing fast collect vs standard pytest collection.
426
+
427
+ This measures collection time with and without the plugin and provides
428
+ recommendations to the user.
429
+ """
430
+ import time
431
+ import subprocess
432
+
433
+ root_path = str(config.rootpath)
434
+
435
+ print("\n" + "=" * 70, file=sys.stderr)
436
+ print("pytest-fastcollect Benchmark", file=sys.stderr)
437
+ print("=" * 70, file=sys.stderr)
438
+ print("\nAnalyzing your test suite to determine if pytest-fastcollect is beneficial...\n", file=sys.stderr)
439
+
440
+ # Count test files using Rust collector
441
+ fast_collector = FastCollector(root_path)
442
+ collected_data = fast_collector.collect()
443
+ num_files = len(collected_data)
444
+ num_tests = sum(len(tests) for tests in collected_data.values())
445
+
446
+ print(f"📊 Project Stats:", file=sys.stderr)
447
+ print(f" Test files: {num_files}", file=sys.stderr)
448
+ print(f" Test items: {num_tests}", file=sys.stderr)
449
+ print(file=sys.stderr)
450
+
451
+ # Benchmark 1: With fastcollect (current run uses it)
452
+ print("⚡ Benchmark 1: WITH pytest-fastcollect", file=sys.stderr)
453
+ print(" Running collection with Rust acceleration...", end=" ", flush=True, file=sys.stderr)
454
+
455
+ start = time.time()
456
+ try:
457
+ # Use the Rust collector (already have the data)
458
+ fast_time = time.time() - start
459
+ print(f"Done! ({fast_time:.3f}s)", file=sys.stderr)
460
+ except Exception as e:
461
+ print(f"Failed: {e}", file=sys.stderr)
462
+ fast_time = None
463
+
464
+ # Benchmark 2: Without fastcollect
465
+ print("\n🐌 Benchmark 2: WITHOUT pytest-fastcollect", file=sys.stderr)
466
+ print(" Running standard pytest collection...", end=" ", flush=True, file=sys.stderr)
467
+
468
+ try:
469
+ # Run pytest --collect-only --no-fast-collect in a subprocess
470
+ import sys as sys_module
471
+ result = subprocess.run(
472
+ [sys_module.executable, "-m", "pytest", "--collect-only", "--no-fast-collect", "-q"],
473
+ cwd=root_path,
474
+ capture_output=True,
475
+ timeout=BENCHMARK_TIMEOUT_SECONDS
476
+ )
477
+
478
+ # Parse the output to get timing (pytest doesn't show timing in --collect-only by default)
479
+ # So we measure the subprocess time
480
+ slow_time = float(result.stderr.decode().split("in ")[-1].split("s")[0]) if "in " in result.stderr.decode() else None
481
+
482
+ if slow_time is None:
483
+ # Fallback: just use subprocess time
484
+ start = time.time()
485
+ subprocess.run(
486
+ [sys_module.executable, "-m", "pytest", "--collect-only", "--no-fast-collect", "-q"],
487
+ cwd=root_path,
488
+ capture_output=True,
489
+ timeout=BENCHMARK_TIMEOUT_SECONDS
490
+ )
491
+ slow_time = time.time() - start
492
+
493
+ print(f"Done! ({slow_time:.3f}s)", file=sys.stderr)
494
+
495
+ except subprocess.TimeoutExpired:
496
+ print("Timeout (>120s)", file=sys.stderr)
497
+ slow_time = None
498
+ except Exception as e:
499
+ print(f"Failed: {e}", file=sys.stderr)
500
+ slow_time = None
501
+
502
+ # Results and recommendation
503
+ print("\n" + "=" * 70, file=sys.stderr)
504
+ print("📈 Results", file=sys.stderr)
505
+ print("=" * 70, file=sys.stderr)
506
+
507
+ if fast_time is not None and slow_time is not None:
508
+ speedup = slow_time / fast_time if fast_time > 0 else 1.0
509
+ time_saved = slow_time - fast_time
510
+
511
+ print(f"\n⏱️ Collection Time:", file=sys.stderr)
512
+ print(f" Standard pytest: {slow_time:.3f}s", file=sys.stderr)
513
+ print(f" With fastcollect: {fast_time:.3f}s", file=sys.stderr)
514
+ print(f" Time saved: {time_saved:.3f}s", file=sys.stderr)
515
+ print(f" Speedup: {speedup:.2f}x", file=sys.stderr)
516
+
517
+ print(f"\n💡 Recommendation:", file=sys.stderr)
518
+
519
+ if speedup >= 2.0:
520
+ grade = "⭐⭐⭐ EXCELLENT"
521
+ recommendation = (
522
+ f" {grade}\n"
523
+ f" pytest-fastcollect provides SIGNIFICANT speedup ({speedup:.1f}x faster)!\n"
524
+ f" ✅ Highly recommended for your project.\n"
525
+ f" ✅ You'll save {time_saved:.1f}s on every test run."
526
+ )
527
+ elif speedup >= 1.5:
528
+ grade = "⭐⭐ GOOD"
529
+ recommendation = (
530
+ f" {grade}\n"
531
+ f" pytest-fastcollect provides good speedup ({speedup:.1f}x faster).\n"
532
+ f" ✅ Recommended for your project.\n"
533
+ f" ✅ You'll save {time_saved:.1f}s on every test run."
534
+ )
535
+ elif speedup >= 1.2:
536
+ grade = "⭐ MODERATE"
537
+ recommendation = (
538
+ f" {grade}\n"
539
+ f" pytest-fastcollect provides moderate speedup ({speedup:.1f}x faster).\n"
540
+ f" ⚖️ May be beneficial, especially if you run tests frequently.\n"
541
+ f" 💾 Caching will improve performance over time."
542
+ )
543
+ elif speedup >= 1.0:
544
+ grade = "→ MINIMAL"
545
+ recommendation = (
546
+ f" {grade}\n"
547
+ f" pytest-fastcollect provides minimal speedup ({speedup:.1f}x).\n"
548
+ f" ⚠️ May not be worth it for such a small project.\n"
549
+ f" 💡 Consider using it if:\n"
550
+ f" - Your project will grow significantly\n"
551
+ f" - You use selective test execution (-k, -m)"
552
+ )
553
+ else:
554
+ grade = "❌ OVERHEAD"
555
+ recommendation = (
556
+ f" {grade}\n"
557
+ f" pytest-fastcollect is SLOWER ({speedup:.1f}x) on your project.\n"
558
+ f" ❌ NOT recommended - disable with --no-fast-collect\n"
559
+ f" 💡 The plugin works best on projects with 200+ test files."
560
+ )
561
+
562
+ print(recommendation, file=sys.stderr)
563
+
564
+ # Project size analysis
565
+ print(f"\n📦 Project Size Analysis:", file=sys.stderr)
566
+ if num_files >= 500:
567
+ print(f" Your project is LARGE ({num_files} files).", file=sys.stderr)
568
+ print(f" ✅ pytest-fastcollect is designed for projects like yours!", file=sys.stderr)
569
+ elif num_files >= 200:
570
+ print(f" Your project is MEDIUM-LARGE ({num_files} files).", file=sys.stderr)
571
+ print(f" ✅ Good fit for pytest-fastcollect.", file=sys.stderr)
572
+ elif num_files >= 100:
573
+ print(f" Your project is MEDIUM ({num_files} files).", file=sys.stderr)
574
+ print(f" ⚖️ pytest-fastcollect may help, especially with selective testing.", file=sys.stderr)
575
+ elif num_files >= 50:
576
+ print(f" Your project is SMALL-MEDIUM ({num_files} files).", file=sys.stderr)
577
+ print(f" 💡 Benefits will be modest but may grow as project expands.", file=sys.stderr)
578
+ else:
579
+ print(f" Your project is SMALL ({num_files} files).", file=sys.stderr)
580
+ print(f" ℹ️ pytest-fastcollect overhead may exceed benefits.", file=sys.stderr)
581
+ print(f" 💡 Best for projects with 200+ test files.", file=sys.stderr)
582
+
583
+ else:
584
+ print("\n⚠️ Could not complete benchmark comparison.", file=sys.stderr)
585
+ print(" Please ensure pytest is properly installed.", file=sys.stderr)
586
+
587
+ print("\n" + "=" * 70, file=sys.stderr)
588
+ print("💡 Tips:", file=sys.stderr)
589
+ print(" - Use -k or -m filters for additional speedup (selective import)", file=sys.stderr)
590
+ print(" - Enable caching for incremental builds (default: enabled)", file=sys.stderr)
591
+ print(" - Try --parallel-import for further speedup (experimental)", file=sys.stderr)
592
+ print("=" * 70 + "\n", file=sys.stderr)
593
+
594
+
595
+ def pytest_collection_finish(session: Session) -> None:
596
+ """Called after collection has been performed and modified."""
597
+ global _cache_stats
598
+
599
+ # Display cache statistics if available and verbose
600
+ if _cache_stats and session.config.option.verbose >= 0:
601
+ print(f"\n{_cache_stats}", file=sys.stderr)
File without changes