pintest-cli 0.2.3__tar.gz → 0.2.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/PKG-INFO +1 -1
  2. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/__init__.py +1 -1
  3. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/build_mapping_iterative.py +4 -14
  4. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/cli.py +41 -8
  5. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/cloud_mapping_db.py +71 -25
  6. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/git_diff_parser.py +7 -1
  7. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/pre_commit_hook.py +8 -136
  8. pintest_cli-0.2.6/pintest/push_cache.py +61 -0
  9. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/update_mapping.py +4 -11
  10. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/PKG-INFO +1 -1
  11. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/SOURCES.txt +1 -0
  12. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/setup.py +1 -1
  13. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/README.md +0 -0
  14. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/config.py +0 -0
  15. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/coverage_mapper.py +0 -0
  16. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/post_commit_hook.py +0 -0
  17. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/range_set.py +0 -0
  18. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest/test_mapping_db_v2.py +0 -0
  19. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/dependency_links.txt +0 -0
  20. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/entry_points.txt +0 -0
  21. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/requires.txt +0 -0
  22. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/pintest_cli.egg-info/top_level.txt +0 -0
  23. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/setup.cfg +0 -0
  24. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/tests/__init__.py +0 -0
  25. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/tests/test_git_diff_parser.py +0 -0
  26. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/tests/test_new_feature.py +0 -0
  27. {pintest_cli-0.2.3 → pintest_cli-0.2.6}/tests/test_range_set.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pintest-cli
3
- Version: 0.2.3
3
+ Version: 0.2.6
4
4
  Summary: Run only the tests affected by your code changes.
5
5
  Author: Pintest Contributors
6
6
  Classifier: Development Status :: 3 - Alpha
@@ -1,6 +1,6 @@
1
1
  """Pintest - Run only tests affected by code changes."""
2
2
 
3
- __version__ = "0.2.0"
3
+ __version__ = "0.2.5"
4
4
  __author__ = "Pintest Contributors"
5
5
  __description__ = "Intelligently select and run only tests affected by code changes"
6
6
 
@@ -28,8 +28,6 @@ from pintest.test_mapping_db_v2 import TestMappingDBV2
28
28
  from pintest.pre_commit_hook import (
29
29
  collect_all_tests,
30
30
  find_unmapped_tests,
31
- ensure_docker_containers,
32
- run_preflight_scripts
33
31
  )
34
32
 
35
33
 
@@ -54,7 +52,7 @@ def run_test_chunk_with_mapping(
54
52
  # Run chunk of tests with coverage
55
53
  cmd = [
56
54
  sys.executable, "-m", "pytest",
57
- "--cov=src",
55
+ "--cov",
58
56
  "--cov-context=test",
59
57
  "--cov-append",
60
58
  "--cov-report=",
@@ -85,7 +83,9 @@ def run_test_chunk_with_mapping(
85
83
 
86
84
  # Find and use coverage data files
87
85
  coverage_file = repo_root / ".coverage"
88
- coverage_chunks = list(repo_root.glob(".coverage.*"))
86
+ if not coverage_file.exists() and (repo_root / "coverage" / ".coverage").exists():
87
+ coverage_file = repo_root / "coverage" / ".coverage"
88
+ coverage_chunks = list(repo_root.glob(".coverage.*")) + list((repo_root / "coverage").glob(".coverage.*"))
89
89
 
90
90
  # Filter out empty files
91
91
  valid_chunks = []
@@ -171,16 +171,6 @@ def build_mapping_iteratively(
171
171
  print(f"Repository: {repo_root}")
172
172
  print(f"Mapping DB: {mapping_db}")
173
173
 
174
- # Ensure Docker and preflight checks
175
- print("\n🔧 Pre-checks...")
176
- if not ensure_docker_containers(repo_root, verbose):
177
- print("❌ Docker container check failed.")
178
- return 1
179
-
180
- if not run_preflight_scripts(repo_root, verbose):
181
- print("❌ Preflight checks failed.")
182
- return 1
183
-
184
174
  # Initialize mapping database
185
175
  with TestMappingDBV2(mapping_db) as db:
186
176
  db.initialize_schema()
@@ -70,7 +70,7 @@ class PintestRunner:
70
70
  # CloudMappingDB or optimized interface
71
71
  # Format changes for the API: [{"file": "path", "lines": [1, 2]}, ...]
72
72
  formatted_changes = [
73
- {"file": path, "lines": change.get_all_changed_lines()}
73
+ {"file": path, "lines": list(change.get_all_changed_lines())}
74
74
  for path, change in python_changes.items()
75
75
  if not change.is_new and change.get_all_changed_lines()
76
76
  ]
@@ -138,7 +138,10 @@ class PintestRunner:
138
138
  return 0
139
139
 
140
140
  # Build pytest command
141
- cmd = ["pytest"]
141
+ cmd = [sys.executable, "-m", "pytest"]
142
+
143
+ # Automatically generate coverage for local updates (suppress terminal report)
144
+ cmd.extend(["--cov", "--cov-context=test", "--cov-append", "--cov-report="])
142
145
 
143
146
  if verbose:
144
147
  cmd.append("-v")
@@ -187,11 +190,28 @@ def cmd_run(args):
187
190
  if not mapping_db:
188
191
  mapping_db = repo_root / ".test_mapping.db"
189
192
 
190
- if mapping_db.exists():
191
- if args.verbose:
192
- print(f"🖥️ Local mode: {mapping_db}", file=sys.stderr)
193
- mapping_db_obj = TestMappingDBV2(mapping_db)
194
- mapping_db_obj.connect()
193
+ if not mapping_db.exists():
194
+ print("🏗️ No local mapping DB found. Initializing build...", file=sys.stderr)
195
+ from .build_mapping_iterative import build_mapping_iteratively
196
+
197
+ # Use test_dir from config or default "tests"
198
+ cfg_local = Config.load()
199
+ default_test_dir = "tests"
200
+ if getattr(cfg_local.cloud, "test_dir", None):
201
+ default_test_dir = cfg_local.cloud.test_dir
202
+
203
+ exit_code = build_mapping_iteratively(
204
+ repo_root,
205
+ mapping_db,
206
+ test_dir=default_test_dir,
207
+ verbose=args.verbose
208
+ )
209
+ sys.exit(exit_code)
210
+
211
+ if args.verbose:
212
+ print(f"🖥️ Local mode: {mapping_db}", file=sys.stderr)
213
+ mapping_db_obj = TestMappingDBV2(mapping_db)
214
+ mapping_db_obj.connect()
195
215
 
196
216
  # Initialize runner
197
217
  runner = PintestRunner(
@@ -201,9 +221,13 @@ def cmd_run(args):
201
221
  )
202
222
 
203
223
  try:
224
+ base_branch = args.base_branch
225
+ if base_branch == "master" and cfg.is_cloud_enabled and getattr(cfg.cloud, "branch", None):
226
+ base_branch = cfg.cloud.branch
227
+
204
228
  # Find affected tests
205
229
  affected_tests = runner.find_affected_tests(
206
- args.base_branch
230
+ base_branch
207
231
  )
208
232
 
209
233
  # Unmapped tests discovery (Cloud mode only)
@@ -240,6 +264,13 @@ def cmd_run(args):
240
264
  verbose=args.verbose,
241
265
  pytest_args=pytest_extra_args
242
266
  )
267
+
268
+ # Auto-update the local database with the new coverage
269
+ if not use_cloud and not args.dry_run:
270
+ print("\n🔄 Updating mapping database with new coverage...")
271
+ from .update_mapping import update_mapping
272
+ update_mapping(repo_root, mapping_db=mapping_db, verbose=args.verbose)
273
+
243
274
  sys.exit(exit_code)
244
275
 
245
276
  except KeyboardInterrupt:
@@ -483,6 +514,8 @@ def cmd_push(args):
483
514
  sys.exit(1)
484
515
 
485
516
  coverage_file = repo_root / ".coverage"
517
+ if not coverage_file.exists() and (repo_root / "coverage" / ".coverage").exists():
518
+ coverage_file = repo_root / "coverage" / ".coverage"
486
519
  if not coverage_file.exists():
487
520
  print(f"❌ Coverage file not found at {coverage_file}")
488
521
  print("Run your tests first (e.g. 'pytest --cov --cov-context=test')")
@@ -108,14 +108,14 @@ class CloudMappingDB:
108
108
  True on success, False on failure
109
109
  """
110
110
  from .coverage_mapper import CoverageMapper # existing module
111
+ from .push_cache import PushCache
111
112
 
112
113
  if not coverage_file.exists():
113
114
  if verbose:
114
115
  print(f"⚠️ No coverage file at {coverage_file} — skipping cloud push")
115
116
  return False
116
117
 
117
- if verbose:
118
- print("☁️ Parsing coverage data for cloud upload...", flush=True)
118
+ print("☁️ Parsing coverage data for cloud upload...", flush=True)
119
119
 
120
120
  mapper = CoverageMapper(coverage_file)
121
121
  try:
@@ -138,47 +138,93 @@ class CloudMappingDB:
138
138
  test_file_ranges[key] = RangeSet()
139
139
  test_file_ranges[key].add_range(line_num, line_num)
140
140
 
141
+ cache_db_path = coverage_file.parent / ".pintest_push_cache.db"
142
+ push_cache = PushCache(cache_db_path)
143
+ cached_state = push_cache.get_cached_state(self._branch)
144
+
141
145
  mappings = []
142
146
  for (test_name, file_path), rs in test_file_ranges.items():
143
- mappings.append({
144
- "test_name": test_name,
145
- "file_path": file_path,
146
- "ranges": rs.to_compact_string(),
147
- })
147
+ compact_ranges = rs.to_compact_string()
148
+ key = (test_name, file_path)
149
+ if key not in cached_state or cached_state[key] != compact_ranges:
150
+ mappings.append({
151
+ "test_name": test_name,
152
+ "file_path": file_path,
153
+ "ranges": compact_ranges,
154
+ })
148
155
 
149
156
  if not mappings:
150
- if verbose:
151
- print("ℹ️ No coverage mappings to push")
157
+ print("ℹ️ All coverage mappings are up-to-date with Pintest Cloud (0 deltas)")
152
158
  return True
153
159
 
154
- if verbose:
155
- print(f"☁️ Pushing {len(mappings)} mappings to Pintest...", flush=True)
160
+ print(f"☁️ Pushing {len(mappings)} delta mappings to Pintest...", flush=True)
156
161
 
157
- payload = {
162
+ payload_base = {
158
163
  "branch": self._branch,
159
- "mappings": mappings,
160
164
  }
161
165
  if run_stats:
162
- payload["run_stats"] = run_stats
166
+ payload_base["run_stats"] = run_stats
163
167
 
164
- try:
168
+ from concurrent.futures import ThreadPoolExecutor, as_completed
169
+
170
+ # Send in chunks of 1,000 parallelized in 10 threads
171
+ CHUNK_SIZE = 1000
172
+ success = True
173
+ total_inserted = 0
174
+ total_updated = 0
175
+ total_tests_cloud = 0
176
+
177
+ chunks = [mappings[i:i + CHUNK_SIZE] for i in range(0, len(mappings), CHUNK_SIZE)]
178
+ total_chunks = len(chunks)
179
+
180
+ def push_chunk(chunk_num, chunk_data):
181
+ print(f"☁️ Pushing chunk {chunk_num}/{total_chunks} ({len(chunk_data)} mappings)...", flush=True)
182
+ payload = payload_base.copy()
183
+ payload["mappings"] = chunk_data
165
184
  resp = self.session.post(
166
185
  f"{self._api}/api/v1/repos/{self._repo_id}/coverage",
167
186
  json=payload,
168
187
  timeout=60,
169
188
  )
170
189
  resp.raise_for_status()
190
+ push_cache.batch_upsert(self._branch, chunk_data)
171
191
  data = resp.json()
172
- if verbose:
173
- print(
174
- f"☁️ Cloud sync: {data['inserted']} new, "
175
- f"{data['updated']} updated, "
176
- f"{data['total_tests']} total tests"
177
- )
178
- return True
179
- except requests.RequestException as e:
180
- print(f"⚠️ Cloud push failed: {e}", file=sys.stderr)
181
- return False
192
+ inserted = data.get('inserted', 0)
193
+ updated = data.get('updated', 0)
194
+ t_tests = data.get('total_tests', 0)
195
+ print(f" ✓ Chunk {chunk_num}/{total_chunks} complete ({inserted} new, {updated} updated)", flush=True)
196
+ return inserted, updated, t_tests
197
+
198
+ with ThreadPoolExecutor(max_workers=10) as executor:
199
+ futures = {executor.submit(push_chunk, idx + 1, chunk): idx + 1 for idx, chunk in enumerate(chunks)}
200
+ for future in as_completed(futures):
201
+ chunk_num = futures[future]
202
+ try:
203
+ inserted, updated, t_tests = future.result()
204
+ total_inserted += inserted
205
+ total_updated += updated
206
+ if t_tests > total_tests_cloud:
207
+ total_tests_cloud = t_tests
208
+ except requests.RequestException as e:
209
+ print(f"⚠️ Cloud push failed on chunk {chunk_num}: {e}", file=sys.stderr)
210
+ success = False
211
+ for f in futures:
212
+ f.cancel()
213
+ except Exception as e:
214
+ if success:
215
+ print(f"⚠️ Cloud push failed on chunk {chunk_num}: {e}", file=sys.stderr)
216
+ success = False
217
+ for f in futures:
218
+ f.cancel()
219
+
220
+ if success:
221
+ print(
222
+ f"☁️ Cloud sync complete: {total_inserted} new, "
223
+ f"{total_updated} updated, "
224
+ f"{total_tests_cloud} total tests",
225
+ flush=True
226
+ )
227
+ return success
182
228
 
183
229
  def get_all_test_names(self) -> Set[str]:
184
230
  """Fetch all unique test names from the cloud."""
@@ -62,7 +62,13 @@ class GitDiffParser:
62
62
  # For pre-commit hooks, we usually want staged changes
63
63
  cmd = ["git", "diff", "--cached", "--unified=0", "HEAD"]
64
64
  else:
65
- cmd = ["git", "diff", "--unified=0", f"{base_branch}...HEAD"]
65
+ # Find merge base to handle branches properly
66
+ mb_cmd = ["git", "merge-base", base_branch, "HEAD"]
67
+ mb_result = subprocess.run(mb_cmd, cwd=self.repo_root, capture_output=True, text=True, check=True)
68
+ merge_base = mb_result.stdout.strip()
69
+
70
+ # Diff from merge base to working tree (includes uncommitted changes)
71
+ cmd = ["git", "diff", "--unified=0", merge_base]
66
72
 
67
73
  result = subprocess.run(
68
74
  cmd,
@@ -115,140 +115,6 @@ def ensure_git_lfs(repo_root: Path, verbose: bool = False) -> bool:
115
115
  return True # Not critical - continue anyway
116
116
 
117
117
 
118
- def ensure_docker_containers(repo_root: Path, verbose: bool = False) -> bool:
119
- """
120
- Ensure Docker containers are running from a docker-compose-ut.yml or docker-compose.yml file.
121
-
122
- Searches for a compose file in common locations under the repo root.
123
-
124
- Args:
125
- repo_root: Repository root directory
126
- verbose: Print verbose output
127
-
128
- Returns:
129
- True if containers are running (or no compose file found), False on failure
130
- """
131
- # Search for a docker-compose file in common locations
132
- candidates = [
133
- repo_root / "docker-compose-ut.yml",
134
- repo_root / "docker-compose.yml",
135
- ] + list(repo_root.rglob("docker-compose-ut.yml"))
136
-
137
- docker_compose_path = next((p for p in candidates if p.exists()), None)
138
-
139
- if docker_compose_path is None:
140
- if verbose:
141
- print("ℹ️ No docker-compose file found, skipping Docker check")
142
- return True # No compose file in this repo, skip Docker check
143
-
144
- if verbose:
145
- print("🐳 Checking Docker containers...", flush=True)
146
-
147
- try:
148
- # Check if containers are running
149
- # Note: In Docker Compose v5.x, ps -q may return exit code 1 when no containers exist
150
- result = subprocess.run(
151
- ["docker-compose", "-f", str(docker_compose_path), "ps", "-q"],
152
- capture_output=True,
153
- text=True,
154
- check=False # Don't raise on non-zero exit (containers may not exist yet)
155
- )
156
-
157
- # If ps command failed or no containers running, start them
158
- if result.returncode != 0 or not result.stdout.strip():
159
- # No containers running or project doesn't exist yet, start them
160
- if verbose:
161
- print(" Starting Docker containers...")
162
- start_result = subprocess.run(
163
- ["docker-compose", "-f", str(docker_compose_path), "up", "-d"],
164
- capture_output=not verbose,
165
- text=True,
166
- check=False
167
- )
168
-
169
- if start_result.returncode != 0:
170
- print(f"❌ Failed to start Docker containers (exit code {start_result.returncode})", file=sys.stderr)
171
- if start_result.stderr:
172
- print(start_result.stderr, file=sys.stderr)
173
- return False
174
-
175
- if verbose:
176
- print(" ✓ Docker containers started")
177
- else:
178
- if verbose:
179
- print(" ✓ Docker containers already running")
180
-
181
- return True
182
-
183
- except FileNotFoundError:
184
- print("❌ docker-compose not found. Install it: https://docs.docker.com/compose/install/", file=sys.stderr)
185
- return False
186
-
187
-
188
- def run_preflight_scripts(repo_root: Path, verbose: bool = False) -> bool:
189
- """
190
- Run preflight scripts for Neo4j and Postgres.
191
-
192
- Args:
193
- repo_root: Repository root directory
194
- verbose: Print verbose output
195
-
196
- Returns:
197
- True if all preflight scripts passed, False otherwise
198
- """
199
- script_names = [
200
- "preflight-postgres.sh",
201
- "preflight-neo4j.sh"
202
- ]
203
-
204
- if verbose:
205
- print("🔧 Running preflight checks...", flush=True)
206
-
207
- for script_name in script_names:
208
- # Try multiple possible locations for the script
209
- possible_locations = [
210
- repo_root / "scripts" / script_name, # Workspace root scripts
211
- Path.cwd() / "scripts" / script_name, # Current directory scripts
212
- ]
213
-
214
- script = None
215
- for location in possible_locations:
216
- if location.exists():
217
- script = location
218
- break
219
-
220
- if script is None:
221
- if verbose:
222
- print(f" ⚠️ Preflight script not found: {script_name} (tried {len(possible_locations)} locations)")
223
- continue
224
-
225
- try:
226
- if verbose:
227
- print(f" Running {script.name}...", flush=True)
228
-
229
- result = subprocess.run(
230
- [str(script)],
231
- cwd=repo_root,
232
- capture_output=not verbose,
233
- text=True,
234
- check=True
235
- )
236
-
237
- if verbose:
238
- print(f" ✓ {script.name} passed")
239
-
240
- except subprocess.CalledProcessError as e:
241
- print(f"❌ Preflight check failed: {script.name}", file=sys.stderr)
242
- if not verbose and e.stderr:
243
- print(e.stderr, file=sys.stderr)
244
- return False
245
-
246
- if verbose:
247
- print(" ✓ All preflight checks passed")
248
-
249
- return True
250
-
251
-
252
118
  def should_exclude_test(test_name: str) -> bool:
253
119
  """
254
120
  Check if a test should be excluded from runs and mapping.
@@ -560,7 +426,7 @@ def run_test_chunk_with_mapping_update(
560
426
  # Run tests with coverage - output flows directly to terminal
561
427
  cmd = [
562
428
  sys.executable, "-m", "pytest",
563
- "--cov=src",
429
+ "--cov",
564
430
  "--cov-context=test",
565
431
  "--cov-append",
566
432
  "--cov-report=",
@@ -591,6 +457,8 @@ def run_test_chunk_with_mapping_update(
591
457
 
592
458
  try:
593
459
  coverage_file = repo_root / ".coverage"
460
+ if not coverage_file.exists() and (repo_root / "coverage" / ".coverage").exists():
461
+ coverage_file = repo_root / "coverage" / ".coverage"
594
462
  if not coverage_file.exists():
595
463
  print(f" ⚠️ No coverage file found at {coverage_file}", flush=True)
596
464
  print(f" Tests may have crashed before generating coverage data.", flush=True)
@@ -690,7 +558,7 @@ def run_single_test_with_mapping_update(
690
558
  # Run single test with coverage
691
559
  cmd = [
692
560
  "python", "-m", "pytest",
693
- "--cov=src",
561
+ "--cov",
694
562
  "--cov-context=test",
695
563
  "--cov-append",
696
564
  "--cov-report=",
@@ -713,6 +581,8 @@ def run_single_test_with_mapping_update(
713
581
  # Update mapping database with new coverage
714
582
  try:
715
583
  coverage_file = repo_root / ".coverage"
584
+ if not coverage_file.exists() and (repo_root / "coverage" / ".coverage").exists():
585
+ coverage_file = repo_root / "coverage" / ".coverage"
716
586
  if coverage_file.exists():
717
587
  with TestMappingDBV2(mapping_db_path) as db:
718
588
  db.import_from_coverage(coverage_file, incremental=True)
@@ -925,6 +795,8 @@ def combine_coverage_files(repo_root: Path, new_coverage: Path):
925
795
  new_coverage: Path to new coverage file from test run
926
796
  """
927
797
  existing_coverage = repo_root / ".coverage"
798
+ if not existing_coverage.exists() and (repo_root / "coverage" / ".coverage").exists():
799
+ existing_coverage = repo_root / "coverage" / ".coverage"
928
800
 
929
801
  if not new_coverage.exists():
930
802
  print(f"Warning: New coverage file not found: {new_coverage}", file=sys.stderr)
@@ -0,0 +1,61 @@
1
+ import sqlite3
2
+ from pathlib import Path
3
+ from typing import Dict, List, Tuple
4
+
5
+
6
+ class PushCache:
7
+ """
8
+ Manages the local SQLite cache for Pintest Cloud delta pushes.
9
+ Tracks previously synchronized test coverage mappings to filter out unmodified entries.
10
+ """
11
+
12
+ def __init__(self, db_path: Path):
13
+ self.db_path = db_path
14
+ self._init_db()
15
+
16
+ def _init_db(self):
17
+ """Initialize the local push cache database schema."""
18
+ with sqlite3.connect(self.db_path) as conn:
19
+ conn.execute("PRAGMA journal_mode=WAL")
20
+ conn.execute("""
21
+ CREATE TABLE IF NOT EXISTS push_cache (
22
+ branch TEXT,
23
+ test_name TEXT,
24
+ file_path TEXT,
25
+ ranges TEXT,
26
+ last_pushed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
27
+ PRIMARY KEY (branch, test_name, file_path)
28
+ )
29
+ """)
30
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_push_cache_lookup ON push_cache(branch)")
31
+
32
+ def get_cached_state(self, branch: str) -> Dict[Tuple[str, str], str]:
33
+ """
34
+ Fetch the last successfully pushed state for the active branch.
35
+
36
+ Returns:
37
+ {(test_name, file_path): ranges_string}
38
+ """
39
+ with sqlite3.connect(self.db_path) as conn:
40
+ cursor = conn.execute(
41
+ "SELECT test_name, file_path, ranges FROM push_cache WHERE branch = ?",
42
+ (branch,)
43
+ )
44
+ return {(row[0], row[1]): row[2] for row in cursor}
45
+
46
+ def batch_upsert(self, branch: str, mappings: List[Dict[str, str]]):
47
+ """
48
+ Atomically update the cache with successfully pushed mappings.
49
+
50
+ Args:
51
+ branch: Active git branch
52
+ mappings: List of dicts [{"test_name": ..., "file_path": ..., "ranges": ...}]
53
+ """
54
+ with sqlite3.connect(self.db_path) as conn:
55
+ conn.executemany("""
56
+ INSERT INTO push_cache (branch, test_name, file_path, ranges, last_pushed_at)
57
+ VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP)
58
+ ON CONFLICT(branch, test_name, file_path) DO UPDATE SET
59
+ ranges = EXCLUDED.ranges,
60
+ last_pushed_at = CURRENT_TIMESTAMP
61
+ """, [(branch, m["test_name"], m["file_path"], m["ranges"]) for m in mappings])
@@ -15,7 +15,7 @@ SCRIPT_DIR = Path(__file__).parent
15
15
  sys.path.insert(0, str(SCRIPT_DIR))
16
16
 
17
17
  from pintest.test_mapping_db_v2 import TestMappingDBV2
18
- from pintest.pre_commit_hook import ensure_docker_containers, run_preflight_scripts
18
+ # from pintest.pre_commit_hook import ensure_docker_containers, run_preflight_scripts
19
19
 
20
20
 
21
21
  def update_mapping(
@@ -38,21 +38,14 @@ def update_mapping(
38
38
  # Default paths
39
39
  if coverage_file is None:
40
40
  coverage_file = repo_root / ".coverage"
41
+ if not coverage_file.exists() and (repo_root / "coverage" / ".coverage").exists():
42
+ coverage_file = repo_root / "coverage" / ".coverage"
41
43
 
42
44
  if mapping_db is None:
43
45
  mapping_db = repo_root / ".test_mapping.db"
44
46
 
45
- # Ensure Docker containers are running (if a compose file is present)
46
47
  if verbose:
47
- print("\n🔧 Pre-checks...")
48
-
49
- if not ensure_docker_containers(repo_root, verbose):
50
- print("❌ Docker container check failed.", file=sys.stderr)
51
- return 1
52
-
53
- if not run_preflight_scripts(repo_root, verbose):
54
- print("❌ Preflight checks failed.", file=sys.stderr)
55
- return 1
48
+ pass
56
49
 
57
50
  if not coverage_file.exists():
58
51
  print(f"❌ Error: Coverage file not found: {coverage_file}", file=sys.stderr)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pintest-cli
3
- Version: 0.2.3
3
+ Version: 0.2.6
4
4
  Summary: Run only the tests affected by your code changes.
5
5
  Author: Pintest Contributors
6
6
  Classifier: Development Status :: 3 - Alpha
@@ -9,6 +9,7 @@ pintest/coverage_mapper.py
9
9
  pintest/git_diff_parser.py
10
10
  pintest/post_commit_hook.py
11
11
  pintest/pre_commit_hook.py
12
+ pintest/push_cache.py
12
13
  pintest/range_set.py
13
14
  pintest/test_mapping_db_v2.py
14
15
  pintest/update_mapping.py
@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
5
5
 
6
6
  setup(
7
7
  name="pintest-cli",
8
- version="0.2.3",
8
+ version="0.2.6",
9
9
  description="Run only the tests affected by your code changes.",
10
10
  long_description=long_description,
11
11
  long_description_content_type="text/markdown",
File without changes
File without changes