tracepipe 0.2.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,309 @@
1
+ # tracepipe/value_provenance.py
2
+ """
3
+ Cell-level value provenance tracking.
4
+
5
+ Provides detailed history of how specific cell values changed
6
+ throughout the pipeline, including null introduction tracking.
7
+
8
+ Usage:
9
+ # Get history of a specific cell
10
+ history = tp.explain_value(row_id=123, column="price", df=result)
11
+
12
+ # Analyze where nulls came from in a column
13
+ analysis = tp.null_analysis("email", df)
14
+ """
15
+
16
+ from dataclasses import dataclass
17
+ from typing import Any, Optional
18
+
19
+ import pandas as pd
20
+
21
+ from .context import get_context
22
+
23
+
24
+ @dataclass
25
+ class ValueEvent:
26
+ """Single change event for a cell."""
27
+
28
+ step_id: int
29
+ operation: str
30
+ old_value: Any
31
+ new_value: Any
32
+ change_type: str
33
+ timestamp: float
34
+ code_location: Optional[str]
35
+
36
+ def to_dict(self) -> dict:
37
+ """Export to dictionary."""
38
+ return {
39
+ "step_id": self.step_id,
40
+ "operation": self.operation,
41
+ "old_value": self.old_value,
42
+ "new_value": self.new_value,
43
+ "change_type": self.change_type,
44
+ "timestamp": self.timestamp,
45
+ "code_location": self.code_location,
46
+ }
47
+
48
+
49
+ @dataclass
50
+ class ValueHistory:
51
+ """Complete history of a cell's value."""
52
+
53
+ row_id: int
54
+ column: str
55
+ current_value: Any
56
+ events: list[ValueEvent]
57
+ became_null_at: Optional[int] = None # step_id
58
+ became_null_by: Optional[str] = None # operation
59
+
60
+ def __repr__(self) -> str:
61
+ lines = [f"Value History: row {self.row_id}, column '{self.column}'"]
62
+ lines.append(f" Current: {self.current_value}")
63
+ lines.append(f" Changes: {len(self.events)}")
64
+
65
+ if self.became_null_at:
66
+ lines.append(f" ! Became null at step {self.became_null_at} by {self.became_null_by}")
67
+
68
+ for event in self.events[-5:]:
69
+ lines.append(f" {event.operation}: {event.old_value} -> {event.new_value}")
70
+
71
+ if len(self.events) > 5:
72
+ lines.append(f" ... and {len(self.events) - 5} more events")
73
+
74
+ return "\n".join(lines)
75
+
76
+ @property
77
+ def was_modified(self) -> bool:
78
+ """True if value was ever modified."""
79
+ return len(self.events) > 0
80
+
81
+ @property
82
+ def is_null(self) -> bool:
83
+ """True if current value is null."""
84
+ return pd.isna(self.current_value)
85
+
86
+ def to_dict(self) -> dict:
87
+ """Export to dictionary."""
88
+ return {
89
+ "row_id": self.row_id,
90
+ "column": self.column,
91
+ "current_value": self.current_value,
92
+ "events": [e.to_dict() for e in self.events],
93
+ "became_null_at": self.became_null_at,
94
+ "became_null_by": self.became_null_by,
95
+ }
96
+
97
+
98
+ def explain_value(
99
+ row_id: int,
100
+ column: str,
101
+ df: Optional[pd.DataFrame] = None,
102
+ follow_lineage: bool = True,
103
+ ) -> ValueHistory:
104
+ """
105
+ Get complete history of a specific cell's value.
106
+
107
+ Args:
108
+ row_id: Row ID to trace
109
+ column: Column name
110
+ df: Optional DataFrame for current value lookup
111
+ follow_lineage: If True, include pre-merge parent history (default: True)
112
+
113
+ Returns:
114
+ ValueHistory with all changes to this cell
115
+ """
116
+ ctx = get_context()
117
+ store = ctx.store
118
+
119
+ # Get current value if df provided
120
+ current_value = None
121
+ if df is not None:
122
+ rids = ctx.row_manager.get_ids_array(df)
123
+ if rids is not None:
124
+ # Find position of this row_id
125
+ matches = (rids == row_id).nonzero()[0]
126
+ if len(matches) > 0 and column in df.columns:
127
+ current_value = df.iloc[matches[0]][column]
128
+
129
+ # Collect events - use lineage-aware method if requested
130
+ if follow_lineage and hasattr(store, "get_cell_history_with_lineage"):
131
+ # Get cell history including pre-merge parent history
132
+ raw_events = store.get_cell_history_with_lineage(row_id, column)
133
+ else:
134
+ # Fallback to direct row_id lookup only
135
+ raw_events = [e for e in store.get_row_history(row_id) if e["col"] == column]
136
+
137
+ # Convert to ValueEvent objects
138
+ events = []
139
+ became_null_at = None
140
+ became_null_by = None
141
+
142
+ for diff in raw_events:
143
+ events.append(
144
+ ValueEvent(
145
+ step_id=diff["step_id"],
146
+ operation=diff.get("operation", "unknown"),
147
+ old_value=diff["old_val"],
148
+ new_value=diff["new_val"],
149
+ change_type=diff.get("change_type", "UNKNOWN"),
150
+ timestamp=diff.get("timestamp", 0) or 0,
151
+ code_location=diff.get("code_location"),
152
+ )
153
+ )
154
+
155
+ # Track when value became null
156
+ if became_null_at is None and pd.isna(diff["new_val"]) and not pd.isna(diff["old_val"]):
157
+ became_null_at = diff["step_id"]
158
+ became_null_by = diff.get("operation", "unknown")
159
+
160
+ # Events should already be sorted by step_id from lineage method
161
+ events.sort(key=lambda e: e.step_id)
162
+
163
+ return ValueHistory(
164
+ row_id=row_id,
165
+ column=column,
166
+ current_value=current_value,
167
+ events=events,
168
+ became_null_at=became_null_at,
169
+ became_null_by=became_null_by,
170
+ )
171
+
172
+
173
+ @dataclass
174
+ class NullAnalysis:
175
+ """Analysis of how nulls appeared in a column."""
176
+
177
+ column: str
178
+ total_nulls: int
179
+ null_sources: dict[str, int] # operation -> count
180
+ sample_row_ids: list[int]
181
+
182
+ def __repr__(self) -> str:
183
+ lines = [f"Null Analysis: '{self.column}'"]
184
+ lines.append(f" Total nulls: {self.total_nulls}")
185
+
186
+ if self.null_sources:
187
+ lines.append(" Sources:")
188
+ for op, count in sorted(self.null_sources.items(), key=lambda x: -x[1]):
189
+ lines.append(f" {op}: {count}")
190
+ else:
191
+ lines.append(" No tracked null introductions")
192
+
193
+ if self.sample_row_ids:
194
+ lines.append(f" Sample row IDs: {self.sample_row_ids[:5]}")
195
+
196
+ return "\n".join(lines)
197
+
198
+ @property
199
+ def has_untracked_nulls(self) -> bool:
200
+ """True if some nulls were not tracked by TracePipe."""
201
+ tracked = sum(self.null_sources.values())
202
+ return tracked < self.total_nulls
203
+
204
+ def to_dict(self) -> dict:
205
+ """Export to dictionary."""
206
+ return {
207
+ "column": self.column,
208
+ "total_nulls": self.total_nulls,
209
+ "null_sources": self.null_sources,
210
+ "sample_row_ids": self.sample_row_ids,
211
+ "has_untracked_nulls": self.has_untracked_nulls,
212
+ }
213
+
214
+
215
+ def null_analysis(column: str, df: pd.DataFrame) -> NullAnalysis:
216
+ """
217
+ Analyze how nulls appeared in a column.
218
+
219
+ Returns breakdown of which operations introduced nulls.
220
+
221
+ Args:
222
+ column: Column name to analyze
223
+ df: Current DataFrame
224
+
225
+ Returns:
226
+ NullAnalysis with breakdown of null sources
227
+ """
228
+ ctx = get_context()
229
+ store = ctx.store
230
+
231
+ if column not in df.columns:
232
+ return NullAnalysis(column=column, total_nulls=0, null_sources={}, sample_row_ids=[])
233
+
234
+ rids = ctx.row_manager.get_ids_array(df)
235
+ if rids is None:
236
+ return NullAnalysis(
237
+ column=column,
238
+ total_nulls=int(df[column].isna().sum()),
239
+ null_sources={},
240
+ sample_row_ids=[],
241
+ )
242
+
243
+ # Find null rows
244
+ null_mask = df[column].isna()
245
+ null_rids = set(rids[null_mask].tolist())
246
+
247
+ # Track which operations introduced nulls
248
+ null_sources: dict[str, int] = {}
249
+ step_map = {s.step_id: s for s in store.steps}
250
+ sample_ids: list[int] = []
251
+
252
+ for diff in store._iter_all_diffs():
253
+ if diff["col"] == column and diff["row_id"] in null_rids:
254
+ if pd.isna(diff["new_val"]) and not pd.isna(diff["old_val"]):
255
+ step = step_map.get(diff["step_id"])
256
+ op = step.operation if step else "unknown"
257
+ null_sources[op] = null_sources.get(op, 0) + 1
258
+ if len(sample_ids) < 10:
259
+ sample_ids.append(diff["row_id"])
260
+
261
+ return NullAnalysis(
262
+ column=column,
263
+ total_nulls=len(null_rids),
264
+ null_sources=null_sources,
265
+ sample_row_ids=sample_ids,
266
+ )
267
+
268
+
269
+ def column_changes_summary(column: str, df: pd.DataFrame) -> dict[str, Any]:
270
+ """
271
+ Get summary of all changes to a column.
272
+
273
+ Args:
274
+ column: Column name
275
+ df: Current DataFrame
276
+
277
+ Returns:
278
+ Dict with summary statistics
279
+ """
280
+ ctx = get_context()
281
+ store = ctx.store
282
+
283
+ rids = ctx.row_manager.get_ids_array(df)
284
+ if rids is None:
285
+ return {
286
+ "column": column,
287
+ "total_changes": 0,
288
+ "changes_by_operation": {},
289
+ "unique_rows_modified": 0,
290
+ }
291
+
292
+ rid_set = set(rids.tolist())
293
+ changes_by_op: dict[str, int] = {}
294
+ modified_rows: set = set()
295
+ step_map = {s.step_id: s for s in store.steps}
296
+
297
+ for diff in store._iter_all_diffs():
298
+ if diff["col"] == column and diff["row_id"] in rid_set:
299
+ step = step_map.get(diff["step_id"])
300
+ op = step.operation if step else "unknown"
301
+ changes_by_op[op] = changes_by_op.get(op, 0) + 1
302
+ modified_rows.add(diff["row_id"])
303
+
304
+ return {
305
+ "column": column,
306
+ "total_changes": sum(changes_by_op.values()),
307
+ "changes_by_operation": changes_by_op,
308
+ "unique_rows_modified": len(modified_rows),
309
+ }
@@ -156,12 +156,17 @@ def _get_groups_summary(ctx) -> list[dict]:
156
156
  groups = []
157
157
  for mapping in ctx.store.aggregation_mappings:
158
158
  for group_key, row_ids in mapping.membership.items():
159
- is_count_only = isinstance(row_ids, int)
159
+ # Count-only groups are stored as [-count] (list with one negative element)
160
+ is_count_only = len(row_ids) == 1 and row_ids[0] < 0
161
+ if is_count_only:
162
+ row_count = abs(row_ids[0])
163
+ else:
164
+ row_count = len(row_ids)
160
165
  groups.append(
161
166
  {
162
167
  "key": str(group_key),
163
168
  "column": mapping.group_column,
164
- "row_count": row_ids if is_count_only else len(row_ids),
169
+ "row_count": row_count,
165
170
  "is_count_only": is_count_only,
166
171
  "row_ids": [] if is_count_only else row_ids[:100], # First 100 only
167
172
  "agg_functions": mapping.agg_functions,
@@ -1059,9 +1064,13 @@ document.addEventListener('DOMContentLoaded', () => {
1059
1064
  """
1060
1065
 
1061
1066
 
1062
- def save(filepath: str) -> None:
1067
+ def save(filepath: str, title: str = "TracePipe Dashboard") -> None:
1063
1068
  """
1064
1069
  Save interactive lineage report as HTML.
1070
+
1071
+ Args:
1072
+ filepath: Path to save the HTML file
1073
+ title: Title for the report (shown in browser tab and header)
1065
1074
  """
1066
1075
  ctx = get_context()
1067
1076
 
@@ -1073,7 +1082,9 @@ def save(filepath: str) -> None:
1073
1082
  row_index = _build_row_index(ctx)
1074
1083
 
1075
1084
  # Total registered rows (approximate)
1076
- total_registered = ctx.row_manager.next_row_id if hasattr(ctx.row_manager, "next_row_id") else 0
1085
+ total_registered = (
1086
+ ctx.row_manager._next_row_id if hasattr(ctx.row_manager, "_next_row_id") else 0
1087
+ )
1077
1088
 
1078
1089
  # Identify Suggested Rows for UX
1079
1090
  suggested_rows = {"dropped": [], "modified": [], "survivors": []}
@@ -1181,13 +1192,16 @@ def save(filepath: str) -> None:
1181
1192
  </div>
1182
1193
  """
1183
1194
 
1195
+ # Escape title for HTML
1196
+ escaped_title = html.escape(title)
1197
+
1184
1198
  html_content = f"""
1185
1199
  <!DOCTYPE html>
1186
1200
  <html lang="en">
1187
1201
  <head>
1188
1202
  <meta charset="utf-8">
1189
1203
  <meta name="viewport" content="width=device-width, initial-scale=1">
1190
- <title>TracePipe Dashboard</title>
1204
+ <title>{escaped_title}</title>
1191
1205
  {CSS}
1192
1206
  </head>
1193
1207
  <body>
@@ -1217,7 +1231,7 @@ def save(filepath: str) -> None:
1217
1231
  <div class="main-content">
1218
1232
  <!-- Top Bar -->
1219
1233
  <div class="top-bar">
1220
- <div class="page-title">Data Lineage Report</div>
1234
+ <div class="page-title">{escaped_title}</div>
1221
1235
  <div class="search-wrapper">
1222
1236
  <i class="search-icon-abs">🔍</i>
1223
1237
  <input type="text" id="globalSearch" class="search-input"
@@ -1236,7 +1250,8 @@ def save(filepath: str) -> None:
1236
1250
  </div>
1237
1251
  <div class="card">
1238
1252
  <h3>Retention</h3>
1239
- <div class="metric-value">{(final_rows / initial_rows * 100) if initial_rows else 0:.1f}%</div>
1253
+ <div class="metric-value">{
1254
+ (final_rows / initial_rows * 100) if initial_rows else 0:.1f}%</div>
1240
1255
  <div class="metric-sub">{_format_number(final_rows)} of {
1241
1256
  _format_number(initial_rows)
1242
1257
  } rows</div>
@@ -0,0 +1,308 @@
1
+ Metadata-Version: 2.4
2
+ Name: tracepipe
3
+ Version: 0.3.1
4
+ Summary: Row-level data lineage tracking for pandas pipelines
5
+ Project-URL: Homepage, https://github.com/tracepipe/tracepipe
6
+ Project-URL: Documentation, https://tracepipe.github.io/tracepipe/
7
+ Project-URL: Repository, https://github.com/tracepipe/tracepipe.git
8
+ Project-URL: Issues, https://github.com/tracepipe/tracepipe/issues
9
+ Project-URL: Changelog, https://tracepipe.github.io/tracepipe/changelog/
10
+ Author: Gauthier Piarrette
11
+ License: MIT License
12
+
13
+ Copyright (c) 2026 Gauthier Piarrette
14
+
15
+ Permission is hereby granted, free of charge, to any person obtaining a copy
16
+ of this software and associated documentation files (the "Software"), to deal
17
+ in the Software without restriction, including without limitation the rights
18
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
19
+ copies of the Software, and to permit persons to whom the Software is
20
+ furnished to do so, subject to the following conditions:
21
+
22
+ The above copyright notice and this permission notice shall be included in all
23
+ copies or substantial portions of the Software.
24
+
25
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
26
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
27
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
28
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
29
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
30
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31
+ SOFTWARE.
32
+ License-File: LICENSE
33
+ Keywords: data-engineering,data-lineage,data-quality,debugging,observability,pandas
34
+ Classifier: Development Status :: 4 - Beta
35
+ Classifier: Intended Audience :: Developers
36
+ Classifier: Intended Audience :: Science/Research
37
+ Classifier: License :: OSI Approved :: MIT License
38
+ Classifier: Operating System :: OS Independent
39
+ Classifier: Programming Language :: Python :: 3
40
+ Classifier: Programming Language :: Python :: 3.9
41
+ Classifier: Programming Language :: Python :: 3.10
42
+ Classifier: Programming Language :: Python :: 3.11
43
+ Classifier: Programming Language :: Python :: 3.12
44
+ Classifier: Topic :: Scientific/Engineering
45
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
46
+ Requires-Python: >=3.9
47
+ Requires-Dist: numpy>=1.20.0
48
+ Requires-Dist: pandas>=1.5.0
49
+ Provides-Extra: all
50
+ Requires-Dist: psutil>=5.9.0; extra == 'all'
51
+ Requires-Dist: pyarrow>=10.0.0; extra == 'all'
52
+ Provides-Extra: arrow
53
+ Requires-Dist: pyarrow>=10.0.0; extra == 'arrow'
54
+ Provides-Extra: dev
55
+ Requires-Dist: black>=23.0.0; extra == 'dev'
56
+ Requires-Dist: pre-commit>=3.5.0; extra == 'dev'
57
+ Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
58
+ Requires-Dist: pytest>=7.0.0; extra == 'dev'
59
+ Requires-Dist: ruff>=0.1.0; extra == 'dev'
60
+ Requires-Dist: taskipy>=1.12.0; extra == 'dev'
61
+ Provides-Extra: docs
62
+ Requires-Dist: mkdocs-material>=9.5.0; extra == 'docs'
63
+ Requires-Dist: mkdocs>=1.5.0; extra == 'docs'
64
+ Requires-Dist: mkdocstrings[python]>=0.24.0; extra == 'docs'
65
+ Requires-Dist: pymdown-extensions>=10.0.0; extra == 'docs'
66
+ Provides-Extra: memory
67
+ Requires-Dist: psutil>=5.9.0; extra == 'memory'
68
+ Description-Content-Type: text/markdown
69
+
70
+ <div align="center">
71
+
72
+ # TracePipe
73
+
74
+ ### Row-level data lineage for pandas pipelines
75
+
76
+ **Know exactly where every row went, why values changed, and how your data transformed.**
77
+
78
+ [![PyPI version](https://img.shields.io/pypi/v/tracepipe.svg)](https://pypi.org/project/tracepipe/)
79
+ [![Python 3.9+](https://img.shields.io/pypi/pyversions/tracepipe.svg)](https://pypi.org/project/tracepipe/)
80
+ [![CI](https://github.com/gauthierpiarrette/tracepipe/actions/workflows/ci.yml/badge.svg)](https://github.com/gauthierpiarrette/tracepipe/actions/workflows/ci.yml)
81
+ [![codecov](https://codecov.io/gh/gauthierpiarrette/tracepipe/branch/main/graph/badge.svg)](https://codecov.io/gh/gauthierpiarrette/tracepipe)
82
+ [![License: MIT](https://img.shields.io/badge/License-MIT-green.svg)](https://opensource.org/licenses/MIT)
83
+ [![Docs](https://img.shields.io/badge/docs-mkdocs-blue.svg)](https://gauthierpiarrette.github.io/tracepipe/)
84
+
85
+ [Getting Started](#getting-started) · [Documentation](https://gauthierpiarrette.github.io/tracepipe/) · [Examples](#real-world-example)
86
+
87
+ </div>
88
+
89
+ ---
90
+
91
+ ## Why TracePipe?
92
+
93
+ Data pipelines are black boxes. Rows vanish. Values change. You're left guessing.
94
+
95
+ ```python
96
+ df = pd.read_csv("customers.csv")
97
+ df = df.dropna() # Some rows disappear
98
+ df = df.merge(regions, on="zip") # New rows appear, some vanish
99
+ df["income"] = df["income"].fillna(0) # Values change silently
100
+ df = df[df["age"] >= 18] # More rows gone
101
+ # What happened to customer C-789? 🤷
102
+ ```
103
+
104
+ **TracePipe gives you the complete audit trail — zero code changes required.**
105
+
106
+ ---
107
+
108
+ ## Getting Started
109
+
110
+ ```bash
111
+ pip install tracepipe
112
+ ```
113
+
114
+ ```python
115
+ import tracepipe as tp
116
+ import pandas as pd
117
+
118
+ tp.enable(mode="debug", watch=["income"])
119
+
120
+ df = pd.read_csv("customers.csv")
121
+ df = df.dropna()
122
+ df["income"] = df["income"].fillna(0)
123
+ df = df[df["age"] >= 18]
124
+
125
+ tp.check(df) # See what happened
126
+ ```
127
+
128
+ ```
129
+ TracePipe Check: [OK] Pipeline healthy
130
+
131
+ Retention: 847/1000 (84.7%)
132
+ Dropped: 153 rows
133
+ • DataFrame.dropna: 42
134
+ • DataFrame.__getitem__[mask]: 111
135
+
136
+ Value changes: 23 cells modified
137
+ • DataFrame.fillna: 23 (income)
138
+ ```
139
+
140
+ That's it. **One import, full visibility.**
141
+
142
+ ---
143
+
144
+ ## Core API
145
+
146
+ | Function | What it does |
147
+ |----------|--------------|
148
+ | `tp.enable()` | Start tracking |
149
+ | `tp.check(df)` | Health check — retention, drops, changes |
150
+ | `tp.trace(df, where={"id": "C-789"})` | Follow a row's complete journey |
151
+ | `tp.why(df, col="income", row=5)` | Explain why a cell has its current value |
152
+ | `tp.report(df, "audit.html")` | Export interactive HTML report |
153
+
154
+ ---
155
+
156
+ ## Key Features
157
+
158
+ <table>
159
+ <tr>
160
+ <td width="50%">
161
+
162
+ ### 🔍 Zero-Code Instrumentation
163
+ TracePipe patches pandas at runtime. Your existing code works unchanged.
164
+
165
+ ### 📊 Complete Provenance
166
+ Track drops, transforms, merges, and cell-level changes with before/after values.
167
+
168
+ </td>
169
+ <td width="50%">
170
+
171
+ ### 🎯 Business-Key Lookups
172
+ Find rows by their values: `tp.trace(df, where={"email": "alice@example.com"})`
173
+
174
+ ### ⚡ Production-Ready
175
+ 1.0-2.8x overhead (varies by operation). Tested on DataFrames up to 1M rows.
176
+
177
+ </td>
178
+ </tr>
179
+ </table>
180
+
181
+ ---
182
+
183
+ ## Real-World Example
184
+
185
+ ```python
186
+ import tracepipe as tp
187
+ import pandas as pd
188
+
189
+ tp.enable(mode="debug", watch=["age", "income", "label"])
190
+
191
+ # Load and clean
192
+ df = pd.read_csv("training_data.csv")
193
+ df = df.dropna(subset=["label"])
194
+ df["income"] = df["income"].fillna(df["income"].median())
195
+ df = df[df["age"] >= 18]
196
+
197
+ # Audit
198
+ print(tp.check(df))
199
+ ```
200
+
201
+ ```
202
+ Retention: 8234/10000 (82.3%)
203
+ Dropped: 1766 rows
204
+ • DataFrame.dropna: 423
205
+ • DataFrame.__getitem__[mask]: 1343
206
+
207
+ Value changes: 892 cells
208
+ • DataFrame.fillna: 892 (income)
209
+ ```
210
+
211
+ ```python
212
+ # Why does this customer have a filled income?
213
+ tp.why(df, col="income", where={"customer_id": "C-789"})
214
+ ```
215
+
216
+ ```
217
+ Cell History: row 156, column 'income'
218
+ Current value: 45000.0
219
+ [i] Was null at step 1 (later recovered)
220
+
221
+ History (1 change):
222
+ None -> 45000.0
223
+ by: DataFrame.fillna
224
+ ```
225
+
226
+ ---
227
+
228
+ ## Two Modes
229
+
230
+ | Mode | Use Case | What's Tracked |
231
+ |------|----------|----------------|
232
+ | **CI** (default) | Production pipelines | Step counts, retention rates, merge warnings |
233
+ | **Debug** | Development | Full row history, cell diffs, merge parents, group membership |
234
+
235
+ ```python
236
+ tp.enable(mode="ci") # Lightweight
237
+ tp.enable(mode="debug") # Full lineage
238
+ ```
239
+
240
+ ---
241
+
242
+ ## What's Tracked
243
+
244
+ | Operation | Coverage |
245
+ |-----------|----------|
246
+ | `dropna`, `drop_duplicates`, `query`, `df[mask]` | ✅ Full |
247
+ | `fillna`, `replace`, `loc[]=`, `iloc[]=` | ✅ Full (cell diffs) |
248
+ | `merge`, `join` | ✅ Full (parent tracking) |
249
+ | `groupby().agg()` | ✅ Full (group membership) |
250
+ | `sort_values`, `head`, `tail`, `sample` | ✅ Full |
251
+ | `apply`, `pipe` | ⚠️ Partial |
252
+
253
+ ---
254
+
255
+ ## Data Quality Contracts
256
+
257
+ ```python
258
+ (tp.contract()
259
+ .expect_unique("customer_id")
260
+ .expect_no_nulls("email")
261
+ .expect_retention(min_rate=0.9)
262
+ .check(df)
263
+ .raise_if_failed())
264
+ ```
265
+
266
+ ---
267
+
268
+ ## Documentation
269
+
270
+ 📚 **[Full Documentation](https://gauthierpiarrette.github.io/tracepipe/)**
271
+
272
+ - [Quickstart](https://gauthierpiarrette.github.io/tracepipe/getting-started/quickstart/)
273
+ - [User Guide](https://gauthierpiarrette.github.io/tracepipe/guide/concepts/)
274
+ - [API Reference](https://gauthierpiarrette.github.io/tracepipe/api/)
275
+ - [Examples](https://gauthierpiarrette.github.io/tracepipe/examples/ml-pipeline/)
276
+
277
+ ---
278
+
279
+ ## Contributing
280
+
281
+ ```bash
282
+ git clone https://github.com/gauthierpiarrette/tracepipe.git
283
+ cd tracepipe
284
+ pip install -e ".[dev]"
285
+ pytest tests/ -v
286
+ ```
287
+
288
+ See [CONTRIBUTING](https://gauthierpiarrette.github.io/tracepipe/contributing/) for guidelines.
289
+
290
+ ---
291
+
292
+ ## License
293
+
294
+ MIT License. See [LICENSE](LICENSE).
295
+
296
+ ---
297
+
298
+ <div align="center">
299
+
300
+ **Stop guessing where your rows went.**
301
+
302
+ ```bash
303
+ pip install tracepipe
304
+ ```
305
+
306
+ ⭐ Star us on GitHub if TracePipe helps your data work!
307
+
308
+ </div>