gpclarity 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gpclarity/utils.py ADDED
@@ -0,0 +1,411 @@
1
+ """
2
+ Model complexity quantification for Gaussian Processes.
3
+ """
4
+
5
+ import logging
6
+ from typing import Any, Dict
7
+
8
+ import GPy
9
+ import numpy as np
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class ComplexityError(Exception):
15
+ """Raised when complexity computation fails."""
16
+ pass
17
+
18
+ class LinAlgError(Exception):
19
+ """Linear algebra computation error."""
20
+ pass
21
+
22
+
23
+ def count_kernel_components(kern: GPy.kern.Kern) -> int:
24
+ """
25
+ Recursively count total kernel components in composite kernels.
26
+
27
+ Args:
28
+ kern: GPy kernel object
29
+
30
+ Returns:
31
+ Total number of kernel components
32
+
33
+ Raises:
34
+ ComplexityError: If kernel traversal fails unexpectedly
35
+ """
36
+ try:
37
+ if not hasattr(kern, "parts"):
38
+ return 1
39
+
40
+ if not kern.parts:
41
+ return 1
42
+
43
+ # Validate parts is iterable
44
+ if not hasattr(kern.parts, "__iter__"):
45
+ raise ComplexityError(f"Kernel 'parts' is not iterable: {type(kern.parts)}")
46
+
47
+ return sum(count_kernel_components(k) for k in kern.parts)
48
+
49
+ except ComplexityError:
50
+ raise
51
+ except RecursionError as e:
52
+ logger.error(f"Recursion limit hit in kernel counting (circular reference?): {e}")
53
+ raise ComplexityError("Kernel structure too deep or circular") from e
54
+ except Exception as e:
55
+ logger.error(f"Unexpected error counting kernel components: {e}")
56
+ raise ComplexityError(f"Failed to count kernel components: {e}") from e
57
+
58
+
59
+ def compute_roughness_score(kern: GPy.kern.Kern) -> float:
60
+ """
61
+ Compute overall function roughness as inverse lengthscale.
62
+
63
+ Args:
64
+ kern: GPy kernel object
65
+
66
+ Returns:
67
+ Roughness score (higher = more wiggly)
68
+
69
+ Raises:
70
+ ComplexityError: If roughness computation fails
71
+ """
72
+ roughness = 0.0
73
+ count = 0
74
+
75
+ def traverse(k):
76
+ nonlocal roughness, count
77
+
78
+ try:
79
+ if not hasattr(k, "parts"):
80
+ # Leaf kernel
81
+ if hasattr(k, "lengthscale"):
82
+ ls = k.lengthscale
83
+ ls_mean = np.mean(ls) if hasattr(ls, "__iter__") else ls
84
+
85
+ if not np.isfinite(ls_mean):
86
+ logger.warning(f"Non-finite lengthscale encountered: {ls_mean}")
87
+ return
88
+
89
+ roughness += 1.0 / (ls_mean + 1e-10)
90
+ count += 1
91
+ return
92
+
93
+ if k.parts:
94
+ for i, part in enumerate(k.parts):
95
+ try:
96
+ traverse(part)
97
+ except Exception as e:
98
+ logger.warning(f"Failed to traverse kernel part {i}: {e}")
99
+
100
+ except Exception as e:
101
+ logger.warning(f"Error traversing kernel: {e}")
102
+
103
+ try:
104
+ traverse(kern)
105
+ except Exception as e:
106
+ logger.error(f"Roughness score computation failed: {e}")
107
+ raise ComplexityError(f"Failed to compute roughness: {e}") from e
108
+
109
+ if count == 0:
110
+ logger.warning("No lengthscales found in kernel, returning zero roughness")
111
+ return 0.0
112
+
113
+ return roughness / count
114
+
115
+
116
+ def compute_noise_ratio(model: GPy.models.GPRegression) -> float:
117
+ """
118
+ Compute signal-to-noise ratio (SNR) for the model.
119
+
120
+ Args:
121
+ model: Trained GPy model
122
+
123
+ Returns:
124
+ SNR = signal_variance / noise_variance (returns 1.0 if indeterminate)
125
+
126
+ Raises:
127
+ ComplexityError: If SNR computation fails unexpectedly
128
+ """
129
+ try:
130
+ if not hasattr(model, "kern"):
131
+ raise ComplexityError("Model has no kernel")
132
+
133
+ if not hasattr(model.kern, "variance"):
134
+ # Some kernels don't have variance (e.g., combination kernels)
135
+ logger.debug("Kernel has no variance attribute, assuming SNR=1.0")
136
+ return 1.0
137
+
138
+ signal_var = float(model.kern.variance)
139
+
140
+ if not hasattr(model, "Gaussian_noise"):
141
+ raise ComplexityError("Model has no Gaussian_noise attribute")
142
+
143
+ if not hasattr(model.Gaussian_noise, "variance"):
144
+ raise ComplexityError("Gaussian_noise has no variance attribute")
145
+
146
+ noise_var = float(model.Gaussian_noise.variance)
147
+
148
+ # Handle edge cases
149
+ if not np.isfinite(signal_var) or not np.isfinite(noise_var):
150
+ logger.warning(f"Non-finite variance values: signal={signal_var}, noise={noise_var}")
151
+ return 1.0
152
+
153
+ if noise_var < 0:
154
+ logger.warning(f"Negative noise variance: {noise_var}")
155
+ return 1.0
156
+
157
+ return float(signal_var / (noise_var + 1e-10))
158
+
159
+ except (AttributeError, TypeError, ValueError) as e:
160
+ # Expected failures for non-standard model structures
161
+ logger.debug(f"Could not compute noise ratio (expected for some models): {e}")
162
+ return 1.0
163
+ except ComplexityError:
164
+ raise
165
+ except Exception as e:
166
+ logger.error(f"Unexpected error computing noise ratio: {e}")
167
+ raise ComplexityError(f"Failed to compute noise ratio: {e}") from e
168
+
169
+
170
+ def compute_complexity_score(
171
+ model: GPy.models.GPRegression, X: np.ndarray
172
+ ) -> Dict[str, Any]:
173
+ """
174
+ Comprehensive model complexity quantification.
175
+
176
+ Combines multiple metrics: kernel components, roughness, noise ratio,
177
+ and effective degrees of freedom.
178
+
179
+ Args:
180
+ model: Trained GPy model
181
+ X: Training data for degrees of freedom calculation
182
+
183
+ Returns:
184
+ Dictionary with complexity score and detailed breakdown
185
+
186
+ Raises:
187
+ ComplexityError: If computation fails
188
+ ValueError: If inputs are invalid
189
+ """
190
+ if X is None or not hasattr(X, "shape"):
191
+ raise ValueError("X must be a numpy array")
192
+
193
+ if X.shape[0] == 0:
194
+ raise ValueError("X cannot be empty")
195
+
196
+ try:
197
+ n_components = count_kernel_components(model.kern)
198
+ roughness = compute_roughness_score(model.kern)
199
+ noise_ratio = compute_noise_ratio(model)
200
+
201
+ # Effective degrees of freedom (approximation)
202
+ effective_dof = X.shape[0] * 0.5 # default fallback
203
+
204
+ try:
205
+ K = model.kern.K(X, X)
206
+ noise_var = float(model.Gaussian_noise.variance)
207
+
208
+ if not np.isfinite(noise_var):
209
+ logger.warning(f"Non-finite noise variance: {noise_var}")
210
+ else:
211
+ trace_K = np.trace(K)
212
+ if np.isfinite(trace_K) and trace_K >= 0:
213
+ trace_ratio = trace_K / (trace_K + noise_var * X.shape[0] + 1e-10)
214
+ effective_dof = trace_ratio * X.shape[0]
215
+
216
+ except (AttributeError, ValueError, np.linalg.LinAlgError) as e:
217
+ logger.debug(f"Could not compute effective DOF: {e}")
218
+ except Exception as e:
219
+ logger.warning(f"Unexpected error in effective DOF computation: {e}")
220
+
221
+ # Composite complexity score (0 = simple, ∞ = complex)
222
+ dof_ratio = effective_dof / X.shape[0]
223
+ complexity_score = (
224
+ n_components * roughness * noise_ratio / (dof_ratio + 1e-10)
225
+ )
226
+
227
+ # Interpretation thresholds (adaptive)
228
+ complexity_score_log = np.log10(complexity_score + 1)
229
+
230
+ if complexity_score_log < 0.5:
231
+ interpretation = "Simple model (low risk of overfitting)"
232
+ suggestions = [
233
+ "Model is likely underfitting",
234
+ "Consider more expressive kernel",
235
+ ]
236
+ elif complexity_score_log < 1.5:
237
+ interpretation = "Moderate complexity (well-balanced)"
238
+ suggestions = ["Good complexity for most applications"]
239
+ else:
240
+ interpretation = "High complexity (monitor for overfitting)"
241
+ suggestions = [
242
+ "Consider simplifying kernel",
243
+ "Add regularization",
244
+ "Collect more data",
245
+ ]
246
+
247
+ return {
248
+ "score": float(complexity_score),
249
+ "log_score": float(complexity_score_log),
250
+ "interpretation": interpretation,
251
+ "suggestions": suggestions,
252
+ "components": {
253
+ "n_kernel_parts": n_components,
254
+ "roughness_score": float(roughness),
255
+ "noise_ratio": float(noise_ratio),
256
+ "effective_degrees_of_freedom": float(effective_dof),
257
+ },
258
+ "risk_factors": {
259
+ "too_complex": complexity_score_log > 1.5,
260
+ "too_simple": complexity_score_log < 0.5,
261
+ "high_noise": noise_ratio < 0.1,
262
+ },
263
+ }
264
+
265
+ except ComplexityError:
266
+ raise
267
+ except Exception as e:
268
+ logger.error(f"Unexpected error in complexity score computation: {e}")
269
+ raise ComplexityError(f"Failed to compute complexity score: {e}") from e
270
+
271
+ def _validate_kernel_matrix(K: np.ndarray) -> None:
272
+ """
273
+ Validate kernel matrix for numerical issues.
274
+
275
+ Args:
276
+ K: Kernel matrix to validate
277
+
278
+ Raises:
279
+ LinAlgError: If matrix is invalid
280
+ """
281
+ if not np.all(np.isfinite(K)):
282
+ n_nonfinite = np.sum(~np.isfinite(K))
283
+ raise LinAlgError(
284
+ f"Kernel matrix contains {n_nonfinite} non-finite values"
285
+ )
286
+
287
+ if K.shape[0] != K.shape[1]:
288
+ raise LinAlgError(f"Kernel matrix must be square, got {K.shape}")
289
+
290
+ # Check symmetry
291
+ if not np.allclose(K, K.T, rtol=1e-5, atol=1e-8):
292
+ max_asym = np.max(np.abs(K - K.T))
293
+ logger.warning(f"Kernel matrix asymmetric (max diff: {max_asym:.2e})")
294
+
295
+
296
+ def _cholesky_with_jitter(
297
+ K: np.ndarray,
298
+ max_attempts: int = 5,
299
+ initial_jitter: float = 1e-6,
300
+ jitter_growth: float = 10.0,
301
+ ) -> np.ndarray:
302
+ """
303
+ Compute Cholesky decomposition with progressive jitter.
304
+
305
+ Args:
306
+ K: Positive semi-definite matrix
307
+ max_attempts: Maximum jitter attempts
308
+ initial_jitter: Starting jitter magnitude
309
+ jitter_growth: Multiplicative factor for jitter increase
310
+
311
+ Returns:
312
+ Lower triangular Cholesky factor
313
+
314
+ Raises:
315
+ LinAlgError: If decomposition fails after all attempts
316
+ """
317
+ try:
318
+ return np.linalg.cholesky(K)
319
+ except np.linalg.LinAlgError:
320
+ pass
321
+
322
+ K_work = K.copy()
323
+ jitter = initial_jitter
324
+
325
+ for attempt in range(max_attempts):
326
+ K_work = K_work + np.eye(K.shape[0]) * jitter
327
+ try:
328
+ L = np.linalg.cholesky(K_work)
329
+ logger.debug(f"Cholesky succeeded with jitter {jitter:.2e}")
330
+ return L
331
+ except np.linalg.LinAlgError:
332
+ jitter *= jitter_growth
333
+
334
+ raise LinAlgError(
335
+ f"Cholesky decomposition failed after {max_attempts} attempts "
336
+ f"with max jitter {jitter/jitter_growth:.2e}"
337
+ )
338
+
339
+
340
+ def _extract_param_value(param: Any) -> Union[float, np.ndarray]:
341
+ """
342
+ Safely extract scalar or array value from GPy parameter.
343
+
344
+ Args:
345
+ param: GPy parameter object
346
+
347
+ Returns:
348
+ Scalar float or numpy array
349
+ """
350
+ val = param.param_array
351
+
352
+ if val is None:
353
+ return 0.0
354
+
355
+ arr = np.atleast_1d(val)
356
+
357
+ if len(arr) == 1:
358
+ return float(arr[0])
359
+ else:
360
+ return arr.copy()
361
+
362
+
363
+ def _validate_convergence_window(window: int, history_length: int) -> None:
364
+ """
365
+ Validate window size for convergence analysis.
366
+
367
+ Args:
368
+ window: Requested window size
369
+ history_length: Available history length
370
+
371
+ Raises:
372
+ ValueError: If window invalid
373
+ """
374
+ if window <= 0:
375
+ raise ValueError(f"Window must be positive, got {window}")
376
+ if window > history_length // 2:
377
+ raise ValueError(
378
+ f"Window ({window}) too large for history length ({history_length}). "
379
+ f"Max allowed: {history_length // 2}"
380
+ )
381
+
382
+ def _validate_array(arr: Any, name: str = "array") -> np.ndarray:
383
+ """
384
+ Validate and convert input to numpy array.
385
+
386
+ Args:
387
+ arr: Input array-like
388
+ name: Name for error messages
389
+
390
+ Returns:
391
+ Validated numpy array
392
+
393
+ Raises:
394
+ ValueError: If invalid
395
+ """
396
+ if arr is None:
397
+ raise ValueError(f"{name} cannot be None")
398
+
399
+ try:
400
+ arr = np.asarray(arr)
401
+ except Exception as e:
402
+ raise ValueError(f"{name} must be array-like: {e}") from e
403
+
404
+ if arr.size == 0:
405
+ raise ValueError(f"{name} cannot be empty")
406
+
407
+ if not np.all(np.isfinite(arr)):
408
+ n_invalid = np.sum(~np.isfinite(arr))
409
+ raise ValueError(f"{name} contains {n_invalid} non-finite values")
410
+
411
+ return arr
@@ -0,0 +1,248 @@
1
+ Metadata-Version: 2.4
2
+ Name: gpclarity
3
+ Version: 0.0.2
4
+ Summary: Interpretability and Diagnostics Tools for Gaussian Processes
5
+ Project-URL: Homepage, https://github.com/AngadKumar16/gpclarity
6
+ Project-URL: Issues, https://github.com/AngadKumar16/gpclarity/issues
7
+ Project-URL: Documentation, https://gpclarity.readthedocs.io
8
+ Author-email: Angad Kumar <angadkumar16ak@gmail.com>
9
+ License: BSD 3-Clause License
10
+
11
+ Copyright (c) 2026, Angad Kumar
12
+ All rights reserved.
13
+
14
+ Redistribution and use in source and binary forms, with or without
15
+ modification, are permitted provided that the following conditions are met:
16
+
17
+ 1. Redistributions of source code must retain the above copyright notice, this
18
+ list of conditions and the following disclaimer.
19
+
20
+ 2. Redistributions in binary form must reproduce the above copyright notice,
21
+ this list of conditions and the following disclaimer in the documentation
22
+ and/or other materials provided with the distribution.
23
+
24
+ 3. Neither the name of the copyright holder nor the names of its
25
+ contributors may be used to endorse or promote products derived from
26
+ this software without specific prior written permission.
27
+
28
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
29
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
30
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
31
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
32
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
33
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
34
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
35
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
36
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
37
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38
+
39
+
40
+ Academic Citation Request (Non-Binding)
41
+
42
+ If you use GPClarity in academic research, publications, or derived
43
+ scientific work, we kindly request that you cite the software. Citation
44
+ helps support continued development and enables recognition of open
45
+ scientific infrastructure. See the CITATION.cff file for details.
46
+ License-File: LICENSE
47
+ Keywords: gaussian-process,interpretability,machine-learning,uncertainty
48
+ Classifier: Development Status :: 4 - Beta
49
+ Classifier: Intended Audience :: Science/Research
50
+ Classifier: License :: OSI Approved :: MIT License
51
+ Classifier: Programming Language :: Python :: 3
52
+ Classifier: Programming Language :: Python :: 3.9
53
+ Classifier: Programming Language :: Python :: 3.10
54
+ Classifier: Programming Language :: Python :: 3.11
55
+ Classifier: Programming Language :: Python :: 3.12
56
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
57
+ Requires-Python: >=3.9
58
+ Requires-Dist: emukit>=0.4.0
59
+ Requires-Dist: gpy>=1.10.0
60
+ Requires-Dist: matplotlib>=3.4.0
61
+ Requires-Dist: numpy>=1.20.0
62
+ Requires-Dist: scipy>=1.7.0
63
+ Provides-Extra: dev
64
+ Requires-Dist: black>=23.0; extra == 'dev'
65
+ Requires-Dist: isort>=5.12; extra == 'dev'
66
+ Requires-Dist: mypy>=1.0; extra == 'dev'
67
+ Requires-Dist: pre-commit>=3.0; extra == 'dev'
68
+ Requires-Dist: pytest-cov>=4.0; extra == 'dev'
69
+ Requires-Dist: pytest>=7.0; extra == 'dev'
70
+ Provides-Extra: docs
71
+ Requires-Dist: myst-parser>=1.0; extra == 'docs'
72
+ Requires-Dist: nbsphinx>=0.9; extra == 'docs'
73
+ Requires-Dist: sphinx-rtd-theme>=1.2; extra == 'docs'
74
+ Requires-Dist: sphinx>=5.0; extra == 'docs'
75
+ Description-Content-Type: text/markdown
76
+
77
+ # GPClarity: Gaussian Process Interpretability Toolkit
78
+ ![Python Version](https://img.shields.io/python/v/gpclarity)
79
+ ![License](https://img.shields.io/badge/license-MIT-blue.svg)
80
+ ![Build Status](https://github.com/AngadKumar16/gpclarity/workflows/CI/badge.svg)
81
+
82
+ **GPClarity** is a production-ready library that transforms black-box Gaussian Process models into interpretable, debuggable, and trustworthy tools. Built on GPy and emukit, it provides human-readable insights into kernel behavior, uncertainty patterns, and model complexity.
83
+
84
+ ---
85
+
86
+ ## 🎯 Features
87
+
88
+ - 🔍 **Kernel Interpretation**: Translate raw kernel math into human meaning
89
+ - 📊 **Uncertainty Profiling**: Visualize and diagnose uncertainty behavior
90
+ - 📈 **Hyperparameter Tracking**: Monitor optimization dynamics in real-time
91
+ - 🧮 **Complexity Quantification**: Measure and prevent overfitting
92
+ - 🎯 **Data Influence Analysis**: Identify impactful training points
93
+ - 🔗 **Emukit Integration**: Seamless Bayesian optimization support
94
+
95
+ ---
96
+
97
+ ## 🚀 Quick Start
98
+
99
+ ```python
100
+ import gpclarity
101
+ import GPy
102
+ import numpy as np
103
+
104
+ # Train a Gaussian Process
105
+ X = np.linspace(0, 10, 50).reshape(-1, 1)
106
+ y = np.sin(X).flatten() + 0.1 * np.random.randn(50)
107
+
108
+ kernel = GPy.kern.RBF(1) + GPy.kern.White(1)
109
+ model = GPy.models.GPRegression(X, y[:, None], kernel)
110
+ model.optimize()
111
+
112
+ summary = gpclarity.summarize_kernel(model)
113
+
114
+ profiler = gpclarity.UncertaintyProfiler(model)
115
+ X_test = np.linspace(-2, 12, 200).reshape(-1, 1)
116
+ profiler.plot(X_test, X_train=X, y_train=y)
117
+
118
+ tracker = gpclarity.HyperparameterTracker(model)
119
+ history = tracker.wrapped_optimize(max_iters=50)
120
+ tracker.plot_evolution()
121
+
122
+ complexity = gpclarity.compute_complexity_score(model, X)
123
+ print(f"Complexity: {complexity['score']:.2f} - {complexity['interpretation']}")
124
+ ```
125
+
126
+ ---
127
+
128
+ ## 📦 Installation
129
+
130
+ ### Stable Release
131
+ ```bash
132
+ pip install gpclarity
133
+ ```
134
+
135
+ ### Development Version
136
+ ```bash
137
+ git clone https://github.com/AngadKumar16/gpclarity.git
138
+ cd gpclarity
139
+ pip install -e ".[dev]"
140
+ ```
141
+
142
+ ### Conda (coming soon)
143
+ ```bash
144
+ conda install -c conda-forge gpclarity
145
+ ```
146
+
147
+ ---
148
+
149
+ ## 🏗️ Architecture
150
+
151
+ ```
152
+ gpclarity/
153
+ ├── kernel_summary
154
+ ├── uncertainty_analysis
155
+ ├── hyperparam_tracker
156
+ ├── model_complexity
157
+ ├── data_influence
158
+ └── utils
159
+ ```
160
+
161
+ ---
162
+
163
+ ## 🔬 Advanced Usage
164
+
165
+ ### Emukit Integration
166
+
167
+ ```python
168
+ from gpclarity import ClarityBayesianOptimizationLoop
169
+
170
+ loop = ClarityBayesianOptimizationLoop(model, space)
171
+ loop.run_loop(user_function, stopping_condition)
172
+ loop.plot_diagnostics()
173
+ ```
174
+
175
+ ### Batch Processing
176
+
177
+ ```python
178
+ models = [model1, model2, model3]
179
+ reports = [gpclarity.summarize_kernel(m, verbose=False) for m in models]
180
+ ```
181
+
182
+ ---
183
+
184
+ ## 📊 Example Outputs
185
+
186
+ ### Kernel Summary
187
+
188
+ ```
189
+ 🔍 KERNEL SUMMARY
190
+ Structure: ['RBF', 'White']
191
+ Components: 2
192
+
193
+ 📦 RBF (lengthscale)
194
+ └─ lengthscale: 1.23
195
+ 💡 Moderate flexibility
196
+
197
+ 📦 White (variance)
198
+ └─ variance: 0.01
199
+ 💡 Low observation noise
200
+ ```
201
+
202
+ ### Complexity Report
203
+
204
+ ```json
205
+ {
206
+ "score": 2.34,
207
+ "interpretation": "Moderate complexity (well-balanced)",
208
+ "components": {
209
+ "n_kernel_parts": 2,
210
+ "roughness_score": 0.81,
211
+ "noise_ratio": 4.5
212
+ }
213
+ }
214
+ ```
215
+
216
+ ---
217
+
218
+ ## 🎓 Citation
219
+
220
+ ```bibtex
221
+ @software{gpclarity2026,
222
+ title={gpclarity: Gaussian Process Interpretability Toolkit},
223
+ author={Angad Kumar},
224
+ year={2026},
225
+ url={https://github.com/AngadKumar16/gpclarity},
226
+ version={0.1.0}
227
+ }
228
+ ```
229
+ ## 📝 License
230
+
231
+ GPClarity is licensed under the **MIT License**. See [LICENSE](LICENSE) for details.
232
+
233
+ ## 🤝 Contributing
234
+
235
+ Contributions are welcome!
236
+
237
+ - Report bugs or request features via [GitHub Issues](https://github.com/AngadKumar16/gpclarity/issues)
238
+ - Submit pull requests for fixes or enhancements
239
+ - Make sure to follow the code style and write tests for new features
240
+
241
+ **Author:** Angad Kumar ([GitHub](https://github.com/AngadKumar16), [Email](mailto:angadkumar16ak@gmail.com))
242
+
243
+ ## 🛣️ Roadmap
244
+
245
+ - Conda package support
246
+ - More visualization tools for kernel decomposition
247
+ - Automated tutorials / example notebooks
248
+ - More features overall
@@ -0,0 +1,14 @@
1
+ gpclarity/__init__.py,sha256=3ZmuI3pMR76IAac5VfeYjsj3XZoiiGl5IoyFsb7DWyQ,5965
2
+ gpclarity/_version.py,sha256=TqmJTwRFFMcQ_an21EbHMvk9y7bfvBnH7YUaMsYzvR8,49
3
+ gpclarity/data_influence.py,sha256=Cm45O8twbzQG2JP5LqMzKG_Sulz4v7yuO4Dsy4jEDOA,17552
4
+ gpclarity/exceptions.py,sha256=AGGTsVOQr4VQ84P7qjqsKTEAKNBOyVRRtS8eUHmcesw,950
5
+ gpclarity/hyperparam_tracker.py,sha256=vjx4PZLKjaQBUx79G5ym6s6DKqPxHg4YZCPynJ6Y6K8,24873
6
+ gpclarity/kernel_summary.py,sha256=a8sXs_qEmG2K3m0oYU2fUV9CjTz6jre_P5gk-Wn5s7U,9272
7
+ gpclarity/model_complexity.py,sha256=SRATXpdKX_uIwqk1_RZvS4jvHi7mmBpLpBz0MbkhgQM,22324
8
+ gpclarity/plotting.py,sha256=ZMIvGLqXykD4WmXYoDEeuStY5YEaaMQ9EMun4UAB0Ro,10356
9
+ gpclarity/uncertainty_analysis.py,sha256=XOQv98khDWSMDZb76uVJ6V3e4nsjUlLgyeUNFY2Q-ZI,22294
10
+ gpclarity/utils.py,sha256=rbg0vpUt29_eYOwcXVJ3wvTArd73xfFvll67GSB9NJ8,12834
11
+ gpclarity-0.0.2.dist-info/METADATA,sha256=M0hbeP9xpt9m1YlgHr4dAj8ji-OKFc3tQ8OSDff8MLs,7825
12
+ gpclarity-0.0.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
+ gpclarity-0.0.2.dist-info/licenses/LICENSE,sha256=Hcj35trYFMm1phdiTIEHqP-5kxrQGZPD-Uis2NkC1N8,1836
14
+ gpclarity-0.0.2.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any