deepdrift 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepdrift-0.2.0/LICENSE +21 -0
- deepdrift-0.2.0/PKG-INFO +22 -0
- deepdrift-0.2.0/README.md +126 -0
- deepdrift-0.2.0/deepdrift/__init__.py +6 -0
- deepdrift-0.2.0/deepdrift/doctor.py +42 -0
- deepdrift-0.2.0/deepdrift/monitor.py +120 -0
- deepdrift-0.2.0/deepdrift/observer.py +76 -0
- deepdrift-0.2.0/deepdrift/visualization.py +52 -0
- deepdrift-0.2.0/deepdrift.egg-info/PKG-INFO +22 -0
- deepdrift-0.2.0/deepdrift.egg-info/SOURCES.txt +13 -0
- deepdrift-0.2.0/deepdrift.egg-info/dependency_links.txt +1 -0
- deepdrift-0.2.0/deepdrift.egg-info/requires.txt +6 -0
- deepdrift-0.2.0/deepdrift.egg-info/top_level.txt +1 -0
- deepdrift-0.2.0/setup.cfg +4 -0
- deepdrift-0.2.0/setup.py +23 -0
deepdrift-0.2.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Alexey
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
deepdrift-0.2.0/PKG-INFO
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: deepdrift
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: A Layer-Wise Diagnostic Framework for Neural Network Robustness
|
|
5
|
+
Author: Alexey Evtushenko
|
|
6
|
+
Author-email: alexey@eutonics.ru
|
|
7
|
+
Classifier: Programming Language :: Python :: 3
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Dist: torch>=1.10.0
|
|
12
|
+
Requires-Dist: torchvision>=0.11.0
|
|
13
|
+
Requires-Dist: numpy
|
|
14
|
+
Requires-Dist: tqdm
|
|
15
|
+
Requires-Dist: matplotlib
|
|
16
|
+
Requires-Dist: seaborn
|
|
17
|
+
Dynamic: author
|
|
18
|
+
Dynamic: author-email
|
|
19
|
+
Dynamic: classifier
|
|
20
|
+
Dynamic: license-file
|
|
21
|
+
Dynamic: requires-dist
|
|
22
|
+
Dynamic: summary
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# DeepDrift
|
|
2
|
+
|
|
3
|
+
[](https://doi.org/10.5281/zenodo.18086612)
|
|
4
|
+
[](https://opensource.org/licenses/MIT)
|
|
5
|
+
[](https://pytorch.org/)
|
|
6
|
+
|
|
7
|
+
**A Layer-Wise Diagnostic Framework for Neural Network Robustness.**
|
|
8
|
+
|
|
9
|
+
> "Stop guessing *why* your model failed. See exactly *where* it broke."
|
|
10
|
+
|
|
11
|
+
DeepDrift is an unsupervised diagnostic tool that acts like an **MRI scan for your neural network**. Instead of just monitoring output accuracy (which is a lagging indicator), DeepDrift analyzes how data representations evolve layer-by-layer in real-time.
|
|
12
|
+
|
|
13
|
+
It allows you to distinguish between:
|
|
14
|
+
* **Sensor Failure** (High drift at input layers)
|
|
15
|
+
* **Geometric Collapse** (Drift accumulation in deep layers)
|
|
16
|
+
* **Spurious Correlations** (Anomalies in mid-level features)
|
|
17
|
+
|
|
18
|
+

|
|
19
|
+
|
|
20
|
+
## 🚀 Key Features
|
|
21
|
+
|
|
22
|
+
* **Unsupervised:** No labeled OOD data required. Works in production.
|
|
23
|
+
* **Lightweight:** < 1% inference overhead.
|
|
24
|
+
* **Interpretability:** Maps drift to network depth ($z$-axis).
|
|
25
|
+
* **AI Doctor:** Built-in heuristics to classify failure modes.
|
|
26
|
+
|
|
27
|
+
## 🧠 Under the Hood: Predictive Monitoring
|
|
28
|
+
|
|
29
|
+
DeepDrift isn't just a threshold check. It implements a stateful **Finite State Machine (FSM)** with hysteresis and trend analysis to prevent alert fatigue.
|
|
30
|
+
|
|
31
|
+

|
|
32
|
+
|
|
33
|
+
* **⚠️ Early Warning (Yellow):** Detects rapid drift acceleration ($\beta$-slope) *before* the critical threshold is breached.
|
|
34
|
+
* **🔴 Critical Alert (Red):** Triggers when structural integrity is compromised.
|
|
35
|
+
* **🟢 Hysteresis Recovery:** The alert stays active until the system stabilizes significantly below the threshold, preventing flickering alarms.
|
|
36
|
+
|
|
37
|
+
## 📦 Installation
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
git clone https://github.com/Eutonics/DeepDrift.git
|
|
41
|
+
cd DeepDrift
|
|
42
|
+
pip install .
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
⚡ Quick Start
|
|
46
|
+
1. Real-time Monitoring (Production Mode)
|
|
47
|
+
Use the stateful monitor to track model health with hysteresis and trend detection.
|
|
48
|
+
codePython
|
|
49
|
+
|
|
50
|
+
```
|
|
51
|
+
import torch
|
|
52
|
+
import torchvision.models as models
|
|
53
|
+
from deepdrift import DeepDriftMonitor, ObserverConfig
|
|
54
|
+
|
|
55
|
+
# 1. Load your model
|
|
56
|
+
model = models.resnet18(pretrained=True)
|
|
57
|
+
model.eval()
|
|
58
|
+
|
|
59
|
+
# 2. Configure Sensitivity
|
|
60
|
+
# theta_slope: Detects rapid drift acceleration
|
|
61
|
+
# theta_high: Critical alert threshold (Sigma)
|
|
62
|
+
config = ObserverConfig(theta_high=3.0, theta_slope=0.05, window_size=20)
|
|
63
|
+
monitor = DeepDriftMonitor(model, arch_name='ResNet-18', drift_config=config)
|
|
64
|
+
|
|
65
|
+
# 3. Calibrate on clean data (establish baseline)
|
|
66
|
+
# monitor.calibrate(train_loader, max_batches=50)
|
|
67
|
+
|
|
68
|
+
# 4. Monitoring Loop
|
|
69
|
+
# status, alerts = monitor.step(incoming_batch)
|
|
70
|
+
|
|
71
|
+
# if alerts:
|
|
72
|
+
# for alert in alerts:
|
|
73
|
+
# print(alert)
|
|
74
|
+
# # Output: "⚠️ WARNING [Mid]: Rapid Drift Detected (Slope 0.045)"
|
|
75
|
+
# # Output: "🔴 ALERT [IR]: Threshold Breach (3.2 >= 3.0)"
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
2. Static Diagnosis (Research Mode)
|
|
79
|
+
Analyze a single batch to get a spectral signature and diagnosis.
|
|
80
|
+
codePython
|
|
81
|
+
|
|
82
|
+
```
|
|
83
|
+
from deepdrift import diagnose_drift, plot_drift_profile
|
|
84
|
+
|
|
85
|
+
# ... (after calibration) ...
|
|
86
|
+
|
|
87
|
+
# Get raw profile
|
|
88
|
+
drift_profile = monitor.step(ood_batch)[0] # Extract drift values
|
|
89
|
+
|
|
90
|
+
# Get Diagnosis
|
|
91
|
+
diagnosis = diagnose_drift([d['drift'] for d in drift_profile.values()])
|
|
92
|
+
print(f"Diagnosis: {diagnosis}")
|
|
93
|
+
# Output: "WARNING: Avalanche Effect (Geometric Failure)"
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
📚 Research & Publications
|
|
97
|
+
DeepDrift is backed by research on Renormalization Group theory in Deep Learning.
|
|
98
|
+
|
|
99
|
+
1. DeepDrift: A Layer-Wise Diagnostic Framework for Neural Network Robustness (2025)
|
|
100
|
+
|
|
101
|
+
* The foundational paper describing the framework and metrics.
|
|
102
|
+
|
|
103
|
+
2. Spatial Dynamics of Memorization in Diffusion Models (2025)
|
|
104
|
+
|
|
105
|
+
* Application of DeepDrift to discover the "Burning Bottleneck" phenomenon in U-Nets.
|
|
106
|
+
|
|
107
|
+
📄 Citation
|
|
108
|
+
If you use DeepDrift in your research, please cite:
|
|
109
|
+
codeBibtex
|
|
110
|
+
|
|
111
|
+
```
|
|
112
|
+
@article{evtushenko2025deepdrift,
|
|
113
|
+
title={DeepDrift: A Layer-Wise Diagnostic Framework for Neural Network Robustness},
|
|
114
|
+
author={Evtushenko, Alexey},
|
|
115
|
+
journal={arXiv preprint},
|
|
116
|
+
doi={10.5281/zenodo.18086612},
|
|
117
|
+
year={2025}
|
|
118
|
+
}
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
License
|
|
122
|
+
This project is licensed under the MIT License.
|
|
123
|
+
codeCode
|
|
124
|
+
|
|
125
|
+
```
|
|
126
|
+
```
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from .monitor import DeepDriftMonitor
|
|
2
|
+
from .observer import ObserverConfig, MonitorState
|
|
3
|
+
from .doctor import diagnose_drift
|
|
4
|
+
from .visualization import plot_drift_profile
|
|
5
|
+
|
|
6
|
+
__all__ = ["DeepDriftMonitor", "ObserverConfig", "MonitorState", "diagnose_drift", "plot_drift_profile"]
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
def diagnose_drift(drift_profile, threshold=3.0):
|
|
4
|
+
"""
|
|
5
|
+
Holistic diagnosis based on layer coupling.
|
|
6
|
+
Interprets the FLOW of drift, not just isolated spikes.
|
|
7
|
+
"""
|
|
8
|
+
if not drift_profile or len(drift_profile) < 4:
|
|
9
|
+
return "Unknown (Profile too short)"
|
|
10
|
+
|
|
11
|
+
profile = np.array(drift_profile)
|
|
12
|
+
max_drift = np.max(profile)
|
|
13
|
+
|
|
14
|
+
# Map layers (Assuming [UV, Mid, Deep, IR] order)
|
|
15
|
+
uv, mid, deep, ir = profile[0], profile[1], profile[2], profile[-1]
|
|
16
|
+
|
|
17
|
+
# 1. Healthy State
|
|
18
|
+
if max_drift < threshold:
|
|
19
|
+
return "✅ Stable"
|
|
20
|
+
|
|
21
|
+
# 2. Benign Shift (Validation of Robustness)
|
|
22
|
+
# UV горит, но IR спокоен. Модель фильтрует шум.
|
|
23
|
+
if uv > threshold and ir < threshold and ir < uv * 0.6:
|
|
24
|
+
return "ℹ️ INFO: Benign Sensor Shift (Filtered)"
|
|
25
|
+
|
|
26
|
+
# 3. Avalanche (Accumulation) - CHECK FIRST for specificity
|
|
27
|
+
# Ошибка растет к выходу (характерно для CNN)
|
|
28
|
+
if ir > deep and deep > mid and ir > threshold:
|
|
29
|
+
return "⚠️ WARNING: Avalanche Effect (Geometric Instability)"
|
|
30
|
+
|
|
31
|
+
# 4. Internal Rot (Spurious Correlation)
|
|
32
|
+
# Вход ок, Выход ок, но Середина горит.
|
|
33
|
+
if mid > threshold and mid > uv and mid > ir:
|
|
34
|
+
return "🔴 ALERT: Internal Feature Mismatch (Spurious Correlation)"
|
|
35
|
+
|
|
36
|
+
# 5. Critical Failure (Global Collapse)
|
|
37
|
+
# Если горит всё, или среднее очень высокое (характерно для ViT)
|
|
38
|
+
if np.mean(profile) > threshold * 1.2:
|
|
39
|
+
return "⛔ CRITICAL: Global Collapse (Model Disoriented)"
|
|
40
|
+
|
|
41
|
+
# Fallback
|
|
42
|
+
return f"Anomaly Detected (Max Z={max_drift:.1f})"
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import torch
|
|
2
|
+
import numpy as np
|
|
3
|
+
from .observer import LayerObserver, ObserverConfig, MonitorState
|
|
4
|
+
|
|
5
|
+
class DeepDriftMonitor:
|
|
6
|
+
"""
|
|
7
|
+
Monitors layer-wise representation drift with stateful alerting system.
|
|
8
|
+
"""
|
|
9
|
+
def __init__(self, model, arch_name=None, layers_map=None, drift_config=None):
|
|
10
|
+
self.model = model
|
|
11
|
+
self.activations = {}
|
|
12
|
+
self.hooks = []
|
|
13
|
+
self.mu = {}
|
|
14
|
+
self.sigma = {}
|
|
15
|
+
self.step_counter = 0
|
|
16
|
+
|
|
17
|
+
try:
|
|
18
|
+
self.device = next(model.parameters()).device
|
|
19
|
+
except StopIteration:
|
|
20
|
+
self.device = 'cpu'
|
|
21
|
+
|
|
22
|
+
# 1. Setup Layers
|
|
23
|
+
if layers_map is not None:
|
|
24
|
+
self.layers = layers_map
|
|
25
|
+
elif arch_name is not None:
|
|
26
|
+
self.layers = self._auto_detect_layers(model, arch_name)
|
|
27
|
+
else:
|
|
28
|
+
self.layers = {name: module for name, module in model.named_children()}
|
|
29
|
+
|
|
30
|
+
# 2. Setup Observers
|
|
31
|
+
cfg = drift_config if drift_config else ObserverConfig()
|
|
32
|
+
self.observers = {
|
|
33
|
+
name: LayerObserver(name, cfg) for name in self.layers
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
self._register_hooks()
|
|
37
|
+
|
|
38
|
+
def _auto_detect_layers(self, model, arch_name):
|
|
39
|
+
layers = {}
|
|
40
|
+
name_lower = arch_name.lower()
|
|
41
|
+
if 'resnet' in name_lower:
|
|
42
|
+
layers = {
|
|
43
|
+
'UV': getattr(model, 'layer1', None) or getattr(model, 'features', None)[0],
|
|
44
|
+
'Mid': getattr(model, 'layer2', None) or getattr(model, 'features', None)[4],
|
|
45
|
+
'Deep': getattr(model, 'layer3', None) or getattr(model, 'features', None)[6],
|
|
46
|
+
'IR': getattr(model, 'layer4', None) or getattr(model, 'features', None)[-1]
|
|
47
|
+
}
|
|
48
|
+
return {k: v for k, v in layers.items() if v is not None}
|
|
49
|
+
|
|
50
|
+
def _hook_fn(self, name):
|
|
51
|
+
def hook(model, input, output):
|
|
52
|
+
# FIX: Handle tuple outputs from Transformers
|
|
53
|
+
if isinstance(output, tuple):
|
|
54
|
+
output = output[0]
|
|
55
|
+
|
|
56
|
+
if output.dim() == 4:
|
|
57
|
+
act = output.mean(dim=[2, 3])
|
|
58
|
+
elif output.dim() == 3:
|
|
59
|
+
act = output[:, 0, :]
|
|
60
|
+
else:
|
|
61
|
+
act = output.flatten(1)
|
|
62
|
+
self.activations[name] = act.detach()
|
|
63
|
+
return hook
|
|
64
|
+
|
|
65
|
+
def _register_hooks(self):
|
|
66
|
+
for name, module in self.layers.items():
|
|
67
|
+
self.hooks.append(module.register_forward_hook(self._hook_fn(name)))
|
|
68
|
+
|
|
69
|
+
def calibrate(self, loader, max_batches=50):
|
|
70
|
+
print("⚙️ DeepDrift: Calibrating baseline...")
|
|
71
|
+
self.model.eval()
|
|
72
|
+
acc = {k: [] for k in self.layers}
|
|
73
|
+
with torch.no_grad():
|
|
74
|
+
for i, batch in enumerate(loader):
|
|
75
|
+
if i >= max_batches: break
|
|
76
|
+
x = batch[0] if isinstance(batch, (list, tuple)) else batch
|
|
77
|
+
x = x.to(self.device)
|
|
78
|
+
_ = self.model(x)
|
|
79
|
+
for k in self.layers: acc[k].append(self.activations[k])
|
|
80
|
+
|
|
81
|
+
for k in self.layers:
|
|
82
|
+
if len(acc[k]) > 0:
|
|
83
|
+
d = torch.cat(acc[k], dim=0)
|
|
84
|
+
self.mu[k] = d.mean(dim=0)
|
|
85
|
+
dist = torch.norm(d - self.mu[k], dim=1)
|
|
86
|
+
self.sigma[k] = dist.std().item() + 1e-9
|
|
87
|
+
print("✅ Calibration complete.")
|
|
88
|
+
|
|
89
|
+
def step(self, inputs):
|
|
90
|
+
"""
|
|
91
|
+
Process one batch and return system status.
|
|
92
|
+
"""
|
|
93
|
+
self.model.eval()
|
|
94
|
+
self.step_counter += 1
|
|
95
|
+
|
|
96
|
+
with torch.no_grad():
|
|
97
|
+
_ = self.model(inputs)
|
|
98
|
+
|
|
99
|
+
current_status = {}
|
|
100
|
+
alerts = []
|
|
101
|
+
|
|
102
|
+
for name in self.layers:
|
|
103
|
+
if name in self.activations and name in self.mu:
|
|
104
|
+
batch_mu = self.activations[name].mean(dim=0)
|
|
105
|
+
dist = torch.norm(batch_mu - self.mu[name]).item()
|
|
106
|
+
z_score = dist / self.sigma[name]
|
|
107
|
+
|
|
108
|
+
state, event = self.observers[name].update(z_score, self.step_counter)
|
|
109
|
+
|
|
110
|
+
current_status[name] = {
|
|
111
|
+
'drift': z_score,
|
|
112
|
+
'slope': self.observers[name].current_beta,
|
|
113
|
+
'state': state.value
|
|
114
|
+
}
|
|
115
|
+
if event: alerts.append(event)
|
|
116
|
+
|
|
117
|
+
return current_status, alerts
|
|
118
|
+
|
|
119
|
+
def close(self):
|
|
120
|
+
for h in self.hooks: h.remove()
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from collections import deque
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
|
|
6
|
+
class MonitorState(Enum):
|
|
7
|
+
NORMAL = "NORMAL"
|
|
8
|
+
WARNING = "WARNING" # High Trend (Predictive)
|
|
9
|
+
ALERT = "ALERT" # Threshold Breach (Critical)
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ObserverConfig:
|
|
13
|
+
"""
|
|
14
|
+
Configuration for the layer observer.
|
|
15
|
+
"""
|
|
16
|
+
theta_high: float = 3.0 # Alert threshold (Z-score)
|
|
17
|
+
theta_low: float = 1.5 # Recovery threshold (Hysteresis)
|
|
18
|
+
theta_slope: float = 0.05 # Trend threshold (Drift per step) - Sensitivity
|
|
19
|
+
window_size: int = 20 # History window for trend calculation
|
|
20
|
+
|
|
21
|
+
class LayerObserver:
|
|
22
|
+
"""
|
|
23
|
+
Stateful observer for a single neural layer.
|
|
24
|
+
Implements Hysteresis (Signal Debouncing) and Trend Analysis (Early Warning).
|
|
25
|
+
"""
|
|
26
|
+
def __init__(self, layer_name, config=ObserverConfig()):
|
|
27
|
+
self.name = layer_name
|
|
28
|
+
self.cfg = config
|
|
29
|
+
self.history = deque(maxlen=config.window_size)
|
|
30
|
+
self.state = MonitorState.NORMAL
|
|
31
|
+
self.current_beta = 0.0
|
|
32
|
+
|
|
33
|
+
def update(self, value, step):
|
|
34
|
+
"""
|
|
35
|
+
Updates state based on new drift value.
|
|
36
|
+
Returns: (State, EventString or None)
|
|
37
|
+
"""
|
|
38
|
+
self.history.append((step, value))
|
|
39
|
+
event = None
|
|
40
|
+
|
|
41
|
+
# 1. Calculate Trend (Beta / Slope)
|
|
42
|
+
self.current_beta = 0.0
|
|
43
|
+
if len(self.history) >= 5:
|
|
44
|
+
# Simple linear regression: y = beta*x + alpha
|
|
45
|
+
x = np.array([h[0] for h in self.history])
|
|
46
|
+
y = np.array([h[1] for h in self.history])
|
|
47
|
+
if np.std(x) > 0:
|
|
48
|
+
self.current_beta = np.polyfit(x, y, 1)[0]
|
|
49
|
+
|
|
50
|
+
# 2. State Machine with Hysteresis
|
|
51
|
+
|
|
52
|
+
# TRANSITION FROM NORMAL
|
|
53
|
+
if self.state == MonitorState.NORMAL:
|
|
54
|
+
if value >= self.cfg.theta_high:
|
|
55
|
+
self.state = MonitorState.ALERT
|
|
56
|
+
event = f"🔴 ALERT [{self.name}]: Threshold Breach ({value:.2f} >= {self.cfg.theta_high})"
|
|
57
|
+
elif self.current_beta > self.cfg.theta_slope:
|
|
58
|
+
self.state = MonitorState.WARNING
|
|
59
|
+
event = f"⚠️ WARNING [{self.name}]: Rapid Drift Detected (Slope {self.current_beta:.3f})"
|
|
60
|
+
|
|
61
|
+
# TRANSITION FROM WARNING
|
|
62
|
+
elif self.state == MonitorState.WARNING:
|
|
63
|
+
if value >= self.cfg.theta_high:
|
|
64
|
+
self.state = MonitorState.ALERT
|
|
65
|
+
event = f"🔴 ALERT [{self.name}]: Escalated to Threshold Breach"
|
|
66
|
+
elif self.current_beta <= 0:
|
|
67
|
+
self.state = MonitorState.NORMAL
|
|
68
|
+
event = f"✅ INFO [{self.name}]: Trend Stabilized"
|
|
69
|
+
|
|
70
|
+
# TRANSITION FROM ALERT (Hysteresis Applied)
|
|
71
|
+
elif self.state == MonitorState.ALERT:
|
|
72
|
+
if value <= self.cfg.theta_low:
|
|
73
|
+
self.state = MonitorState.NORMAL
|
|
74
|
+
event = f"🟢 RECOVERY [{self.name}]: Signal returned to normal ({value:.2f})"
|
|
75
|
+
|
|
76
|
+
return self.state, event
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import matplotlib.pyplot as plt
|
|
2
|
+
import numpy as np
|
|
3
|
+
import io
|
|
4
|
+
from PIL import Image
|
|
5
|
+
|
|
6
|
+
def plot_drift_profile(drift_profile, title="DeepDrift Profile", save_path=None):
|
|
7
|
+
"""
|
|
8
|
+
Visualizes the layer-wise drift profile.
|
|
9
|
+
Returns a PIL Image object if no save_path is provided.
|
|
10
|
+
"""
|
|
11
|
+
layers = ["UV (Input)", "Mid-Level", "Deep-Level", "IR (Output)"]
|
|
12
|
+
|
|
13
|
+
# Fallback if profile length differs
|
|
14
|
+
if len(drift_profile) != len(layers):
|
|
15
|
+
layers = [f"L{i}" for i in range(len(drift_profile))]
|
|
16
|
+
|
|
17
|
+
x = np.arange(len(layers))
|
|
18
|
+
|
|
19
|
+
fig = plt.figure(figsize=(8, 5))
|
|
20
|
+
|
|
21
|
+
# Dynamic coloring based on threshold
|
|
22
|
+
colors = ['green' if d < 3.0 else 'red' for d in drift_profile]
|
|
23
|
+
|
|
24
|
+
plt.bar(x, drift_profile, color=colors, alpha=0.8, edgecolor='black')
|
|
25
|
+
|
|
26
|
+
# Threshold line
|
|
27
|
+
plt.axhline(3.0, color='red', linestyle='--', label='Alert Threshold (3σ)')
|
|
28
|
+
|
|
29
|
+
# Zones background
|
|
30
|
+
plt.axvspan(-0.5, 0.5, color='red', alpha=0.05) # UV
|
|
31
|
+
plt.axvspan(len(layers)-1.5, len(layers)-0.5, color='blue', alpha=0.05) # IR
|
|
32
|
+
|
|
33
|
+
plt.xticks(x, layers)
|
|
34
|
+
plt.ylabel("Drift Score (Z)")
|
|
35
|
+
plt.title(title)
|
|
36
|
+
plt.legend()
|
|
37
|
+
plt.grid(True, alpha=0.3, axis='y')
|
|
38
|
+
|
|
39
|
+
plt.tight_layout()
|
|
40
|
+
|
|
41
|
+
if save_path:
|
|
42
|
+
plt.savefig(save_path)
|
|
43
|
+
plt.close(fig)
|
|
44
|
+
return save_path
|
|
45
|
+
else:
|
|
46
|
+
# Convert plot to image for Gradio/Display
|
|
47
|
+
buf = io.BytesIO()
|
|
48
|
+
plt.savefig(buf, format='png', dpi=100)
|
|
49
|
+
buf.seek(0)
|
|
50
|
+
img = Image.open(buf)
|
|
51
|
+
plt.close(fig)
|
|
52
|
+
return img
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: deepdrift
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: A Layer-Wise Diagnostic Framework for Neural Network Robustness
|
|
5
|
+
Author: Alexey Evtushenko
|
|
6
|
+
Author-email: alexey@eutonics.ru
|
|
7
|
+
Classifier: Programming Language :: Python :: 3
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Dist: torch>=1.10.0
|
|
12
|
+
Requires-Dist: torchvision>=0.11.0
|
|
13
|
+
Requires-Dist: numpy
|
|
14
|
+
Requires-Dist: tqdm
|
|
15
|
+
Requires-Dist: matplotlib
|
|
16
|
+
Requires-Dist: seaborn
|
|
17
|
+
Dynamic: author
|
|
18
|
+
Dynamic: author-email
|
|
19
|
+
Dynamic: classifier
|
|
20
|
+
Dynamic: license-file
|
|
21
|
+
Dynamic: requires-dist
|
|
22
|
+
Dynamic: summary
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
setup.py
|
|
4
|
+
deepdrift/__init__.py
|
|
5
|
+
deepdrift/doctor.py
|
|
6
|
+
deepdrift/monitor.py
|
|
7
|
+
deepdrift/observer.py
|
|
8
|
+
deepdrift/visualization.py
|
|
9
|
+
deepdrift.egg-info/PKG-INFO
|
|
10
|
+
deepdrift.egg-info/SOURCES.txt
|
|
11
|
+
deepdrift.egg-info/dependency_links.txt
|
|
12
|
+
deepdrift.egg-info/requires.txt
|
|
13
|
+
deepdrift.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
deepdrift
|
deepdrift-0.2.0/setup.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from setuptools import setup, find_packages
|
|
2
|
+
|
|
3
|
+
setup(
|
|
4
|
+
name="deepdrift",
|
|
5
|
+
version="0.2.0",
|
|
6
|
+
description="A Layer-Wise Diagnostic Framework for Neural Network Robustness",
|
|
7
|
+
author="Alexey Evtushenko",
|
|
8
|
+
author_email="alexey@eutonics.ru",
|
|
9
|
+
packages=find_packages(),
|
|
10
|
+
install_requires=[
|
|
11
|
+
"torch>=1.10.0",
|
|
12
|
+
"torchvision>=0.11.0",
|
|
13
|
+
"numpy",
|
|
14
|
+
"tqdm",
|
|
15
|
+
"matplotlib",
|
|
16
|
+
"seaborn"
|
|
17
|
+
],
|
|
18
|
+
classifiers=[
|
|
19
|
+
"Programming Language :: Python :: 3",
|
|
20
|
+
"License :: OSI Approved :: MIT License",
|
|
21
|
+
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
|
22
|
+
],
|
|
23
|
+
)
|