pyTRACTnmr 0.1.1b1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytractnmr-0.1.1b1/.gitignore +10 -0
- pytractnmr-0.1.1b1/.python-version +1 -0
- pytractnmr-0.1.1b1/PKG-INFO +15 -0
- pytractnmr-0.1.1b1/README.md +1 -0
- pytractnmr-0.1.1b1/pyproject.toml +25 -0
- pytractnmr-0.1.1b1/src/pyTRACTnmr/__init__.py +1 -0
- pytractnmr-0.1.1b1/src/pyTRACTnmr/main.py +15 -0
- pytractnmr-0.1.1b1/src/pyTRACTnmr/processing.py +243 -0
- pytractnmr-0.1.1b1/src/pyTRACTnmr/widgets.py +86 -0
- pytractnmr-0.1.1b1/src/pyTRACTnmr/window.py +628 -0
- pytractnmr-0.1.1b1/uv.lock +497 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
3.14
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pyTRACTnmr
|
|
3
|
+
Version: 0.1.1b1
|
|
4
|
+
Summary: A simple gui based application to process and analyse TRACT data from NMR spectroscopy.
|
|
5
|
+
Requires-Python: >=3.14
|
|
6
|
+
Requires-Dist: matplotlib>=3.10.8
|
|
7
|
+
Requires-Dist: nmrglue>=0.11
|
|
8
|
+
Requires-Dist: numpy>=2.4.2
|
|
9
|
+
Requires-Dist: pyside6-stubs>=6.7.3.0
|
|
10
|
+
Requires-Dist: pyside6>=6.10.2
|
|
11
|
+
Requires-Dist: scipy-stubs>=1.17.0.2
|
|
12
|
+
Requires-Dist: scipy>=1.17.0
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
This is a simple GUI application for processing and Analysing TRACT data. Currently this only supports collected with Bruker spectrometers with pulseprogram `tractf3gpphwg`.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
This is a simple GUI application for processing and Analysing TRACT data. Currently this only supports collected with Bruker spectrometers with pulseprogram `tractf3gpphwg`.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "pyTRACTnmr"
|
|
3
|
+
version = "0.1.1b1"
|
|
4
|
+
description = "A simple gui based application to process and analyse TRACT data from NMR spectroscopy."
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.14"
|
|
7
|
+
dependencies = [
|
|
8
|
+
"matplotlib>=3.10.8",
|
|
9
|
+
"nmrglue>=0.11",
|
|
10
|
+
"numpy>=2.4.2",
|
|
11
|
+
"pyside6>=6.10.2",
|
|
12
|
+
"pyside6-stubs>=6.7.3.0",
|
|
13
|
+
"scipy>=1.17.0",
|
|
14
|
+
"scipy-stubs>=1.17.0.2",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[project.scripts]
|
|
18
|
+
pytractnmr = "pyTRACTnmr.main:main"
|
|
19
|
+
|
|
20
|
+
[build-system]
|
|
21
|
+
requires = ["hatchling"]
|
|
22
|
+
build-backend = "hatchling.build"
|
|
23
|
+
|
|
24
|
+
[tool.hatch.build.targets.wheel]
|
|
25
|
+
packages = ["src/pyTRACTnmr"]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Init tract_gui package
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from PySide6.QtWidgets import QApplication
|
|
3
|
+
try:
|
|
4
|
+
from .window import TractApp
|
|
5
|
+
except ImportError:
|
|
6
|
+
from window import TractApp
|
|
7
|
+
|
|
8
|
+
def main():
|
|
9
|
+
app = QApplication(sys.argv)
|
|
10
|
+
window = TractApp()
|
|
11
|
+
window.show()
|
|
12
|
+
sys.exit(app.exec())
|
|
13
|
+
|
|
14
|
+
if __name__ == "__main__":
|
|
15
|
+
main()
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import numpy as np
|
|
3
|
+
import nmrglue as ng # type: ignore
|
|
4
|
+
from scipy.optimize import curve_fit
|
|
5
|
+
from typing import Optional, Tuple, List, Dict
|
|
6
|
+
import logging
|
|
7
|
+
|
|
8
|
+
# Configure logging
|
|
9
|
+
logging.basicConfig(level=logging.INFO)
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class TractBruker:
|
|
14
|
+
"""
|
|
15
|
+
Process Bruker TRACT NMR data for 15N relaxation analysis.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
# Physical constants (CODATA 2018 values)
|
|
19
|
+
PLANCK = 6.62607015e-34
|
|
20
|
+
VACUUM_PERMEABILITY = 1.25663706212e-6
|
|
21
|
+
GAMMA_1H = 267.52218744e6
|
|
22
|
+
GAMMA_15N = -27.126e6
|
|
23
|
+
NH_BOND_LENGTH = 1.02e-10
|
|
24
|
+
CSA_15N = 160e-6
|
|
25
|
+
CSA_BOND_ANGLE = 17 * np.pi / 180
|
|
26
|
+
|
|
27
|
+
def __init__(self, exp_folder: str, delay_list: Optional[str] = None) -> None:
|
|
28
|
+
logger.info(f"Initializing TractBruker with folder: {exp_folder}")
|
|
29
|
+
|
|
30
|
+
try:
|
|
31
|
+
self.attributes, self.fids = ng.bruker.read(exp_folder)
|
|
32
|
+
try:
|
|
33
|
+
self.phc0 = self.attributes["procs"]["PHC0"]
|
|
34
|
+
self.phc1 = self.attributes["procs"]["PHC1"]
|
|
35
|
+
except KeyError:
|
|
36
|
+
self.phc0 = 0.0
|
|
37
|
+
self.phc1 = 0.0
|
|
38
|
+
except Exception as e:
|
|
39
|
+
raise ValueError(f"Could not load Bruker data: {e}")
|
|
40
|
+
|
|
41
|
+
# Handle delays
|
|
42
|
+
if delay_list and os.path.exists(delay_list):
|
|
43
|
+
self.delays = self._read_delays(delay_list)
|
|
44
|
+
else:
|
|
45
|
+
# Try standard 'vdlist' in folder
|
|
46
|
+
vdlist_path = os.path.join(exp_folder, "vdlist")
|
|
47
|
+
if os.path.exists(vdlist_path):
|
|
48
|
+
self.delays = self._read_delays(vdlist_path)
|
|
49
|
+
else:
|
|
50
|
+
logger.warning("No delay list found. Using dummy delays.")
|
|
51
|
+
# Assuming interleaved alpha/beta, so 2 FIDs per delay point
|
|
52
|
+
n_delays = self.fids.shape[1] // 2
|
|
53
|
+
self.delays = np.linspace(0.01, 1.0, n_delays)
|
|
54
|
+
|
|
55
|
+
self.alpha_spectra: List[np.ndarray] = []
|
|
56
|
+
self.beta_spectra: List[np.ndarray] = []
|
|
57
|
+
# self.alpha_integrals: np.ndarray | None = None
|
|
58
|
+
# self.beta_integrals: np.ndarray | None = None
|
|
59
|
+
self.unit_converter = None
|
|
60
|
+
|
|
61
|
+
def _read_delays(self, file: str) -> np.ndarray:
|
|
62
|
+
with open(file, "r") as list_file:
|
|
63
|
+
delays = list_file.read()
|
|
64
|
+
delays = delays.replace("u", "e-6").replace("m", "e-3")
|
|
65
|
+
return np.array([float(x) for x in delays.splitlines() if x.strip()])
|
|
66
|
+
|
|
67
|
+
def process_first_trace(
|
|
68
|
+
self,
|
|
69
|
+
p0: float,
|
|
70
|
+
p1: float,
|
|
71
|
+
points: int = 2048,
|
|
72
|
+
off: float = 0.35,
|
|
73
|
+
end: float = 0.98,
|
|
74
|
+
pow: float = 2.0,
|
|
75
|
+
) -> np.ndarray:
|
|
76
|
+
"""Process first FID for interactive phase correction."""
|
|
77
|
+
fid = self.fids[0, 0]
|
|
78
|
+
# Apply apodization
|
|
79
|
+
data = ng.proc_base.sp(fid, off=off, end=end, pow=pow)
|
|
80
|
+
# Zero filling
|
|
81
|
+
data = ng.proc_base.zf_size(data, points)
|
|
82
|
+
# Fourier transform
|
|
83
|
+
data = ng.proc_base.fft(data)
|
|
84
|
+
# Remove digital filter
|
|
85
|
+
data = ng.bruker.remove_digital_filter(self.attributes, data, post_proc=True)
|
|
86
|
+
# Apply phase correction
|
|
87
|
+
data = ng.proc_base.ps(data, p0=p0, p1=p1)
|
|
88
|
+
# Discard imaginary part
|
|
89
|
+
data = ng.proc_base.di(data)
|
|
90
|
+
# Reverse spectrum
|
|
91
|
+
data = ng.proc_base.rev(data)
|
|
92
|
+
|
|
93
|
+
# Set up unit converter
|
|
94
|
+
udic = ng.bruker.guess_udic(self.attributes, data)
|
|
95
|
+
self.unit_converter = ng.fileiobase.uc_from_udic(udic)
|
|
96
|
+
return data
|
|
97
|
+
|
|
98
|
+
def split_process(
|
|
99
|
+
self,
|
|
100
|
+
p0: float,
|
|
101
|
+
p1: float,
|
|
102
|
+
points: int = 2048,
|
|
103
|
+
off: float = 0.35,
|
|
104
|
+
end: float = 0.98,
|
|
105
|
+
pow: float = 2.0,
|
|
106
|
+
) -> None:
|
|
107
|
+
"""Process all FIDs and split into alpha/beta."""
|
|
108
|
+
self.phc0 = p0
|
|
109
|
+
self.phc1 = p1
|
|
110
|
+
self.alpha_spectra = []
|
|
111
|
+
self.beta_spectra = []
|
|
112
|
+
|
|
113
|
+
for i in range(self.fids.shape[0]):
|
|
114
|
+
for j in range(self.fids[i].shape[0]):
|
|
115
|
+
data = self.fids[i][j]
|
|
116
|
+
data = ng.proc_base.sp(data, off=off, end=end, pow=pow)
|
|
117
|
+
data = ng.proc_base.zf_size(data, points)
|
|
118
|
+
data = ng.proc_base.fft(data)
|
|
119
|
+
data = ng.bruker.remove_digital_filter(
|
|
120
|
+
self.attributes, data, post_proc=True
|
|
121
|
+
)
|
|
122
|
+
data = ng.proc_base.ps(data, p0=p0, p1=p1)
|
|
123
|
+
data = ng.proc_base.di(data)
|
|
124
|
+
data = ng.proc_bl.baseline_corrector(data)
|
|
125
|
+
data = ng.proc_base.rev(data)
|
|
126
|
+
|
|
127
|
+
if j % 2 == 0:
|
|
128
|
+
self.beta_spectra.append(data)
|
|
129
|
+
else:
|
|
130
|
+
self.alpha_spectra.append(data)
|
|
131
|
+
|
|
132
|
+
# Unit converter from first spectrum
|
|
133
|
+
if self.beta_spectra:
|
|
134
|
+
udic = ng.bruker.guess_udic(self.attributes, self.beta_spectra[0])
|
|
135
|
+
self.unit_converter = ng.fileiobase.uc_from_udic(udic)
|
|
136
|
+
|
|
137
|
+
def integrate_indices(self, start_idx: int, end_idx: int) -> None:
|
|
138
|
+
"""Integrate using point indices."""
|
|
139
|
+
if not self.alpha_spectra or not self.beta_spectra:
|
|
140
|
+
raise RuntimeError("No spectra available. Run split_process() first.")
|
|
141
|
+
|
|
142
|
+
self.alpha_integrals: np.ndarray = np.array(
|
|
143
|
+
[s[start_idx:end_idx].sum() for s in self.alpha_spectra]
|
|
144
|
+
)
|
|
145
|
+
self.beta_integrals: np.ndarray = np.array(
|
|
146
|
+
[s[start_idx:end_idx].sum() for s in self.beta_spectra]
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def integrate_ppm(self, start_ppm: float, end_ppm: float) -> None:
|
|
150
|
+
"""Integrate using ppm range."""
|
|
151
|
+
if self.unit_converter is None:
|
|
152
|
+
raise RuntimeError("Unit converter not initialized.")
|
|
153
|
+
|
|
154
|
+
idx1 = self.unit_converter(start_ppm, "ppm")
|
|
155
|
+
idx2 = self.unit_converter(end_ppm, "ppm")
|
|
156
|
+
|
|
157
|
+
start = int(min(idx1, idx2))
|
|
158
|
+
end = int(max(idx1, idx2))
|
|
159
|
+
self.integrate_indices(start, end)
|
|
160
|
+
|
|
161
|
+
@staticmethod
|
|
162
|
+
def _relax(x, a, r):
|
|
163
|
+
return a * np.exp(-r * x)
|
|
164
|
+
|
|
165
|
+
def calc_relaxation(self) -> None:
|
|
166
|
+
if self.alpha_integrals is None or self.beta_integrals is None:
|
|
167
|
+
raise RuntimeError("Must call integrate() before calc_relaxation()")
|
|
168
|
+
|
|
169
|
+
# Truncate delays if mismatch
|
|
170
|
+
n_pts = min(len(self.alpha_integrals), len(self.delays))
|
|
171
|
+
delays: np.ndarray = self.delays[:n_pts]
|
|
172
|
+
alpha_ints = self.alpha_integrals[:n_pts]
|
|
173
|
+
beta_ints = self.beta_integrals[:n_pts]
|
|
174
|
+
|
|
175
|
+
# Normalize
|
|
176
|
+
alpha_norm = alpha_ints / alpha_ints[0]
|
|
177
|
+
beta_norm = beta_ints / beta_ints[0]
|
|
178
|
+
|
|
179
|
+
try:
|
|
180
|
+
self.popt_alpha, self.pcov_alpha = curve_fit(
|
|
181
|
+
self._relax, delays, alpha_norm, p0=[1.0, 5.0], maxfev=5000
|
|
182
|
+
)
|
|
183
|
+
self.popt_beta, self.pcov_beta = curve_fit(
|
|
184
|
+
self._relax, delays, beta_norm, p0=[1.0, 5.0], maxfev=5000
|
|
185
|
+
)
|
|
186
|
+
except Exception as e:
|
|
187
|
+
raise RuntimeError(f"Fitting failed: {e}")
|
|
188
|
+
|
|
189
|
+
self.Ra: float = self.popt_alpha[1]
|
|
190
|
+
self.Rb: float = self.popt_beta[1]
|
|
191
|
+
self.err_Ra: float = np.sqrt(np.diag(self.pcov_alpha))[1]
|
|
192
|
+
self.err_Rb: float = np.sqrt(np.diag(self.pcov_beta))[1]
|
|
193
|
+
|
|
194
|
+
def _tc_equation(self, w_N: float, c: float, S2: float = 1.0) -> float:
|
|
195
|
+
t1 = (5 * c) / (24 * S2)
|
|
196
|
+
A = 336 * (S2**2) * (w_N**2)
|
|
197
|
+
B = 25 * (c**2) * (w_N**4)
|
|
198
|
+
C = 125 * (c**3) * (w_N**6)
|
|
199
|
+
D = 625 * (S2**2) * (c**4) * (w_N**10)
|
|
200
|
+
E = 3025 * (S2**4) * (c**2) * (w_N**8)
|
|
201
|
+
F = 21952 * (S2**6) * (w_N**6)
|
|
202
|
+
G = 1800 * c * (w_N**4)
|
|
203
|
+
term_sqrt = np.sqrt(D - E + F)
|
|
204
|
+
term_cbrt = (C + 24 * np.sqrt(3) * term_sqrt + G * S2**2) ** (1 / 3)
|
|
205
|
+
t2 = (A - B) / (24 * (w_N**2) * S2 * term_cbrt)
|
|
206
|
+
t3 = term_cbrt / (24 * S2 * w_N**2)
|
|
207
|
+
return t1 - t2 + t3
|
|
208
|
+
|
|
209
|
+
def calc_tc(
|
|
210
|
+
self, B0: Optional[float] = None, S2: float = 1.0, n_bootstrap: int = 1000
|
|
211
|
+
) -> None:
|
|
212
|
+
if not hasattr(self, "Ra"):
|
|
213
|
+
self.calc_relaxation()
|
|
214
|
+
if B0 is None:
|
|
215
|
+
B0 = self.attributes["acqus"]["SFO1"]
|
|
216
|
+
B_0 = B0 * 1e6 * 2 * np.pi / self.GAMMA_1H
|
|
217
|
+
p = (
|
|
218
|
+
self.VACUUM_PERMEABILITY * self.GAMMA_1H * self.GAMMA_15N * self.PLANCK
|
|
219
|
+
) / (16 * np.pi**2 * np.sqrt(2) * self.NH_BOND_LENGTH**3)
|
|
220
|
+
dN = self.GAMMA_15N * B_0 * self.CSA_15N / (3 * np.sqrt(2))
|
|
221
|
+
w_N = B_0 * self.GAMMA_15N
|
|
222
|
+
Ra_samples: np.ndarray = np.random.normal(self.Ra, self.err_Ra, n_bootstrap)
|
|
223
|
+
Rb_samples: np.ndarray = np.random.normal(self.Rb, self.err_Rb, n_bootstrap)
|
|
224
|
+
c_samples = (Rb_samples - Ra_samples) / (
|
|
225
|
+
2 * dN * p * (3 * np.cos(self.CSA_BOND_ANGLE) ** 2 - 1)
|
|
226
|
+
)
|
|
227
|
+
tau_samples: np.ndarray = (
|
|
228
|
+
np.array(
|
|
229
|
+
[self._tc_equation(w_N, c, S2) for c in c_samples if not np.isnan(c)]
|
|
230
|
+
)
|
|
231
|
+
* 1e9
|
|
232
|
+
)
|
|
233
|
+
self.tau_c = np.mean(tau_samples)
|
|
234
|
+
self.err_tau_c = np.std(tau_samples)
|
|
235
|
+
|
|
236
|
+
def get_fit_data(
|
|
237
|
+
self,
|
|
238
|
+
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
|
239
|
+
n_pts = min(len(self.alpha_integrals), len(self.delays))
|
|
240
|
+
x = self.delays[:n_pts]
|
|
241
|
+
y_a = self.alpha_integrals[:n_pts] / self.alpha_integrals[0]
|
|
242
|
+
y_b = self.beta_integrals[:n_pts] / self.beta_integrals[0]
|
|
243
|
+
return x, y_a, y_b, self.popt_alpha, self.popt_beta
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from PySide6.QtWidgets import (
|
|
2
|
+
QFontDialog,
|
|
3
|
+
QInputDialog,
|
|
4
|
+
QFileDialog,
|
|
5
|
+
QMessageBox,
|
|
6
|
+
QWidget,
|
|
7
|
+
)
|
|
8
|
+
from matplotlib.backends.backend_qtagg import FigureCanvasQTAgg, NavigationToolbar2QT
|
|
9
|
+
import matplotlib.pyplot as plt
|
|
10
|
+
from matplotlib.figure import Figure
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CustomNavigationToolbar(NavigationToolbar2QT):
|
|
15
|
+
def __init__(
|
|
16
|
+
self, canvas: FigureCanvasQTAgg, parent: QWidget, coordinates: bool = True
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__(canvas, parent, coordinates)
|
|
19
|
+
self.addSeparator()
|
|
20
|
+
self.addAction("Font", self.change_font)
|
|
21
|
+
self.addAction("Export", self.export_figure)
|
|
22
|
+
|
|
23
|
+
def export_figure(self) -> None:
|
|
24
|
+
dpi, ok = QInputDialog.getInt(
|
|
25
|
+
self, "Export Settings", "DPI:", value=300, minValue=72, maxValue=1200
|
|
26
|
+
)
|
|
27
|
+
if not ok:
|
|
28
|
+
return
|
|
29
|
+
fname, _ = QFileDialog.getSaveFileName(
|
|
30
|
+
self, "Save Figure", "", "PNG (*.png);;PDF (*.pdf);;SVG (*.svg)"
|
|
31
|
+
)
|
|
32
|
+
if fname:
|
|
33
|
+
try:
|
|
34
|
+
self.canvas.figure.savefig(fname, dpi=dpi, bbox_inches="tight")
|
|
35
|
+
except Exception as e:
|
|
36
|
+
QMessageBox.critical(self, "Error", f"Could not save figure: {e}")
|
|
37
|
+
|
|
38
|
+
def change_font(self) -> None:
|
|
39
|
+
ok, font = QFontDialog.getFont(self)
|
|
40
|
+
if ok:
|
|
41
|
+
size = font.pointSize()
|
|
42
|
+
family = font.family()
|
|
43
|
+
|
|
44
|
+
# Update rcParams for future plots
|
|
45
|
+
plt.rcParams.update(
|
|
46
|
+
{
|
|
47
|
+
"font.size": size,
|
|
48
|
+
"font.family": family,
|
|
49
|
+
"axes.labelsize": size,
|
|
50
|
+
"axes.titlesize": size + 2,
|
|
51
|
+
"xtick.labelsize": size,
|
|
52
|
+
"ytick.labelsize": size,
|
|
53
|
+
"legend.fontsize": size,
|
|
54
|
+
}
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Update current figure elements
|
|
58
|
+
for ax in self.canvas.figure.axes:
|
|
59
|
+
for item in (
|
|
60
|
+
[ax.title, ax.xaxis.label, ax.yaxis.label]
|
|
61
|
+
+ ax.get_xticklabels()
|
|
62
|
+
+ ax.get_yticklabels()
|
|
63
|
+
):
|
|
64
|
+
item.set_fontsize(size)
|
|
65
|
+
item.set_fontfamily(family)
|
|
66
|
+
|
|
67
|
+
legend = ax.get_legend()
|
|
68
|
+
if legend:
|
|
69
|
+
for text in legend.get_texts():
|
|
70
|
+
text.set_fontsize(size)
|
|
71
|
+
text.set_fontfamily(family)
|
|
72
|
+
|
|
73
|
+
self.canvas.draw()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class MplCanvas(FigureCanvasQTAgg):
|
|
77
|
+
def __init__(
|
|
78
|
+
self,
|
|
79
|
+
parent: Optional[QWidget] = None,
|
|
80
|
+
width: float = 5,
|
|
81
|
+
height: float = 4,
|
|
82
|
+
dpi: int = 100,
|
|
83
|
+
) -> None:
|
|
84
|
+
self.fig = Figure(figsize=(width, height), dpi=dpi)
|
|
85
|
+
self.axes = self.fig.add_subplot(111)
|
|
86
|
+
super().__init__(self.fig)
|