pyTRACTnmr 0.1.1b1__py3-none-any.whl → 0.1.2b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyTRACTnmr/main.py CHANGED
@@ -1,9 +1,8 @@
1
1
  import sys
2
2
  from PySide6.QtWidgets import QApplication
3
- try:
4
- from .window import TractApp
5
- except ImportError:
6
- from window import TractApp
3
+
4
+ from window import TractApp # type: ignore
5
+
7
6
 
8
7
  def main():
9
8
  app = QApplication(sys.argv)
@@ -11,5 +10,6 @@ def main():
11
10
  window.show()
12
11
  sys.exit(app.exec())
13
12
 
13
+
14
14
  if __name__ == "__main__":
15
15
  main()
pyTRACTnmr/processing.py CHANGED
@@ -2,8 +2,9 @@ import os
2
2
  import numpy as np
3
3
  import nmrglue as ng # type: ignore
4
4
  from scipy.optimize import curve_fit
5
- from typing import Optional, Tuple, List, Dict
5
+ from typing import Optional, Tuple, List
6
6
  import logging
7
+ from typing_extensions import deprecated
7
8
 
8
9
  # Configure logging
9
10
  logging.basicConfig(level=logging.INFO)
@@ -25,6 +26,15 @@ class TractBruker:
25
26
  CSA_BOND_ANGLE = 17 * np.pi / 180
26
27
 
27
28
  def __init__(self, exp_folder: str, delay_list: Optional[str] = None) -> None:
29
+ """_summary_
30
+
31
+ Args:
32
+ exp_folder (str): Path to the Bruker experiment folder.
33
+ delay_list (Optional[str], optional): Path to the delay list file. Defaults to None.
34
+
35
+ Raises:
36
+ ValueError: If the experiment cannot be loaded.
37
+ """
28
38
  logger.info(f"Initializing TractBruker with folder: {exp_folder}")
29
39
 
30
40
  try:
@@ -47,10 +57,9 @@ class TractBruker:
47
57
  if os.path.exists(vdlist_path):
48
58
  self.delays = self._read_delays(vdlist_path)
49
59
  else:
50
- logger.warning("No delay list found. Using dummy delays.")
51
- # Assuming interleaved alpha/beta, so 2 FIDs per delay point
52
- n_delays = self.fids.shape[1] // 2
53
- self.delays = np.linspace(0.01, 1.0, n_delays)
60
+ raise ValueError(
61
+ "No delay list found (vdlist) and no external list provided."
62
+ )
54
63
 
55
64
  self.alpha_spectra: List[np.ndarray] = []
56
65
  self.beta_spectra: List[np.ndarray] = []
@@ -59,24 +68,37 @@ class TractBruker:
59
68
  self.unit_converter = None
60
69
 
61
70
  def _read_delays(self, file: str) -> np.ndarray:
71
+ """Uitility function for reading vdlist file and converting it to numpy ndarray
72
+
73
+ Args:
74
+ file (str): Path to the vdlist file
75
+
76
+ Returns:
77
+ np.ndarray: Numpy array condaining the delays in seconds.
78
+ """
62
79
  with open(file, "r") as list_file:
63
80
  delays = list_file.read()
64
81
  delays = delays.replace("u", "e-6").replace("m", "e-3")
65
82
  return np.array([float(x) for x in delays.splitlines() if x.strip()])
66
83
 
67
- def process_first_trace(
68
- self,
69
- p0: float,
70
- p1: float,
71
- points: int = 2048,
72
- off: float = 0.35,
73
- end: float = 0.98,
74
- pow: float = 2.0,
84
+ def _get_lb_val(self, lb: float) -> float:
85
+ """Calculate normalized line broadening value."""
86
+ try:
87
+ sw = self.attributes["acqus"]["SW_h"]
88
+ return lb / sw
89
+ except KeyError, ZeroDivisionError:
90
+ return lb
91
+
92
+ def _process_single_fid(
93
+ self, fid, p0, p1, points, apod_func, lb_val, off, end, pow, nodes
75
94
  ) -> np.ndarray:
76
- """Process first FID for interactive phase correction."""
77
- fid = self.fids[0, 0]
95
+ """Internal helper to process a single FID."""
78
96
  # Apply apodization
79
- data = ng.proc_base.sp(fid, off=off, end=end, pow=pow)
97
+ if apod_func == "em":
98
+ data = ng.proc_base.em(fid, lb=lb_val)
99
+ else:
100
+ data = ng.proc_base.sp(fid, off=off, end=end, pow=pow)
101
+
80
102
  # Zero filling
81
103
  data = ng.proc_base.zf_size(data, points)
82
104
  # Fourier transform
@@ -89,6 +111,42 @@ class TractBruker:
89
111
  data = ng.proc_base.di(data)
90
112
  # Reverse spectrum
91
113
  data = ng.proc_base.rev(data)
114
+ if nodes is not None and len(nodes) > 1:
115
+ data = ng.proc_bl.base(data, nodes)
116
+ return data
117
+
118
+ def process_first_trace(
119
+ self,
120
+ p0: float,
121
+ p1: float,
122
+ points: int = 2048,
123
+ apod_func: str = "sp",
124
+ lb: float = 0.0,
125
+ off: float = 0.35,
126
+ end: float = 0.98,
127
+ pow: float = 2.0,
128
+ nodes=None,
129
+ ) -> np.ndarray:
130
+ """Process the first plane in the Psuedo-2D experiment. This is useful for phase correction
131
+
132
+ Args:
133
+ p0 (float): Zeroth order phase correction
134
+ p1 (float): First order phase correction
135
+ points (int, optional): Zero filling points. Defaults to 2048.
136
+ apod_func (str, optional): Apodization function to use. Only "sp" and "em" are supported. Defaults to "sp".
137
+ lb (float, optional): Line broadening in Hz (only for em). Defaults to 0.0.
138
+ off (float, optional): Offset for sp apodization. Defaults to 0.35.
139
+ end (float, optional): End of sp apodization. Defaults to 0.98.
140
+ pow (float, optional): Power for sp apodization. Defaults to 2.0.
141
+
142
+ Returns:
143
+ np.ndarray: Fourier transformed spectrum containng only the real part.
144
+ """
145
+ fid = self.fids[0, 0]
146
+ lb_val = self._get_lb_val(lb) if apod_func == "em" else 0.0
147
+ data = self._process_single_fid(
148
+ fid, p0, p1, points, apod_func, lb_val, off, end, pow, nodes
149
+ )
92
150
 
93
151
  # Set up unit converter
94
152
  udic = ng.bruker.guess_udic(self.attributes, data)
@@ -100,30 +158,46 @@ class TractBruker:
100
158
  p0: float,
101
159
  p1: float,
102
160
  points: int = 2048,
161
+ apod_func: str = "sp",
162
+ lb: float = 0.0,
103
163
  off: float = 0.35,
104
164
  end: float = 0.98,
105
165
  pow: float = 2.0,
166
+ nodes=None,
106
167
  ) -> None:
107
- """Process all FIDs and split into alpha/beta."""
168
+ """The primary function for processing the Pusedo-2D experiment. This splits the data into alpha and beta state and perform the basic processing of the raw FIDs
169
+
170
+ Args:
171
+ p0 (float): Zeroth order phase correction.
172
+ p1 (float): First order phase correction.
173
+ points (int, optional): Zero filling points. Defaults to 2048.
174
+ apod_func (str, optional): Apodization function to use. Only "sp" and "em" are supported. Defaults to "sp".
175
+ lb (float, optional): Line broadening in Hz (only for em). Defaults to 0.0.
176
+ off (float, optional): Offset for sp apodization. Defaults to 0.35.
177
+ end (float, optional): End of sp apodization. Defaults to 0.98.
178
+ pow (float, optional): Power for sp apodization. Defaults to 2.0.
179
+ """
108
180
  self.phc0 = p0
109
181
  self.phc1 = p1
110
182
  self.alpha_spectra = []
111
183
  self.beta_spectra = []
112
184
 
185
+ lb_val = self._get_lb_val(lb) if apod_func == "em" else 0.0
186
+
113
187
  for i in range(self.fids.shape[0]):
114
188
  for j in range(self.fids[i].shape[0]):
115
- data = self.fids[i][j]
116
- data = ng.proc_base.sp(data, off=off, end=end, pow=pow)
117
- data = ng.proc_base.zf_size(data, points)
118
- data = ng.proc_base.fft(data)
119
- data = ng.bruker.remove_digital_filter(
120
- self.attributes, data, post_proc=True
189
+ data = self._process_single_fid(
190
+ self.fids[i][j],
191
+ p0,
192
+ p1,
193
+ points,
194
+ apod_func,
195
+ lb_val,
196
+ off,
197
+ end,
198
+ pow,
199
+ nodes,
121
200
  )
122
- data = ng.proc_base.ps(data, p0=p0, p1=p1)
123
- data = ng.proc_base.di(data)
124
- data = ng.proc_bl.baseline_corrector(data)
125
- data = ng.proc_base.rev(data)
126
-
127
201
  if j % 2 == 0:
128
202
  self.beta_spectra.append(data)
129
203
  else:
@@ -134,8 +208,18 @@ class TractBruker:
134
208
  udic = ng.bruker.guess_udic(self.attributes, self.beta_spectra[0])
135
209
  self.unit_converter = ng.fileiobase.uc_from_udic(udic)
136
210
 
211
+ @deprecated("Use integrate_ppm() instead")
137
212
  def integrate_indices(self, start_idx: int, end_idx: int) -> None:
138
- """Integrate using point indices."""
213
+ """Intergrate the specified region in the spectra. This accounts for all the alpha and beta spectrum collected.
214
+
215
+ Args:
216
+ start_idx (int): Start index for integration.
217
+ end_idx (int): End index for integration.
218
+
219
+
220
+ Raises:
221
+ RuntimeError: If no spectra are available. Run split_process() first.
222
+ """
139
223
  if not self.alpha_spectra or not self.beta_spectra:
140
224
  raise RuntimeError("No spectra available. Run split_process() first.")
141
225
 
@@ -147,7 +231,16 @@ class TractBruker:
147
231
  )
148
232
 
149
233
  def integrate_ppm(self, start_ppm: float, end_ppm: float) -> None:
150
- """Integrate using ppm range."""
234
+ """Integrate the specified region in all the extracted spectras.
235
+
236
+ Args:
237
+ start_ppm (float): Start index for integration in ppm.
238
+ end_ppm (float): End index for integration in ppm.
239
+
240
+ Raises:
241
+ RuntimeError: If no spectra are available. Run split_process() first.
242
+ RuntimeError: If no unit converter is available. Run split_process() first.
243
+ """
151
244
  if self.unit_converter is None:
152
245
  raise RuntimeError("Unit converter not initialized.")
153
246
 
@@ -159,10 +252,21 @@ class TractBruker:
159
252
  self.integrate_indices(start, end)
160
253
 
161
254
  @staticmethod
162
- def _relax(x, a, r):
255
+ def _relax(x, a, r) -> np.ndarray:
256
+ """Internal function for exponential decay
257
+
258
+ Returns:
259
+ float | np.ndarray: Y values
260
+ """
163
261
  return a * np.exp(-r * x)
164
262
 
165
263
  def calc_relaxation(self) -> None:
264
+ """Calculate the Relaxation rates for alpha and beta states. This function does not return any values but sets
265
+
266
+ Raises:
267
+ RuntimeError: If no integrals are available. Run integrate_ppm() first.
268
+ RuntimeError: If fitting fails.
269
+ """
166
270
  if self.alpha_integrals is None or self.beta_integrals is None:
167
271
  raise RuntimeError("Must call integrate() before calc_relaxation()")
168
272
 
@@ -192,6 +296,19 @@ class TractBruker:
192
296
  self.err_Rb: float = np.sqrt(np.diag(self.pcov_beta))[1]
193
297
 
194
298
  def _tc_equation(self, w_N: float, c: float, S2: float = 1.0) -> float:
299
+ """Function for calculating the Rotational Correlation Time. The equation is are adapted from eq. 15 of:
300
+ 'TRACT revisited: an algebraic solution for determining overall rotational correlation times from cross-correlated relaxation rates'
301
+ PMID: 34480265
302
+ doi: 10.1007/s10858-021-00379-5
303
+
304
+ Args:
305
+ w_N (float): Larmor Frequency of Nitrogen atom.
306
+ c (float): Constant derived from the relaxation rate of the alpha state and beta state.
307
+ S2 (float, optional): Square of the order parameter. Defaults to 1.0.
308
+
309
+ Returns:
310
+ float: Rotational Correlation Time in ns.
311
+ """
195
312
  t1 = (5 * c) / (24 * S2)
196
313
  A = 336 * (S2**2) * (w_N**2)
197
314
  B = 25 * (c**2) * (w_N**4)
@@ -209,6 +326,13 @@ class TractBruker:
209
326
  def calc_tc(
210
327
  self, B0: Optional[float] = None, S2: float = 1.0, n_bootstrap: int = 1000
211
328
  ) -> None:
329
+ """Calculate Rotational Correlation Time by using bootstraping. The Relaxation rates are resampled based on the error estimates derived from the covaraince matrix.
330
+
331
+ Args:
332
+ B0 (Optional[float], optional): Magnetic field in MHz. Defaults to None.
333
+ S2 (float, optional): Square of the Order parameter. Defaults to 1.0.
334
+ n_bootstrap (int, optional): Number of bootstrap samples. Defaults to 1000.
335
+ """
212
336
  if not hasattr(self, "Ra"):
213
337
  self.calc_relaxation()
214
338
  if B0 is None:
@@ -233,11 +357,49 @@ class TractBruker:
233
357
  self.tau_c = np.mean(tau_samples)
234
358
  self.err_tau_c = np.std(tau_samples)
235
359
 
360
+ def calc_confidence_interval(
361
+ self, x: np.ndarray, popt: np.ndarray, pcov: np.ndarray
362
+ ) -> np.ndarray:
363
+ """Calculate 95% confidence interval for the exponential decay."""
364
+ A, R = popt
365
+ # Gradient of f(x) = A * exp(-R * x)
366
+ # df/dA = exp(-R * x)
367
+ # df/dR = -A * x * exp(-R * x)
368
+ df_dA = np.exp(-R * x)
369
+ df_dR = -A * x * np.exp(-R * x)
370
+
371
+ J = np.stack([df_dA, df_dR], axis=1)
372
+
373
+ # sigma^2 = diag(J @ pcov @ J.T)
374
+ sigma2 = np.sum((J @ pcov) * J, axis=1)
375
+ return 1.96 * np.sqrt(sigma2)
376
+
236
377
  def get_fit_data(
237
378
  self,
238
- ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
379
+ ) -> Tuple[
380
+ np.ndarray,
381
+ np.ndarray,
382
+ np.ndarray,
383
+ np.ndarray,
384
+ np.ndarray,
385
+ np.ndarray,
386
+ np.ndarray,
387
+ ]:
388
+ """Returns the fit data for alpha and beta states
389
+
390
+ Returns:
391
+ Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: (Delays (s), Ratios of alpha state, Ratios of beta state, optimized parameters for alpha state, optimized parameters for beta state, cov matrix alpha, cov matrix beta)
392
+ """
239
393
  n_pts = min(len(self.alpha_integrals), len(self.delays))
240
394
  x = self.delays[:n_pts]
241
395
  y_a = self.alpha_integrals[:n_pts] / self.alpha_integrals[0]
242
396
  y_b = self.beta_integrals[:n_pts] / self.beta_integrals[0]
243
- return x, y_a, y_b, self.popt_alpha, self.popt_beta
397
+ return (
398
+ x,
399
+ y_a,
400
+ y_b,
401
+ self.popt_alpha,
402
+ self.popt_beta,
403
+ self.pcov_alpha,
404
+ self.pcov_beta,
405
+ )