pychemstation 0.10.4__py3-none-any.whl → 0.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. pychemstation/analysis/__init__.py +8 -1
  2. pychemstation/analysis/chromatogram.py +20 -0
  3. pychemstation/analysis/process_report.py +125 -63
  4. pychemstation/control/__init__.py +2 -0
  5. pychemstation/control/controllers/__init__.py +2 -3
  6. pychemstation/control/controllers/abc_tables/device.py +15 -0
  7. pychemstation/control/controllers/abc_tables/run.py +228 -0
  8. pychemstation/control/controllers/abc_tables/table.py +221 -0
  9. pychemstation/control/controllers/comm.py +25 -106
  10. pychemstation/control/controllers/data_aq/__init__.py +4 -0
  11. pychemstation/control/controllers/{tables → data_aq}/method.py +52 -95
  12. pychemstation/control/controllers/{tables → data_aq}/sequence.py +199 -141
  13. pychemstation/control/controllers/devices/__init__.py +3 -0
  14. pychemstation/control/controllers/devices/injector.py +69 -24
  15. pychemstation/control/hplc.py +15 -17
  16. pychemstation/utils/injector_types.py +23 -3
  17. pychemstation/utils/macro.py +2 -2
  18. pychemstation/utils/method_types.py +1 -1
  19. pychemstation/utils/mocking/__init__.py +0 -0
  20. pychemstation/utils/mocking/abc_comm.py +160 -0
  21. pychemstation/utils/mocking/mock_comm.py +5 -0
  22. pychemstation/utils/mocking/mock_hplc.py +2 -0
  23. pychemstation/utils/sequence_types.py +19 -0
  24. pychemstation/utils/table_types.py +6 -0
  25. pychemstation/utils/tray_types.py +36 -1
  26. {pychemstation-0.10.4.dist-info → pychemstation-0.10.6.dist-info}/METADATA +4 -4
  27. pychemstation-0.10.6.dist-info/RECORD +42 -0
  28. pychemstation/control/controllers/devices/device.py +0 -49
  29. pychemstation/control/controllers/tables/ms.py +0 -24
  30. pychemstation/control/controllers/tables/table.py +0 -375
  31. pychemstation-0.10.4.dist-info/RECORD +0 -37
  32. /pychemstation/control/controllers/{tables → abc_tables}/__init__.py +0 -0
  33. {pychemstation-0.10.4.dist-info → pychemstation-0.10.6.dist-info}/WHEEL +0 -0
  34. {pychemstation-0.10.4.dist-info → pychemstation-0.10.6.dist-info}/licenses/LICENSE +0 -0
@@ -1,4 +1,11 @@
1
1
  from .process_report import CSVProcessor
2
2
  from .process_report import TXTProcessor
3
+ from .chromatogram import AgilentChannelChromatogramData
4
+ from .chromatogram import AgilentHPLCChromatogram
3
5
 
4
- __all__ = ["CSVProcessor", "TXTProcessor"]
6
+ __all__ = [
7
+ "CSVProcessor",
8
+ "TXTProcessor",
9
+ "AgilentChannelChromatogramData",
10
+ "AgilentHPLCChromatogram",
11
+ ]
@@ -3,6 +3,7 @@
3
3
  import os
4
4
  import time
5
5
  from dataclasses import dataclass
6
+ from typing import Dict
6
7
 
7
8
  import numpy as np
8
9
 
@@ -114,3 +115,22 @@ class AgilentChannelChromatogramData:
114
115
  F: AgilentHPLCChromatogram
115
116
  G: AgilentHPLCChromatogram
116
117
  H: AgilentHPLCChromatogram
118
+
119
+ @classmethod
120
+ def from_dict(cls, chroms: Dict[str, AgilentHPLCChromatogram]):
121
+ keys = chroms.keys()
122
+ class_keys = vars(AgilentChannelChromatogramData)["__annotations__"].keys()
123
+ if set(class_keys) == set(keys):
124
+ return AgilentChannelChromatogramData(
125
+ A=chroms["A"],
126
+ B=chroms["B"],
127
+ C=chroms["C"],
128
+ D=chroms["D"],
129
+ E=chroms["E"],
130
+ F=chroms["F"],
131
+ G=chroms["G"],
132
+ H=chroms["H"],
133
+ )
134
+ else:
135
+ err = f"{keys} don't match {class_keys}"
136
+ raise KeyError(err)
@@ -1,10 +1,12 @@
1
+ from __future__ import annotations
2
+
1
3
  import abc
2
4
  import os
3
5
  import re
4
6
  from abc import abstractmethod
5
7
  from dataclasses import dataclass
6
8
  from enum import Enum
7
- from typing import AnyStr, Dict, List, Optional, Pattern
9
+ from typing import AnyStr, Dict, List, Optional, Pattern, Union
8
10
 
9
11
  import pandas as pd
10
12
  from aghplctools.ingestion.text import (
@@ -15,6 +17,7 @@ from aghplctools.ingestion.text import (
15
17
  _signal_table_re,
16
18
  chunk_string,
17
19
  )
20
+ from pandas.errors import EmptyDataError
18
21
  from result import Err, Ok, Result
19
22
 
20
23
  from ..analysis.chromatogram import AgilentHPLCChromatogram
@@ -43,7 +46,7 @@ class Signals:
43
46
  class AgilentReport:
44
47
  vial_location: Optional[Tray]
45
48
  signals: List[Signals]
46
- solvents: Optional[Dict[AnyStr, AnyStr]]
49
+ solvents: Optional[Dict[str, str]]
47
50
 
48
51
 
49
52
  class ReportType(Enum):
@@ -69,6 +72,37 @@ class CSVProcessor(ReportProcessor):
69
72
  """
70
73
  super().__init__(path)
71
74
 
75
+ def find_csv_prefix(self) -> str:
76
+ files = [
77
+ f
78
+ for f in os.listdir(self.path)
79
+ if os.path.isfile(os.path.join(self.path, f))
80
+ ]
81
+ for file in files:
82
+ if "00" in file:
83
+ name, _, file_extension = file.partition(".")
84
+ if "00" in name and file_extension.lower() == "csv":
85
+ prefix, _, _ = name.partition("00")
86
+ return prefix
87
+ raise FileNotFoundError("Couldn't find the prefix for CSV")
88
+
89
+ def report_contains(self, labels: List[str], want: List[str]):
90
+ for label in labels:
91
+ if label in want:
92
+ want.remove(label)
93
+
94
+ all_labels_seen = False
95
+ if len(want) != 0:
96
+ for want_label in want:
97
+ label_seen = False
98
+ for label in labels:
99
+ if want_label in label or want_label == label:
100
+ label_seen = True
101
+ all_labels_seen = label_seen
102
+ else:
103
+ return True
104
+ return all_labels_seen
105
+
72
106
  def process_report(self) -> Result[AgilentReport, AnyStr]:
73
107
  """
74
108
  Method to parse details from CSV report.
@@ -76,15 +110,30 @@ class CSVProcessor(ReportProcessor):
76
110
  :return: subset of complete report details, specifically the sample location, solvents in pumps,
77
111
  and list of peaks at each wavelength channel.
78
112
  """
79
- labels = os.path.join(self.path, "REPORT00.CSV")
80
- if os.path.exists(labels):
81
- df_labels: Dict[int, Dict[int:AnyStr]] = pd.read_csv(
113
+ prefix = self.find_csv_prefix()
114
+ labels = os.path.join(self.path, f"{prefix}00.CSV")
115
+ if not os.path.exists(labels):
116
+ raise ValueError(
117
+ "CSV reports do not exist, make sure to turn on the post run CSV report option!"
118
+ )
119
+ elif os.path.exists(labels):
120
+ LOCATION = "Location"
121
+ NUM_SIGNALS = "Number of Signals"
122
+ SOLVENT = "Solvent"
123
+ df_labels: Dict[int, Dict[int, str]] = pd.read_csv(
82
124
  labels, encoding="utf-16", header=None
83
125
  ).to_dict()
84
- vial_location = []
85
- signals = {}
86
- solvents = {}
87
- for pos, val in df_labels[0].items():
126
+ vial_location: str = ""
127
+ signals: Dict[int, list[AgilentPeak]] = {}
128
+ solvents: Dict[str, str] = {}
129
+ report_labels: Dict[int, str] = df_labels[0]
130
+
131
+ if not self.report_contains(
132
+ list(report_labels.values()), [LOCATION, NUM_SIGNALS, SOLVENT]
133
+ ):
134
+ return Err(f"Missing one of: {LOCATION}, {NUM_SIGNALS}, {SOLVENT}")
135
+
136
+ for pos, val in report_labels.items():
88
137
  if val == "Location":
89
138
  vial_location = df_labels[1][pos]
90
139
  elif "Solvent" in val:
@@ -93,22 +142,29 @@ class CSVProcessor(ReportProcessor):
93
142
  elif val == "Number of Signals":
94
143
  num_signals = int(df_labels[1][pos])
95
144
  for s in range(1, num_signals + 1):
96
- df = pd.read_csv(
97
- os.path.join(self.path, f"REPORT0{s}.CSV"),
98
- encoding="utf-16",
99
- header=None,
100
- )
101
- peaks = df.apply(lambda row: AgilentPeak(*row), axis=1)
102
- wavelength = df_labels[1][pos + s].partition(",4 Ref=off")[0][
103
- -3:
104
- ]
105
- signals[wavelength] = list(peaks)
145
+ try:
146
+ df = pd.read_csv(
147
+ os.path.join(self.path, f"{prefix}0{s}.CSV"),
148
+ encoding="utf-16",
149
+ header=None,
150
+ )
151
+ peaks = df.apply(lambda row: AgilentPeak(*row), axis=1)
152
+ except EmptyDataError:
153
+ peaks = []
154
+ try:
155
+ wavelength = df_labels[1][pos + s].partition(",4 Ref=off")[
156
+ 0
157
+ ][-3:]
158
+ signals[int(wavelength)] = list(peaks)
159
+ except (IndexError, ValueError):
160
+ # TODO: Ask about the MS signals
161
+ pass
106
162
  break
107
163
 
108
164
  return Ok(
109
165
  AgilentReport(
110
166
  signals=[
111
- Signals(wavelength=int(w), peaks=s, data=None)
167
+ Signals(wavelength=w, peaks=s, data=None)
112
168
  for w, s in signals.items()
113
169
  ],
114
170
  vial_location=FiftyFourVialPlate.from_int(int(vial_location)),
@@ -154,7 +210,7 @@ class TXTProcessor(ReportProcessor):
154
210
  path: str,
155
211
  min_ret_time: int = 0,
156
212
  max_ret_time: int = 999,
157
- target_wavelength_range: List[int] = range(200, 300),
213
+ target_wavelength_range=None,
158
214
  ):
159
215
  """
160
216
  Class to process reports in CSV form.
@@ -164,12 +220,14 @@ class TXTProcessor(ReportProcessor):
164
220
  :param max_ret_time: peaks will only be returned up to this time (min)
165
221
  :param target_wavelength_range: range of wavelengths to return
166
222
  """
223
+ if target_wavelength_range is None:
224
+ target_wavelength_range = list(range(200, 300))
167
225
  self.target_wavelength_range = target_wavelength_range
168
226
  self.min_ret_time = min_ret_time
169
227
  self.max_ret_time = max_ret_time
170
228
  super().__init__(path)
171
229
 
172
- def process_report(self) -> Result[AgilentReport, AnyStr]:
230
+ def process_report(self) -> Result[AgilentReport, Union[AnyStr, Exception]]:
173
231
  """
174
232
  Method to parse details from CSV report.
175
233
  If you want more functionality, use `aghplctools`.
@@ -179,44 +237,48 @@ class TXTProcessor(ReportProcessor):
179
237
  :return: subset of complete report details, specifically the sample location, solvents in pumps,
180
238
  and list of peaks at each wavelength channel.
181
239
  """
182
-
183
- with open(
184
- os.path.join(self.path, "REPORT.TXT"), "r", encoding="utf-16"
185
- ) as openfile:
186
- text = openfile.read()
187
-
188
240
  try:
189
- signals = self.parse_area_report(text)
190
- except ValueError as e:
191
- return Err("No peaks found: " + str(e))
192
-
193
- signals = {
194
- key: signals[key] for key in self.target_wavelength_range if key in signals
195
- }
196
-
197
- parsed_signals = []
198
- for wavelength, wavelength_dict in signals.items():
199
- current_wavelength_signals = Signals(
200
- wavelength=int(wavelength), peaks=[], data=None
201
- )
202
- for ret_time, ret_time_dict in wavelength_dict.items():
203
- if self.min_ret_time <= ret_time <= self.max_ret_time:
204
- current_wavelength_signals.peaks.append(
205
- AgilentPeak(
206
- retention_time=ret_time,
207
- area=ret_time_dict["Area"],
208
- width=ret_time_dict["Width"],
209
- height=ret_time_dict["Height"],
210
- peak_number=None,
211
- peak_type=ret_time_dict["Type"],
212
- area_percent=None,
241
+ with open(
242
+ os.path.join(self.path, "REPORT.TXT"), "r", encoding="utf-16"
243
+ ) as openfile:
244
+ text = openfile.read()
245
+
246
+ try:
247
+ signals = self.parse_area_report(text)
248
+ except ValueError as e:
249
+ return Err("No peaks found: " + str(e))
250
+
251
+ signals = {
252
+ key: signals[key]
253
+ for key in self.target_wavelength_range
254
+ if key in signals
255
+ }
256
+
257
+ parsed_signals = []
258
+ for wavelength, wavelength_dict in signals.items():
259
+ current_wavelength_signals = Signals(
260
+ wavelength=int(wavelength), peaks=[], data=None
261
+ )
262
+ for ret_time, ret_time_dict in wavelength_dict.items():
263
+ if self.min_ret_time <= ret_time <= self.max_ret_time:
264
+ current_wavelength_signals.peaks.append(
265
+ AgilentPeak(
266
+ retention_time=ret_time,
267
+ area=ret_time_dict["Area"],
268
+ width=ret_time_dict["Width"],
269
+ height=ret_time_dict["Height"],
270
+ peak_number=None,
271
+ peak_type=ret_time_dict["Type"],
272
+ area_percent=None,
273
+ )
213
274
  )
214
- )
215
- parsed_signals.append(current_wavelength_signals)
275
+ parsed_signals.append(current_wavelength_signals)
216
276
 
217
- return Ok(
218
- AgilentReport(vial_location=None, solvents=None, signals=parsed_signals)
219
- )
277
+ return Ok(
278
+ AgilentReport(vial_location=None, solvents=None, signals=parsed_signals)
279
+ )
280
+ except Exception as e:
281
+ return Err(e)
220
282
 
221
283
  def parse_area_report(self, report_text: str) -> Dict:
222
284
  """
@@ -234,7 +296,7 @@ class TXTProcessor(ReportProcessor):
234
296
  if re.search(_no_peaks_re, report_text): # There are no peaks in Report.txt
235
297
  raise ValueError("No peaks found in Report.txt")
236
298
  blocks = _header_block_re.split(report_text)
237
- signals = {} # output dictionary
299
+ signals: Dict[int, dict] = {} # output dictionary
238
300
  for ind, block in enumerate(blocks):
239
301
  # area report block
240
302
  if _area_report_re.match(block): # match area report block
@@ -247,7 +309,7 @@ class TXTProcessor(ReportProcessor):
247
309
  # some error state (e.g. 'not found')
248
310
  if si.group("error") != "":
249
311
  continue
250
- wavelength = float(si.group("wavelength"))
312
+ wavelength = int(si.group("wavelength"))
251
313
  if wavelength in signals:
252
314
  # placeholder error raise just in case (this probably won't happen)
253
315
  raise KeyError(
@@ -270,15 +332,14 @@ class TXTProcessor(ReportProcessor):
270
332
  for key in self._column_re_dictionary:
271
333
  if key in peak.re.groupindex:
272
334
  try: # try float conversion, otherwise continue
273
- value = float(peak.group(key))
335
+ current[key] = float(peak.group(key))
274
336
  except ValueError:
275
- value = peak.group(key)
276
- current[key] = value
337
+ current[key] = peak.group(key)
277
338
  else: # ensures defined
278
339
  current[key] = None
279
340
  return signals
280
341
 
281
- def build_peak_regex(self, signal_table: str) -> Pattern[AnyStr]:
342
+ def build_peak_regex(self, signal_table: str) -> Pattern[str] | None:
282
343
  """
283
344
  Builds a peak regex from a signal table. Courtesy of Veronica Lai.
284
345
 
@@ -311,6 +372,7 @@ class TXTProcessor(ReportProcessor):
311
372
  f'The header/unit combination "{header}" "{unit}" is not defined in the peak regex '
312
373
  f"dictionary. Let Lars know."
313
374
  )
375
+
314
376
  return re.compile(
315
377
  "[ ]+".join(
316
378
  peak_re_string
@@ -3,7 +3,9 @@
3
3
  """
4
4
 
5
5
  from .hplc import HPLCController
6
+ from . import controllers
6
7
 
7
8
  __all__ = [
8
9
  "HPLCController",
10
+ "controllers",
9
11
  ]
@@ -3,7 +3,6 @@
3
3
  """
4
4
 
5
5
  from .comm import CommunicationController
6
- from .tables.method import MethodController
7
- from .tables.sequence import SequenceController
6
+ from . import data_aq
8
7
 
9
- __all__ = ["CommunicationController", "MethodController", "SequenceController"]
8
+ __all__ = ["CommunicationController", "data_aq"]
@@ -0,0 +1,15 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import ABC
4
+
5
+ from ....control.controllers import CommunicationController
6
+ from ....utils.table_types import Table
7
+ from .table import ABCTableController
8
+
9
+
10
+ class DeviceController(ABCTableController, ABC):
11
+ def __init__(
12
+ self, controller: CommunicationController, table: Table, offline: bool
13
+ ):
14
+ super().__init__(controller=controller, table=table)
15
+ self.offline = offline
@@ -0,0 +1,228 @@
1
+ """
2
+ Abstract module containing shared logic for Method and Sequence tables.
3
+
4
+ Authors: Lucy Hao
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import abc
10
+ import math
11
+ import os
12
+ import time
13
+ import warnings
14
+ from typing import Dict, List, Optional, Tuple, Union
15
+
16
+ import polling
17
+ import rainbow as rb
18
+ from result import Err, Result, Ok
19
+
20
+ from .table import ABCTableController
21
+ from ....analysis.process_report import (
22
+ AgilentReport,
23
+ CSVProcessor,
24
+ ReportType,
25
+ TXTProcessor,
26
+ )
27
+ from ....control.controllers.comm import CommunicationController
28
+ from pychemstation.analysis.chromatogram import (
29
+ AgilentChannelChromatogramData,
30
+ AgilentHPLCChromatogram,
31
+ )
32
+ from ....utils.macro import HPLCRunningStatus
33
+ from ....utils.method_types import MethodDetails
34
+ from ....utils.sequence_types import SequenceTable
35
+ from ....utils.table_types import Table, T
36
+
37
+ TableType = Union[MethodDetails, SequenceTable]
38
+
39
+
40
+ class RunController(ABCTableController, abc.ABC):
41
+ def __init__(
42
+ self,
43
+ controller: Optional[CommunicationController],
44
+ src: str,
45
+ data_dirs: List[str],
46
+ table: Table,
47
+ offline: bool = False,
48
+ ):
49
+ super().__init__(controller=controller, table=table)
50
+ warnings.warn(
51
+ "This abstract class is not meant to be initialized. Use MethodController or SequenceController."
52
+ )
53
+ self.table_state: Optional[TableType] = None
54
+ self.curr_run_starting_time: Optional[float] = None
55
+ self.timeout: Optional[float] = None
56
+
57
+ if not offline:
58
+ if src and not os.path.isdir(src):
59
+ raise FileNotFoundError(f"dir: {src} not found.")
60
+
61
+ for d in data_dirs:
62
+ if not os.path.isdir(d):
63
+ raise FileNotFoundError(f"dir: {d} not found.")
64
+ if r"\\" in d:
65
+ raise ValueError("Data directories should not be raw strings!")
66
+ self.src: str = src
67
+ self.data_dirs: List[str] = data_dirs
68
+
69
+ self.spectra: dict[str, AgilentHPLCChromatogram] = {
70
+ "A": AgilentHPLCChromatogram(),
71
+ "B": AgilentHPLCChromatogram(),
72
+ "C": AgilentHPLCChromatogram(),
73
+ "D": AgilentHPLCChromatogram(),
74
+ "E": AgilentHPLCChromatogram(),
75
+ "F": AgilentHPLCChromatogram(),
76
+ "G": AgilentHPLCChromatogram(),
77
+ "H": AgilentHPLCChromatogram(),
78
+ }
79
+ self.uv: Dict[int, AgilentHPLCChromatogram] = {}
80
+ self.data_files: List = []
81
+
82
+ @abc.abstractmethod
83
+ def fuzzy_match_most_recent_folder(self, most_recent_folder: T) -> Result[T, str]:
84
+ pass
85
+
86
+ @abc.abstractmethod
87
+ def get_data(
88
+ self, custom_path: Optional[str] = None
89
+ ) -> Union[List[AgilentChannelChromatogramData], AgilentChannelChromatogramData]:
90
+ pass
91
+
92
+ @abc.abstractmethod
93
+ def get_data_uv(
94
+ self, custom_path: str | None = None
95
+ ) -> Dict[int, AgilentHPLCChromatogram]:
96
+ pass
97
+
98
+ @abc.abstractmethod
99
+ def get_report(
100
+ self, custom_path: str, report_type: ReportType = ReportType.TXT
101
+ ) -> List[AgilentReport]:
102
+ pass
103
+
104
+ def check_hplc_is_running(self) -> bool:
105
+ if self.controller:
106
+ try:
107
+ started_running = polling.poll(
108
+ lambda: isinstance(self.controller.get_status(), HPLCRunningStatus),
109
+ step=1,
110
+ max_tries=20,
111
+ )
112
+ except Exception as e:
113
+ print(e)
114
+ return False
115
+ if started_running:
116
+ self.curr_run_starting_time = time.time()
117
+ return started_running
118
+ else:
119
+ raise ValueError("Controller is offline")
120
+
121
+ def check_hplc_run_finished(self) -> Tuple[float, bool]:
122
+ if self.controller:
123
+ done_running = self.controller.check_if_not_running()
124
+ if self.curr_run_starting_time and self.timeout:
125
+ time_passed = time.time() - self.curr_run_starting_time
126
+ if time_passed > self.timeout:
127
+ enough_time_passed = time_passed >= self.timeout
128
+ run_finished = enough_time_passed and done_running
129
+ if run_finished:
130
+ self._reset_time()
131
+ return 0, run_finished
132
+ else:
133
+ time_left = self.timeout - time_passed
134
+ return time_left, self.controller.check_if_not_running()
135
+ return 0, self.controller.check_if_not_running()
136
+ raise ValueError("Controller is offline!")
137
+
138
+ def check_hplc_done_running(self) -> Ok[T] | Err[str]:
139
+ """
140
+ Checks if ChemStation has finished running and can read data back
141
+
142
+ :return: Data file object containing most recent run file information.
143
+ """
144
+ if self.timeout is not None:
145
+ finished_run = False
146
+ minutes = math.ceil(self.timeout / 60)
147
+ try:
148
+ finished_run = not polling.poll(
149
+ lambda: self.check_hplc_run_finished()[1],
150
+ max_tries=minutes - 1,
151
+ step=50,
152
+ )
153
+ except (
154
+ polling.TimeoutException,
155
+ polling.PollingException,
156
+ polling.MaxCallException,
157
+ ):
158
+ try:
159
+ finished_run = polling.poll(
160
+ lambda: self.check_hplc_run_finished()[1],
161
+ timeout=self.timeout / 2,
162
+ step=1,
163
+ )
164
+ except (
165
+ polling.TimeoutException,
166
+ polling.PollingException,
167
+ polling.MaxCallException,
168
+ ):
169
+ pass
170
+ else:
171
+ raise ValueError("Timeout value is None, no comparison can be made.")
172
+
173
+ check_folder = self.fuzzy_match_most_recent_folder(self.data_files[-1])
174
+ if check_folder.is_ok() and finished_run:
175
+ return check_folder
176
+ elif check_folder.is_ok():
177
+ try:
178
+ finished_run = polling.poll(
179
+ lambda: self.check_hplc_run_finished()[1], max_tries=10, step=50
180
+ )
181
+ if finished_run:
182
+ return check_folder
183
+ except Exception:
184
+ self._reset_time()
185
+ return self.data_files[-1]
186
+ return Err("Run did not complete as expected")
187
+
188
+ def get_uv_spectrum(self, path: str):
189
+ data_uv = rb.agilent.chemstation.parse_file(os.path.join(path, "DAD1.UV"))
190
+ times = data_uv.xlabels
191
+ wavelengths = data_uv.ylabels
192
+ absorbances = data_uv.data.transpose()
193
+ for i, w in enumerate(wavelengths):
194
+ self.uv[w] = AgilentHPLCChromatogram()
195
+ self.uv[w].attach_spectrum(times, absorbances[i])
196
+
197
+ def get_report_details(
198
+ self, path: str, report_type: ReportType = ReportType.TXT
199
+ ) -> AgilentReport:
200
+ if report_type is ReportType.TXT:
201
+ txt_report = TXTProcessor(path).process_report()
202
+ if txt_report.is_ok():
203
+ return txt_report.ok_value
204
+ elif txt_report.is_err():
205
+ raise ValueError(txt_report.err_value)
206
+ if report_type is ReportType.CSV:
207
+ csv_report = CSVProcessor(path).process_report()
208
+ if csv_report.is_ok():
209
+ return csv_report.ok_value
210
+ elif csv_report.is_err():
211
+ raise ValueError(csv_report.err_value)
212
+ raise ValueError("Expected one of ReportType.TXT or ReportType.CSV")
213
+
214
+ def get_spectrum_at_channels(self, data_path: str):
215
+ """
216
+ Load chromatogram for any channel in spectra dictionary.
217
+ """
218
+ for channel, spec in self.spectra.items():
219
+ try:
220
+ spec.load_spectrum(data_path=data_path, channel=channel)
221
+ except FileNotFoundError:
222
+ self.spectra[channel] = AgilentHPLCChromatogram()
223
+ warning = f"No data at channel: {channel}"
224
+ warnings.warn(warning)
225
+
226
+ def _reset_time(self):
227
+ self.curr_run_starting_time = None
228
+ self.timeout = None