pytrms 0.9.6__tar.gz → 0.9.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {pytrms-0.9.6 → pytrms-0.9.7}/PKG-INFO +1 -1
  2. {pytrms-0.9.6 → pytrms-0.9.7}/pyproject.toml +1 -1
  3. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/__init__.py +2 -2
  4. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/_base/__init__.py +2 -2
  5. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/_base/ioniclient.py +20 -0
  6. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/_base/mqttclient.py +13 -5
  7. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/__init__.py +5 -5
  8. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/db_api.py +28 -16
  9. pytrms-0.9.7/pytrms/clients/dummy.py +43 -0
  10. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/modbus.py +181 -73
  11. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/mqtt.py +32 -19
  12. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/compose/composition.py +18 -18
  13. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/helpers.py +14 -14
  14. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/peaktable.py +15 -9
  15. {pytrms-0.9.6 → pytrms-0.9.7}/LICENSE +0 -0
  16. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/_version.py +0 -0
  17. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/ioniclient.py +0 -0
  18. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/clients/ssevent.py +0 -0
  19. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/compose/__init__.py +0 -0
  20. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/data/IoniTofPrefs.ini +0 -0
  21. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/data/ParaIDs.csv +0 -0
  22. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/instrument.py +0 -0
  23. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/measurement.py +0 -0
  24. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/plotting/__init__.py +0 -0
  25. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/plotting/plotting.py +0 -0
  26. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/readers/__init__.py +0 -0
  27. {pytrms-0.9.6 → pytrms-0.9.7}/pytrms/readers/ionitof_reader.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pytrms
3
- Version: 0.9.6
3
+ Version: 0.9.7
4
4
  Summary: Python bundle for proton-transfer reaction mass-spectrometry (PTR-MS).
5
5
  License: GPL-2.0
6
6
  Author: Moritz Koenemann
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "pytrms"
3
- version = "0.9.6"
3
+ version = "0.9.7"
4
4
  description = "Python bundle for proton-transfer reaction mass-spectrometry (PTR-MS)."
5
5
  authors = ["Moritz Koenemann <moritz.koenemann@ionicon.com>"]
6
6
  license = "GPL-2.0"
@@ -1,4 +1,4 @@
1
- _version = '0.9.6'
1
+ _version = '0.9.7'
2
2
 
3
3
  __all__ = ['load', 'connect']
4
4
 
@@ -32,7 +32,7 @@ def connect(host='localhost', port=None, method='mqtt'):
32
32
  elif method.lower() == 'webapi':
33
33
  from .clients.ioniclient import IoniClient as _client
34
34
  elif method.lower() == 'modbus':
35
- from .modbus import IoniconModbus as _client
35
+ from .clients.modbus import IoniconModbus as _client
36
36
  else:
37
37
  raise NotImplementedError(str(method))
38
38
 
@@ -1,7 +1,7 @@
1
1
  from collections import namedtuple
2
2
 
3
- from .mqttclient import MqttClientBase
4
- from .ioniclient import IoniClientBase
3
+ from .mqttclient import MqttClientBase as _MqttClientBase
4
+ from .ioniclient import IoniClientBase as _IoniClientBase
5
5
 
6
6
  class itype:
7
7
 
@@ -1,6 +1,10 @@
1
1
  from abc import ABC, abstractmethod
2
2
 
3
+
3
4
  class IoniClientBase(ABC):
5
+ '''Abstract base-class that defines the common interface for clients.
6
+
7
+ '''
4
8
 
5
9
  @property
6
10
  @abstractmethod
@@ -22,6 +26,22 @@ class IoniClientBase(ABC):
22
26
  def disconnect(self):
23
27
  pass
24
28
 
29
+ @abstractmethod
30
+ def start_measurement(self, path=None):
31
+ '''Start a new measurement and block until the change is confirmed.
32
+
33
+ If 'path' is not None, write to the given .h5 file.
34
+ '''
35
+ pass
36
+
37
+ @abstractmethod
38
+ def stop_measurement(self, future_cycle=None):
39
+ '''Stop the current measurement and block until the change is confirmed.
40
+
41
+ If 'future_cycle' is not None and in the future, schedule the stop command.
42
+ '''
43
+ pass
44
+
25
45
  def __init__(self, host, port):
26
46
  # Note: circumvent (potentially sluggish) Windows DNS lookup:
27
47
  self.host = '127.0.0.1' if host == 'localhost' else str(host)
@@ -9,8 +9,6 @@ from datetime import datetime as dt
9
9
 
10
10
  import paho.mqtt.client
11
11
 
12
- from .ioniclient import IoniClientBase
13
-
14
12
  log = logging.getLogger(__name__)
15
13
 
16
14
  __all__ = ['MqttClientBase']
@@ -35,7 +33,11 @@ def _on_publish(client, self, mid):
35
33
  log.debug(f"[{self}] published {mid = }")
36
34
 
37
35
 
38
- class MqttClientBase(IoniClientBase):
36
+ class MqttClientBase:
37
+ """Mix-in class that supplies basic MQTT-callback functions.
38
+
39
+ Implements part of the `IoniClientBase` interface.
40
+ """
39
41
 
40
42
  @property
41
43
  def is_connected(self):
@@ -50,8 +52,11 @@ class MqttClientBase(IoniClientBase):
50
52
  def __init__(self, host, port, subscriber_functions,
51
53
  on_connect, on_subscribe, on_publish,
52
54
  connect_timeout_s=10):
55
+ # Note: circumvent (potentially sluggish) Windows DNS lookup:
56
+ self.host = '127.0.0.1' if host == 'localhost' else str(host)
57
+ self.port = int(port)
58
+
53
59
  assert len(subscriber_functions) > 0, "no subscribers: for some unknown reason this causes disconnects"
54
- super().__init__(host, port)
55
60
 
56
61
  # Note: Version 2.0 of paho-mqtt introduced versioning of the user-callback to fix
57
62
  # some inconsistency in callback arguments and to provide better support for MQTTv5.
@@ -90,7 +95,7 @@ class MqttClientBase(IoniClientBase):
90
95
  try:
91
96
  self.connect(connect_timeout_s)
92
97
  except TimeoutError as exc:
93
- log.warn(f"{exc} (retry connecting when the Instrument is set up)")
98
+ log.warning(f"{exc} (retry connecting when the Instrument is set up)")
94
99
 
95
100
  def connect(self, timeout_s=10):
96
101
  log.info(f"[{self}] connecting to MQTT broker...")
@@ -117,3 +122,6 @@ class MqttClientBase(IoniClientBase):
117
122
  self.client.loop_stop()
118
123
  self.client.disconnect()
119
124
 
125
+ def __repr__(self):
126
+ return f"<{self.__class__.__name__} @ {self.host}[:{self.port}]>"
127
+
@@ -11,23 +11,23 @@ _logging.TRACE = 5 # even more verbose than logging.DEBUG
11
11
 
12
12
  def enable_extended_logging(log_level=_logging.DEBUG):
13
13
  '''make output of http-requests more talkative.
14
-
15
- set 'log_level=logging.TRACE' (defined as 0 in pytrms.__init__) for highest verbosity!
14
+
15
+ set 'log_level=_logging.TRACE' for highest verbosity!
16
16
  '''
17
17
  if log_level <= _logging.DEBUG:
18
18
  # enable logging of http request urls on the library, that is
19
19
  # underlying the 'requests'-package:
20
- _logging.warn(f"enabling logging-output on 'urllib3' ({log_level = })")
20
+ _logging.warning(f"enabling logging-output on 'urllib3' ({log_level = })")
21
21
  requests_log = _logging.getLogger("urllib3")
22
22
  requests_log.setLevel(log_level)
23
23
  requests_log.propagate = True
24
-
24
+
25
25
  if log_level <= _logging.TRACE:
26
26
  # Enabling debugging at http.client level (requests->urllib3->http.client)
27
27
  # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with
28
28
  # HEADERS but without DATA. the only thing missing will be the response.body,
29
29
  # which is not logged.
30
- _logging.warn(f"enabling logging-output on 'HTTPConnection' ({log_level = })")
30
+ _logging.warning(f"enabling logging-output on 'HTTPConnection' ({log_level = })")
31
31
  from http.client import HTTPConnection
32
32
  HTTPConnection.debuglevel = 1
33
33
 
@@ -5,16 +5,12 @@ import requests
5
5
 
6
6
  from . import _logging
7
7
  from .ssevent import SSEventListener
8
- from .._base import IoniClientBase
8
+ from .._base import _IoniClientBase
9
9
 
10
10
  log = _logging.getLogger(__name__)
11
11
 
12
- # TODO :: sowas waer auch ganz cool: die DBAPI bietes sich geradezu an,
13
- # da mehr object-oriented zu arbeiten:
14
- # currentVariable = get_component(currentComponentNameAction, ds)
15
- # currentVariable.save_value({'value': currentValue})
16
12
 
17
- class IoniConnect(IoniClientBase):
13
+ class IoniConnect(_IoniClientBase):
18
14
 
19
15
  @property
20
16
  def is_connected(self):
@@ -28,12 +24,31 @@ class IoniConnect(IoniClientBase):
28
24
  @property
29
25
  def is_running(self):
30
26
  '''Returns `True` if IoniTOF is currently acquiring data.'''
27
+ # TODO :: /api/meas/curretn {isRunning ?}
31
28
  raise NotImplementedError("is_running")
32
29
 
33
30
  def connect(self, timeout_s):
31
+ # TODO :: create session ?! (see __init__ ...)
34
32
  pass
35
33
 
36
34
  def disconnect(self):
35
+ # TODO :: del session ?!
36
+ pass
37
+
38
+ def start_measurement(self, path=None):
39
+ '''Start a new measurement and block until the change is confirmed.
40
+
41
+ If 'path' is not None, write to the given .h5 file.
42
+ '''
43
+ # TODO :: POST /api/measurement {recipeDirectory} / path = ?
44
+ pass
45
+
46
+ def stop_measurement(self, future_cycle=None):
47
+ '''Stop the current measurement and block until the change is confirmed.
48
+
49
+ If 'future_cycle' is not None and in the future, schedule the stop command.
50
+ '''
51
+ # TODO :: PUT /api/meas/current {isRunning = False}
37
52
  pass
38
53
 
39
54
  def __init__(self, host='127.0.0.1', port=5066, session=None):
@@ -106,9 +121,9 @@ class IoniConnect(IoniClientBase):
106
121
  from pytrms.peaktable import Peak, PeakTable
107
122
  from operator import attrgetter
108
123
 
109
- # Note: a `Peak` is a hashable object that serves as a key that
110
- # distinguishes between peaks as defined by PyTRMS:
111
- make_key = lambda peak: Peak(center=peak['center'], label=peak['name'], shift=peak['shift'])
124
+ # Note: the DB-API distinguishes between peaks with
125
+ # different center *and* name, so this is our key:
126
+ make_key = lambda peak: (peak['center'], peak['name'])
112
127
 
113
128
  if isinstance(peaktable, str):
114
129
  log.info(f"loading peaktable '{peaktable}'...")
@@ -165,15 +180,12 @@ class IoniConnect(IoniClientBase):
165
180
  if len(peaktable.fitted):
166
181
  # Note: until now, we disregarded the peak-parent-relationship, so
167
182
  # make another request to the updated peak-table from the server...
168
- db_peaks = {make_key(p): {
169
- 'payload': {k: p[k] for k in conv.keys()},
170
- 'self': p['_links']['self'],
171
- 'parent': p['_links'].get('parent'),
172
- } for p in self.get('/api/peaks')['_embedded']['peaks']}
183
+ peak2href = {Peak(center=p["center"], label=p["name"]): p["_links"]["self"]["href"]
184
+ for p in self.get('/api/peaks')['_embedded']['peaks']}
173
185
 
174
186
  for fitted in peaktable.fitted:
175
- fitted_href = db_peaks[fitted]["self"]["href"]
176
- parent_href = db_peaks[fitted.parent]["self"]["href"]
187
+ fitted_href = peak2href[fitted]
188
+ parent_href = peak2href[fitted.parent]
177
189
  r = self.session.request('link', self.url + parent_href, headers={"location": fitted_href})
178
190
  if not r.ok:
179
191
  log.error(f"LINK {parent_href} to Location: {fitted_href} failed\n\n[{r.status_code}]: {r.content}")
@@ -0,0 +1,43 @@
1
+ from . import _logging
2
+ from .._base import _IoniClientBase
3
+
4
+ log = _logging.getLogger(__name__)
5
+
6
+
7
+ class IoniDummy(_IoniClientBase):
8
+ '''A mock for any 'IoniClient' (modbus, mqtt, ...) that can be used
9
+ in places where no connection to the instrument is possible or desirable.
10
+ '''
11
+
12
+ @property
13
+ def is_connected(self):
14
+ return self.__is_connected
15
+
16
+ __is_connected = False
17
+
18
+ @property
19
+ def is_running(self):
20
+ return self.__is_running
21
+
22
+ __is_running = False
23
+
24
+ def connect(self, timeout_s):
25
+ log.info(f'pretending to connect to server')
26
+ self.__is_connected = True
27
+
28
+ def disconnect(self):
29
+ log.info(f'pretending to disconnect to server')
30
+ self.__is_connected = False
31
+
32
+ def start_measurement(self, path=None):
33
+ log.info(f'pretending to start measurement ({path = })')
34
+ self.__is_running = True
35
+
36
+ def stop_measurement(self, future_cycle=None):
37
+ log.info(f'pretending to stop measurement ({future_cycle = })')
38
+ self.__is_running = False
39
+
40
+ def __init__(self, host='localhost', port=5687):
41
+ super().__init__(host, port)
42
+ self.connect()
43
+
@@ -6,13 +6,13 @@ import struct
6
6
  import time
7
7
  import logging
8
8
  from collections import namedtuple
9
- from functools import lru_cache
9
+ from functools import lru_cache, partial
10
10
  from itertools import tee
11
11
 
12
12
  import pyModbusTCP.client
13
13
 
14
14
  from . import _par_id_file
15
- from .._base.ioniclient import IoniClientBase
15
+ from .._base import _IoniClientBase
16
16
 
17
17
  log = logging.getLogger(__name__)
18
18
 
@@ -32,7 +32,7 @@ def _patch_is_open():
32
32
  if int(minor) < 2:
33
33
  return lambda mc: mc.is_open()
34
34
  else:
35
- return lambda mc: mc.is_open
35
+ return lambda mc: bool(mc.is_open)
36
36
 
37
37
  _is_open = _patch_is_open()
38
38
 
@@ -48,11 +48,11 @@ with open(_par_id_file) as f:
48
48
  # some exceptions that are 'short' (alive_counter, n_parameters) or explicitly
49
49
  # marked to be 'int' (AME_RunNumber, et.c.):
50
50
  _fmts = dict([
51
- ('float', '>f'),
52
- ('double', '>d'),
53
- ('short', '>h'),
54
- ('int', '>i'),
55
- ('long', '>q'),
51
+ ('float', '>f'),
52
+ ('double', '>d'),
53
+ ('short', '>h'),
54
+ ('int', '>i'),
55
+ ('long', '>q'),
56
56
  ])
57
57
 
58
58
  _register = namedtuple('register_info', ['n_registers', 'c_format', 'reg_format'])
@@ -106,40 +106,72 @@ def _pack(value, format='>f'):
106
106
 
107
107
  return struct.unpack(reg_format, struct.pack(c_format, value))
108
108
 
109
+ _fast = 0 # MPV direction enum
110
+ _DO_ON = 0x3F80 # digital output magick number
109
111
 
110
- class IoniconModbus(IoniClientBase):
112
+
113
+ class IoniconModbus(_IoniClientBase):
111
114
 
112
115
  address = dict([
113
- ('server_state', ( 0, '>f', True)), # 0: Not ready, 1: Ready, 2: Startup
114
- ('measure_state', ( 2, '>f', True)), # 0: Not running | 1: running | 2: Just Started | 3: Just Stopped
115
- ('instrument_state', ( 4, '>f', True)), # 0: Not Ok, 1: Ok, 2: Error, 3: Warning
116
- ('alive_counter', ( 6, '>H', True)), # (updated every 500 ms)
117
- ('n_parameters', ( 2000, '>H', True)),
118
- ('tc_raw', ( 4000, '>f', True)),
119
- ('tc_conc', ( 6000, '>f', True)),
120
- ('n_masses', ( 8000, '>f', True)),
121
- # ('n_corr', ( 7000, '>i', True)), # not implemented?
122
- ('tc_components', (10000, '>f', True)),
123
- ('ame_alarms', (12000, '>f', True)),
124
- ('user_number', (13900, '>i', True)),
125
- ('step_number', (13902, '>i', True)),
126
- ('run_number', (13904, '>i', True)),
127
- ('use_mean', (13906, '>i', True)),
128
- ('action_number', (13912, '>i', True)),
129
- ('version_major', (13918, '>h', True)),
130
- ('version_minor', (13919, '>h', True)),
131
- ('version_patch', (13920, '>h', True)),
132
- ('ame_state', (13914, '>i', True)), # Running 0=Off; 1=On (not implemented!)
133
- ('n_components', (14000, '>f', True)),
134
- ('component_names', (14002, '>f', True)),
135
- ('ame_mean_data', (26000, '>f', True)),
136
- ('n_ame_mean', (26002, '>d', True)),
116
+ ('server_state', ( 0, '>f', False)), # 0: Not ready, 1: Ready, 2: Startup
117
+ ('measure_state', ( 2, '>f', False)), # 0: Not running | 1: running | 2: Just Started | 3: Just Stopped
118
+ ('instrument_state', ( 4, '>f', False)), # 0: Not Ok, 1: Ok, 2: Error, 3: Warning
119
+ ('alive_counter', ( 6, '>H', False)), # (updated every 500 ms)
120
+ ('n_parameters', ( 2000, '>H', False)),
121
+ ('tc_raw', ( 4000, '>f', False)),
122
+ ('tc_conc', ( 6000, '>f', False)),
123
+ ('n_masses', ( 8000, '>f', False)),
124
+ # ('n_corr', ( 7000, '>i', False)), # not implemented?
125
+ ('tc_components', (10000, '>f', False)),
126
+ ('ame_alarms', (12000, '>f', False)),
127
+ ('user_number', (13900, '>i', False)),
128
+ ('step_number', (13902, '>i', False)),
129
+ ('run_number', (13904, '>i', False)),
130
+ ('use_mean', (13906, '>i', False)),
131
+ ('action_number', (13912, '>i', False)),
132
+ ('version_major', (13918, '>h', False)),
133
+ ('version_minor', (13919, '>h', False)),
134
+ ('version_patch', (13920, '>h', False)),
135
+ ('ame_state', (13914, '>i', False)), # Running 0=Off; 1=On (not implemented!)
136
+ ('n_components', (14000, '>f', False)),
137
+ ('component_names', (14002, '>f', False)),
138
+ ('ame_mean_data', (26000, '>f', False)),
139
+ ('n_ame_mean', (26002, '>d', False)),
140
+ ])
141
+
142
+ _lookup_offset = dict([
143
+ # parID offset name in Modbus manual special procedure
144
+ ( 42, (3 * 0, 'FC H2O', partial(_pack, format='>f') )),
145
+ ( 1, (3 * 1, 'PC', partial(_pack, format='>f') )),
146
+ ( 2, (3 * 2, 'FC inlet', partial(_pack, format='>f') )),
147
+ ( 3, (3 * 3, 'FC O2', partial(_pack, format='>f') )),
148
+ ( 4, (3 * 4, 'FC NO', partial(_pack, format='>f') )),
149
+ ( 5, (3 * 5, 'FC Dilution', partial(_pack, format='>f') )),
150
+ ( 6, (3 * 6, 'FC Krypton', partial(_pack, format='>f') )),
151
+ ( 7, (3 * 7, 'FC Xenon', partial(_pack, format='>f') )),
152
+ ( 8, (3 * 8, 'FC Purge', partial(_pack, format='>f') )),
153
+ ( 9, (3 * 9, 'FC FastGC', partial(_pack, format='>f') )),
154
+ ( 10, (3 * 10, 'FC Custom 1', partial(_pack, format='>f') )),
155
+ ( 11, (3 * 11, 'FC Custom 2', partial(_pack, format='>f') )),
156
+ ( 12, (3 * 12, 'FC Custom 3', partial(_pack, format='>f') )),
157
+ ( 13, (3 * 13, 'FC Custom 4', partial(_pack, format='>f') )),
158
+ ( 14, (3 * 14, 'FC Custom 5', partial(_pack, format='>f') )),
159
+ ( 15, (3 * 15, 'FC Custom 6', partial(_pack, format='>f') )),
160
+ ( 16, (3 * 16, 'FC Custom 7', partial(_pack, format='>f') )),
161
+ ( 17, (3 * 17, 'FC Custom 8', partial(_pack, format='>f') )),
162
+ ( 18, (3 * 18, 'FC Custom 9', partial(_pack, format='>f') )),
163
+ (556, (3 * 19, 'Measure Start', partial(_pack, format='>f') )),
164
+ (559, (3 * 20, 'Measure Stop', partial(_pack, format='>f') )),
165
+ ( 70, (3 * 21, 'Set MPV1', lambda v: [_fast, *_pack(v, '>h')] )),
166
+ ( 71, (3 * 22, 'Set MPV2', lambda v: [_fast, *_pack(v, '>h')] )),
167
+ ( 72, (3 * 23, 'Set MPV3', lambda v: [_fast, *_pack(v, '>h')] )),
168
+ (138, (3 * 24, 'DO 1', lambda v: [_DO_ON if v else 0x0, 0] )),
169
+ (139, (3 * 25, 'DO 2', lambda v: [_DO_ON if v else 0x0, 0] )),
137
170
  ])
138
171
 
139
172
  @classmethod
140
- def use_legacy_input_registers(klaas, use_input_reg = True):
141
- """Read from input- instead of holding-registers (legacy method to be compatible with AME1.0)."""
142
- use_holding = not use_input_reg
173
+ def use_holding_registers(klaas, use_holding=True):
174
+ """Use Modbus HOLDING- instead of INPUT-registers (default: INPUT, compatible with AME1 and AME2)."""
143
175
  modded = dict()
144
176
  for key, vals in klaas.address.items():
145
177
  modded[key] = vals[0], vals[1], use_holding
@@ -154,16 +186,21 @@ class IoniconModbus(IoniClientBase):
154
186
  if not _is_open(self.mc):
155
187
  return False
156
188
 
157
- # wait for the IoniTOF alive-counter to change (1 second max)...
158
- initial_count = self._alive_counter
159
- timeout_s = 3 # counter should increase every 500 ms, approximately
160
- started_at = time.monotonic()
161
- while time.monotonic() < started_at + timeout_s:
162
- if initial_count != self._alive_counter:
163
- return True
164
-
165
- time.sleep(10e-3)
166
- return False
189
+ try:
190
+ # wait for the IoniTOF alive-counter to change (1 second max)...
191
+ initial_count = self._alive_counter
192
+ timeout_s = 3 # counter should increase every 500 ms, approximately
193
+ started_at = time.monotonic()
194
+ while time.monotonic() < started_at + timeout_s:
195
+ if initial_count != self._alive_counter:
196
+ return True
197
+
198
+ time.sleep(10e-3)
199
+ return False
200
+ except IOError:
201
+ # bug-fix: failing to read _alive_counter closed the connection,
202
+ # even after checking .is_open! Don't let this property throw:
203
+ return False
167
204
 
168
205
  @property
169
206
  def is_running(self):
@@ -210,7 +247,7 @@ class IoniconModbus(IoniClientBase):
210
247
  try:
211
248
  self.connect()
212
249
  except TimeoutError as exc:
213
- log.warn(f"{exc} (retry connecting when the Instrument is set up)")
250
+ log.warning(f"{exc} (retry connecting when the Instrument is set up)")
214
251
  self._addresses = {}
215
252
 
216
253
  def connect(self, timeout_s=10):
@@ -220,7 +257,7 @@ class IoniconModbus(IoniClientBase):
220
257
  self.mc.timeout(timeout_s)
221
258
  else:
222
259
  self.mc.timeout = timeout_s
223
- if not self.mc.open():
260
+ if not (_is_open(self.mc) or self.mc.open()):
224
261
  raise TimeoutError(f"[{self}] no connection to modbus socket")
225
262
 
226
263
  started_at = time.monotonic()
@@ -237,6 +274,52 @@ class IoniconModbus(IoniClientBase):
237
274
  if _is_open(self.mc):
238
275
  self.mc.close()
239
276
 
277
+ def start_measurement(self, path=None):
278
+ '''Start a new measurement and block until the change is confirmed.
279
+
280
+ 'path' is ignored!
281
+ '''
282
+ if path is not None:
283
+ log.warning(f'ignoring .h5-filepath in Modbus command and starting quick measurement')
284
+
285
+ # Note: let's assume one of them will be right and calling it twice don't hurt:
286
+ self.write_instrument_data('ACQ_SRV_Start_Meas_Quick', 1, oldschool=False, timeout_s=10)
287
+ self.write_instrument_data('ACQ_SRV_Start_Meas_Quick', 1, oldschool=True)
288
+
289
+ timeout_s = 10
290
+ started_at = time.monotonic()
291
+ while time.monotonic() < started_at + timeout_s:
292
+ if self.is_running:
293
+ break
294
+
295
+ time.sleep(10e-3)
296
+ else:
297
+ raise TimeoutError(f"[{self}] unable to start measurement after { timeout_s = }");
298
+
299
+ def stop_measurement(self, future_cycle=None):
300
+ '''Stop the current measurement and block until the change is confirmed.
301
+
302
+ 'future_cycle' is ignored!
303
+ '''
304
+ if future_cycle is not None:
305
+ log.info(f'block until {future_cycle = } (current_cycle = {self.read_timecycle().abs_cycle})')
306
+ while self.read_timecycle().abs_cycle < int(future_cycle):
307
+ time.sleep(200e-3)
308
+
309
+ # Note: let's assume one of them will be right and calling it twice don't hurt:
310
+ self.write_instrument_data('ACQ_SRV_Stop_Meas', 1, oldschool=False, timeout_s=10)
311
+ self.write_instrument_data('ACQ_SRV_Stop_Meas', 1, oldschool=True)
312
+
313
+ timeout_s = 10
314
+ started_at = time.monotonic()
315
+ while time.monotonic() < started_at + timeout_s:
316
+ if not self.is_running:
317
+ break
318
+
319
+ time.sleep(10e-3)
320
+ else:
321
+ raise TimeoutError(f"[{self}] unable to stop measurement after { timeout_s = }");
322
+
240
323
  @property
241
324
  @lru_cache
242
325
  def n_parameters(self):
@@ -301,16 +384,16 @@ class IoniconModbus(IoniClientBase):
301
384
 
302
385
  return rv
303
386
 
304
- def write_instrument_data(self, par_id, new_value, timeout_s=10):
387
+ def write_instrument_data(self, par_id, new_value, oldschool=False, timeout_s=10):
388
+ '''Send a write command via the Modbus protocol.
305
389
 
306
- # This command-structure is written as an array:
307
- # Register 0: number of command-blocks to write
308
- # Each command-block consists of 3 registers:
309
- # Register 1: Parameter ID
310
- # Register 2-3: Parameter Set Value as float(real)
311
- start_register = 41000
312
- blocksize = 3
390
+ See the Modbus manual for implementation details.
313
391
 
392
+ 'par_id' - (int or string) the parameter-ID or -descriptor from the parID-list
393
+ 'new_value' - the value to write (will be converted to float32)
394
+ 'oldschool' - use the legacy Modbus write procedure (Register 40000)
395
+ 'timeout_s' - timeout in seconds
396
+ '''
314
397
  if isinstance(par_id, str):
315
398
  try:
316
399
  par_id = next(k for k, v in _id_to_descr.items() if v == par_id)
@@ -320,11 +403,33 @@ class IoniconModbus(IoniClientBase):
320
403
  if par_id not in _id_to_descr:
321
404
  raise IndexError(str(par_id))
322
405
 
323
- # Note: we use only the first command-block for writing:
406
+ # Each command-block consists of 3 registers:
407
+ # Register 1: Parameter ID (newschool) / execute bit (oldschool)
408
+ # Register 2-3: Parameter Set Value as float(real)
409
+ # Note: The newschool command-structure is written as an array:
410
+ # Register 0: number of command-blocks to write
411
+ # This coincides with the oldschool protocol, where
412
+ # Register 0: ready to write (execute bit) ~> 1
413
+ # We use only the first command-block for writing...
324
414
  n_blocks = 1
325
- reg_values = list(_pack(n_blocks, '>h'))
326
- # ...although we could add more command-blocks here:
327
- reg_values += list(_pack(par_id, '>h')) + list(_pack(new_value, '>f'))
415
+ assert (n_blocks == 1 or not oldschool), "oldschool instrument protocol doesn't allow multiple writes"
416
+ reg_values = []
417
+ if oldschool:
418
+ offset, name, packer = IoniconModbus._lookup_offset[par_id]
419
+ start_register = 40_000 + offset
420
+ reg_values += list(_pack(1, '>h')) # execute!
421
+ reg_values += list(packer(new_value))
422
+ log.debug(f'WRITE REG {start_register} ({name}) w/ oldschool protocol')
423
+ else:
424
+ start_register = 41_000
425
+ # ...although we could add more command-blocks here (newschool):
426
+ reg_values += list(_pack(n_blocks, '>h'))
427
+ # the parameter to write is written to the command block (newschool):
428
+ reg_values += list(_pack(par_id, '>h'))
429
+ reg_values += list(_pack(new_value, '>f'))
430
+ log.debug(f'WRITE REG {start_register} ({par_id = }) w/ newschool protocol')
431
+
432
+ assert len(reg_values) == (4 - bool(oldschool)), "invalid program: unexpected number of registers to write"
328
433
 
329
434
  # wait for instrument to receive...
330
435
  retry_time = 0
@@ -332,8 +437,8 @@ class IoniconModbus(IoniClientBase):
332
437
  # a value of 0 indicates ready-to-write:
333
438
  if self.mc.read_holding_registers(start_register) == [0]:
334
439
  break
335
- retry_time += 0.5
336
- time.sleep(0.5)
440
+ retry_time += 0.2
441
+ time.sleep(0.2)
337
442
  else:
338
443
  raise TimeoutError(f'instrument not ready for writing after ({timeout_s}) seconds')
339
444
 
@@ -419,9 +524,9 @@ class IoniconModbus(IoniClientBase):
419
524
  def read_ame_alarms(self):
420
525
  start_reg, c_fmt, _is_holding = self.address['ame_alarms']
421
526
  n_alarms = int(self._read_reg(start_reg, c_fmt, _is_holding))
422
- values = self._read_reg_multi(start_reg + 2, c_fmt, n_alarms, _is_holding)
527
+ alarm_levels = self._read_reg_multi(start_reg + 2, c_fmt, n_alarms, _is_holding)
423
528
 
424
- return dict(zip(self.read_component_names(), map(bool, values)))
529
+ return dict(zip(self.read_component_names(), alarm_levels))
425
530
 
426
531
  def read_ame_timecycle(self):
427
532
  return self.read_timecycle(kind='components')
@@ -453,7 +558,7 @@ class IoniconModbus(IoniClientBase):
453
558
  ])
454
559
 
455
560
  def write_ame_action(self, action_number):
456
- self.write_instrument_data(596, action_number, timeout_s=10)
561
+ self.write_instrument_data(596, action_number, oldschool=False, timeout_s=10)
457
562
 
458
563
  def read_ame_mean(self, step_number=None):
459
564
  start_reg, c_fmt, _is_holding = self.address['ame_mean_data']
@@ -485,14 +590,16 @@ class IoniconModbus(IoniClientBase):
485
590
  )
486
591
 
487
592
  def _read_reg(self, addr, c_format, is_holding_register=False):
488
- n_bytes, c_format, reg_format = _get_fmt(c_format)
593
+ if not _is_open(self.mc):
594
+ raise IOError("trying to read from closed Modbus-connection")
595
+
489
596
  _read = self.mc.read_holding_registers if is_holding_register else self.mc.read_input_registers
490
-
597
+
598
+ n_bytes, c_format, reg_format = _get_fmt(c_format)
599
+
491
600
  register = _read(addr, n_bytes)
492
- if register is None and _is_open(self.mc):
601
+ if register is None:
493
602
  raise IOError(f"unable to read ({n_bytes}) registers at [{addr}] from connection")
494
- elif register is None and not _is_open(self.mc):
495
- raise IOError("trying to read from closed Modbus-connection")
496
603
 
497
604
  return _unpack(register, c_format)
498
605
 
@@ -501,6 +608,9 @@ class IoniconModbus(IoniClientBase):
501
608
  if not n_values > 0:
502
609
  return rv
503
610
 
611
+ if not _is_open(self.mc):
612
+ raise IOError("trying to read from closed Modbus-connection")
613
+
504
614
  _read = self.mc.read_holding_registers if is_holding_register else self.mc.read_input_registers
505
615
 
506
616
  n_bytes, c_format, reg_format = _get_fmt(c_format)
@@ -514,10 +624,8 @@ class IoniconModbus(IoniClientBase):
514
624
 
515
625
  for block in blocks:
516
626
  register = _read(*block)
517
- if register is None and self.is_connected:
627
+ if register is None:
518
628
  raise IOError(f"unable to read ({block[1]}) registers at [{block[0]}] from connection")
519
- elif register is None and not self.is_connected:
520
- raise IOError("trying to read from closed Modbus-connection")
521
629
 
522
630
  # group the register-values by n_bytes, e.g. [1,2,3,4,..] ~> [(1,2),(3,4),..]
523
631
  # this is a trick from the itertools-recipes, see
@@ -10,18 +10,18 @@ from threading import Condition, RLock
10
10
 
11
11
  from . import _logging
12
12
  from . import _par_id_file
13
- from .._base import itype, MqttClientBase
13
+ from .._base import itype, _MqttClientBase, _IoniClientBase
14
14
 
15
15
 
16
16
  log = _logging.getLogger(__name__)
17
17
 
18
- __all__ = ['MqttClient', 'MqttClientBase']
18
+ __all__ = ['MqttClient']
19
19
 
20
20
 
21
21
  with open(_par_id_file) as f:
22
22
  from pandas import read_csv, isna
23
23
 
24
- _par_id_info = read_csv(f, sep='\t').drop(0).set_index('Name')
24
+ _par_id_info = read_csv(f, sep='\t').drop(0).set_index('Name').fillna('')
25
25
  if isna(_par_id_info.at['MPV_1', 'Access']):
26
26
  log.warning(f'filling in read-properties still missing in {os.path.basename(_par_id_file)}')
27
27
  _par_id_info.at['MPV_1', 'Access'] = 'RW'
@@ -34,9 +34,10 @@ with open(_par_id_file) as f:
34
34
 
35
35
  def _build_header():
36
36
  ts = datetime.now()
37
+ ts_isoformat = ts.astimezone().isoformat(timespec='milliseconds')
37
38
  header = {
38
39
  "TimeStamp": {
39
- "Str": ts.isoformat(),
40
+ "Str": ts_isoformat[:-3] + ts_isoformat[-2:], # TZ-info w/o the colon
40
41
  "sec": ts.timestamp() + 2082844800, # convert to LabVIEW time
41
42
  },
42
43
  }
@@ -133,7 +134,7 @@ def _parse_fullcycle(byte_string, need_add_data=False):
133
134
  performance (on a Intel Core i5, 8th Gen Ubuntu Linux):
134
135
  < 2 ms when `need_add_data=False` (default)
135
136
  6-7 ms when needing to parse the AddData-cluster (else)
136
-
137
+
137
138
  @returns a namedtuple ('timecycle', 'intensity', 'mass_cal', 'add_data')
138
139
  '''
139
140
  import numpy as np
@@ -152,7 +153,7 @@ def _parse_fullcycle(byte_string, need_add_data=False):
152
153
  _arr = np.frombuffer(byte_string, dtype=dtype, count=1, offset=offset)
153
154
  offset += _arr.nbytes
154
155
  return _arr[0]
155
-
156
+
156
157
  def rd_arr1d(dtype=_f32, count=None):
157
158
  nonlocal offset
158
159
  if count is None:
@@ -160,7 +161,7 @@ def _parse_fullcycle(byte_string, need_add_data=False):
160
161
  arr = np.frombuffer(byte_string, dtype=dtype, count=count, offset=offset)
161
162
  offset += arr.nbytes
162
163
  return arr
163
-
164
+
164
165
  def rd_arr2d(dtype=_f32):
165
166
  nonlocal offset
166
167
  n = rd_single()
@@ -172,14 +173,14 @@ def _parse_fullcycle(byte_string, need_add_data=False):
172
173
  def rd_string():
173
174
  nonlocal offset
174
175
  return rd_arr1d(dtype=_chr).tobytes().decode('latin-1').lstrip('\x00')
175
-
176
+
176
177
  tc_cluster = rd_arr1d(dtype=_f64, count=4)
177
178
  run__, cpx__ = rd_arr1d(dtype=_f64, count=2) # (discarded)
178
179
  # SpecData #
179
180
  intensity = rd_arr1d(dtype=_f32)
180
181
  sum_inty = rd_arr1d(dtype=_f32) # (discarded)
181
182
  mon_peaks = rd_arr2d(dtype=_f32) # (discarded)
182
-
183
+
183
184
  if not need_add_data:
184
185
  # skip costly parsing of Trace- and Add-Data cluster:
185
186
  return itype.fullcycle_t(itype.timecycle_t(*tc_cluster), intensity, None, None)
@@ -283,7 +284,7 @@ def follow_schedule(client, self, msg):
283
284
 
284
285
  if msg.topic.endswith("SRV_Schedule"):
285
286
  if not msg.payload:
286
- log.warn("empty ACQ_SRV_Schedule payload has cleared retained topic")
287
+ log.warning("empty ACQ_SRV_Schedule payload has cleared retained topic")
287
288
  self._sched_cmds.clear()
288
289
  return
289
290
 
@@ -413,7 +414,7 @@ _subscriber_functions = [fun for name, fun in list(vars().items())
413
414
  _NOT_INIT = object()
414
415
 
415
416
 
416
- class MqttClient(MqttClientBase):
417
+ class MqttClient(_MqttClientBase, _IoniClientBase):
417
418
  """a simplified client for the Ionicon MQTT API.
418
419
 
419
420
  > mq = MqttClient()
@@ -433,7 +434,7 @@ class MqttClient(MqttClientBase):
433
434
  set_value_limit = {
434
435
  "TCP_MCP_B": 3200.0,
435
436
  }
436
-
437
+
437
438
  @property
438
439
  def is_connected(self):
439
440
  '''Returns `True` if connection to IoniTOF could be established.'''
@@ -476,7 +477,7 @@ class MqttClient(MqttClientBase):
476
477
  @property
477
478
  def current_sourcefile(self):
478
479
  '''Returns the path to the hdf5-file that is currently being written.
479
-
480
+
480
481
  Returns an empty string if no measurement is running.
481
482
  '''
482
483
  if not self.is_running:
@@ -492,7 +493,7 @@ class MqttClient(MqttClientBase):
492
493
  while time.monotonic() < started_at + timeout_s:
493
494
  if self._sf_filename[0] is not _NOT_INIT:
494
495
  return self._sf_filename[0]
495
-
496
+
496
497
  time.sleep(10e-3)
497
498
  else:
498
499
  raise TimeoutError(f"[{self}] unable to retrieve source-file after ({timeout_s = })");
@@ -531,6 +532,10 @@ class MqttClient(MqttClientBase):
531
532
  if not self.is_connected:
532
533
  raise Exception(f"[{self}] no connection to instrument");
533
534
 
535
+ if parID == "FC_inlet":
536
+ log.warning(f"mapping 'FC_inlet' ~> 'FC_FC inlet' (with whitespace)")
537
+ parID = "FC_FC inlet"
538
+
534
539
  _lut = self.act_values if kind.lower() == "act" else self.set_values
535
540
  is_read_only = ('W' not in _par_id_info.loc[parID].Access) # may raise KeyError!
536
541
  if _lut is self.set_values and is_read_only:
@@ -561,7 +566,7 @@ class MqttClient(MqttClientBase):
561
566
  # confirm change of state:
562
567
  if not self._calcconzinfo[0] is _NOT_INIT:
563
568
  return self._calcconzinfo[0].tables[table_name]
564
-
569
+
565
570
  time.sleep(10e-3)
566
571
  else:
567
572
  raise TimeoutError(f"[{self}] unable to retrieve calc-conz-info from PTR server");
@@ -592,11 +597,15 @@ class MqttClient(MqttClientBase):
592
597
  if not self.is_connected:
593
598
  raise Exception(f"[{self}] no connection to instrument");
594
599
 
600
+ if parID == "FC_inlet":
601
+ log.warning(f"mapping 'FC_inlet' ~> 'FC_FC inlet' (with whitespace)")
602
+ parID = "FC_FC inlet"
603
+
595
604
  if not 'W' in _par_id_info.loc[parID].Access: # may raise KeyError!
596
605
  raise ValueError(f"'{parID}' is read-only")
597
606
 
598
607
  if parID in __class__.set_value_limit and new_value > __class__.set_value_limit[parID]:
599
- raise ValueError("set value limit of {__class__.set_value_limit[parID]} on '{parID}'")
608
+ raise ValueError(f"will not exceed set-value limit of {__class__.set_value_limit[parID]} on '{parID}'")
600
609
 
601
610
  topic, qos, retain = "IC_Command/Write/Direct", 1, False
602
611
  log.info(f"writing '{parID}' ~> [{new_value}]")
@@ -617,11 +626,15 @@ class MqttClient(MqttClientBase):
617
626
  if not self.is_connected:
618
627
  raise Exception(f"[{self}] no connection to instrument");
619
628
 
629
+ if parID == "FC_inlet":
630
+ log.warning(f"mapping 'FC_inlet' ~> 'FC_FC inlet' (with whitespace)")
631
+ parID = "FC_FC inlet"
632
+
620
633
  if not 'W' in _par_id_info.loc[parID].Access: # may raise KeyError!
621
634
  raise ValueError(f"'{parID}' is read-only")
622
635
 
623
636
  if parID in __class__.set_value_limit and new_value > __class__.set_value_limit[parID]:
624
- raise ValueError("set value limit of {__class__.set_value_limit[parID]} on '{parID}'")
637
+ raise ValueError(f"will not exceed set-value limit of {__class__.set_value_limit[parID]} on '{parID}'")
625
638
 
626
639
  if (future_cycle == 0 and not self.is_running):
627
640
  # Note: ioniTOF40 doesn't handle scheduling for the 0th cycle!
@@ -640,7 +653,7 @@ class MqttClient(MqttClientBase):
640
653
  return self.write(parID, new_value)
641
654
 
642
655
  if not future_cycle > self.current_cycle:
643
- log.warn(f"attempting to schedule past cycle, hope you know what you're doing");
656
+ log.warning(f"attempting to schedule past cycle, hope you know what you're doing");
644
657
  pass # and at least let's debug it in MQTT browser (see also doc-string above)!
645
658
 
646
659
  topic, qos, retain = "IC_Command/Write/Scheduled", 1, False
@@ -772,7 +785,7 @@ class MqttClient(MqttClientBase):
772
785
  # uninterruptible wait on an underlying lock. This means that no exceptions
773
786
  # can occur, and in particular a SIGINT will not trigger a KeyboardInterrupt!
774
787
  if timeout_s is None and not self.is_running:
775
- log.warn(f"waiting indefinitely for measurement to run...")
788
+ log.warning(f"waiting indefinitely for measurement to run...")
776
789
 
777
790
  yield q.get(block=True, timeout=timeout_s)
778
791
 
@@ -35,17 +35,17 @@ class Step:
35
35
  >>> Step("H50", {'AUTO_UseMean': 0}, 10, start_delay=2)
36
36
  Traceback (most recent call last):
37
37
  ...
38
- AssertionError: Automation numbers cannot be defined
38
+ AssertionError: a Step must not define AME-numbers
39
39
 
40
40
  ..and neither can a 'OP_Mode' alongside anything else:
41
41
  >>> Step("Odd2", {'DPS_Udrift': 345, 'OP_Mode': 2}, 10, start_delay=2)
42
42
  Traceback (most recent call last):
43
43
  ...
44
- AssertionError: if 'OP_Mode' is specified, nothing else can be
44
+ AssertionError: if Step defines 'OP_Mode', nothing else can be!
45
45
 
46
46
  '''
47
47
  protected_keys = ['AME_RunNumber', 'AME_StepNumber', 'AUTO_UseMean']
48
-
48
+
49
49
  def __init__(self, name, set_values, duration, start_delay):
50
50
  self.name = str(name)
51
51
  self.set_values = dict(set_values)
@@ -58,9 +58,9 @@ class Step:
58
58
  assert self.start_delay < self.duration
59
59
 
60
60
  for key in self.set_values:
61
- assert key not in Step.protected_keys, "Automation numbers cannot be defined"
61
+ assert key not in Step.protected_keys, "a Step must not define AME-numbers"
62
62
  if 'OP_Mode' in self.set_values:
63
- assert len(self.set_values) == 1, "if 'OP_Mode' is specified, nothing else can be"
63
+ assert len(self.set_values) == 1, "if Step defines 'OP_Mode', nothing else can be!"
64
64
 
65
65
  def __repr__(self):
66
66
  return f"{self.name}: ({self.start_delay}/{self.duration}) sec ~> {self.set_values}"
@@ -87,7 +87,7 @@ class Composition(Iterable):
87
87
  ...without automation (default)...
88
88
  >>> list(co.sequence())
89
89
  [(8, {'Eins': 1}), (18, {'Zwei': 2})]
90
-
90
+
91
91
  ...with an action-number at the start (note, that AME-numbers are 1 cycle ahead)...
92
92
  >>> co.start_action = 7
93
93
  >>> list(co.sequence())
@@ -98,7 +98,7 @@ class Composition(Iterable):
98
98
  >>> seq = co.sequence()
99
99
  >>> next(seq)
100
100
  (9, {'AME_ActionNumber': 7})
101
-
101
+
102
102
  >>> next(seq)
103
103
  (8, {'Eins': 1})
104
104
 
@@ -116,14 +116,14 @@ class Composition(Iterable):
116
116
 
117
117
  >>> next(seq)
118
118
  (22, {'AUTO_UseMean': 1})
119
-
119
+
120
120
  '''
121
121
 
122
122
  STEP_MARKER = 'AME_StepNumber'
123
123
  RUN_MARKER = 'AME_RunNumber'
124
124
  USE_MARKER = 'AUTO_UseMean'
125
125
  ACTION_MARKER = 'AME_ActionNumber'
126
-
126
+
127
127
  @staticmethod
128
128
  def load(filename, **kwargs):
129
129
  with open(filename, 'r') as ifstream:
@@ -137,7 +137,7 @@ class Composition(Iterable):
137
137
  self.start_action = int(start_action) if start_action is not None else None
138
138
  self.generate_automation = bool(generate_automation)
139
139
  self.foresight_runs = int(foresight_runs) if self.max_runs < 0 else max(int(foresight_runs), self.max_runs)
140
-
140
+
141
141
  assert len(self.steps) > 0, "empty step list"
142
142
  assert self.max_runs != 0, "max_runs cannot be zero"
143
143
  assert self.foresight_runs > 0, "foresight_runs must be positive"
@@ -150,7 +150,7 @@ class Composition(Iterable):
150
150
  return self.max_runs > 0
151
151
 
152
152
  def dump(self, ofstream):
153
- json.dump(self, ofstream, indent=2, default=vars)
153
+ json.dump(self.steps, ofstream, indent=2, default=vars)
154
154
 
155
155
  def translate_op_modes(self, preset_items, check=True):
156
156
  '''Given the `preset_items` (from a presets-file), compile a list of set_values.
@@ -210,7 +210,7 @@ class Composition(Iterable):
210
210
  entry.update(carry)
211
211
  if check:
212
212
  assert all(key in entry for key in preset_keys), "reaction-data missing in presets"
213
-
213
+
214
214
  return set_values
215
215
 
216
216
  def sequence(self):
@@ -222,11 +222,11 @@ class Composition(Iterable):
222
222
  This generates AME_Run/Step-Number and AUTO_UseMean unless otherwise specified.
223
223
  '''
224
224
  _offset_ame = True # whether ame-numbers should mark the *next* cycle, see [#2897]
225
-
225
+
226
226
  future_cycle = self.start_cycle
227
227
  if self.start_action is not None:
228
228
  yield future_cycle + int(_offset_ame), dict([(self.ACTION_MARKER, int(self.start_action))])
229
-
229
+
230
230
  for run, step, step_info in self:
231
231
  yield future_cycle, dict(step_info.set_values)
232
232
 
@@ -250,9 +250,9 @@ class Composition(Iterable):
250
250
  @coroutine
251
251
  def schedule_routine(self, schedule_fun):
252
252
  '''Create a coroutine that receives the current cycle and yields the last scheduled cycle.
253
-
253
+
254
254
  'schedule_fun' should be a callable taking three arguments '(parID, value, schedule_cycle)'
255
-
255
+
256
256
  >>> co = Composition([
257
257
  ... Step("Oans", {"Eins": 1}, 10, start_delay=2),
258
258
  ... Step("Zwoa", {"Zwei": 2}, 10, start_delay=3)
@@ -265,10 +265,10 @@ class Composition(Iterable):
265
265
  Eins 1 20
266
266
  Zwei 2 30
267
267
  Eins 1 40
268
-
268
+
269
269
  >>> wake_cycle # should wake up in time before the last run has begun..
270
270
  30
271
-
271
+
272
272
  '''
273
273
  # feed all future updates for a given current cycle to the Dirigent
274
274
  log.debug("schedule_routine: initializing...")
@@ -6,7 +6,7 @@ common helper functions.
6
6
  def convert_labview_to_posix(lv_time_utc, utc_offset_sec):
7
7
  '''Create a `pandas.Timestamp` from LabView time.'''
8
8
  from pandas import Timestamp
9
-
9
+
10
10
  # change epoch from 01.01.1904 to 01.01.1970:
11
11
  posix_time = lv_time_utc - 2082844800
12
12
  # the tz must be specified in isoformat like '+02:30'..
@@ -19,24 +19,24 @@ def convert_labview_to_posix(lv_time_utc, utc_offset_sec):
19
19
 
20
20
  def parse_presets_file(presets_file):
21
21
  '''Load a `presets_file` as XML-tree and interpret the "OP_Mode" of this `Composition`.
22
-
22
+
23
23
  The tricky thing is, that any OP_Mode may or may not override previous settings!
24
24
  Therefore, it depends on the order of modes in this Composition to be able to assign
25
25
  each OP_Mode its actual dictionary of set_values.
26
-
26
+
27
27
  Note, that the preset file uses its own naming convention that cannot neccessarily be
28
28
  translated into standard parID-names. You may choose whatever you like to do with it.
29
29
  '''
30
30
  import xml.etree.ElementTree as ET
31
31
  from collections import namedtuple, defaultdict
32
-
33
- _key = namedtuple('preset_item', ['name', 'ads_path', 'dtype'])
32
+
33
+ _key = namedtuple('preset_item', ['name', 'ads_path', 'dtype'])
34
34
  _parse_value = {
35
35
  "FLOAT": float,
36
36
  "BOOL": bool,
37
37
  "BYTE": int,
38
38
  "ENUM": int,
39
- }
39
+ }
40
40
  tree = ET.parse(presets_file)
41
41
  root = tree.getroot()
42
42
 
@@ -44,7 +44,7 @@ def parse_presets_file(presets_file):
44
44
  preset_items = defaultdict(dict)
45
45
  for index, preset in enumerate(root.iterfind('preset')):
46
46
  preset_names[index] = preset.find('name').text.strip()
47
-
47
+
48
48
  if preset.find('WritePrimIon').text.upper() == "TRUE":
49
49
  val = preset.find('IndexPrimIon').text
50
50
  preset_items[index][_key('PrimionIdx', '', 'INT')] = int(val)
@@ -52,7 +52,7 @@ def parse_presets_file(presets_file):
52
52
  if preset.find('WriteTransmission').text.upper() == "TRUE":
53
53
  val = preset.find('IndexTransmission').text
54
54
  preset_items[index][_key('TransmissionIdx', '', 'INT')] = int(val)
55
-
55
+
56
56
  for item in preset.iterfind('item'):
57
57
  if item.find('Write').text.upper() == "TRUE":
58
58
  # device_index = item.find('DeviceIndex').text
@@ -61,9 +61,9 @@ def parse_presets_file(presets_file):
61
61
  # page_name = item.find('PageName').text
62
62
  name = item.find('Name').text
63
63
  value_text = item.find('Value').text
64
-
64
+
65
65
  key = _key(name, ads_path, data_type)
66
- val = _parse_value[data_type](value_text)
66
+ val = _parse_value[data_type](value_text)
67
67
  preset_items[index][key] = val
68
68
 
69
69
  return {index: (preset_names[index], preset_items[index]) for index in preset_names.keys()}
@@ -89,7 +89,7 @@ def setup_measurement_dir(config_dir=None, data_root_dir="D:/Data", suffix="",
89
89
  from datetime import datetime
90
90
  from itertools import chain
91
91
 
92
- recipe = namedtuple('recipe', ['dirname', 'h5_file', 'pt_file', 'alarms_file'])
92
+ recipe = namedtuple('recipe', ['dirname', 'h5_file', 'pt_file', 'alarm_files'])
93
93
  _pt_formats = ['*.ionipt']
94
94
  _al_formats = ['*.alm']
95
95
  # make directory with current timestamp:
@@ -105,9 +105,9 @@ def setup_measurement_dir(config_dir=None, data_root_dir="D:/Data", suffix="",
105
105
  # we're done here..
106
106
  return recipe(new_recipe_dir, new_h5_file, '', '')
107
107
 
108
- # find the *first* matching file or an empty string if no match...
108
+ # peaktable: find the *first* matching file or an empty string if no match:
109
109
  new_pt_file = next(chain.from_iterable(glob.iglob(config_dir + "/" + g) for g in _pt_formats), '')
110
- new_al_file = next(chain.from_iterable(glob.iglob(config_dir + "/" + g) for g in _al_formats), '')
110
+ alm_files = sorted(chain.from_iterable(glob.iglob(config_dir + "/" + g) for g in _al_formats))
111
111
  # ...and copy all files from the master-recipe-dir:
112
112
  files2copy = glob.glob(config_dir + "/*")
113
113
  for file in files2copy:
@@ -122,5 +122,5 @@ def setup_measurement_dir(config_dir=None, data_root_dir="D:/Data", suffix="",
122
122
  # well, we can't set write permission
123
123
  pass
124
124
 
125
- return recipe(new_recipe_dir, new_h5_file, new_pt_file, new_al_file)
125
+ return recipe(new_recipe_dir, new_h5_file, new_pt_file, alm_files)
126
126
 
@@ -274,7 +274,7 @@ class PeakTable:
274
274
  @staticmethod
275
275
  def _parse_ionipt(file):
276
276
 
277
- def _make_peak(ioni_p, borders, shift, parent=None):
277
+ def _make_peak(ioni_p, borders, shift, parent):
278
278
  return Peak(ioni_p["center"],
279
279
  label=ioni_p["name"],
280
280
  formula=ioni_p["ionic_isotope"],
@@ -292,23 +292,29 @@ class PeakTable:
292
292
  border_peak = item["border_peak"]
293
293
  borders = (item["low"], item["high"])
294
294
  shift = item["shift"]
295
- parent = None
296
295
  MODE = int(item["mode"])
297
296
  IGNORE = 0b00
298
297
  INTEGRATE = 0b01
299
298
  FIT_PEAKS = 0b10
299
+ BOTH = 0b11
300
+
300
301
  if bool(MODE == IGNORE):
301
302
  continue
302
- if bool(MODE & INTEGRATE):
303
- parent = _make_peak(border_peak, borders, shift)
303
+
304
+ if bool(MODE & FIT_PEAKS) or bool(MODE == BOTH):
305
+ # Note: this cannot be handled separately, because
306
+ # every fit-peak needs a parent in our context!
307
+ parent = _make_peak(border_peak, borders, shift, None)
304
308
  peaks.append(parent)
305
- if bool(MODE & FIT_PEAKS):
306
309
  for ioni_peak in item["peak"]:
307
- if parent is None:
308
- # Note: we denote a peak w/ parent as a "fitted" peak..
309
- # as a workaround, use the first as (its own) parent:
310
- parent = ioni_peak["name"]
311
310
  peaks.append(_make_peak(ioni_peak, borders, shift, parent))
311
+ continue
312
+
313
+ if bool(MODE & INTEGRATE):
314
+ # Note: MUST go last, since BOTH would apply and I forgot about
315
+ # bitwise-operations in Python and don't want to remember..
316
+ peaks.append(_make_peak(border_peak, borders, shift, None))
317
+ continue
312
318
 
313
319
  return PeakTable(peaks)
314
320
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes