keithley-tempcontrol 0.17.3__py3-none-any.whl → 0.18.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- egse/tempcontrol/keithley/__init__.py +0 -2
- egse/tempcontrol/keithley/daq6510.py +101 -255
- egse/tempcontrol/keithley/daq6510_adev.py +27 -35
- egse/tempcontrol/keithley/daq6510_amon.py +569 -0
- egse/tempcontrol/keithley/daq6510_cs.py +115 -44
- egse/tempcontrol/keithley/daq6510_dev.py +77 -210
- egse/tempcontrol/keithley/daq6510_mon.py +242 -453
- egse/tempcontrol/keithley/daq6510_protocol.py +43 -38
- egse/tempcontrol/keithley/daq6510_sim.py +119 -62
- keithley_tempcontrol/cgse_services.py +58 -7
- {keithley_tempcontrol-0.17.3.dist-info → keithley_tempcontrol-0.18.1.dist-info}/METADATA +1 -1
- keithley_tempcontrol-0.18.1.dist-info/RECORD +18 -0
- {keithley_tempcontrol-0.17.3.dist-info → keithley_tempcontrol-0.18.1.dist-info}/entry_points.txt +1 -0
- keithley_tempcontrol-0.17.3.dist-info/RECORD +0 -17
- {keithley_tempcontrol-0.17.3.dist-info → keithley_tempcontrol-0.18.1.dist-info}/WHEEL +0 -0
|
@@ -1,524 +1,313 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
import logging
|
|
5
|
-
import signal
|
|
6
|
-
import time
|
|
7
|
-
from asyncio import Task
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
from typing import Any
|
|
10
|
-
from typing import Optional
|
|
1
|
+
"""
|
|
2
|
+
The synchronous monitoring service is a small application that performs measurements on
|
|
3
|
+
the DAQ6510.
|
|
11
4
|
|
|
12
|
-
|
|
13
|
-
|
|
5
|
+
The service reads the configuration for the Keithley DAQ6510 from the
|
|
6
|
+
Configuration Manager and then configures the device. When no Configuration Manager is
|
|
7
|
+
available, the service can also be started with a filename to read the configuration from. The file
|
|
8
|
+
should have the YAML format.
|
|
14
9
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
10
|
+
```
|
|
11
|
+
insert an excerpt of a sample YAML configuration file here...
|
|
12
|
+
```
|
|
18
13
|
|
|
19
|
-
|
|
14
|
+
The monitoring service can be started as follows:
|
|
20
15
|
|
|
21
16
|
|
|
22
|
-
|
|
23
|
-
"""
|
|
24
|
-
DAQ6510 temperature monitoring service with ZeroMQ command interface.
|
|
17
|
+
"""
|
|
25
18
|
|
|
26
|
-
|
|
19
|
+
import datetime
|
|
20
|
+
import multiprocessing
|
|
21
|
+
import os
|
|
22
|
+
import sys
|
|
23
|
+
import time
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
from typing import Any
|
|
27
26
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
daq_port: int = 5025,
|
|
32
|
-
zmq_port: int = 5556,
|
|
33
|
-
log_file: str = "temperature_readings.log",
|
|
34
|
-
channels: list[str] = None,
|
|
35
|
-
poll_interval: float = 60.0,
|
|
36
|
-
):
|
|
37
|
-
"""Initialize the DAQ6510 monitoring service.
|
|
38
|
-
|
|
39
|
-
Args:
|
|
40
|
-
daq_hostname: Hostname or IP of the DAQ6510
|
|
41
|
-
daq_port: TCP port for DAQ6510 SCPI interface
|
|
42
|
-
zmq_port: Port for ZeroMQ command interface
|
|
43
|
-
log_file: Path to log file for temperature readings
|
|
44
|
-
channels: List of channels to monitor (e.g. ["101", "102"])
|
|
45
|
-
poll_interval: Initial polling interval in seconds
|
|
46
|
-
"""
|
|
47
|
-
self.daq_hostname = daq_hostname
|
|
48
|
-
self.daq_port = daq_port
|
|
49
|
-
self.zmq_port = zmq_port
|
|
50
|
-
self.log_file = Path(log_file)
|
|
51
|
-
self.channels = channels or ["101", "102", "103", "104"]
|
|
52
|
-
self.poll_interval = poll_interval
|
|
53
|
-
|
|
54
|
-
# Setup ZeroMQ context
|
|
55
|
-
self.ctx = zmq.asyncio.Context()
|
|
56
|
-
self.socket = self.ctx.socket(zmq.ROUTER)
|
|
57
|
-
self.socket.bind(f"tcp://*:{zmq_port}")
|
|
58
|
-
|
|
59
|
-
# Service state
|
|
60
|
-
self.running = False
|
|
61
|
-
self.polling_active = False
|
|
62
|
-
self.daq_interface = None
|
|
63
|
-
self.command_handlers = {
|
|
64
|
-
"START_POLLING": self._handle_start_polling,
|
|
65
|
-
"STOP_POLLING": self._handle_stop_polling,
|
|
66
|
-
"SET_INTERVAL": self._handle_set_interval,
|
|
67
|
-
"SET_CHANNELS": self._handle_set_channels,
|
|
68
|
-
"GET_STATUS": self._handle_get_status,
|
|
69
|
-
"GET_READING": self._handle_get_reading,
|
|
70
|
-
"GET_LAST_READING": self._handle_get_last_reading,
|
|
71
|
-
"SHUTDOWN": self._handle_shutdown,
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
# Keep a record of the last measurement
|
|
75
|
-
self._last_reading: dict = {}
|
|
76
|
-
|
|
77
|
-
# Make sure the log directory exists
|
|
78
|
-
self.log_file.parent.mkdir(exist_ok=True, parents=True)
|
|
79
|
-
|
|
80
|
-
# Create DAQ interface
|
|
81
|
-
self.daq_interface = DAQ6510(hostname=daq_hostname, port=daq_port)
|
|
82
|
-
|
|
83
|
-
async def start(self):
|
|
84
|
-
"""Start the monitoring service."""
|
|
85
|
-
logger.info(f"Starting DAQ6510 Monitoring Service on ZMQ port {self.zmq_port}")
|
|
86
|
-
self.running = True
|
|
87
|
-
|
|
88
|
-
# Register signal handlers for graceful shutdown
|
|
89
|
-
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
90
|
-
asyncio.get_event_loop().add_signal_handler(sig, lambda: asyncio.create_task(self.shutdown()))
|
|
91
|
-
|
|
92
|
-
# Start the main service tasks
|
|
93
|
-
await asyncio.gather(self.command_listener(), self.connect_daq(), return_exceptions=True)
|
|
94
|
-
|
|
95
|
-
def done_polling(self, task: Task):
|
|
96
|
-
if task.exception():
|
|
97
|
-
logger.error(f"Polling loop ended unexpectedly: {task.exception()}")
|
|
98
|
-
logger.info(f"Done polling ({task.get_name()}).")
|
|
99
|
-
self.polling_active = False
|
|
100
|
-
|
|
101
|
-
async def connect_daq(self):
|
|
102
|
-
"""Establish connection to the DAQ6510."""
|
|
103
|
-
while self.running:
|
|
104
|
-
try:
|
|
105
|
-
logger.info(f"Connecting to DAQ6510 at {self.daq_hostname}:{self.daq_port}")
|
|
106
|
-
await self.daq_interface.connect()
|
|
107
|
-
logger.info("Successfully connected to DAQ6510.")
|
|
108
|
-
await self.daq_interface.initialize()
|
|
109
|
-
logger.info("Successfully initialized DAQ6510 for measurements.")
|
|
110
|
-
|
|
111
|
-
# If we were polling before, restart it.
|
|
112
|
-
# The first time we enter this loop, we are not polling.
|
|
113
|
-
if self.polling_active:
|
|
114
|
-
# QUESTION: Do we need to await here?
|
|
115
|
-
polling_task = asyncio.create_task(self.polling_loop())
|
|
116
|
-
|
|
117
|
-
# But we can add error handling for the task
|
|
118
|
-
polling_task.add_done_callback(self.done_polling)
|
|
119
|
-
|
|
120
|
-
# Keep checking connection status periodically
|
|
121
|
-
while self.running and await self.daq_interface.is_connected():
|
|
122
|
-
logger.info("Checking DAQ6510 connection...")
|
|
123
|
-
await asyncio.sleep(10)
|
|
124
|
-
|
|
125
|
-
if self.running:
|
|
126
|
-
logger.warning("Lost connection to DAQ6510")
|
|
127
|
-
await self.daq_interface.disconnect()
|
|
128
|
-
|
|
129
|
-
except (DeviceConnectionError, DeviceTimeoutError) as exc:
|
|
130
|
-
logger.error(f"Failed to connect to DAQ6510: {exc}")
|
|
131
|
-
await asyncio.sleep(5) # Wait before retrying
|
|
132
|
-
|
|
133
|
-
async def polling_loop(self):
|
|
134
|
-
"""Main polling loop for temperature measurements."""
|
|
135
|
-
logger.info(f"Starting temperature polling loop (interval: {self.poll_interval}s, channels: {self.channels})")
|
|
136
|
-
|
|
137
|
-
# The next lines are a way to calculate the sleep time between two measurements, this takes the time of the
|
|
138
|
-
# measurement itself into account.
|
|
139
|
-
def interval():
|
|
140
|
-
next_time = time.perf_counter()
|
|
141
|
-
while True:
|
|
142
|
-
next_time += self.poll_interval
|
|
143
|
-
yield max(next_time - time.perf_counter(), 0)
|
|
144
|
-
|
|
145
|
-
g_interval = interval()
|
|
146
|
-
|
|
147
|
-
while self.running and self.polling_active:
|
|
148
|
-
try:
|
|
149
|
-
if not await self.daq_interface.is_connected():
|
|
150
|
-
logger.warning("DAQ6510 not connected, skipping temperature reading")
|
|
151
|
-
await asyncio.sleep(5)
|
|
152
|
-
continue
|
|
27
|
+
import rich
|
|
28
|
+
import typer
|
|
29
|
+
from urllib3.exceptions import NewConnectionError
|
|
153
30
|
|
|
154
|
-
|
|
155
|
-
|
|
31
|
+
from egse.env import bool_env
|
|
32
|
+
from egse.hk import read_conversion_dict
|
|
33
|
+
from egse.log import logger
|
|
34
|
+
from egse.logger import remote_logging
|
|
35
|
+
from egse.metrics import get_metrics_repo
|
|
36
|
+
from egse.response import Failure
|
|
37
|
+
from egse.scpi import count_number_of_channels, get_channel_names
|
|
38
|
+
from egse.settings import get_site_id
|
|
39
|
+
from egse.setup import Setup, load_setup
|
|
40
|
+
from egse.storage import StorageProxy, is_storage_manager_active
|
|
41
|
+
from egse.storage.persistence import CSV
|
|
42
|
+
from egse.system import SignalCatcher, flatten_dict, format_datetime, now, str_to_datetime, type_name
|
|
43
|
+
from egse.tempcontrol.keithley.daq6510 import DAQ6510Proxy
|
|
44
|
+
from egse.tempcontrol.keithley.daq6510_cs import is_daq6510_cs_active
|
|
156
45
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
try:
|
|
160
|
-
# temp = random.random()
|
|
161
|
-
temp = await self.daq_interface.get_measurement(channel)
|
|
162
|
-
readings[channel] = temp
|
|
163
|
-
except (DeviceConnectionError, DeviceTimeoutError, ValueError) as exc:
|
|
164
|
-
logger.error(f"Error reading channel {channel}: {exc}")
|
|
165
|
-
readings[channel] = None
|
|
46
|
+
VERBOSE_DEBUG = bool_env("VERBOSE_DEBUG")
|
|
47
|
+
SITE_ID = get_site_id()
|
|
166
48
|
|
|
167
|
-
# Log the readings
|
|
168
|
-
log_entry = {"timestamp": timestamp, "readings": readings}
|
|
169
49
|
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
50
|
+
def load_setup_from_input_file(input_file: str | Path) -> Setup | None:
|
|
51
|
+
"""Loads a Setup YAML file from disk."""
|
|
52
|
+
input_file = Path(input_file).resolve()
|
|
173
53
|
|
|
174
|
-
|
|
54
|
+
if not input_file.exists():
|
|
55
|
+
logger.error(f"ERROR: Input file ({input_file}) doesn't exists.")
|
|
56
|
+
return None
|
|
175
57
|
|
|
176
|
-
|
|
58
|
+
return Setup.from_yaml_file(input_file)
|
|
177
59
|
|
|
178
|
-
except Exception as exc:
|
|
179
|
-
logger.exception(f"Error in polling loop: {exc}")
|
|
180
60
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
61
|
+
def daq6510(count, interval, delay, channel_list, input_file: str):
|
|
62
|
+
"""
|
|
63
|
+
Run the monitoring service for the DAQ6510.
|
|
184
64
|
|
|
185
|
-
|
|
65
|
+
Args:
|
|
66
|
+
count: Number of measurements to perform per acquisition [optional]
|
|
67
|
+
interval: Time interval between measurements in seconds [optional]
|
|
68
|
+
delay: Delay between acquisitions in seconds [optional]
|
|
69
|
+
channel_list: Comma-separated list of channels to acquire data from [optional]
|
|
70
|
+
input_file: YAML file containing the Setup for the DAQ6510 [optional]
|
|
186
71
|
|
|
187
|
-
|
|
188
|
-
"""ZeroMQ command interface listener."""
|
|
189
|
-
logger.info("Command listener started")
|
|
72
|
+
"""
|
|
190
73
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
74
|
+
if input_file:
|
|
75
|
+
setup = load_setup_from_input_file(input_file)
|
|
76
|
+
else:
|
|
77
|
+
setup = load_setup()
|
|
195
78
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
continue
|
|
79
|
+
if setup is None:
|
|
80
|
+
logger.error("ERROR: Could not load setup.")
|
|
81
|
+
sys.exit(1)
|
|
200
82
|
|
|
201
|
-
|
|
83
|
+
if VERBOSE_DEBUG:
|
|
84
|
+
logger.debug(f"Loaded setup: {setup}")
|
|
202
85
|
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
command = command_data.get("command")
|
|
207
|
-
params = command_data.get("params", {})
|
|
86
|
+
if not hasattr(setup, "gse"):
|
|
87
|
+
logger.error("ERROR: No GSE section in the loaded Setup.")
|
|
88
|
+
sys.exit(1)
|
|
208
89
|
|
|
209
|
-
|
|
90
|
+
try:
|
|
91
|
+
hk_conversion_table = read_conversion_dict("DAQ6510-MON", use_site=True, setup=setup)
|
|
92
|
+
column_names = list(hk_conversion_table.values())
|
|
93
|
+
except Exception as exc:
|
|
94
|
+
logger.warning(f"WARNING: Failed to read telemetry dictionary: {exc}")
|
|
95
|
+
hk_conversion_table = {"101": "PT100-4", "102": "PT100-2"}
|
|
96
|
+
column_names = list(hk_conversion_table.values())
|
|
210
97
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
98
|
+
if not is_daq6510_cs_active():
|
|
99
|
+
logger.error(
|
|
100
|
+
"The DAQ6510 Control Server is not running, start the 'daq6510_cs' command "
|
|
101
|
+
"before running the data acquisition."
|
|
102
|
+
)
|
|
103
|
+
return
|
|
216
104
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
logger.exception(f"Error processing command: {exc}")
|
|
221
|
-
response = {"status": "error", "message": str(exc)}
|
|
105
|
+
if not is_storage_manager_active():
|
|
106
|
+
logger.error("The storage manager is not running, start the core services before running the data acquisition.")
|
|
107
|
+
return
|
|
222
108
|
|
|
223
|
-
|
|
224
|
-
|
|
109
|
+
if "DAQ6510" not in setup.gse: # type: ignore
|
|
110
|
+
logger.error("ERROR: no DAQ6510 entry in the loaded Setup.")
|
|
111
|
+
sys.exit(1)
|
|
225
112
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
await asyncio.sleep(1)
|
|
113
|
+
if not channel_list:
|
|
114
|
+
channel_list = setup.gse.DAQ6510.channels # type: ignore
|
|
229
115
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
if not self.polling_active:
|
|
233
|
-
self.polling_active = True
|
|
116
|
+
if not count:
|
|
117
|
+
count = setup.gse.DAQ6510.route.scan.count.scan # type: ignore
|
|
234
118
|
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
self.channels = params["channels"]
|
|
119
|
+
if not interval:
|
|
120
|
+
interval = setup.gse.DAQ6510.route.scan.interval # type: ignore
|
|
238
121
|
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
self.poll_interval = float(params["interval"])
|
|
122
|
+
if not delay:
|
|
123
|
+
delay = setup.gse.DAQ6510.route.delay # type: ignore
|
|
242
124
|
|
|
243
|
-
|
|
244
|
-
polling_task = asyncio.create_task(self.polling_loop())
|
|
125
|
+
count, interval, delay = int(count), int(interval), int(delay)
|
|
245
126
|
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
lambda t: logger.error(f"Polling loop ended unexpectedly: {t.exception()}") if t.exception() else None
|
|
249
|
-
)
|
|
127
|
+
channel_count = count_number_of_channels(channel_list)
|
|
128
|
+
channel_names = get_channel_names(channel_list)
|
|
250
129
|
|
|
251
|
-
|
|
252
|
-
"status": "ok",
|
|
253
|
-
"message": f"Polling started with interval {self.poll_interval}s and channels {self.channels}",
|
|
254
|
-
}
|
|
255
|
-
else:
|
|
256
|
-
return {"status": "ok", "message": "Polling already active"}
|
|
130
|
+
metrics_client = setup_metrics_client()
|
|
257
131
|
|
|
258
|
-
|
|
259
|
-
"""Stop temperature polling."""
|
|
260
|
-
if self.polling_active:
|
|
261
|
-
self.polling_active = False
|
|
262
|
-
return {"status": "ok", "message": "Polling stopped"}
|
|
263
|
-
else:
|
|
264
|
-
return {"status": "ok", "message": "Polling already stopped"}
|
|
132
|
+
# Initialize some variables that will be used for registration to the Storage Manager
|
|
265
133
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
if interval <= 0:
|
|
274
|
-
return {"status": "error", "message": "Interval must be positive"}
|
|
134
|
+
origin = "DAQ6510-MON"
|
|
135
|
+
persistence_class = CSV
|
|
136
|
+
prep = {
|
|
137
|
+
"mode": "a",
|
|
138
|
+
"ending": "\n",
|
|
139
|
+
"column_names": ["timestamp", *column_names],
|
|
140
|
+
}
|
|
275
141
|
|
|
276
|
-
|
|
277
|
-
self.poll_interval = interval
|
|
142
|
+
killer = SignalCatcher()
|
|
278
143
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
return {"status": "error", "message": "Invalid interval format"}
|
|
144
|
+
with DAQ6510Proxy() as daq, StorageProxy() as storage:
|
|
145
|
+
daq.reset()
|
|
282
146
|
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
return {"status": "error", "message": "Missing or invalid parameter: channels (should be a list)"}
|
|
147
|
+
dt = now()
|
|
148
|
+
daq.set_time(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
|
149
|
+
logger.info(f"DAQ6510 date and time set: {daq.get_time()}")
|
|
287
150
|
|
|
288
|
-
|
|
289
|
-
self.channels = params["channels"]
|
|
151
|
+
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
290
152
|
|
|
291
|
-
|
|
153
|
+
# This will write a comment line to the CSV file with the column names. This might be useful when
|
|
154
|
+
# the sensors are reconfigured and the number or names of columns changes.
|
|
155
|
+
storage.save({"origin": origin, "data": f"# columns: {column_names}"})
|
|
292
156
|
|
|
293
|
-
|
|
294
|
-
|
|
157
|
+
for sensor in setup.gse.DAQ6510.sensors: # type: ignore
|
|
158
|
+
for function in setup.gse.DAQ6510.sensors[sensor]: # type: ignore
|
|
159
|
+
sense = {
|
|
160
|
+
function.upper(): [
|
|
161
|
+
(key, value)
|
|
162
|
+
for key, value in flatten_dict(setup.gse.DAQ6510.sensors[sensor][function]).items() # type: ignore
|
|
163
|
+
if key != "channels"
|
|
164
|
+
]
|
|
165
|
+
}
|
|
166
|
+
function_channel_list = setup.gse.DAQ6510.sensors[sensor][function].channels # type: ignore
|
|
167
|
+
if VERBOSE_DEBUG:
|
|
168
|
+
logger.debug(f"{sense=}")
|
|
169
|
+
logger.debug(f"{function_channel_list=}")
|
|
170
|
+
daq.configure_sensors(channel_list=function_channel_list, sense=sense)
|
|
295
171
|
|
|
296
|
-
|
|
297
|
-
"""Get a reading for the given channel(s)."""
|
|
298
|
-
logger.info(f"GET_READING – {params = }")
|
|
172
|
+
logger.info(f"global: {channel_list=}, {channel_count=}")
|
|
299
173
|
|
|
300
|
-
|
|
174
|
+
daq.setup_measurements(channel_list=channel_list)
|
|
301
175
|
|
|
302
|
-
|
|
176
|
+
while True:
|
|
303
177
|
try:
|
|
304
|
-
|
|
305
|
-
readings["data"][channel] = temp
|
|
306
|
-
except (DeviceConnectionError, DeviceTimeoutError, ValueError, RuntimeError) as exc:
|
|
307
|
-
logger.error(f"Error reading channel {channel}: {exc}")
|
|
308
|
-
readings["data"][channel] = None
|
|
309
|
-
readings.update({"status": "error", "message": f"Error reading channel {channel}"})
|
|
310
|
-
|
|
311
|
-
return readings
|
|
312
|
-
|
|
313
|
-
async def _handle_get_status(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
314
|
-
"""Get current service status."""
|
|
315
|
-
connected = False
|
|
316
|
-
try:
|
|
317
|
-
if self.daq_interface:
|
|
318
|
-
connected = await self.daq_interface.is_connected()
|
|
319
|
-
except Exception:
|
|
320
|
-
connected = False
|
|
321
|
-
|
|
322
|
-
return {
|
|
323
|
-
"status": "ok",
|
|
324
|
-
"data": {
|
|
325
|
-
"service_running": self.running,
|
|
326
|
-
"polling_active": self.polling_active,
|
|
327
|
-
"poll_interval": self.poll_interval,
|
|
328
|
-
"channels": self.channels,
|
|
329
|
-
"daq_connected": connected,
|
|
330
|
-
"daq_hostname": self.daq_hostname,
|
|
331
|
-
"daq_port": self.daq_port,
|
|
332
|
-
},
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
async def _handle_shutdown(self, params: dict[str, Any]) -> dict[str, Any]:
|
|
336
|
-
"""Shutdown the service."""
|
|
337
|
-
# Schedule shutdown after sending response
|
|
338
|
-
_ = asyncio.create_task(self.shutdown())
|
|
339
|
-
|
|
340
|
-
return {"status": "ok", "message": "Service shutting down"}
|
|
341
|
-
|
|
342
|
-
async def shutdown(self):
|
|
343
|
-
"""Gracefully shut down the service."""
|
|
344
|
-
logger.info("Shutting down DAQ Monitoring Service...")
|
|
345
|
-
|
|
346
|
-
# Stop the main loops
|
|
347
|
-
self.running = False
|
|
348
|
-
self.polling_active = False
|
|
349
|
-
|
|
350
|
-
# Disconnect DAQ
|
|
351
|
-
try:
|
|
352
|
-
logger.info("Disconnecting the DAQ6510...")
|
|
353
|
-
if self.daq_interface and await self.daq_interface.is_connected():
|
|
354
|
-
await self.daq_interface.disconnect()
|
|
355
|
-
except Exception as exc:
|
|
356
|
-
logger.error(f"Error disconnecting from DAQ: {exc}")
|
|
178
|
+
response = daq.perform_measurement(channel_list=channel_list, count=count, interval=interval)
|
|
357
179
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
logger.info("Closing ZeroMQ socket and terminate context...")
|
|
361
|
-
self.socket.close()
|
|
362
|
-
self.ctx.term()
|
|
363
|
-
except Exception as exc:
|
|
364
|
-
logger.error(f"Error closing ZeroMQ socket: {exc}")
|
|
365
|
-
|
|
366
|
-
logger.info("Service shutdown complete")
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
class DAQMonitorClient:
|
|
370
|
-
"""Simple client for interacting with the DAQ Monitor Service."""
|
|
371
|
-
|
|
372
|
-
def __init__(self, server_address: str = "localhost", port: int = 5556, timeout: float = 5.0):
|
|
373
|
-
"""Initialize the client.
|
|
374
|
-
|
|
375
|
-
Args:
|
|
376
|
-
server_address: Address of the monitoring service
|
|
377
|
-
port: ZeroMQ port
|
|
378
|
-
timeout: Command timeout in seconds
|
|
379
|
-
"""
|
|
380
|
-
self.server_address = server_address
|
|
381
|
-
self.port = port
|
|
382
|
-
self.timeout = timeout
|
|
383
|
-
|
|
384
|
-
self.ctx = zmq.Context().instance()
|
|
385
|
-
self.socket = None
|
|
386
|
-
|
|
387
|
-
def connect(self):
|
|
388
|
-
"""Connect to the DAQ Monitoring service."""
|
|
389
|
-
self.socket = self.ctx.socket(zmq.DEALER)
|
|
390
|
-
self.socket.connect(f"tcp://{self.server_address}:{self.port}")
|
|
391
|
-
self.socket.setsockopt(zmq.RCVTIMEO, int(self.timeout * 1000))
|
|
392
|
-
|
|
393
|
-
def disconnect(self):
|
|
394
|
-
"""Close the client connection."""
|
|
395
|
-
self.socket.close(linger=100)
|
|
396
|
-
self.ctx.term()
|
|
397
|
-
|
|
398
|
-
def __enter__(self):
|
|
399
|
-
self.connect()
|
|
400
|
-
return self
|
|
401
|
-
|
|
402
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
403
|
-
self.disconnect()
|
|
404
|
-
if exc_type:
|
|
405
|
-
logger.error(f"Caught {exc_type}: {exc_val}")
|
|
406
|
-
|
|
407
|
-
def _send_command(self, command: str, params: dict[str, Any] = None) -> dict[str, Any]:
|
|
408
|
-
"""Send a command to the monitoring service.
|
|
409
|
-
|
|
410
|
-
Args:
|
|
411
|
-
command: Command name
|
|
412
|
-
params: Optional command parameters
|
|
413
|
-
|
|
414
|
-
Returns:
|
|
415
|
-
Response from the service
|
|
416
|
-
"""
|
|
417
|
-
params = params or {}
|
|
418
|
-
message = {"command": command, "params": params}
|
|
180
|
+
if killer.term_signal_received:
|
|
181
|
+
break
|
|
419
182
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
return {"status": "error", "message": f"ZMQ error: {exc}"}
|
|
426
|
-
except Exception as exc:
|
|
427
|
-
return {"status": "error", "message": f"Error: {exc}"}
|
|
183
|
+
if not response:
|
|
184
|
+
logger.warning("Received an empty response from the DAQ6510, check the connection with the device.")
|
|
185
|
+
logger.warning(f"Response: {response=}")
|
|
186
|
+
time.sleep(1.0)
|
|
187
|
+
continue
|
|
428
188
|
|
|
429
|
-
|
|
430
|
-
|
|
189
|
+
if isinstance(response, Failure):
|
|
190
|
+
logger.warning("Received a Failure from the DAQ6510 Control Server:")
|
|
191
|
+
logger.warning(f"Response: {response}")
|
|
192
|
+
time.sleep(1.0)
|
|
193
|
+
continue
|
|
431
194
|
|
|
432
|
-
|
|
433
|
-
channels: List of channels to monitor
|
|
434
|
-
interval: Polling interval in seconds
|
|
195
|
+
# Process and save the response
|
|
435
196
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
"""
|
|
439
|
-
params = {}
|
|
440
|
-
if channels is not None:
|
|
441
|
-
params["channels"] = channels
|
|
442
|
-
if interval is not None:
|
|
443
|
-
params["interval"] = interval
|
|
197
|
+
if VERBOSE_DEBUG:
|
|
198
|
+
logger.debug(f"{response=}")
|
|
444
199
|
|
|
445
|
-
|
|
200
|
+
dts = response[0][1].strip()
|
|
201
|
+
dt = datetime.datetime.strptime(dts[:-3], "%m/%d/%Y %H:%M:%S.%f")
|
|
202
|
+
datetime_string = format_datetime(dt.replace(tzinfo=datetime.timezone.utc))
|
|
446
203
|
|
|
447
|
-
|
|
448
|
-
|
|
204
|
+
data: dict[str, Any] = {hk_conversion_table[measure[0]]: float(measure[2]) for measure in response}
|
|
205
|
+
data.update({"timestamp": datetime_string})
|
|
449
206
|
|
|
450
|
-
|
|
451
|
-
Response from the service
|
|
452
|
-
"""
|
|
453
|
-
return self._send_command("STOP_POLLING")
|
|
207
|
+
# FIXME: we probably need to do something with the units...
|
|
454
208
|
|
|
455
|
-
|
|
456
|
-
"""Set polling interval.
|
|
209
|
+
units = [measure[3] for measure in response]
|
|
457
210
|
|
|
458
|
-
|
|
459
|
-
|
|
211
|
+
if VERBOSE_DEBUG:
|
|
212
|
+
logger.debug(f"{data=}")
|
|
460
213
|
|
|
461
|
-
|
|
462
|
-
Response from the service
|
|
463
|
-
"""
|
|
464
|
-
return self._send_command("SET_INTERVAL", {"interval": interval})
|
|
214
|
+
storage.save({"origin": origin, "data": data})
|
|
465
215
|
|
|
466
|
-
|
|
467
|
-
"""Set channels to monitor.
|
|
216
|
+
# Now extract channels from the response to update the metrics
|
|
468
217
|
|
|
469
|
-
|
|
470
|
-
|
|
218
|
+
for channel in [measure[0] for measure in response]:
|
|
219
|
+
if channel in hk_conversion_table:
|
|
220
|
+
metrics_name = hk_conversion_table[channel]
|
|
221
|
+
save_metrics(metrics_client, origin, data)
|
|
471
222
|
|
|
472
|
-
|
|
473
|
-
Response from the service
|
|
474
|
-
"""
|
|
475
|
-
return self._send_command("SET_CHANNELS", {"channels": channels})
|
|
223
|
+
# wait for the next measurement to be done (delay)
|
|
476
224
|
|
|
477
|
-
|
|
478
|
-
"""Get a reading from the given channel.
|
|
225
|
+
time.sleep(delay)
|
|
479
226
|
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
227
|
+
except KeyboardInterrupt:
|
|
228
|
+
logger.debug("Interrupt received, terminating...")
|
|
229
|
+
break
|
|
230
|
+
except Exception as exc:
|
|
231
|
+
logger.warning(f"{type_name(exc)}: {exc}", exc_info=True)
|
|
232
|
+
logger.warning("Got a corrupt response from the DAQ6510. Check log messages for 'DAS Exception'.")
|
|
233
|
+
time.sleep(1.0)
|
|
234
|
+
continue
|
|
235
|
+
|
|
236
|
+
storage.unregister({"origin": origin})
|
|
237
|
+
|
|
238
|
+
logger.info("DAQ6510 Data Acquisition System terminated.")
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def setup_metrics_client():
|
|
242
|
+
token = os.getenv("INFLUXDB3_AUTH_TOKEN")
|
|
243
|
+
project = os.getenv("PROJECT")
|
|
244
|
+
|
|
245
|
+
if project and token:
|
|
246
|
+
metrics_client = get_metrics_repo(
|
|
247
|
+
"influxdb", {"host": "http://localhost:8181", "database": project, "token": token}
|
|
248
|
+
)
|
|
249
|
+
metrics_client.connect()
|
|
250
|
+
else:
|
|
251
|
+
metrics_client = None
|
|
252
|
+
logger.warning(
|
|
253
|
+
"INFLUXDB3_AUTH_TOKEN and/or PROJECT environment variable is not set. "
|
|
254
|
+
"Metrics will not be propagated to InfluxDB."
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
return metrics_client
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def save_metrics(metrics_client, origin, data):
|
|
261
|
+
try:
|
|
262
|
+
if metrics_client:
|
|
263
|
+
point = {
|
|
264
|
+
"measurement": origin.lower(),
|
|
265
|
+
"tags": {"site_id": SITE_ID, "origin": origin},
|
|
266
|
+
"fields": {hk_name.lower(): data[hk_name] for hk_name in data if hk_name != "timestamp"},
|
|
267
|
+
"time": str_to_datetime(data["timestamp"]),
|
|
268
|
+
}
|
|
269
|
+
metrics_client.write(point)
|
|
270
|
+
else:
|
|
271
|
+
logger.warning(
|
|
272
|
+
f"Could not write {origin} metrics to the time series database (self.metrics_client is None)."
|
|
273
|
+
)
|
|
274
|
+
except NewConnectionError:
|
|
275
|
+
logger.warning(
|
|
276
|
+
f"No connection to the time series database could be established to propagate {origin} metrics. Check "
|
|
277
|
+
f"whether this service is (still) running."
|
|
278
|
+
)
|
|
484
279
|
|
|
485
|
-
def get_last_reading(self) -> dict:
|
|
486
|
-
return self._send_command("GET_LAST_READING")
|
|
487
280
|
|
|
488
|
-
|
|
489
|
-
|
|
281
|
+
app = typer.Typer(
|
|
282
|
+
name="daq6510_mon",
|
|
283
|
+
help="DAQ6510 Data Acquisition Unit, Keithley, temperature monitoring (monitoring)",
|
|
284
|
+
no_args_is_help=True,
|
|
285
|
+
)
|
|
490
286
|
|
|
491
|
-
Returns:
|
|
492
|
-
Status information
|
|
493
|
-
"""
|
|
494
|
-
return self._send_command("GET_STATUS")
|
|
495
287
|
|
|
496
|
-
|
|
497
|
-
|
|
288
|
+
@app.command()
|
|
289
|
+
def start(input_file: str = typer.Option("", help="YAML file containing the Setup for the DAQ6510")):
|
|
290
|
+
"""Starts the Keithley DAQ6510 Monitoring Service."""
|
|
498
291
|
|
|
499
|
-
|
|
500
|
-
Response from the service
|
|
501
|
-
"""
|
|
502
|
-
return self._send_command("SHUTDOWN")
|
|
292
|
+
multiprocessing.current_process().name = "daq6510_mon (start)"
|
|
503
293
|
|
|
294
|
+
with remote_logging():
|
|
295
|
+
from egse.env import setup_env
|
|
504
296
|
|
|
505
|
-
|
|
506
|
-
monitor = DAQ6510Monitor(
|
|
507
|
-
daq_hostname="192.168.68.77",
|
|
508
|
-
daq_port=5025,
|
|
509
|
-
zmq_port=5556,
|
|
510
|
-
log_file="temperature_readings.log",
|
|
511
|
-
channels=["101", "102"],
|
|
512
|
-
poll_interval=10.0,
|
|
513
|
-
)
|
|
297
|
+
setup_env()
|
|
514
298
|
|
|
515
|
-
|
|
299
|
+
try:
|
|
300
|
+
daq6510(count=None, interval=None, delay=None, channel_list=None, input_file=input_file)
|
|
301
|
+
except KeyboardInterrupt:
|
|
302
|
+
logger.debug("Shutdown requested...exiting")
|
|
303
|
+
except SystemExit as exit_code:
|
|
304
|
+
logger.debug("System Exit with code {}.".format(exit_code))
|
|
305
|
+
sys.exit(exit_code.code)
|
|
306
|
+
except Exception:
|
|
307
|
+
msg = "Cannot start the DAQ6510 Monitoring Service"
|
|
308
|
+
logger.exception(msg)
|
|
309
|
+
rich.print(f"[red]{msg}.")
|
|
516
310
|
|
|
517
311
|
|
|
518
312
|
if __name__ == "__main__":
|
|
519
|
-
|
|
520
|
-
level=logging.DEBUG,
|
|
521
|
-
format="[%(asctime)s] %(threadName)-12s %(levelname)-8s %(name)-12s %(lineno)5d:%(module)-20s %(message)s",
|
|
522
|
-
)
|
|
523
|
-
|
|
524
|
-
asyncio.run(main())
|
|
313
|
+
sys.exit(app())
|