keithley-tempcontrol 0.17.4__py3-none-any.whl → 0.18.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,560 +1,313 @@
1
- import asyncio
2
- import datetime
3
- import json
4
- import signal
5
- import time
6
- from asyncio import Task
7
- from pathlib import Path
8
- from typing import Any
9
- from typing import Callable
10
- from typing import Optional
1
+ """
2
+ The synchronous monitoring service is a small application that performs measurements on
3
+ the DAQ6510.
11
4
 
12
- import typer
13
- import zmq
14
- import zmq.asyncio
15
-
16
- from egse.device import DeviceConnectionError
17
- from egse.device import DeviceTimeoutError
18
- from egse.log import logger
19
- from egse.settings import Settings
20
- from egse.system import TyperAsyncCommand
21
- from egse.tempcontrol.keithley.daq6510_adev import DAQ6510
5
+ The service reads the configuration for the Keithley DAQ6510 from the
6
+ Configuration Manager and then configures the device. When no Configuration Manager is
7
+ available, the service can also be started with a filename to read the configuration from. The file
8
+ should have the YAML format.
22
9
 
23
- settings = Settings.load("Keithley DAQ6510")
10
+ ```
11
+ insert an excerpt of a sample YAML configuration file here...
12
+ ```
24
13
 
25
- DAQ_DEV_HOST = settings.get("HOSTNAME")
26
- DAQ_DEV_PORT = settings.get("PORT")
14
+ The monitoring service can be started as follows:
27
15
 
28
- DAQ_MON_CMD_PORT = 5556
29
16
 
17
+ """
30
18
 
31
- class DAQ6510Monitor:
32
- """
33
- DAQ6510 temperature monitoring service with ZeroMQ command interface.
34
-
35
- """
36
-
37
- def __init__(
38
- self,
39
- daq_hostname: str,
40
- daq_port: int = DAQ_DEV_PORT,
41
- zmq_port: int = DAQ_MON_CMD_PORT,
42
- log_file: str = "temperature_readings.log",
43
- channels: list[str] = None,
44
- poll_interval: float = 60.0,
45
- ):
46
- """Initialize the DAQ6510 monitoring service.
47
-
48
- Args:
49
- daq_hostname: Hostname or IP of the DAQ6510
50
- daq_port: TCP port for DAQ6510 SCPI interface
51
- zmq_port: Port for ZeroMQ command interface
52
- log_file: Path to log file for temperature readings
53
- channels: List of channels to monitor (e.g. ["101", "102"])
54
- poll_interval: Initial polling interval in seconds
55
- """
56
- self.daq_hostname = daq_hostname
57
- self.daq_port = daq_port
58
- self.zmq_port = zmq_port
59
- self.log_file = Path(log_file)
60
- self.channels = channels or ["101", "102", "103", "104"]
61
- self.poll_interval = poll_interval
62
-
63
- # Setup ZeroMQ context
64
- self.ctx = zmq.asyncio.Context()
65
- self.socket = self.ctx.socket(zmq.ROUTER)
66
- self.socket.bind(f"tcp://*:{zmq_port}")
67
-
68
- # Service state
69
- self.running = False
70
- self.polling_active = False
71
- self.daq_interface = None
72
- self.command_handlers: dict[str, Callable] = {
73
- "START_POLLING": self._handle_start_polling,
74
- "STOP_POLLING": self._handle_stop_polling,
75
- "SET_INTERVAL": self._handle_set_interval,
76
- "SET_CHANNELS": self._handle_set_channels,
77
- "GET_STATUS": self._handle_get_status,
78
- "GET_READING": self._handle_get_reading,
79
- "GET_LAST_READING": self._handle_get_last_reading,
80
- "SHUTDOWN": self._handle_shutdown,
81
- }
82
-
83
- # Keep a record of the last measurement
84
- self._last_reading: dict = {}
85
-
86
- # Make sure the log directory exists
87
- self.log_file.parent.mkdir(exist_ok=True, parents=True)
88
-
89
- # Create DAQ interface
90
- # In this case we use the device itself, no control server. That means
91
- # the monitoring must be the only service connecting to the device.
92
- self.daq_interface = DAQ6510(hostname=daq_hostname, port=daq_port)
93
-
94
- async def start(self):
95
- """Start the monitoring service."""
96
- logger.info(f"Starting DAQ6510 Monitoring Service on ZMQ port {self.zmq_port}")
97
- self.running = True
98
-
99
- def handle_shutdown():
100
- asyncio.create_task(self.shutdown())
101
-
102
- # Register signal handlers for graceful shutdown
103
- for sig in (signal.SIGINT, signal.SIGTERM):
104
- asyncio.get_event_loop().add_signal_handler(sig, handle_shutdown)
105
-
106
- # Start the main service tasks
107
- await asyncio.gather(self.command_listener(), self.connect_daq(), return_exceptions=True)
108
-
109
- def done_polling(self, task: Task):
110
- if task.exception():
111
- logger.error(f"Polling loop ended unexpectedly: {task.exception()}")
112
- logger.info(f"Done polling ({task.get_name()}).")
113
- self.polling_active = False
114
-
115
- async def connect_daq(self):
116
- """Establish connection to the DAQ6510."""
117
- while self.running:
118
- init_commands = [
119
- ('TRAC:MAKE "test1", 1000', False), # create a new buffer
120
- # settings for channel 1 and 2 of slot 1
121
- ('SENS:FUNC "TEMP", (@101:102)', False), # set the function to temperature
122
- ("SENS:TEMP:TRAN FRTD, (@101)", False), # set the transducer to 4-wire RTD
123
- ("SENS:TEMP:RTD:FOUR PT100, (@101)", False), # set the type of the 4-wire RTD
124
- ("SENS:TEMP:TRAN RTD, (@102)", False), # set the transducer to 2-wire RTD
125
- ("SENS:TEMP:RTD:TWO PT100, (@102)", False), # set the type of the 2-wire RTD
126
- ('ROUT:SCAN:BUFF "test1"', False),
127
- ("ROUT:SCAN:CRE (@101:102)", False),
128
- ("ROUT:CHAN:OPEN (@101:102)", False),
129
- ("ROUT:STAT? (@101:102)", True),
130
- ("ROUT:SCAN:STAR:STIM NONE", False),
131
- # ("ROUT:SCAN:ADD:SING (@101, 102)", False), # not sure what this does, not really needed
132
- ("ROUT:SCAN:COUN:SCAN 1", False), # not sure if this is needed in this setting
133
- # ("ROUT:SCAN:INT 1", False),
134
- ]
135
-
136
- try:
137
- logger.info(f"Connecting to DAQ6510 at {self.daq_hostname}:{self.daq_port}")
138
- await self.daq_interface.connect()
139
- logger.info("Successfully connected to DAQ6510.")
140
- await self.daq_interface.initialize(commands=init_commands, reset_device=True)
141
- logger.info("Successfully initialized DAQ6510 for measurements.")
142
-
143
- # If we were polling before, restart it.
144
- # The first time we enter this loop, we are not polling.
145
- if self.polling_active:
146
- # QUESTION: Do we need to await here?
147
- polling_task = asyncio.create_task(self.polling_loop())
148
-
149
- # But we can add error handling for the task
150
- polling_task.add_done_callback(self.done_polling)
151
-
152
- # Keep checking connection status periodically
153
- while self.running and await self.daq_interface.is_connected():
154
- logger.info("Checking DAQ6510 connection...")
155
- await asyncio.sleep(10)
156
-
157
- if self.running:
158
- logger.warning("Lost connection to DAQ6510")
159
- await self.daq_interface.disconnect()
160
-
161
- except (DeviceConnectionError, DeviceTimeoutError) as exc:
162
- logger.error(f"Failed to connect to DAQ6510: {exc}")
163
- await asyncio.sleep(5) # Wait before retrying
164
-
165
- async def polling_loop(self):
166
- """Main polling loop for temperature measurements."""
167
- logger.info(f"Starting temperature polling loop (interval: {self.poll_interval}s, channels: {self.channels})")
168
-
169
- # The next lines are a way to calculate the sleep time between two measurements, this takes the time of the
170
- # measurement itself into account.
171
- def interval():
172
- next_time = time.perf_counter()
173
- while True:
174
- next_time += self.poll_interval
175
- yield max(next_time - time.perf_counter(), 0)
176
-
177
- g_interval = interval()
178
-
179
- while self.running and self.polling_active:
180
- try:
181
- if not await self.daq_interface.is_connected():
182
- logger.warning("DAQ6510 not connected, skipping temperature reading")
183
- await asyncio.sleep(5)
184
- continue
19
+ import datetime
20
+ import multiprocessing
21
+ import os
22
+ import sys
23
+ import time
24
+ from pathlib import Path
25
+ from typing import Any
185
26
 
186
- timestamp = datetime.datetime.now().isoformat()
187
- readings = {}
27
+ import rich
28
+ import typer
29
+ from urllib3.exceptions import NewConnectionError
188
30
 
189
- # Read temperature from each channel
190
- for channel in self.channels:
191
- try:
192
- # temp = random.random()
193
- temp = await self.daq_interface.get_measurement(channel)
194
- readings[channel] = temp
195
- except (DeviceConnectionError, DeviceTimeoutError, ValueError) as exc:
196
- logger.error(f"Error reading channel {channel}: {exc}")
197
- readings[channel] = None
31
+ from egse.env import bool_env
32
+ from egse.hk import read_conversion_dict
33
+ from egse.log import logger
34
+ from egse.logger import remote_logging
35
+ from egse.metrics import get_metrics_repo
36
+ from egse.response import Failure
37
+ from egse.scpi import count_number_of_channels, get_channel_names
38
+ from egse.settings import get_site_id
39
+ from egse.setup import Setup, load_setup
40
+ from egse.storage import StorageProxy, is_storage_manager_active
41
+ from egse.storage.persistence import CSV
42
+ from egse.system import SignalCatcher, flatten_dict, format_datetime, now, str_to_datetime, type_name
43
+ from egse.tempcontrol.keithley.daq6510 import DAQ6510Proxy
44
+ from egse.tempcontrol.keithley.daq6510_cs import is_daq6510_cs_active
198
45
 
199
- # Log the readings
200
- log_entry = {"timestamp": timestamp, "readings": readings}
46
+ VERBOSE_DEBUG = bool_env("VERBOSE_DEBUG")
47
+ SITE_ID = get_site_id()
201
48
 
202
- # Append to log file
203
- with open(self.log_file, "a") as fd:
204
- fd.write(json.dumps(log_entry) + "\n")
205
49
 
206
- self._last_reading.update({"timestamp": timestamp, "readings": readings})
50
+ def load_setup_from_input_file(input_file: str | Path) -> Setup | None:
51
+ """Loads a Setup YAML file from disk."""
52
+ input_file = Path(input_file).resolve()
207
53
 
208
- logger.info(f"Temperature readings: {readings}")
54
+ if not input_file.exists():
55
+ logger.error(f"ERROR: Input file ({input_file}) doesn't exists.")
56
+ return None
209
57
 
210
- except Exception as exc:
211
- logger.exception(f"Error in polling loop: {exc}")
58
+ return Setup.from_yaml_file(input_file)
212
59
 
213
- finally:
214
- # Wait for next polling interval, we account for the time needed to perform the measurement.
215
- await asyncio.sleep(next(g_interval))
216
60
 
217
- logger.info("Temperature polling loop stopped")
61
+ def daq6510(count, interval, delay, channel_list, input_file: str):
62
+ """
63
+ Run the monitoring service for the DAQ6510.
218
64
 
219
- async def command_listener(self):
220
- """ZeroMQ command interface listener."""
221
- logger.info("Command listener started")
65
+ Args:
66
+ count: Number of measurements to perform per acquisition [optional]
67
+ interval: Time interval between measurements in seconds [optional]
68
+ delay: Delay between acquisitions in seconds [optional]
69
+ channel_list: Comma-separated list of channels to acquire data from [optional]
70
+ input_file: YAML file containing the Setup for the DAQ6510 [optional]
222
71
 
223
- while self.running:
224
- try:
225
- # Wait for next message
226
- message = await self.socket.recv_multipart()
72
+ """
227
73
 
228
- # Parse the message
229
- if len(message) < 3:
230
- logger.warning(f"Received malformed message: {message}")
231
- continue
74
+ if input_file:
75
+ setup = load_setup_from_input_file(input_file)
76
+ else:
77
+ setup = load_setup()
232
78
 
233
- identity, empty, *payload = message
79
+ if setup is None:
80
+ logger.error("ERROR: Could not load setup.")
81
+ sys.exit(1)
234
82
 
235
- try:
236
- # Parse the command and parameters
237
- command_data = json.loads(payload[0].decode("utf-8"))
238
- command = command_data.get("command")
239
- params = command_data.get("params", {})
83
+ if VERBOSE_DEBUG:
84
+ logger.debug(f"Loaded setup: {setup}")
240
85
 
241
- logger.info(f"Received command: {command} from {identity}")
86
+ if not hasattr(setup, "gse"):
87
+ logger.error("ERROR: No GSE section in the loaded Setup.")
88
+ sys.exit(1)
242
89
 
243
- # Handle the command
244
- if command in self.command_handlers:
245
- response = await self.command_handlers[command](params)
246
- else:
247
- response = {"status": "error", "message": f"Unknown command: {command}"}
90
+ try:
91
+ hk_conversion_table = read_conversion_dict("DAQ6510-MON", use_site=True, setup=setup)
92
+ column_names = list(hk_conversion_table.values())
93
+ except Exception as exc:
94
+ logger.warning(f"WARNING: Failed to read telemetry dictionary: {exc}")
95
+ hk_conversion_table = {"101": "PT100-4", "102": "PT100-2"}
96
+ column_names = list(hk_conversion_table.values())
248
97
 
249
- except json.JSONDecodeError:
250
- response = {"status": "error", "message": "Invalid JSON format"}
251
- except Exception as exc:
252
- logger.exception(f"Error processing command: {exc}")
253
- response = {"status": "error", "message": str(exc)}
98
+ if not is_daq6510_cs_active():
99
+ logger.error(
100
+ "The DAQ6510 Control Server is not running, start the 'daq6510_cs' command "
101
+ "before running the data acquisition."
102
+ )
103
+ return
254
104
 
255
- # Send response
256
- await self.socket.send_multipart([identity, b"", json.dumps(response).encode("utf-8")])
105
+ if not is_storage_manager_active():
106
+ logger.error("The storage manager is not running, start the core services before running the data acquisition.")
107
+ return
257
108
 
258
- except Exception as exc:
259
- logger.exception(f"Error in command listener: {exc}")
260
- await asyncio.sleep(1)
109
+ if "DAQ6510" not in setup.gse: # type: ignore
110
+ logger.error("ERROR: no DAQ6510 entry in the loaded Setup.")
111
+ sys.exit(1)
261
112
 
262
- async def _handle_start_polling(self, params: dict[str, Any]) -> dict[str, Any]:
263
- """Start temperature polling."""
264
- if not self.polling_active:
265
- self.polling_active = True
113
+ if not channel_list:
114
+ channel_list = setup.gse.DAQ6510.channels # type: ignore
266
115
 
267
- # If channels provided, update them
268
- if "channels" in params:
269
- self.channels = params["channels"]
116
+ if not count:
117
+ count = setup.gse.DAQ6510.route.scan.count.scan # type: ignore
270
118
 
271
- # If interval provided, update it
272
- if "interval" in params:
273
- self.poll_interval = float(params["interval"])
119
+ if not interval:
120
+ interval = setup.gse.DAQ6510.route.scan.interval # type: ignore
274
121
 
275
- # Start polling loop
276
- polling_task = asyncio.create_task(self.polling_loop())
122
+ if not delay:
123
+ delay = setup.gse.DAQ6510.route.delay # type: ignore
277
124
 
278
- # But we can add error handling for the task
279
- polling_task.add_done_callback(
280
- lambda t: logger.error(f"Polling loop ended unexpectedly: {t.exception()}") if t.exception() else None
281
- )
125
+ count, interval, delay = int(count), int(interval), int(delay)
282
126
 
283
- return {
284
- "status": "ok",
285
- "message": f"Polling started with interval {self.poll_interval}s and channels {self.channels}",
286
- }
287
- else:
288
- return {"status": "ok", "message": "Polling already active"}
127
+ channel_count = count_number_of_channels(channel_list)
128
+ channel_names = get_channel_names(channel_list)
289
129
 
290
- async def _handle_stop_polling(self, params: dict[str, Any]) -> dict[str, Any]:
291
- """Stop temperature polling."""
292
- if self.polling_active:
293
- self.polling_active = False
294
- return {"status": "ok", "message": "Polling stopped"}
295
- else:
296
- return {"status": "ok", "message": "Polling already stopped"}
130
+ metrics_client = setup_metrics_client()
297
131
 
298
- async def _handle_set_interval(self, params: dict[str, Any]) -> dict[str, Any]:
299
- """Set polling interval."""
300
- if "interval" not in params:
301
- return {"status": "error", "message": "Missing required parameter: interval"}
132
+ # Initialize some variables that will be used for registration to the Storage Manager
302
133
 
303
- try:
304
- interval = float(params["interval"])
305
- if interval <= 0:
306
- return {"status": "error", "message": "Interval must be positive"}
134
+ origin = "DAQ6510-MON"
135
+ persistence_class = CSV
136
+ prep = {
137
+ "mode": "a",
138
+ "ending": "\n",
139
+ "column_names": ["timestamp", *column_names],
140
+ }
307
141
 
308
- old_interval = self.poll_interval
309
- self.poll_interval = interval
142
+ killer = SignalCatcher()
310
143
 
311
- return {"status": "ok", "message": f"Polling interval changed from {old_interval}s to {interval}s"}
312
- except ValueError:
313
- return {"status": "error", "message": "Invalid interval format"}
144
+ with DAQ6510Proxy() as daq, StorageProxy() as storage:
145
+ daq.reset()
314
146
 
315
- async def _handle_set_channels(self, params: dict[str, Any]) -> dict[str, Any]:
316
- """Set channels to monitor."""
317
- if "channels" not in params or not isinstance(params["channels"], list):
318
- return {"status": "error", "message": "Missing or invalid parameter: channels (should be a list)"}
147
+ dt = now()
148
+ daq.set_time(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
149
+ logger.info(f"DAQ6510 date and time set: {daq.get_time()}")
319
150
 
320
- old_channels = self.channels.copy()
321
- self.channels = params["channels"]
151
+ storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
322
152
 
323
- return {"status": "ok", "message": f"Monitoring channels changed from {old_channels} to {self.channels}"}
153
+ # This will write a comment line to the CSV file with the column names. This might be useful when
154
+ # the sensors are reconfigured and the number or names of columns changes.
155
+ storage.save({"origin": origin, "data": f"# columns: {column_names}"})
324
156
 
325
- async def _handle_get_last_reading(self, params: dict[str, Any]):
326
- return self._last_reading
157
+ for sensor in setup.gse.DAQ6510.sensors: # type: ignore
158
+ for function in setup.gse.DAQ6510.sensors[sensor]: # type: ignore
159
+ sense = {
160
+ function.upper(): [
161
+ (key, value)
162
+ for key, value in flatten_dict(setup.gse.DAQ6510.sensors[sensor][function]).items() # type: ignore
163
+ if key != "channels"
164
+ ]
165
+ }
166
+ function_channel_list = setup.gse.DAQ6510.sensors[sensor][function].channels # type: ignore
167
+ if VERBOSE_DEBUG:
168
+ logger.debug(f"{sense=}")
169
+ logger.debug(f"{function_channel_list=}")
170
+ daq.configure_sensors(channel_list=function_channel_list, sense=sense)
327
171
 
328
- async def _handle_get_reading(self, params: dict[str, Any]):
329
- """Get a reading for the given channel(s)."""
330
- logger.info(f"GET_READING – {params = }")
172
+ logger.info(f"global: {channel_list=}, {channel_count=}")
331
173
 
332
- readings = {"status": "ok", "data": {}}
174
+ daq.setup_measurements(channel_list=channel_list)
333
175
 
334
- for channel in params["channels"]:
176
+ while True:
335
177
  try:
336
- temp = await self.daq_interface.get_measurement(channel)
337
- readings["data"][channel] = temp
338
- except (DeviceConnectionError, DeviceTimeoutError, ValueError, RuntimeError) as exc:
339
- logger.error(f"Error reading channel {channel}: {exc}")
340
- readings["data"][channel] = None
341
- readings.update({"status": "error", "message": f"Error reading channel {channel}"})
342
-
343
- return readings
344
-
345
- async def _handle_get_status(self, params: dict[str, Any]) -> dict[str, Any]:
346
- """Get current service status."""
347
- connected = False
348
- try:
349
- if self.daq_interface:
350
- connected = await self.daq_interface.is_connected()
351
- except Exception:
352
- connected = False
353
-
354
- return {
355
- "status": "ok",
356
- "data": {
357
- "service_running": self.running,
358
- "polling_active": self.polling_active,
359
- "poll_interval": self.poll_interval,
360
- "channels": self.channels,
361
- "daq_connected": connected,
362
- "daq_hostname": self.daq_hostname,
363
- "daq_port": self.daq_port,
364
- },
365
- }
366
-
367
- async def _handle_shutdown(self, params: dict[str, Any]) -> dict[str, Any]:
368
- """Shutdown the service."""
369
- # Schedule shutdown after sending response
370
- _ = asyncio.create_task(self.shutdown())
371
-
372
- return {"status": "ok", "message": "Service shutting down"}
373
-
374
- async def shutdown(self):
375
- """Gracefully shut down the service."""
376
- logger.info("Shutting down DAQ Monitoring Service...")
377
-
378
- # Stop the main loops
379
- self.running = False
380
- self.polling_active = False
381
-
382
- # Disconnect DAQ
383
- try:
384
- logger.info("Disconnecting the DAQ6510...")
385
- if self.daq_interface and await self.daq_interface.is_connected():
386
- await self.daq_interface.disconnect()
387
- except Exception as exc:
388
- logger.error(f"Error disconnecting from DAQ: {exc}")
178
+ response = daq.perform_measurement(channel_list=channel_list, count=count, interval=interval)
389
179
 
390
- # Close ZeroMQ socket
391
- try:
392
- logger.info("Closing ZeroMQ socket and terminate context...")
393
- self.socket.close()
394
- self.ctx.term()
395
- except Exception as exc:
396
- logger.error(f"Error closing ZeroMQ socket: {exc}")
397
-
398
- logger.info("Service shutdown complete")
399
-
400
-
401
- class DAQMonitorClient:
402
- """A simple client for interacting with the DAQ Monitor Service."""
403
-
404
- def __init__(self, server_address: str = "localhost", port: int = DAQ_MON_CMD_PORT, timeout: float = 5.0):
405
- """Initialize the client.
406
-
407
- Args:
408
- server_address: Address of the monitoring service
409
- port: ZeroMQ port
410
- timeout: Command timeout in seconds
411
- """
412
- self.server_address = server_address
413
- self.port = port
414
- self.timeout = timeout
415
-
416
- self.ctx = zmq.Context().instance()
417
- self.socket = None
418
-
419
- def connect(self):
420
- """Connect to the DAQ Monitoring service."""
421
- self.socket = self.ctx.socket(zmq.DEALER)
422
- self.socket.connect(f"tcp://{self.server_address}:{self.port}")
423
- self.socket.setsockopt(zmq.RCVTIMEO, int(self.timeout * 1000))
424
-
425
- def disconnect(self):
426
- """Close the client connection."""
427
- self.socket.close(linger=100)
428
- self.ctx.term()
429
-
430
- def __enter__(self):
431
- self.connect()
432
- return self
433
-
434
- def __exit__(self, exc_type, exc_val, exc_tb):
435
- self.disconnect()
436
- if exc_type:
437
- logger.error(f"Caught {exc_type}: {exc_val}")
438
-
439
- def _send_command(self, command: str, params: dict[str, Any] = None) -> dict[str, Any]:
440
- """Send a command to the monitoring service.
441
-
442
- Args:
443
- command: Command name
444
- params: Optional command parameters
445
-
446
- Returns:
447
- Response from the service as a dictionary.
448
- """
449
- params = params or {}
450
- message = {"command": command, "params": params}
451
-
452
- try:
453
- self.socket.send_multipart([b"", json.dumps(message).encode("utf-8")])
454
- _, response_data = self.socket.recv_multipart()
455
- return json.loads(response_data.decode("utf-8"))
456
- except zmq.ZMQError as exc:
457
- return {"status": "error", "message": f"ZMQ error: {exc}"}
458
- except Exception as exc:
459
- return {"status": "error", "message": f"Error: {exc}"}
180
+ if killer.term_signal_received:
181
+ break
460
182
 
461
- def start_polling(self, channels: Optional[list[str]] = None, interval: Optional[float] = None) -> dict[str, Any]:
462
- """Start polling on specified channels.
463
-
464
- Args:
465
- channels: List of channels to monitor
466
- interval: Polling interval in seconds
467
-
468
- Returns:
469
- Response from the service
470
- """
471
- params = {}
472
- if channels is not None:
473
- params["channels"] = channels
474
- if interval is not None:
475
- params["interval"] = interval
183
+ if not response:
184
+ logger.warning("Received an empty response from the DAQ6510, check the connection with the device.")
185
+ logger.warning(f"Response: {response=}")
186
+ time.sleep(1.0)
187
+ continue
476
188
 
477
- return self._send_command("START_POLLING", params)
189
+ if isinstance(response, Failure):
190
+ logger.warning("Received a Failure from the DAQ6510 Control Server:")
191
+ logger.warning(f"Response: {response}")
192
+ time.sleep(1.0)
193
+ continue
478
194
 
479
- def stop_polling(self) -> dict[str, Any]:
480
- """Stop polling.
195
+ # Process and save the response
481
196
 
482
- Returns:
483
- Response from the service
484
- """
485
- return self._send_command("STOP_POLLING")
197
+ if VERBOSE_DEBUG:
198
+ logger.debug(f"{response=}")
486
199
 
487
- def set_interval(self, interval: float) -> dict[str, Any]:
488
- """Set polling interval.
200
+ dts = response[0][1].strip()
201
+ dt = datetime.datetime.strptime(dts[:-3], "%m/%d/%Y %H:%M:%S.%f")
202
+ datetime_string = format_datetime(dt.replace(tzinfo=datetime.timezone.utc))
489
203
 
490
- Args:
491
- interval: New polling interval in seconds
204
+ data: dict[str, Any] = {hk_conversion_table[measure[0]]: float(measure[2]) for measure in response}
205
+ data.update({"timestamp": datetime_string})
492
206
 
493
- Returns:
494
- Response from the service
495
- """
496
- return self._send_command("SET_INTERVAL", {"interval": interval})
207
+ # FIXME: we probably need to do something with the units...
497
208
 
498
- def set_channels(self, channels: list[str]) -> dict[str, Any]:
499
- """Set channels to monitor.
209
+ units = [measure[3] for measure in response]
500
210
 
501
- Args:
502
- channels: List of channel identifiers
211
+ if VERBOSE_DEBUG:
212
+ logger.debug(f"{data=}")
503
213
 
504
- Returns:
505
- Response from the service
506
- """
507
- return self._send_command("SET_CHANNELS", {"channels": channels})
214
+ storage.save({"origin": origin, "data": data})
508
215
 
509
- def get_reading(self, channels: list[str]) -> dict[str, float]:
510
- """Get a reading from the given channel.
216
+ # Now extract channels from the response to update the metrics
511
217
 
512
- Returns:
513
- A dictionary with the value of the measurement for the given channel.
514
- """
515
- return self._send_command("GET_READING", {"channels": channels})
218
+ for channel in [measure[0] for measure in response]:
219
+ if channel in hk_conversion_table:
220
+ metrics_name = hk_conversion_table[channel]
221
+ save_metrics(metrics_client, origin, data)
516
222
 
517
- def get_last_reading(self) -> dict:
518
- return self._send_command("GET_LAST_READING")
223
+ # wait for the next measurement to be done (delay)
519
224
 
520
- def get_status(self) -> dict[str, Any]:
521
- """Get current service status.
225
+ time.sleep(delay)
522
226
 
523
- To confirm the status is 'ok', check the response for the key 'status'.
227
+ except KeyboardInterrupt:
228
+ logger.debug("Interrupt received, terminating...")
229
+ break
230
+ except Exception as exc:
231
+ logger.warning(f"{type_name(exc)}: {exc}", exc_info=True)
232
+ logger.warning("Got a corrupt response from the DAQ6510. Check log messages for 'DAS Exception'.")
233
+ time.sleep(1.0)
234
+ continue
235
+
236
+ storage.unregister({"origin": origin})
237
+
238
+ logger.info("DAQ6510 Data Acquisition System terminated.")
239
+
240
+
241
+ def setup_metrics_client():
242
+ token = os.getenv("INFLUXDB3_AUTH_TOKEN")
243
+ project = os.getenv("PROJECT")
244
+
245
+ if project and token:
246
+ metrics_client = get_metrics_repo(
247
+ "influxdb", {"host": "http://localhost:8181", "database": project, "token": token}
248
+ )
249
+ metrics_client.connect()
250
+ else:
251
+ metrics_client = None
252
+ logger.warning(
253
+ "INFLUXDB3_AUTH_TOKEN and/or PROJECT environment variable is not set. "
254
+ "Metrics will not be propagated to InfluxDB."
255
+ )
256
+
257
+ return metrics_client
258
+
259
+
260
+ def save_metrics(metrics_client, origin, data):
261
+ try:
262
+ if metrics_client:
263
+ point = {
264
+ "measurement": origin.lower(),
265
+ "tags": {"site_id": SITE_ID, "origin": origin},
266
+ "fields": {hk_name.lower(): data[hk_name] for hk_name in data if hk_name != "timestamp"},
267
+ "time": str_to_datetime(data["timestamp"]),
268
+ }
269
+ metrics_client.write(point)
270
+ else:
271
+ logger.warning(
272
+ f"Could not write {origin} metrics to the time series database (self.metrics_client is None)."
273
+ )
274
+ except NewConnectionError:
275
+ logger.warning(
276
+ f"No connection to the time series database could be established to propagate {origin} metrics. Check "
277
+ f"whether this service is (still) running."
278
+ )
524
279
 
525
- Returns:
526
- Status information as dictionary.
527
- """
528
- return self._send_command("GET_STATUS")
529
280
 
530
- def shutdown(self) -> dict[str, Any]:
531
- """Shutdown the service.
281
+ app = typer.Typer(
282
+ name="daq6510_mon",
283
+ help="DAQ6510 Data Acquisition Unit, Keithley, temperature monitoring (monitoring)",
284
+ no_args_is_help=True,
285
+ )
532
286
 
533
- Returns:
534
- Response from the service
535
- """
536
- return self._send_command("SHUTDOWN")
537
287
 
288
+ @app.command()
289
+ def start(input_file: str = typer.Option("", help="YAML file containing the Setup for the DAQ6510")):
290
+ """Starts the Keithley DAQ6510 Monitoring Service."""
538
291
 
539
- app = typer.Typer(name="daq6510_mon")
292
+ multiprocessing.current_process().name = "daq6510_mon (start)"
540
293
 
294
+ with remote_logging():
295
+ from egse.env import setup_env
541
296
 
542
- @app.command(cls=TyperAsyncCommand, name="monitor")
543
- async def main(log_file: str = "temperature_readings.log"):
544
- """
545
- Start the DAQ6510 monitoring app in the background.
546
- """
547
- monitor = DAQ6510Monitor(
548
- daq_hostname=DAQ_DEV_HOST,
549
- daq_port=DAQ_DEV_PORT,
550
- zmq_port=DAQ_MON_CMD_PORT,
551
- log_file=log_file,
552
- channels=["101", "102"],
553
- poll_interval=10.0,
554
- )
297
+ setup_env()
555
298
 
556
- await monitor.start()
299
+ try:
300
+ daq6510(count=None, interval=None, delay=None, channel_list=None, input_file=input_file)
301
+ except KeyboardInterrupt:
302
+ logger.debug("Shutdown requested...exiting")
303
+ except SystemExit as exit_code:
304
+ logger.debug("System Exit with code {}.".format(exit_code))
305
+ sys.exit(exit_code.code)
306
+ except Exception:
307
+ msg = "Cannot start the DAQ6510 Monitoring Service"
308
+ logger.exception(msg)
309
+ rich.print(f"[red]{msg}.")
557
310
 
558
311
 
559
312
  if __name__ == "__main__":
560
- asyncio.run(app())
313
+ sys.exit(app())