cgse 2023.38.0__py3-none-any.whl → 2024.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +27 -0
- bump.py +85 -0
- cgse-2024.1.4.dist-info/METADATA +38 -0
- cgse-2024.1.4.dist-info/RECORD +5 -0
- {cgse-2023.38.0.dist-info → cgse-2024.1.4.dist-info}/WHEEL +1 -2
- cgse-2023.38.0.dist-info/COPYING +0 -674
- cgse-2023.38.0.dist-info/COPYING.LESSER +0 -165
- cgse-2023.38.0.dist-info/METADATA +0 -144
- cgse-2023.38.0.dist-info/RECORD +0 -649
- cgse-2023.38.0.dist-info/entry_points.txt +0 -75
- cgse-2023.38.0.dist-info/top_level.txt +0 -2
- egse/__init__.py +0 -12
- egse/__main__.py +0 -32
- egse/aeu/aeu.py +0 -5235
- egse/aeu/aeu_awg.yaml +0 -265
- egse/aeu/aeu_crio.yaml +0 -273
- egse/aeu/aeu_cs.py +0 -626
- egse/aeu/aeu_devif.py +0 -321
- egse/aeu/aeu_main_ui.py +0 -912
- egse/aeu/aeu_metrics.py +0 -131
- egse/aeu/aeu_protocol.py +0 -463
- egse/aeu/aeu_psu.yaml +0 -204
- egse/aeu/aeu_ui.py +0 -873
- egse/aeu/arbdata/FccdRead.arb +0 -2
- egse/aeu/arbdata/FccdRead_min_points.arb +0 -2
- egse/aeu/arbdata/HeaterSync_FccdRead.arb +0 -2
- egse/aeu/arbdata/HeaterSync_ccdRead25.arb +0 -2
- egse/aeu/arbdata/HeaterSync_ccdRead31_25.arb +0 -2
- egse/aeu/arbdata/HeaterSync_ccdRead37_50.arb +0 -2
- egse/aeu/arbdata/HeaterSync_ccdRead43_75.arb +0 -2
- egse/aeu/arbdata/HeaterSync_ccdRead50.arb +0 -2
- egse/aeu/arbdata/Heater_FccdRead_min_points.arb +0 -2
- egse/aeu/arbdata/ccdRead25.arb +0 -2
- egse/aeu/arbdata/ccdRead25_150ms.arb +0 -2
- egse/aeu/arbdata/ccdRead31_25.arb +0 -2
- egse/aeu/arbdata/ccdRead31_25_150ms.arb +0 -2
- egse/aeu/arbdata/ccdRead37_50.arb +0 -2
- egse/aeu/arbdata/ccdRead37_50_150ms.arb +0 -2
- egse/aeu/arbdata/ccdRead43_75.arb +0 -2
- egse/aeu/arbdata/ccdRead43_75_150ms.arb +0 -2
- egse/aeu/arbdata/ccdRead50.arb +0 -2
- egse/aeu/arbdata/ccdRead50_150ms.arb +0 -2
- egse/alert/__init__.py +0 -1049
- egse/alert/alertman.yaml +0 -37
- egse/alert/alertman_cs.py +0 -234
- egse/alert/alertman_ui.py +0 -603
- egse/alert/gsm/beaglebone.py +0 -138
- egse/alert/gsm/beaglebone.yaml +0 -51
- egse/alert/gsm/beaglebone_cs.py +0 -108
- egse/alert/gsm/beaglebone_devif.py +0 -130
- egse/alert/gsm/beaglebone_protocol.py +0 -48
- egse/bits.py +0 -318
- egse/camera.py +0 -44
- egse/collimator/__init__.py +0 -0
- egse/collimator/fcul/__init__.py +0 -0
- egse/collimator/fcul/ogse.py +0 -1077
- egse/collimator/fcul/ogse.yaml +0 -14
- egse/collimator/fcul/ogse_cs.py +0 -154
- egse/collimator/fcul/ogse_devif.py +0 -358
- egse/collimator/fcul/ogse_protocol.py +0 -129
- egse/collimator/fcul/ogse_sim.py +0 -431
- egse/collimator/fcul/ogse_ui.py +0 -1108
- egse/command.py +0 -699
- egse/config.py +0 -410
- egse/confman/__init__.py +0 -1015
- egse/confman/confman.yaml +0 -67
- egse/confman/confman_cs.py +0 -239
- egse/confman/confman_ui.py +0 -381
- egse/confman/setup_ui.py +0 -565
- egse/control.py +0 -442
- egse/coordinates/__init__.py +0 -531
- egse/coordinates/avoidance.py +0 -103
- egse/coordinates/cslmodel.py +0 -127
- egse/coordinates/laser_tracker_to_dict.py +0 -120
- egse/coordinates/point.py +0 -707
- egse/coordinates/pyplot.py +0 -195
- egse/coordinates/referenceFrame.py +0 -1279
- egse/coordinates/refmodel.py +0 -737
- egse/coordinates/rotationMatrix.py +0 -85
- egse/coordinates/transform3d_addon.py +0 -419
- egse/csl/__init__.py +0 -50
- egse/csl/commanding.py +0 -78
- egse/csl/icons/hexapod-connected-selected.svg +0 -30
- egse/csl/icons/hexapod-connected.svg +0 -30
- egse/csl/icons/hexapod-homing-selected.svg +0 -68
- egse/csl/icons/hexapod-homing.svg +0 -68
- egse/csl/icons/hexapod-retract-selected.svg +0 -56
- egse/csl/icons/hexapod-retract.svg +0 -51
- egse/csl/icons/hexapod-zero-selected.svg +0 -56
- egse/csl/icons/hexapod-zero.svg +0 -56
- egse/csl/icons/logo-puna.svg +0 -92
- egse/csl/icons/stop.svg +0 -1
- egse/csl/initialisation.py +0 -102
- egse/csl/mech_pos_settings.yaml +0 -18
- egse/das.py +0 -1247
- egse/das.yaml +0 -7
- egse/data/conf/SETUP_CSL_00000_170620_150000.yaml +0 -5
- egse/data/conf/SETUP_CSL_00001_170620_151010.yaml +0 -69
- egse/data/conf/SETUP_CSL_00002_170620_151020.yaml +0 -69
- egse/data/conf/SETUP_CSL_00003_170620_151030.yaml +0 -69
- egse/data/conf/SETUP_CSL_00004_170620_151040.yaml +0 -69
- egse/data/conf/SETUP_CSL_00005_170620_151050.yaml +0 -69
- egse/data/conf/SETUP_CSL_00006_170620_151060.yaml +0 -69
- egse/data/conf/SETUP_CSL_00007_170620_151070.yaml +0 -69
- egse/data/conf/SETUP_CSL_00008_170620_151080.yaml +0 -75
- egse/data/conf/SETUP_CSL_00010_210308_083016.yaml +0 -138
- egse/data/conf/SETUP_INTA_00000_170620_150000.yaml +0 -4
- egse/data/conf/SETUP_SRON_00000_170620_150000.yaml +0 -4
- egse/decorators.py +0 -415
- egse/device.py +0 -269
- egse/dpu/__init__.py +0 -2681
- egse/dpu/ccd_ui.py +0 -508
- egse/dpu/dpu.py +0 -786
- egse/dpu/dpu.yaml +0 -153
- egse/dpu/dpu_cs.py +0 -272
- egse/dpu/dpu_ui.py +0 -668
- egse/dpu/fitsgen.py +0 -2077
- egse/dpu/fitsgen_test.py +0 -752
- egse/dpu/fitsgen_ui.py +0 -399
- egse/dpu/hdf5_model.py +0 -332
- egse/dpu/hdf5_ui.py +0 -277
- egse/dpu/hdf5_viewer.py +0 -506
- egse/dpu/hk_ui.py +0 -468
- egse/dpu_commands.py +0 -81
- egse/dsi/constants.py +0 -220
- egse/dsi/esl.py +0 -870
- egse/dsi/rmap.py +0 -1042
- egse/dsi/rmapci.py +0 -37
- egse/dsi/spw.py +0 -154
- egse/dsi/spw_state.py +0 -29
- egse/dummy.py +0 -258
- egse/dyndummy.py +0 -179
- egse/env.py +0 -278
- egse/exceptions.py +0 -88
- egse/fdir/__init__.py +0 -28
- egse/fdir/fdir_manager.py +0 -85
- egse/fdir/fdir_manager.yaml +0 -51
- egse/fdir/fdir_manager_controller.py +0 -228
- egse/fdir/fdir_manager_cs.py +0 -164
- egse/fdir/fdir_manager_interface.py +0 -25
- egse/fdir/fdir_remote.py +0 -73
- egse/fdir/fdir_remote.yaml +0 -37
- egse/fdir/fdir_remote_controller.py +0 -50
- egse/fdir/fdir_remote_cs.py +0 -97
- egse/fdir/fdir_remote_interface.py +0 -14
- egse/fdir/fdir_remote_popup.py +0 -31
- egse/fee/__init__.py +0 -114
- egse/fee/f_fee_register.yaml +0 -43
- egse/fee/fee.py +0 -631
- egse/fee/feesim.py +0 -750
- egse/fee/n_fee_hk.py +0 -761
- egse/fee/nfee.py +0 -187
- egse/filterwheel/__init__.py +0 -4
- egse/filterwheel/eksma/__init__.py +0 -24
- egse/filterwheel/eksma/fw8smc4.py +0 -661
- egse/filterwheel/eksma/fw8smc4.yaml +0 -121
- egse/filterwheel/eksma/fw8smc4_cs.py +0 -144
- egse/filterwheel/eksma/fw8smc4_devif.py +0 -473
- egse/filterwheel/eksma/fw8smc4_protocol.py +0 -81
- egse/filterwheel/eksma/fw8smc4_ui.py +0 -940
- egse/filterwheel/eksma/fw8smc5.py +0 -111
- egse/filterwheel/eksma/fw8smc5.yaml +0 -105
- egse/filterwheel/eksma/fw8smc5_controller.py +0 -307
- egse/filterwheel/eksma/fw8smc5_cs.py +0 -141
- egse/filterwheel/eksma/fw8smc5_interface.py +0 -65
- egse/filterwheel/eksma/fw8smc5_simulator.py +0 -29
- egse/filterwheel/eksma/fw8smc5_ui.py +0 -1068
- egse/filterwheel/eksma/testpythonfw.py +0 -215
- egse/fov/__init__.py +0 -65
- egse/fov/fov_hk.py +0 -712
- egse/fov/fov_ui.py +0 -861
- egse/fov/fov_ui_controller.py +0 -140
- egse/fov/fov_ui_model.py +0 -200
- egse/fov/fov_ui_view.py +0 -345
- egse/gimbal/__init__.py +0 -32
- egse/gimbal/symetrie/__init__.py +0 -26
- egse/gimbal/symetrie/alpha.py +0 -586
- egse/gimbal/symetrie/generic_gimbal_ui.py +0 -1521
- egse/gimbal/symetrie/gimbal.py +0 -877
- egse/gimbal/symetrie/gimbal.yaml +0 -168
- egse/gimbal/symetrie/gimbal_cs.py +0 -183
- egse/gimbal/symetrie/gimbal_protocol.py +0 -135
- egse/gimbal/symetrie/gimbal_ui.py +0 -361
- egse/gimbal/symetrie/pmac.py +0 -1006
- egse/gimbal/symetrie/pmac_regex.py +0 -83
- egse/graph.py +0 -132
- egse/gui/__init__.py +0 -47
- egse/gui/buttons.py +0 -378
- egse/gui/focalplane.py +0 -1281
- egse/gui/formatter.py +0 -10
- egse/gui/led.py +0 -162
- egse/gui/limitswitch.py +0 -143
- egse/gui/mechanisms.py +0 -588
- egse/gui/states.py +0 -148
- egse/gui/stripchart.py +0 -729
- egse/gui/switch.py +0 -112
- egse/h5.py +0 -274
- egse/help/__init__.py +0 -0
- egse/help/help_ui.py +0 -126
- egse/hexapod/__init__.py +0 -32
- egse/hexapod/symetrie/__init__.py +0 -138
- egse/hexapod/symetrie/alpha.py +0 -874
- egse/hexapod/symetrie/dynalpha.py +0 -1387
- egse/hexapod/symetrie/hexapod_ui.py +0 -1516
- egse/hexapod/symetrie/pmac.py +0 -1010
- egse/hexapod/symetrie/pmac_regex.py +0 -83
- egse/hexapod/symetrie/puna.py +0 -1167
- egse/hexapod/symetrie/puna.yaml +0 -193
- egse/hexapod/symetrie/puna_cs.py +0 -196
- egse/hexapod/symetrie/puna_protocol.py +0 -131
- egse/hexapod/symetrie/puna_ui.py +0 -434
- egse/hexapod/symetrie/punaplus.py +0 -107
- egse/hexapod/symetrie/zonda.py +0 -872
- egse/hexapod/symetrie/zonda.yaml +0 -337
- egse/hexapod/symetrie/zonda_cs.py +0 -172
- egse/hexapod/symetrie/zonda_devif.py +0 -415
- egse/hexapod/symetrie/zonda_protocol.py +0 -119
- egse/hexapod/symetrie/zonda_ui.py +0 -449
- egse/hk.py +0 -765
- egse/icons/aeu-cs-start.svg +0 -117
- egse/icons/aeu-cs-stop.svg +0 -118
- egse/icons/aeu-cs.svg +0 -107
- egse/icons/aeu_cs-started.svg +0 -112
- egse/icons/aeu_cs-stopped.svg +0 -112
- egse/icons/aeu_cs.svg +0 -55
- egse/icons/alert.svg +0 -1
- egse/icons/arrow-double-left.png +0 -0
- egse/icons/arrow-double-right.png +0 -0
- egse/icons/arrow-up.svg +0 -11
- egse/icons/backward.svg +0 -1
- egse/icons/busy.svg +0 -1
- egse/icons/cleaning.svg +0 -115
- egse/icons/color-scheme.svg +0 -1
- egse/icons/cs-connected-alert.svg +0 -91
- egse/icons/cs-connected-disabled.svg +0 -43
- egse/icons/cs-connected.svg +0 -89
- egse/icons/cs-not-connected.svg +0 -44
- egse/icons/double-left-arrow.svg +0 -1
- egse/icons/double-right-arrow.svg +0 -1
- egse/icons/erase-disabled.svg +0 -19
- egse/icons/erase.svg +0 -59
- egse/icons/fitsgen-start.svg +0 -47
- egse/icons/fitsgen-stop.svg +0 -48
- egse/icons/fitsgen.svg +0 -1
- egse/icons/forward.svg +0 -1
- egse/icons/fov-hk-start.svg +0 -33
- egse/icons/fov-hk-stop.svg +0 -37
- egse/icons/fov-hk.svg +0 -1
- egse/icons/front-desk.svg +0 -1
- egse/icons/home-actioned.svg +0 -15
- egse/icons/home-disabled.svg +0 -15
- egse/icons/home.svg +0 -13
- egse/icons/info.svg +0 -1
- egse/icons/invalid.png +0 -0
- egse/icons/led-green.svg +0 -20
- egse/icons/led-grey.svg +0 -20
- egse/icons/led-orange.svg +0 -20
- egse/icons/led-red.svg +0 -20
- egse/icons/led-square-green.svg +0 -134
- egse/icons/led-square-grey.svg +0 -134
- egse/icons/led-square-orange.svg +0 -134
- egse/icons/led-square-red.svg +0 -134
- egse/icons/limit-switch-all-green.svg +0 -115
- egse/icons/limit-switch-all-red.svg +0 -117
- egse/icons/limit-switch-el+.svg +0 -116
- egse/icons/limit-switch-el-.svg +0 -117
- egse/icons/location-marker.svg +0 -1
- egse/icons/logo-dpu.svg +0 -48
- egse/icons/logo-gimbal.svg +0 -112
- egse/icons/logo-huber.svg +0 -23
- egse/icons/logo-ogse.svg +0 -31
- egse/icons/logo-puna.svg +0 -92
- egse/icons/logo-tcs.svg +0 -29
- egse/icons/logo-zonda.svg +0 -66
- egse/icons/maximize.svg +0 -1
- egse/icons/meter.svg +0 -1
- egse/icons/more.svg +0 -45
- egse/icons/n-fee-hk-start.svg +0 -24
- egse/icons/n-fee-hk-stop.svg +0 -25
- egse/icons/n-fee-hk.svg +0 -83
- egse/icons/observing-off.svg +0 -46
- egse/icons/observing-on.svg +0 -46
- egse/icons/open-document-hdf5.png +0 -0
- egse/icons/open-document-hdf5.svg +0 -21
- egse/icons/ops-mode.svg +0 -1
- egse/icons/play-green.svg +0 -17
- egse/icons/plugged-disabled.svg +0 -27
- egse/icons/plugged.svg +0 -21
- egse/icons/pm_ui.svg +0 -1
- egse/icons/power-button-green.svg +0 -27
- egse/icons/power-button-red.svg +0 -27
- egse/icons/power-button.svg +0 -27
- egse/icons/radar.svg +0 -1
- egse/icons/radioactive.svg +0 -2
- egse/icons/reload.svg +0 -1
- egse/icons/remote-control-off.svg +0 -28
- egse/icons/remote-control-on.svg +0 -28
- egse/icons/repeat-blue.svg +0 -15
- egse/icons/repeat.svg +0 -1
- egse/icons/settings.svg +0 -1
- egse/icons/shrink.svg +0 -1
- egse/icons/shutter.svg +0 -1
- egse/icons/sign-off.svg +0 -1
- egse/icons/sign-on.svg +0 -1
- egse/icons/sim-mode.svg +0 -1
- egse/icons/small-buttons-go.svg +0 -20
- egse/icons/small-buttons-minus.svg +0 -51
- egse/icons/small-buttons-plus.svg +0 -51
- egse/icons/sponge.svg +0 -220
- egse/icons/start-button-disabled.svg +0 -84
- egse/icons/start-button.svg +0 -50
- egse/icons/stop-button-disabled.svg +0 -84
- egse/icons/stop-button.svg +0 -50
- egse/icons/stop-red.svg +0 -17
- egse/icons/stop.svg +0 -1
- egse/icons/switch-disabled-square.svg +0 -87
- egse/icons/switch-disabled.svg +0 -15
- egse/icons/switch-off-square.svg +0 -87
- egse/icons/switch-off.svg +0 -72
- egse/icons/switch-on-square.svg +0 -87
- egse/icons/switch-on.svg +0 -61
- egse/icons/temperature-control.svg +0 -44
- egse/icons/th_ui_logo.svg +0 -1
- egse/icons/unplugged.svg +0 -23
- egse/icons/unvalid.png +0 -0
- egse/icons/user-interface.svg +0 -1
- egse/icons/vacuum.svg +0 -1
- egse/icons/valid.png +0 -0
- egse/icons/zoom-to-pixel-dark.svg +0 -64
- egse/icons/zoom-to-pixel-white.svg +0 -36
- egse/images/big-rotation-stage.png +0 -0
- egse/images/connected-100.png +0 -0
- egse/images/cross.svg +0 -6
- egse/images/disconnected-100.png +0 -0
- egse/images/gui-icon.png +0 -0
- egse/images/home.svg +0 -6
- egse/images/info-icon.png +0 -0
- egse/images/led-black.svg +0 -89
- egse/images/led-green.svg +0 -85
- egse/images/led-orange.svg +0 -85
- egse/images/led-red.svg +0 -85
- egse/images/load-icon.png +0 -0
- egse/images/load-setup.png +0 -0
- egse/images/load.png +0 -0
- egse/images/pause.png +0 -0
- egse/images/play-button.svg +0 -8
- egse/images/play.png +0 -0
- egse/images/process-status.png +0 -0
- egse/images/restart.png +0 -0
- egse/images/search.png +0 -0
- egse/images/sma.png +0 -0
- egse/images/start.png +0 -0
- egse/images/stop-button.svg +0 -8
- egse/images/stop.png +0 -0
- egse/images/switch-off.svg +0 -48
- egse/images/switch-on.svg +0 -48
- egse/images/undo.png +0 -0
- egse/images/update-button.svg +0 -11
- egse/imageviewer/exposureselection.py +0 -475
- egse/imageviewer/imageviewer.py +0 -198
- egse/imageviewer/matchfocalplane.py +0 -179
- egse/imageviewer/subfieldposition.py +0 -133
- egse/lampcontrol/__init__.py +0 -4
- egse/lampcontrol/beaglebone/beaglebone.py +0 -178
- egse/lampcontrol/beaglebone/beaglebone.yaml +0 -62
- egse/lampcontrol/beaglebone/beaglebone_cs.py +0 -106
- egse/lampcontrol/beaglebone/beaglebone_devif.py +0 -150
- egse/lampcontrol/beaglebone/beaglebone_protocol.py +0 -73
- egse/lampcontrol/energetiq/__init__.py +0 -22
- egse/lampcontrol/energetiq/eq99.yaml +0 -98
- egse/lampcontrol/energetiq/lampEQ99.py +0 -283
- egse/lampcontrol/energetiq/lampEQ99_cs.py +0 -128
- egse/lampcontrol/energetiq/lampEQ99_devif.py +0 -158
- egse/lampcontrol/energetiq/lampEQ99_encode_decode_errors.py +0 -73
- egse/lampcontrol/energetiq/lampEQ99_protocol.py +0 -69
- egse/lampcontrol/energetiq/lampEQ99_ui.py +0 -465
- egse/lib/CentOS-7/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/CentOS-8/ESL-RMAP_v34_86.dylib +0 -0
- egse/lib/CentOS-8/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/Debian/ESL-RMAP_v34_86.dylib +0 -0
- egse/lib/Debian/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/Debian/libetherspacelink_v35_21.dylib +0 -0
- egse/lib/Linux/ESL-RMAP_v34_86.dylib +0 -0
- egse/lib/Linux/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/Ubuntu-20/ESL-RMAP_v34_86.dylib +0 -0
- egse/lib/Ubuntu-20/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/gssw/python3-gssw_2.2.3+31f63c9f-1_all.deb +0 -0
- egse/lib/macOS/ESL-RMAP_v34_86.dylib +0 -0
- egse/lib/macOS/EtherSpaceLink_v34_86.dylib +0 -0
- egse/lib/ximc/__pycache__/pyximc.cpython-38 2.pyc +0 -0
- egse/lib/ximc/__pycache__/pyximc.cpython-38.pyc +0 -0
- egse/lib/ximc/libximc.framework/Frameworks/libbindy.dylib +0 -0
- egse/lib/ximc/libximc.framework/Frameworks/libxiwrapper.dylib +0 -0
- egse/lib/ximc/libximc.framework/Headers/ximc.h +0 -5510
- egse/lib/ximc/libximc.framework/Resources/Info.plist +0 -42
- egse/lib/ximc/libximc.framework/Resources/keyfile.sqlite +0 -0
- egse/lib/ximc/libximc.framework/libbindy.so +0 -0
- egse/lib/ximc/libximc.framework/libximc +0 -0
- egse/lib/ximc/libximc.framework/libximc.so +0 -0
- egse/lib/ximc/libximc.framework/libximc.so.7.0.0 +0 -0
- egse/lib/ximc/libximc.framework/libxiwrapper.so +0 -0
- egse/lib/ximc/pyximc.py +0 -922
- egse/listener.py +0 -73
- egse/logger/__init__.py +0 -243
- egse/logger/log_cs.py +0 -321
- egse/metrics.py +0 -98
- egse/mixin.py +0 -464
- egse/monitoring.py +0 -95
- egse/ni/alarms/__init__.py +0 -26
- egse/ni/alarms/cdaq9375.py +0 -300
- egse/ni/alarms/cdaq9375.yaml +0 -89
- egse/ni/alarms/cdaq9375_cs.py +0 -130
- egse/ni/alarms/cdaq9375_devif.py +0 -183
- egse/ni/alarms/cdaq9375_protocol.py +0 -48
- egse/obs_inspection.py +0 -163
- egse/observer.py +0 -41
- egse/obsid.py +0 -163
- egse/powermeter/__init__.py +0 -0
- egse/powermeter/ni/__init__.py +0 -38
- egse/powermeter/ni/cdaq9184.py +0 -224
- egse/powermeter/ni/cdaq9184.yaml +0 -73
- egse/powermeter/ni/cdaq9184_cs.py +0 -130
- egse/powermeter/ni/cdaq9184_devif.py +0 -201
- egse/powermeter/ni/cdaq9184_protocol.py +0 -48
- egse/powermeter/ni/cdaq9184_ui.py +0 -544
- egse/powermeter/thorlabs/__init__.py +0 -25
- egse/powermeter/thorlabs/pm100a.py +0 -380
- egse/powermeter/thorlabs/pm100a.yaml +0 -132
- egse/powermeter/thorlabs/pm100a_cs.py +0 -136
- egse/powermeter/thorlabs/pm100a_devif.py +0 -127
- egse/powermeter/thorlabs/pm100a_protocol.py +0 -80
- egse/powermeter/thorlabs/pm100a_ui.py +0 -725
- egse/process.py +0 -451
- egse/procman/__init__.py +0 -811
- egse/procman/cannot_start_process_popup.py +0 -43
- egse/procman/procman.yaml +0 -49
- egse/procman/procman_cs.py +0 -201
- egse/procman/procman_ui.py +0 -2081
- egse/protocol.py +0 -603
- egse/proxy.py +0 -522
- egse/randomwalk.py +0 -140
- egse/reg.py +0 -585
- egse/reload.py +0 -122
- egse/reprocess.py +0 -675
- egse/resource.py +0 -333
- egse/rst.py +0 -135
- egse/search.py +0 -182
- egse/serialdevice.py +0 -190
- egse/services.py +0 -212
- egse/services.yaml +0 -51
- egse/settings.py +0 -379
- egse/settings.yaml +0 -980
- egse/setup.py +0 -1180
- egse/shutter/__init__.py +0 -0
- egse/shutter/thorlabs/__init__.py +0 -19
- egse/shutter/thorlabs/ksc101.py +0 -205
- egse/shutter/thorlabs/ksc101.yaml +0 -105
- egse/shutter/thorlabs/ksc101_cs.py +0 -136
- egse/shutter/thorlabs/ksc101_devif.py +0 -201
- egse/shutter/thorlabs/ksc101_protocol.py +0 -69
- egse/shutter/thorlabs/ksc101_ui.py +0 -548
- egse/shutter/thorlabs/sc10.py +0 -82
- egse/shutter/thorlabs/sc10.yaml +0 -52
- egse/shutter/thorlabs/sc10_controller.py +0 -81
- egse/shutter/thorlabs/sc10_cs.py +0 -108
- egse/shutter/thorlabs/sc10_interface.py +0 -25
- egse/shutter/thorlabs/sc10_simulator.py +0 -30
- egse/simulator.py +0 -41
- egse/slack.py +0 -61
- egse/socketdevice.py +0 -218
- egse/sockets.py +0 -218
- egse/spw.py +0 -1479
- egse/stages/__init__.py +0 -12
- egse/stages/aerotech/ensemble.py +0 -247
- egse/stages/aerotech/ensemble.yaml +0 -205
- egse/stages/aerotech/ensemble_controller.py +0 -275
- egse/stages/aerotech/ensemble_cs.py +0 -110
- egse/stages/aerotech/ensemble_interface.py +0 -132
- egse/stages/aerotech/ensemble_parameters.py +0 -433
- egse/stages/aerotech/ensemble_simulator.py +0 -27
- egse/stages/aerotech/mgse_sim.py +0 -193
- egse/stages/arun/smd3.py +0 -111
- egse/stages/arun/smd3.yaml +0 -68
- egse/stages/arun/smd3_controller.py +0 -472
- egse/stages/arun/smd3_cs.py +0 -112
- egse/stages/arun/smd3_interface.py +0 -53
- egse/stages/arun/smd3_simulator.py +0 -27
- egse/stages/arun/smd3_stop.py +0 -16
- egse/stages/huber/__init__.py +0 -49
- egse/stages/huber/smc9300.py +0 -904
- egse/stages/huber/smc9300.yaml +0 -63
- egse/stages/huber/smc9300_cs.py +0 -178
- egse/stages/huber/smc9300_devif.py +0 -345
- egse/stages/huber/smc9300_protocol.py +0 -111
- egse/stages/huber/smc9300_sim.py +0 -547
- egse/stages/huber/smc9300_ui.py +0 -973
- egse/state.py +0 -173
- egse/statemachine.py +0 -274
- egse/storage/__init__.py +0 -1004
- egse/storage/persistence.py +0 -2295
- egse/storage/storage.yaml +0 -72
- egse/storage/storage_cs.py +0 -214
- egse/styles/dark.qss +0 -343
- egse/styles/default.qss +0 -48
- egse/synoptics/__init__.py +0 -412
- egse/synoptics/syn.yaml +0 -9
- egse/synoptics/syn_cs.py +0 -195
- egse/system.py +0 -1408
- egse/tcs/__init__.py +0 -14
- egse/tcs/tcs.py +0 -874
- egse/tcs/tcs.yaml +0 -14
- egse/tcs/tcs_cs.py +0 -202
- egse/tcs/tcs_devif.py +0 -292
- egse/tcs/tcs_protocol.py +0 -177
- egse/tcs/tcs_sim.py +0 -177
- egse/tcs/tcs_ui.py +0 -543
- egse/tdms.py +0 -171
- egse/tempcontrol/__init__.py +0 -23
- egse/tempcontrol/agilent/agilent34970.py +0 -109
- egse/tempcontrol/agilent/agilent34970.yaml +0 -44
- egse/tempcontrol/agilent/agilent34970_cs.py +0 -116
- egse/tempcontrol/agilent/agilent34970_devif.py +0 -182
- egse/tempcontrol/agilent/agilent34970_protocol.py +0 -99
- egse/tempcontrol/agilent/agilent34972.py +0 -111
- egse/tempcontrol/agilent/agilent34972.yaml +0 -44
- egse/tempcontrol/agilent/agilent34972_cs.py +0 -117
- egse/tempcontrol/agilent/agilent34972_devif.py +0 -189
- egse/tempcontrol/agilent/agilent34972_protocol.py +0 -101
- egse/tempcontrol/beaglebone/beaglebone.py +0 -342
- egse/tempcontrol/beaglebone/beaglebone.yaml +0 -110
- egse/tempcontrol/beaglebone/beaglebone_cs.py +0 -117
- egse/tempcontrol/beaglebone/beaglebone_protocol.py +0 -135
- egse/tempcontrol/beaglebone/beaglebone_ui.py +0 -681
- egse/tempcontrol/digalox/digalox.py +0 -107
- egse/tempcontrol/digalox/digalox.yaml +0 -36
- egse/tempcontrol/digalox/digalox_cs.py +0 -112
- egse/tempcontrol/digalox/digalox_protocol.py +0 -55
- egse/tempcontrol/keithley/__init__.py +0 -33
- egse/tempcontrol/keithley/daq6510.py +0 -662
- egse/tempcontrol/keithley/daq6510.yaml +0 -105
- egse/tempcontrol/keithley/daq6510_cs.py +0 -163
- egse/tempcontrol/keithley/daq6510_devif.py +0 -343
- egse/tempcontrol/keithley/daq6510_protocol.py +0 -78
- egse/tempcontrol/keithley/daq6510_sim.py +0 -186
- egse/tempcontrol/lakeshore/__init__.py +0 -33
- egse/tempcontrol/lakeshore/lsci.py +0 -361
- egse/tempcontrol/lakeshore/lsci.yaml +0 -162
- egse/tempcontrol/lakeshore/lsci_cs.py +0 -174
- egse/tempcontrol/lakeshore/lsci_devif.py +0 -292
- egse/tempcontrol/lakeshore/lsci_protocol.py +0 -73
- egse/tempcontrol/lakeshore/lsci_ui.py +0 -389
- egse/tempcontrol/ni/__init__.py +0 -0
- egse/tempcontrol/spid/spid.py +0 -109
- egse/tempcontrol/spid/spid.yaml +0 -81
- egse/tempcontrol/spid/spid_controller.py +0 -279
- egse/tempcontrol/spid/spid_cs.py +0 -136
- egse/tempcontrol/spid/spid_protocol.py +0 -107
- egse/tempcontrol/spid/spid_ui.py +0 -727
- egse/tempcontrol/srs/__init__.py +0 -22
- egse/tempcontrol/srs/ptc10.py +0 -875
- egse/tempcontrol/srs/ptc10.yaml +0 -227
- egse/tempcontrol/srs/ptc10_cs.py +0 -128
- egse/tempcontrol/srs/ptc10_devif.py +0 -118
- egse/tempcontrol/srs/ptc10_protocol.py +0 -42
- egse/tempcontrol/srs/ptc10_ui.py +0 -906
- egse/ups/apc/apc.py +0 -236
- egse/ups/apc/apc.yaml +0 -45
- egse/ups/apc/apc_cs.py +0 -101
- egse/ups/apc/apc_protocol.py +0 -125
- egse/user.yaml +0 -7
- egse/vacuum/beaglebone/beaglebone.py +0 -149
- egse/vacuum/beaglebone/beaglebone.yaml +0 -44
- egse/vacuum/beaglebone/beaglebone_cs.py +0 -108
- egse/vacuum/beaglebone/beaglebone_devif.py +0 -164
- egse/vacuum/beaglebone/beaglebone_protocol.py +0 -193
- egse/vacuum/beaglebone/beaglebone_ui.py +0 -638
- egse/vacuum/instrutech/igm402.py +0 -92
- egse/vacuum/instrutech/igm402.yaml +0 -90
- egse/vacuum/instrutech/igm402_controller.py +0 -128
- egse/vacuum/instrutech/igm402_cs.py +0 -108
- egse/vacuum/instrutech/igm402_interface.py +0 -49
- egse/vacuum/instrutech/igm402_simulator.py +0 -36
- egse/vacuum/keller/kellerBus.py +0 -256
- egse/vacuum/keller/leo3.py +0 -102
- egse/vacuum/keller/leo3.yaml +0 -38
- egse/vacuum/keller/leo3_controller.py +0 -83
- egse/vacuum/keller/leo3_cs.py +0 -101
- egse/vacuum/keller/leo3_interface.py +0 -33
- egse/vacuum/mks/evision.py +0 -86
- egse/vacuum/mks/evision.yaml +0 -75
- egse/vacuum/mks/evision_cs.py +0 -101
- egse/vacuum/mks/evision_devif.py +0 -316
- egse/vacuum/mks/evision_interface.py +0 -60
- egse/vacuum/mks/evision_simulator.py +0 -24
- egse/vacuum/mks/evision_ui.py +0 -704
- egse/vacuum/pfeiffer/acp40.py +0 -87
- egse/vacuum/pfeiffer/acp40.yaml +0 -60
- egse/vacuum/pfeiffer/acp40_controller.py +0 -117
- egse/vacuum/pfeiffer/acp40_cs.py +0 -109
- egse/vacuum/pfeiffer/acp40_interface.py +0 -40
- egse/vacuum/pfeiffer/acp40_simulator.py +0 -39
- egse/vacuum/pfeiffer/tc400.py +0 -113
- egse/vacuum/pfeiffer/tc400.yaml +0 -83
- egse/vacuum/pfeiffer/tc400_controller.py +0 -140
- egse/vacuum/pfeiffer/tc400_cs.py +0 -109
- egse/vacuum/pfeiffer/tc400_interface.py +0 -70
- egse/vacuum/pfeiffer/tc400_simulator.py +0 -24
- egse/vacuum/pfeiffer/tpg261.py +0 -81
- egse/vacuum/pfeiffer/tpg261.yaml +0 -66
- egse/vacuum/pfeiffer/tpg261_controller.py +0 -150
- egse/vacuum/pfeiffer/tpg261_cs.py +0 -109
- egse/vacuum/pfeiffer/tpg261_interface.py +0 -60
- egse/vacuum/pfeiffer/tpg261_simulator.py +0 -24
- egse/version.py +0 -174
- egse/visitedpositions.py +0 -398
- egse/windowing.py +0 -213
- egse/zmq/__init__.py +0 -28
- egse/zmq/spw.py +0 -160
- egse/zmq_ser.py +0 -41
- scripts/alerts/cold.yaml +0 -278
- scripts/alerts/example_alerts.yaml +0 -54
- scripts/alerts/transition.yaml +0 -14
- scripts/alerts/warm.yaml +0 -49
- scripts/analyse_n_fee_hk_data.py +0 -44
- scripts/check_hdf5_files.py +0 -192
- scripts/check_register_sync.py +0 -47
- scripts/create_hdf5_report.py +0 -295
- scripts/csl_model.py +0 -436
- scripts/csl_restore_setup.py +0 -230
- scripts/export-grafana-dashboards.py +0 -50
- scripts/fdir/cs_recovery/fdir_cs_recovery.py +0 -59
- scripts/fdir/fdir_table.yaml +0 -70
- scripts/fdir/fdir_test_recovery.py +0 -11
- scripts/fdir/hw_recovery/fdir_agilent_hw_recovery.py +0 -73
- scripts/fdir/limit_recovery/fdir_agilent_limit.py +0 -64
- scripts/fdir/limit_recovery/fdir_bb_heater_limit.py +0 -61
- scripts/fdir/limit_recovery/fdir_ensemble_limit.py +0 -33
- scripts/fdir/limit_recovery/fdir_pressure_limit_recovery.py +0 -71
- scripts/fix_csv.py +0 -80
- scripts/n_fee_supply_voltage_calculation.py +0 -92
- scripts/playground.py +0 -30
- scripts/print_hdf5_hk_data.py +0 -68
- scripts/print_register_map.py +0 -43
- scripts/sron/commanding/control_heaters.py +0 -44
- scripts/sron/commanding/pumpdown.py +0 -46
- scripts/sron/commanding/set_pid_setpoint.py +0 -19
- scripts/sron/commanding/shutdown_bbb_heaters.py +0 -10
- scripts/sron/commanding/shutdown_pumps.py +0 -33
- scripts/sron/tm_gen/tm_gen_agilent.py +0 -38
- scripts/sron/tm_gen/tm_gen_heaters.py +0 -4
- scripts/sron/tm_gen/tm_gen_spid.py +0 -13
- scripts/update_operational_cgse.py +0 -268
- scripts/update_operational_cgse_old.py +0 -273
egse/das.py
DELETED
|
@@ -1,1247 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
The Data Acquisition System (DAS) is a small application that performs measurements on
|
|
3
|
-
different devices.
|
|
4
|
-
|
|
5
|
-
For the Keithley DAQ6510, the DAS reads the configuration for the Keithley DAQ6510 from the
|
|
6
|
-
Configuration Manager and then configures the device. When no Configuration Manager is
|
|
7
|
-
available, the DAS can also be started with a filename to read the configuration from. The file
|
|
8
|
-
should have the YAML format.
|
|
9
|
-
|
|
10
|
-
```
|
|
11
|
-
Setup:
|
|
12
|
-
DAQ6510:
|
|
13
|
-
Sensors:
|
|
14
|
-
Temperature:
|
|
15
|
-
TRANSDUCER: FRTD
|
|
16
|
-
"RTD:FOUR": PT100
|
|
17
|
-
UNIT: KELVIN
|
|
18
|
-
```
|
|
19
|
-
|
|
20
|
-
The Data Acquisition System can be started as follows:
|
|
21
|
-
|
|
22
|
-
```
|
|
23
|
-
$ das
|
|
24
|
-
Usage: das.py [OPTIONS] COMMAND [ARGS]...
|
|
25
|
-
|
|
26
|
-
Options:
|
|
27
|
-
--verbose print out more info to the terminal.
|
|
28
|
-
--debug set the logging output to DEBUG mode.
|
|
29
|
-
--help Show this message and exit.
|
|
30
|
-
|
|
31
|
-
Commands:
|
|
32
|
-
daq6510 Run the Data Acquisition System for the DAQ6510.
|
|
33
|
-
tcs Run the Data Acquisition System for the TCS EGSE.
|
|
34
|
-
cdaq Run the Data Acquisition System for the CDAQ EGSE
|
|
35
|
-
```
|
|
36
|
-
|
|
37
|
-
"""
|
|
38
|
-
import logging
|
|
39
|
-
import multiprocessing
|
|
40
|
-
from datetime import timezone
|
|
41
|
-
|
|
42
|
-
import itertools
|
|
43
|
-
import rich
|
|
44
|
-
|
|
45
|
-
from egse.hk import read_conversion_dict, convert_hk_names
|
|
46
|
-
from egse.state import GlobalState
|
|
47
|
-
|
|
48
|
-
import re
|
|
49
|
-
import sys
|
|
50
|
-
import time
|
|
51
|
-
from datetime import datetime
|
|
52
|
-
from pathlib import Path
|
|
53
|
-
from typing import List
|
|
54
|
-
|
|
55
|
-
import click
|
|
56
|
-
import invoke
|
|
57
|
-
from prometheus_client import Gauge
|
|
58
|
-
from prometheus_client import start_http_server
|
|
59
|
-
|
|
60
|
-
from egse.control import Failure
|
|
61
|
-
from egse.settings import Settings
|
|
62
|
-
from egse.setup import Setup
|
|
63
|
-
from egse.storage import StorageProxy
|
|
64
|
-
from egse.storage import is_storage_manager_active
|
|
65
|
-
from egse.storage.persistence import CSV
|
|
66
|
-
from egse.system import SignalCatcher
|
|
67
|
-
from egse.system import flatten_dict
|
|
68
|
-
from egse.tcs.tcs import TCSProxy
|
|
69
|
-
from egse.tcs.tcs import is_tcs_cs_active
|
|
70
|
-
from egse.tempcontrol.keithley.daq6510 import DAQ6510Proxy
|
|
71
|
-
from egse.tempcontrol.keithley.daq6510 import count_number_of_channels
|
|
72
|
-
from egse.tempcontrol.keithley.daq6510 import get_channel_names
|
|
73
|
-
from egse.tempcontrol.keithley.daq6510_cs import is_daq6510_cs_active
|
|
74
|
-
from egse.tempcontrol.srs.ptc10 import ptc10Proxy
|
|
75
|
-
from egse.tempcontrol.srs.ptc10_cs import is_ptc10_cs_active
|
|
76
|
-
from egse.aeu.aeu import CRIOProxy, OperatingMode
|
|
77
|
-
from egse.aeu.aeu import is_aeu_cs_active
|
|
78
|
-
|
|
79
|
-
from egse.powermeter.ni.cdaq9184 import cdaq9184Proxy
|
|
80
|
-
from egse.powermeter.ni.cdaq9184_cs import is_cdaq9184_cs_active
|
|
81
|
-
|
|
82
|
-
from egse.ni.alarms.cdaq9375 import cdaq9375Proxy
|
|
83
|
-
from egse.ni.alarms.cdaq9375_cs import is_cdaq9375_cs_active
|
|
84
|
-
|
|
85
|
-
from egse.metrics import define_metrics
|
|
86
|
-
from egse.synoptics import SynopticsManagerProxy
|
|
87
|
-
|
|
88
|
-
from egse.system import format_datetime
|
|
89
|
-
|
|
90
|
-
LOGGER = logging.getLogger(__name__)
|
|
91
|
-
|
|
92
|
-
DAS = Settings.load("Data Acquisition System")
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def load_setup_from_input_file(input_file: str):
|
|
96
|
-
"""Loads a Setup YAML file from disk."""
|
|
97
|
-
input_file = Path(input_file).resolve()
|
|
98
|
-
|
|
99
|
-
if not input_file.exists():
|
|
100
|
-
click.echo(f"ERROR: Input file ({input_file}) doesn't exists.")
|
|
101
|
-
return None
|
|
102
|
-
|
|
103
|
-
return Setup.from_yaml_file(input_file)
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
def load_setup_from_configuration_manager():
|
|
107
|
-
"""Loads a Setup YAML file from the Configuration Manager."""
|
|
108
|
-
|
|
109
|
-
return GlobalState.setup
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
class Config:
|
|
113
|
-
def __init__(self):
|
|
114
|
-
self.verbose = False
|
|
115
|
-
self.debug = False
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
pass_config = click.make_pass_decorator(Config, ensure=True)
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
@click.group()
|
|
122
|
-
@click.option("--verbose", is_flag=True, help="print out more info to the terminal.")
|
|
123
|
-
@click.option("--debug", is_flag=True, help="set the logging output to DEBUG mode.")
|
|
124
|
-
@pass_config
|
|
125
|
-
def cli(config, verbose, debug):
|
|
126
|
-
config.verbose = verbose
|
|
127
|
-
config.debug = debug
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
@cli.command()
|
|
131
|
-
@click.option(
|
|
132
|
-
"--count", default=None, help="how many samples should be taken for each measurement"
|
|
133
|
-
)
|
|
134
|
-
@click.option(
|
|
135
|
-
"--interval", default=None, help="what is the time interval between scans [seconds]"
|
|
136
|
-
)
|
|
137
|
-
@click.option(
|
|
138
|
-
"--delay", default=None, help="what is the time delay between measurements [seconds]"
|
|
139
|
-
)
|
|
140
|
-
@click.option(
|
|
141
|
-
"--channel-list",
|
|
142
|
-
default=None,
|
|
143
|
-
help='a channel_list as understood by the device, e.g. "(@101:105)"',
|
|
144
|
-
)
|
|
145
|
-
@click.option(
|
|
146
|
-
"--background/--no-background", "-bg/-no-bg", default=False,
|
|
147
|
-
help="start the data acquisition in the background"
|
|
148
|
-
)
|
|
149
|
-
@click.argument("input_file", type=str, required=False)
|
|
150
|
-
@pass_config
|
|
151
|
-
def daq6510(config, count, interval, delay, channel_list, background: bool, input_file: str):
|
|
152
|
-
"""
|
|
153
|
-
Run the Data Acquisition System for the DAQ6510.
|
|
154
|
-
|
|
155
|
-
INPUT_FILE: YAML file containing the Setup for the DAQ6510 [optional]
|
|
156
|
-
|
|
157
|
-
Note: When this command runs in the background, send an INTERRUPT SIGNAL with the kill command
|
|
158
|
-
to terminate. Never send a KILL SIGNAL (9) because then the process will not properly be
|
|
159
|
-
unregistered from the storage manager.
|
|
160
|
-
|
|
161
|
-
$ kill -INT <PID>
|
|
162
|
-
|
|
163
|
-
"""
|
|
164
|
-
|
|
165
|
-
hk_conversion_table = read_conversion_dict("DAS-DAQ6510", use_site=True)
|
|
166
|
-
column_names = list(hk_conversion_table.values())
|
|
167
|
-
|
|
168
|
-
if background:
|
|
169
|
-
cmd = "das daq6510"
|
|
170
|
-
cmd += f" --count {count}"
|
|
171
|
-
cmd += f" --interval {interval}"
|
|
172
|
-
cmd += f" --delay {delay}"
|
|
173
|
-
cmd += f" --channel-list '{channel_list}'" if channel_list else ""
|
|
174
|
-
cmd += f" {input_file}" if input_file else ""
|
|
175
|
-
LOGGER.info(f"Invoking background command: {cmd}")
|
|
176
|
-
invoke.run(cmd, disown=True)
|
|
177
|
-
return
|
|
178
|
-
|
|
179
|
-
multiprocessing.current_process().name = "das-daq6510"
|
|
180
|
-
|
|
181
|
-
if config.debug:
|
|
182
|
-
logging.basicConfig(level=logging.DEBUG, format=Settings.LOG_FORMAT_FULL)
|
|
183
|
-
|
|
184
|
-
if not is_daq6510_cs_active():
|
|
185
|
-
LOGGER.error("The DAQ6510 Control Server is not running, start the 'daq6510_cs' command "
|
|
186
|
-
"before running the data acquisition.")
|
|
187
|
-
return
|
|
188
|
-
|
|
189
|
-
if not is_storage_manager_active():
|
|
190
|
-
LOGGER.error("The storage manager is not running, start the core services "
|
|
191
|
-
"before running the data acquisition.")
|
|
192
|
-
return
|
|
193
|
-
|
|
194
|
-
if input_file:
|
|
195
|
-
setup = load_setup_from_input_file(input_file)
|
|
196
|
-
else:
|
|
197
|
-
setup = load_setup_from_configuration_manager()
|
|
198
|
-
|
|
199
|
-
if setup is None:
|
|
200
|
-
LOGGER.error("ERROR: Could not load setup.")
|
|
201
|
-
sys.exit(1)
|
|
202
|
-
|
|
203
|
-
if config.verbose:
|
|
204
|
-
LOGGER.info(setup)
|
|
205
|
-
|
|
206
|
-
if "DAQ6510" not in setup.gse:
|
|
207
|
-
LOGGER.error("ERROR: no DAQ6510 entry in the loaded Setup.")
|
|
208
|
-
sys.exit(1)
|
|
209
|
-
|
|
210
|
-
if not channel_list:
|
|
211
|
-
channel_list = setup.gse.DAQ6510.channels
|
|
212
|
-
|
|
213
|
-
if not count:
|
|
214
|
-
count = setup.gse.DAQ6510.route.scan.COUNT.SCAN
|
|
215
|
-
|
|
216
|
-
if not interval:
|
|
217
|
-
interval = setup.gse.DAQ6510.route.scan.INTERVAL
|
|
218
|
-
|
|
219
|
-
if not delay:
|
|
220
|
-
delay = setup.gse.DAQ6510.route.delay
|
|
221
|
-
|
|
222
|
-
count, interval, delay = int(count), int(interval), int(delay)
|
|
223
|
-
|
|
224
|
-
channel_count = count_number_of_channels(channel_list)
|
|
225
|
-
channel_names = get_channel_names(channel_list)
|
|
226
|
-
|
|
227
|
-
DAQ_METRICS = {}
|
|
228
|
-
for channel in channel_names:
|
|
229
|
-
metrics_name = hk_conversion_table[channel]
|
|
230
|
-
DAQ_METRICS[metrics_name] = Gauge(f"{metrics_name}",
|
|
231
|
-
f"The current measure for the sensor connected to channel {channel} "
|
|
232
|
-
f"({metrics_name}) on the DAQ6510")
|
|
233
|
-
|
|
234
|
-
start_http_server(DAS.METRICS_PORT_DAQ6510)
|
|
235
|
-
|
|
236
|
-
# Initialize some variables that will be used for registration to the Storage Manager
|
|
237
|
-
|
|
238
|
-
origin = "DAS-DAQ6510"
|
|
239
|
-
persistence_class = CSV
|
|
240
|
-
prep = {
|
|
241
|
-
"mode": "a",
|
|
242
|
-
"ending": "\n",
|
|
243
|
-
"column_names": ["timestamp", *column_names],
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
killer = SignalCatcher()
|
|
247
|
-
|
|
248
|
-
with DAQ6510Proxy() as daq, StorageProxy() as storage:
|
|
249
|
-
daq.reset()
|
|
250
|
-
|
|
251
|
-
dt = datetime.now(tz=timezone.utc)
|
|
252
|
-
daq.set_time(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
|
253
|
-
LOGGER.info(f"DAQ6510 date and time set: {daq.get_time()}")
|
|
254
|
-
|
|
255
|
-
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
256
|
-
|
|
257
|
-
storage.save({"origin": origin, "data": f"# columns: {column_names}"})
|
|
258
|
-
|
|
259
|
-
for sensor in setup.gse.DAQ6510.sensors:
|
|
260
|
-
for function in setup.gse.DAQ6510.sensors[sensor]:
|
|
261
|
-
sense = {
|
|
262
|
-
function.upper(): [
|
|
263
|
-
(key, value)
|
|
264
|
-
for key, value in flatten_dict(
|
|
265
|
-
setup.gse.DAQ6510.sensors[sensor][function]
|
|
266
|
-
).items()
|
|
267
|
-
if key != "channels"
|
|
268
|
-
]
|
|
269
|
-
}
|
|
270
|
-
function_channel_list = setup.gse.DAQ6510.sensors[sensor][function].channels
|
|
271
|
-
if config.verbose:
|
|
272
|
-
LOGGER.info(f"{sense=}")
|
|
273
|
-
LOGGER.info(f"{function_channel_list=}")
|
|
274
|
-
daq.configure_sensors(channel_list=function_channel_list, sense=sense)
|
|
275
|
-
|
|
276
|
-
LOGGER.info(f"global: {channel_list=}, {channel_count=}")
|
|
277
|
-
|
|
278
|
-
daq.setup_measurements(channel_list=channel_list)
|
|
279
|
-
|
|
280
|
-
while True:
|
|
281
|
-
try:
|
|
282
|
-
response = daq.perform_measurement(
|
|
283
|
-
channel_list=channel_list, count=count, interval=interval
|
|
284
|
-
)
|
|
285
|
-
|
|
286
|
-
if killer.term_signal_received:
|
|
287
|
-
break
|
|
288
|
-
|
|
289
|
-
if not response:
|
|
290
|
-
LOGGER.warning("Received an empty response from the DAQ6510, "
|
|
291
|
-
"check the connection with the device.")
|
|
292
|
-
LOGGER.warning(f"Response: {response=}")
|
|
293
|
-
time.sleep(1.0)
|
|
294
|
-
continue
|
|
295
|
-
|
|
296
|
-
if isinstance(response, Failure):
|
|
297
|
-
LOGGER.warning(f"Received a Failure from the DAQ6510 Control Server:")
|
|
298
|
-
LOGGER.warning(f"Response: {response}")
|
|
299
|
-
time.sleep(1.0)
|
|
300
|
-
continue
|
|
301
|
-
|
|
302
|
-
# Process and save the response
|
|
303
|
-
|
|
304
|
-
# LOGGER.debug(f"{response=}")
|
|
305
|
-
|
|
306
|
-
dts = response[0][1].strip()
|
|
307
|
-
dt = datetime.strptime(dts[:-3], "%m/%d/%Y %H:%M:%S.%f")
|
|
308
|
-
datetime_string = format_datetime(dt.replace(tzinfo=timezone.utc))
|
|
309
|
-
|
|
310
|
-
data = {hk_conversion_table[measure[0]]: float(measure[2]) for measure in response}
|
|
311
|
-
|
|
312
|
-
data.update({"timestamp": datetime_string})
|
|
313
|
-
|
|
314
|
-
# FIXME: we probably need to do something with the units...
|
|
315
|
-
|
|
316
|
-
units = [measure[3] for measure in response]
|
|
317
|
-
|
|
318
|
-
# LOGGER.debug(f"{data=}")
|
|
319
|
-
|
|
320
|
-
storage.save({"origin": origin, "data": data})
|
|
321
|
-
|
|
322
|
-
# Now extract channels from the response to update the metrics
|
|
323
|
-
|
|
324
|
-
for channel in [measure[0] for measure in response]:
|
|
325
|
-
metrics_name = hk_conversion_table[channel]
|
|
326
|
-
DAQ_METRICS[metrics_name].set(data[metrics_name])
|
|
327
|
-
|
|
328
|
-
# wait for the next measurement to be done (delay)
|
|
329
|
-
|
|
330
|
-
time.sleep(delay)
|
|
331
|
-
|
|
332
|
-
except KeyboardInterrupt:
|
|
333
|
-
LOGGER.debug("Interrupt received, terminating...")
|
|
334
|
-
break
|
|
335
|
-
except Exception as exc:
|
|
336
|
-
LOGGER.warning(f"DAS Exception: {exc}", exc_info=True)
|
|
337
|
-
LOGGER.warning("Got a corrupt response from the DAQ6510. "
|
|
338
|
-
"Check log messages for 'DAS Exception'.")
|
|
339
|
-
time.sleep(1.0)
|
|
340
|
-
continue
|
|
341
|
-
|
|
342
|
-
storage.unregister({"origin": origin})
|
|
343
|
-
|
|
344
|
-
# todo: start PTC10 automatically when when its CS starts. In click options below? background?
|
|
345
|
-
# todo: add disable_heater_error
|
|
346
|
-
@cli.command()
|
|
347
|
-
@click.option(
|
|
348
|
-
"--background/--no-background", "-bg/-no-bg", default=False,
|
|
349
|
-
help="start the data acquisition in the background"
|
|
350
|
-
)
|
|
351
|
-
@click.option(
|
|
352
|
-
"--user_regulation", default=None, help="activate ptc10 regulation with the given parameter as temperature setpoint (float)"
|
|
353
|
-
)
|
|
354
|
-
# todo: voir pour option auto_regulation is_fkag=True valeur par défaut
|
|
355
|
-
@click.option(
|
|
356
|
-
"--auto_regulation", is_flag=True, default=None, help="if this option is given, activate ptc10 regulation with survival mode (i.e. with T_min_NOP as temperature setpoint)"
|
|
357
|
-
)
|
|
358
|
-
@click.argument("input_file", type=str, required=False)
|
|
359
|
-
@pass_config
|
|
360
|
-
def ptc10(config, user_regulation: float, auto_regulation: int, background: bool, input_file: str):
|
|
361
|
-
"""
|
|
362
|
-
Run the Data Acquisition System for the DAQ6510.
|
|
363
|
-
|
|
364
|
-
INPUT_FILE: YAML file containing the Setup for the DAQ6510 [optional]
|
|
365
|
-
|
|
366
|
-
Note: When this command runs in the background, send an INTERRUPT SIGNAL with the kill command
|
|
367
|
-
to terminate. Never send a KILL SIGNAL (9) because then the process will not properly be
|
|
368
|
-
unregistered from the storage manager.
|
|
369
|
-
|
|
370
|
-
$ kill -INT <PID>
|
|
371
|
-
|
|
372
|
-
"""
|
|
373
|
-
|
|
374
|
-
if background:
|
|
375
|
-
cmd = "das ptc10"
|
|
376
|
-
cmd += f" --user_regulation {user_regulation}"
|
|
377
|
-
cmd += f" --auto_regulation {auto_regulation}"
|
|
378
|
-
cmd += f" {input_file}" if input_file else ""
|
|
379
|
-
LOGGER.info(f"Invoking background command: {cmd}")
|
|
380
|
-
invoke.run(cmd, disown=True)
|
|
381
|
-
return
|
|
382
|
-
|
|
383
|
-
multiprocessing.current_process().name = "das-ptc10"
|
|
384
|
-
|
|
385
|
-
if config.debug:
|
|
386
|
-
logging.basicConfig(level=logging.DEBUG, format=Settings.LOG_FORMAT_FULL)
|
|
387
|
-
|
|
388
|
-
if not is_ptc10_cs_active():
|
|
389
|
-
LOGGER.error("The PTC10 Control Server is not running, start the 'ptc10_cs' command "
|
|
390
|
-
"before running the data acquisition.")
|
|
391
|
-
return
|
|
392
|
-
|
|
393
|
-
if not is_storage_manager_active():
|
|
394
|
-
LOGGER.error("The storage manager is not running, start the core services "
|
|
395
|
-
"before running the data acquisition.")
|
|
396
|
-
return
|
|
397
|
-
|
|
398
|
-
if input_file:
|
|
399
|
-
setup = load_setup_from_input_file(input_file)
|
|
400
|
-
else:
|
|
401
|
-
setup = load_setup_from_configuration_manager()
|
|
402
|
-
|
|
403
|
-
if setup is None:
|
|
404
|
-
LOGGER.error("ERROR: Could not load setup.")
|
|
405
|
-
sys.exit(1)
|
|
406
|
-
|
|
407
|
-
if config.verbose:
|
|
408
|
-
LOGGER.info(setup)
|
|
409
|
-
|
|
410
|
-
if "PTC10" not in setup.gse:
|
|
411
|
-
LOGGER.error("ERROR: no PTC10 entry in the loaded Setup.")
|
|
412
|
-
sys.exit(1)
|
|
413
|
-
|
|
414
|
-
channel_names = list(setup.gse.PTC10.channel_names.values()) # defined in config file
|
|
415
|
-
|
|
416
|
-
# Creation of list HK_names used to store HK.
|
|
417
|
-
site_id = setup.site_id # Name of the TH
|
|
418
|
-
|
|
419
|
-
HK_names = ["G" + site_id + "_" + sensor_name for sensor_name in channel_names[:4]]
|
|
420
|
-
units = ["ampere", "watt", "volt"]
|
|
421
|
-
for unit in units:
|
|
422
|
-
HK_names.extend(["G" + site_id + "_" + name + "_" + unit for name in channel_names[-3:]])
|
|
423
|
-
HK_names.insert(0, HK_names.pop(2)) # I move GIAS_TTS_BiP_01 at the beginning of the list HK_names.
|
|
424
|
-
# HK_names is ['GIAS_TTS_BiP_01', 'GIAS_TRP2', 'GIAS_TRP3', 'GIAS_TRP4', 'GIAS_H1_ampere', 'GIAS_H2_ampere', 'GIAS_H3_ampere', 'GIAS_H1_watt', 'GIAS_H2_watt', 'GIAS_H3_watt', 'GIAS_H1_volt', 'GIAS_H2_volt', 'GIAS_H3_volt']
|
|
425
|
-
|
|
426
|
-
# Creation of Prometheus METRICS in a dictionnary from TM dictionnary
|
|
427
|
-
PTC_METRICS = define_metrics("DAS-PTC10")
|
|
428
|
-
|
|
429
|
-
if not list(PTC_METRICS.keys()) == HK_names: # Check if names in setup file (used for HK names) and names defined in TM dictionary for metrics names are same
|
|
430
|
-
LOGGER.error("The names of HK defined in the current setup file " + setup.get_id() + " are not the same than those in TM dictionary for metrics")
|
|
431
|
-
|
|
432
|
-
start_http_server(DAS.METRICS_PORT_PTC10)
|
|
433
|
-
|
|
434
|
-
# Initialize some variables that will be used for registration to the Storage Manager
|
|
435
|
-
|
|
436
|
-
origin = "DAS-PTC10"
|
|
437
|
-
persistence_class = CSV
|
|
438
|
-
prep = {
|
|
439
|
-
"mode": "a",
|
|
440
|
-
"ending": "\n",
|
|
441
|
-
"header": "PTC10 First Connection Tests",
|
|
442
|
-
"column_names": ["timestamp", *PTC_METRICS],
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
killer = SignalCatcher()
|
|
446
|
-
|
|
447
|
-
with ptc10Proxy() as ptc, StorageProxy() as storage:
|
|
448
|
-
|
|
449
|
-
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
450
|
-
|
|
451
|
-
# Renaming the names of channels in PTC10 device with names defined in the setup file
|
|
452
|
-
old_channel_names = ptc.get_names() # old_channel_names is a tuple of 2 lists with the names of inputs (4 sensors)
|
|
453
|
-
# in the first list and the names of outputs (3 heaters) in the second list
|
|
454
|
-
old_channel_names = old_channel_names[0] + old_channel_names[1] # Now old_channel_names is a long list of 7 elements (4 inputs/sensors
|
|
455
|
-
# and 3 outputs/heaters)
|
|
456
|
-
for old_name, new_name in zip(old_channel_names, channel_names):
|
|
457
|
-
ptc.set_name(old_name,new_name)
|
|
458
|
-
|
|
459
|
-
# Setting the input for each heater
|
|
460
|
-
for output_ch, input in zip([1, 2, 3], setup.gse.PTC10.heater_input):
|
|
461
|
-
new_input = setup.gse.PTC10.heater_input[input]
|
|
462
|
-
ptc.set_heater_input(output_ch, new_input)
|
|
463
|
-
|
|
464
|
-
# Configure units before limits (because configuring units modifies limits)
|
|
465
|
-
ptc.output_unit(setup.gse.PTC10.heater_unit)
|
|
466
|
-
|
|
467
|
-
# Setting limits
|
|
468
|
-
low_limit = setup.gse.PTC10.heater_limit.low
|
|
469
|
-
high_limit = setup.gse.PTC10.heater_limit.high
|
|
470
|
-
for output_ch in [1, 2, 3]:
|
|
471
|
-
ptc.output_limit(output_ch, low_limit, high_limit)
|
|
472
|
-
|
|
473
|
-
# Setting PID coefficients
|
|
474
|
-
i = 0
|
|
475
|
-
for pid in setup.gse.PTC10.PID:
|
|
476
|
-
PID_list = list(setup.gse.PTC10.PID[pid].values())
|
|
477
|
-
i += 1
|
|
478
|
-
ptc.set_PID(input_ch=i, output_ch=i, PID=PID_list)
|
|
479
|
-
|
|
480
|
-
# Setting the resistance of the heater in Ohm (manually measured with ohmmeter and written in the setup file)
|
|
481
|
-
R = setup.gse.PTC10.heater_Ohm
|
|
482
|
-
|
|
483
|
-
# Time update from egse server
|
|
484
|
-
ptc.update_time()
|
|
485
|
-
is_on_time = True
|
|
486
|
-
|
|
487
|
-
while True:
|
|
488
|
-
try:
|
|
489
|
-
|
|
490
|
-
# Update time of PTC10 every first of the month to avoid a time lag after several days and a time desynchronisation.
|
|
491
|
-
day = time.gmtime()[2]
|
|
492
|
-
if day >= 2:
|
|
493
|
-
is_on_time = False
|
|
494
|
-
if day == 1 and not is_on_time:
|
|
495
|
-
ptc.update_time()
|
|
496
|
-
is_on_time = True
|
|
497
|
-
|
|
498
|
-
if user_regulation and auto_regulation:
|
|
499
|
-
LOGGER.warning("Only 1 option is expected (user_regulation OR auto_regulation) but both were given.\nTerminating... ")
|
|
500
|
-
break
|
|
501
|
-
|
|
502
|
-
if user_regulation: # If user_regulation is given, then run the function set_stable() with "user_regulation" as temperature setpoint
|
|
503
|
-
try:
|
|
504
|
-
ptc.set_stable(float(user_regulation))
|
|
505
|
-
except ValueError:
|
|
506
|
-
LOGGER.warning("ValueError: user_regulation must be a number.\nTerminating...")
|
|
507
|
-
break
|
|
508
|
-
if auto_regulation: # If auto_regulation is given, then run the function set_survival() (i.e. with T_min_NOP as temperature setpoint)
|
|
509
|
-
ptc.set_survival()
|
|
510
|
-
|
|
511
|
-
# Acquirement of PTC10 time for HK and PTC10 values (temperatures and heaters)
|
|
512
|
-
try:
|
|
513
|
-
response = [ptc.get_time()] + ptc.read_temperature() + ptc.read_heater()[0]
|
|
514
|
-
# Sometimes randomly PTC10 doesn't return the time and there is a Failure, so to avoid
|
|
515
|
-
# a bad type in the column timestamp, if there is a Failure, HK are not saved for this
|
|
516
|
-
# time and the code go back at the top of the while loop thanks to exceptions.
|
|
517
|
-
except TypeError:
|
|
518
|
-
LOGGER.warning("TYPE ERROR")
|
|
519
|
-
continue
|
|
520
|
-
except ValueError:
|
|
521
|
-
LOGGER.warning("VALUE ERROR")
|
|
522
|
-
continue
|
|
523
|
-
|
|
524
|
-
if killer.term_signal_received:
|
|
525
|
-
break
|
|
526
|
-
|
|
527
|
-
if not response:
|
|
528
|
-
LOGGER.warning("Received an empty response from the PTC10, "
|
|
529
|
-
"check the connection with the device.")
|
|
530
|
-
LOGGER.warning(f"Response: {response=}")
|
|
531
|
-
time.sleep(1.0)
|
|
532
|
-
continue
|
|
533
|
-
|
|
534
|
-
if isinstance(response, Failure):
|
|
535
|
-
LOGGER.warning(f"Received a Failure from the PTC10 Control Server:")
|
|
536
|
-
LOGGER.warning(f"Response: {response}")
|
|
537
|
-
time.sleep(1.0)
|
|
538
|
-
continue
|
|
539
|
-
|
|
540
|
-
# Process and save the response
|
|
541
|
-
|
|
542
|
-
response.insert(1, response.pop(3)) # I move the value of TTS_BiP_01 at the beginning of the list response.
|
|
543
|
-
|
|
544
|
-
# Calculation of power : P = RI²
|
|
545
|
-
for i in [5, 6, 7]:
|
|
546
|
-
response.append(R * response[i]**2)
|
|
547
|
-
|
|
548
|
-
# Calculation of voltage : U = RI
|
|
549
|
-
for i in [5, 6, 7]:
|
|
550
|
-
response.append(R * response[i])
|
|
551
|
-
|
|
552
|
-
hk_dict = {"timestamp": response[0]}
|
|
553
|
-
hk_dict.update({k: v for k, v in zip(HK_names, response[1:])})
|
|
554
|
-
|
|
555
|
-
LOGGER.debug(f"{response=}")
|
|
556
|
-
|
|
557
|
-
LOGGER.debug(hk_dict)
|
|
558
|
-
|
|
559
|
-
# Saving HK with PTC10 time as timestamp
|
|
560
|
-
storage.save({"origin": origin, "data": hk_dict})
|
|
561
|
-
|
|
562
|
-
# Now set the values in the metrics
|
|
563
|
-
hk_dict.pop("timestamp")
|
|
564
|
-
for key, value in hk_dict.items():
|
|
565
|
-
PTC_METRICS[key].set(value)
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
except KeyboardInterrupt:
|
|
569
|
-
LOGGER.debug("Interrupt received, terminating...")
|
|
570
|
-
break
|
|
571
|
-
except Exception as exc:
|
|
572
|
-
LOGGER.warning(f"DAS Exception: {exc}", exc_info=True)
|
|
573
|
-
LOGGER.warning("Got a corrupt response from the PTC10. "
|
|
574
|
-
"Check log messages for 'DAS Exception'.")
|
|
575
|
-
time.sleep(1.0)
|
|
576
|
-
continue
|
|
577
|
-
|
|
578
|
-
ptc.disable_all()
|
|
579
|
-
storage.unregister({"origin": origin})
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
@cli.command()
|
|
583
|
-
@click.option(
|
|
584
|
-
"--use-all-hk", is_flag=True,
|
|
585
|
-
help=("Use get_all_housekeeping() method to read telemetry from the TCS EGSE. "
|
|
586
|
-
"The device must not be in remote control mode for this.")
|
|
587
|
-
)
|
|
588
|
-
@click.option(
|
|
589
|
-
"--interval", default=10, help="what is the time delay between measurements [seconds]"
|
|
590
|
-
)
|
|
591
|
-
@click.option(
|
|
592
|
-
"--background/--no-background", "-bg/-no-bg", default=False,
|
|
593
|
-
help="start the data acquisition in the background"
|
|
594
|
-
)
|
|
595
|
-
@pass_config
|
|
596
|
-
def tcs(config, use_all_hk, interval, background):
|
|
597
|
-
"""
|
|
598
|
-
Run the Data Acquisition System for the TCS EGSE.
|
|
599
|
-
|
|
600
|
-
Note: When this command runs in the background, send an INTERRUPT SIGNAL with the kill command
|
|
601
|
-
to terminate. Never send a KILL SIGNAL (9) because then the process will not properly be
|
|
602
|
-
unregistered from the storage manager.
|
|
603
|
-
|
|
604
|
-
$ kill -INT <PID>
|
|
605
|
-
|
|
606
|
-
"""
|
|
607
|
-
|
|
608
|
-
rich.print(
|
|
609
|
-
"[red]WARNING[/red]: This function of the DAS has been deprecated and is replaced by the "
|
|
610
|
-
"TCSTelemetry process which is automatically started by the tcs_cs. The `das tcs` will be "
|
|
611
|
-
"removed shortly.")
|
|
612
|
-
|
|
613
|
-
return
|
|
614
|
-
|
|
615
|
-
if background:
|
|
616
|
-
cmd = "das tcs"
|
|
617
|
-
cmd += " --use-all-hk" if use_all_hk else ""
|
|
618
|
-
cmd += f" --interval {interval}"
|
|
619
|
-
LOGGER.info(f"Invoking background command: {cmd}")
|
|
620
|
-
invoke.run(cmd, disown=True)
|
|
621
|
-
return
|
|
622
|
-
|
|
623
|
-
multiprocessing.current_process().name = "das-tcs"
|
|
624
|
-
|
|
625
|
-
if config.debug:
|
|
626
|
-
logging.basicConfig(level=logging.DEBUG, format=Settings.LOG_FORMAT_FULL)
|
|
627
|
-
|
|
628
|
-
start_http_server(DAS.METRICS_PORT_TCS)
|
|
629
|
-
|
|
630
|
-
if not is_tcs_cs_active():
|
|
631
|
-
LOGGER.error("The TCS Control Server is not running, start the 'tcs_cs' command "
|
|
632
|
-
"before running the data acquisition.")
|
|
633
|
-
return
|
|
634
|
-
|
|
635
|
-
if not is_storage_manager_active():
|
|
636
|
-
LOGGER.error("The storage manager is not running, start the core services "
|
|
637
|
-
"before running the data acquisition.")
|
|
638
|
-
return
|
|
639
|
-
|
|
640
|
-
killer = SignalCatcher()
|
|
641
|
-
|
|
642
|
-
with TCSProxy() as tcs_proxy, StorageProxy() as storage:
|
|
643
|
-
|
|
644
|
-
# Not all HK parameters are always sent with the periodic telemetry, so we
|
|
645
|
-
# use the know parameters names. Since we further on pass the values as a dictionary
|
|
646
|
-
# to the storage, values which are not in `names` are silently ignored.
|
|
647
|
-
|
|
648
|
-
names = patterns.keys()
|
|
649
|
-
|
|
650
|
-
# For each of the names, create also a timestamp column for that name
|
|
651
|
-
|
|
652
|
-
columns = list(itertools.chain.from_iterable((x + '_ts', x) for x in names))
|
|
653
|
-
|
|
654
|
-
# Initialize some variables that will be used for registration to the Storage Manager
|
|
655
|
-
# Use the names in the header of the CSV file as column names.
|
|
656
|
-
|
|
657
|
-
origin = "DAS-TCS"
|
|
658
|
-
persistence_class = CSV
|
|
659
|
-
prep = {
|
|
660
|
-
"mode": "a",
|
|
661
|
-
"ending": "\n",
|
|
662
|
-
"header": "TCS EGSE First Connection Tests",
|
|
663
|
-
"column_names": columns,
|
|
664
|
-
}
|
|
665
|
-
|
|
666
|
-
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
667
|
-
|
|
668
|
-
while True:
|
|
669
|
-
try:
|
|
670
|
-
data = tcs_proxy.get_all_housekeeping() if use_all_hk else tcs_proxy.get_data()
|
|
671
|
-
if killer.term_signal_received:
|
|
672
|
-
break
|
|
673
|
-
|
|
674
|
-
if isinstance(data, Failure):
|
|
675
|
-
LOGGER.warning(f"Received a Failure from the TCS EGSE Control Server:")
|
|
676
|
-
LOGGER.warning(f"Response: {data}")
|
|
677
|
-
time.sleep(1.0)
|
|
678
|
-
continue
|
|
679
|
-
|
|
680
|
-
LOGGER.debug(f"received {len(data or [])} data items")
|
|
681
|
-
|
|
682
|
-
if data is None:
|
|
683
|
-
continue
|
|
684
|
-
|
|
685
|
-
data = process_data(data)
|
|
686
|
-
storage.save({"origin": origin, "data": data})
|
|
687
|
-
|
|
688
|
-
time.sleep(interval)
|
|
689
|
-
|
|
690
|
-
except KeyboardInterrupt:
|
|
691
|
-
LOGGER.debug("Interrupt received, terminating...")
|
|
692
|
-
break
|
|
693
|
-
|
|
694
|
-
storage.unregister({"origin": origin})
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
TCS_AMBIENT_RTD = Gauge(
|
|
698
|
-
"tcs_ambient_rtd", "The current ambient temperature on the TCS EGSE"
|
|
699
|
-
)
|
|
700
|
-
TCS_INTERNAL_RTD = Gauge(
|
|
701
|
-
"tcs_internal_rtd", "The current internal temperature off the TCS EGSE"
|
|
702
|
-
)
|
|
703
|
-
TCS_FEE_RTD_1 = Gauge(
|
|
704
|
-
"tcs_fee_rtd_1", "The TRP22 sensor for control channel 1"
|
|
705
|
-
)
|
|
706
|
-
TCS_FEE_RTD_2 = Gauge(
|
|
707
|
-
"tcs_fee_rtd_2", "The TRP22 sensor for control channel 2"
|
|
708
|
-
)
|
|
709
|
-
TCS_FEE_RTD_3 = Gauge(
|
|
710
|
-
"tcs_fee_rtd_3", "The TRP22 sensor for control channel 3"
|
|
711
|
-
)
|
|
712
|
-
TCS_TOU_RTD_1 = Gauge(
|
|
713
|
-
"tcs_tou_rtd_1", "The TRP1 sensor for control channel 1"
|
|
714
|
-
)
|
|
715
|
-
TCS_TOU_RTD_2 = Gauge(
|
|
716
|
-
"tcs_tou_rtd_2", "The TRP1 sensor for control channel 2"
|
|
717
|
-
)
|
|
718
|
-
TCS_TOU_RTD_3 = Gauge(
|
|
719
|
-
"tcs_tou_rtd_3", "The TRP1 sensor for control channel 3"
|
|
720
|
-
)
|
|
721
|
-
TCS_CH1_IOUT = Gauge(
|
|
722
|
-
"tcs_ch1_iout", "channel 1 iout"
|
|
723
|
-
)
|
|
724
|
-
TCS_CH1_POUT = Gauge(
|
|
725
|
-
"tcs_ch1_pout", "channel 1 pout"
|
|
726
|
-
)
|
|
727
|
-
TCS_CH1_VOUT = Gauge(
|
|
728
|
-
"tcs_ch1_vout", "channel 1 pout"
|
|
729
|
-
)
|
|
730
|
-
TCS_CH2_IOUT = Gauge(
|
|
731
|
-
"tcs_ch2_iout", "channel 2 iout"
|
|
732
|
-
)
|
|
733
|
-
TCS_CH2_POUT = Gauge(
|
|
734
|
-
"tcs_ch2_pout", "channel 2 pout"
|
|
735
|
-
)
|
|
736
|
-
TCS_CH2_VOUT = Gauge(
|
|
737
|
-
"tcs_ch2_vout", "channel 2 pout"
|
|
738
|
-
)
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
def process_data(data: List) -> dict:
|
|
742
|
-
"""
|
|
743
|
-
Process the output of the `get_data()` and the `get_all_housekeeping()` commands. Telemetry
|
|
744
|
-
parameters can occur multiple times, only the last entry is retained.
|
|
745
|
-
|
|
746
|
-
This function also updates the metrics that are requested by Prometheus.
|
|
747
|
-
|
|
748
|
-
Args:
|
|
749
|
-
data: the data as returned by get_data() and get_all_housekeeping().
|
|
750
|
-
|
|
751
|
-
Returns:
|
|
752
|
-
an up-to-date dictionary with the parameter values and their timestamps.
|
|
753
|
-
"""
|
|
754
|
-
|
|
755
|
-
processed_data = {}
|
|
756
|
-
|
|
757
|
-
# Create a proper dictionary with the last updated telemetry values.
|
|
758
|
-
# We expect 3 entries: name, date, and value
|
|
759
|
-
|
|
760
|
-
for item in data:
|
|
761
|
-
|
|
762
|
-
if len(item) == 3:
|
|
763
|
-
value = extract_value(item[0], item[2])
|
|
764
|
-
processed_data.update({f"{item[0]}_ts": item[1], item[0]: value})
|
|
765
|
-
else:
|
|
766
|
-
click.echo("WARNING: incorrect format in data response from TCS EGSE.")
|
|
767
|
-
|
|
768
|
-
# Fill in the metrics that will be monitored by Prometheus
|
|
769
|
-
|
|
770
|
-
for name, metric in (
|
|
771
|
-
("ambient_rtd", TCS_AMBIENT_RTD),
|
|
772
|
-
("internal_rtd", TCS_INTERNAL_RTD),
|
|
773
|
-
("fee_rtd_1", TCS_FEE_RTD_1),
|
|
774
|
-
("fee_rtd_2", TCS_FEE_RTD_2),
|
|
775
|
-
("fee_rtd_3", TCS_FEE_RTD_3),
|
|
776
|
-
("tou_rtd_1", TCS_TOU_RTD_1),
|
|
777
|
-
("tou_rtd_2", TCS_TOU_RTD_2),
|
|
778
|
-
("tou_rtd_3", TCS_TOU_RTD_3),
|
|
779
|
-
("ch1_iout", TCS_CH1_IOUT),
|
|
780
|
-
("ch1_pout", TCS_CH1_POUT),
|
|
781
|
-
("ch1_vout", TCS_CH1_VOUT),
|
|
782
|
-
("ch2_iout", TCS_CH2_IOUT),
|
|
783
|
-
("ch2_pout", TCS_CH2_POUT),
|
|
784
|
-
("ch2_vout", TCS_CH2_VOUT),
|
|
785
|
-
):
|
|
786
|
-
if name in processed_data:
|
|
787
|
-
metric.set(processed_data[name])
|
|
788
|
-
|
|
789
|
-
return processed_data
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
# Define different regex patterns, e.g. for temperature, time, power, etc.
|
|
793
|
-
|
|
794
|
-
temperature_pattern = re.compile(r'(.*) ºC')
|
|
795
|
-
seconds_pattern = re.compile(r'(.*) s')
|
|
796
|
-
milliseconds_pattern = re.compile(r'(.*) ms')
|
|
797
|
-
current_pattern = re.compile(r'(.*) A \[(.*) Apk\]')
|
|
798
|
-
voltage_pattern = re.compile(r'(.*) V')
|
|
799
|
-
voltage_peak_pattern = re.compile(r'(.*) V \[(.*) Vpk\]')
|
|
800
|
-
power_pattern = re.compile(r'(.*) mW \[(.*) mWavg\]')
|
|
801
|
-
storage_pattern = re.compile(r'\[(.*)\]')
|
|
802
|
-
match_all_pattern = re.compile(r'(.*)')
|
|
803
|
-
|
|
804
|
-
# Assign parsing patterns to each of the parameters that need specific parsing.
|
|
805
|
-
|
|
806
|
-
patterns = {
|
|
807
|
-
'ambient_rtd': temperature_pattern,
|
|
808
|
-
'ch1_clkheater_period': milliseconds_pattern,
|
|
809
|
-
'ch1_clkheater_ticks': seconds_pattern,
|
|
810
|
-
'ch1_iout': current_pattern,
|
|
811
|
-
'ch1_pid_proctime': seconds_pattern,
|
|
812
|
-
'ch1_pid_sp': temperature_pattern,
|
|
813
|
-
'ch1_pid_ts': seconds_pattern,
|
|
814
|
-
'ch1_pout': power_pattern,
|
|
815
|
-
'ch1_pwm_ontime': milliseconds_pattern,
|
|
816
|
-
'ch1_pwm_proctime': seconds_pattern,
|
|
817
|
-
'ch1_tav': temperature_pattern,
|
|
818
|
-
'ch1_vdc': voltage_pattern,
|
|
819
|
-
'ch1_vout': voltage_peak_pattern,
|
|
820
|
-
'ch2_clkheater_period': milliseconds_pattern,
|
|
821
|
-
'ch2_clkheater_ticks': seconds_pattern,
|
|
822
|
-
'ch2_iout': current_pattern,
|
|
823
|
-
'ch2_pid_proctime': seconds_pattern,
|
|
824
|
-
'ch2_pid_sp': temperature_pattern,
|
|
825
|
-
'ch2_pid_ts': seconds_pattern,
|
|
826
|
-
'ch2_pout': power_pattern,
|
|
827
|
-
'ch2_pwm_ontime': milliseconds_pattern,
|
|
828
|
-
'ch2_pwm_proctime': seconds_pattern,
|
|
829
|
-
'ch2_tav': temperature_pattern,
|
|
830
|
-
'ch2_vdc': voltage_pattern,
|
|
831
|
-
'ch2_vout': voltage_peak_pattern,
|
|
832
|
-
'fee_rtd_1': temperature_pattern,
|
|
833
|
-
'fee_rtd_2': temperature_pattern,
|
|
834
|
-
'fee_rtd_3': temperature_pattern,
|
|
835
|
-
'fee_rtd_tav': temperature_pattern,
|
|
836
|
-
'internal_rtd': temperature_pattern,
|
|
837
|
-
'ni9401_external_clkheater_period': seconds_pattern,
|
|
838
|
-
'ni9401_external_clkheater_timeout': seconds_pattern,
|
|
839
|
-
'psu_vdc': voltage_pattern,
|
|
840
|
-
'spare_rtd_1': temperature_pattern,
|
|
841
|
-
'spare_rtd_2': temperature_pattern,
|
|
842
|
-
'spare_rtd_3': temperature_pattern,
|
|
843
|
-
'spare_rtd_tav': temperature_pattern,
|
|
844
|
-
'storage_mmi': storage_pattern,
|
|
845
|
-
'storage_realtime': storage_pattern,
|
|
846
|
-
'tou_rtd_1': temperature_pattern,
|
|
847
|
-
'tou_rtd_2': temperature_pattern,
|
|
848
|
-
'tou_rtd_3': temperature_pattern,
|
|
849
|
-
'tou_rtd_tav': temperature_pattern,
|
|
850
|
-
}
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
def extract_value(key, value):
|
|
854
|
-
"""
|
|
855
|
-
Extract the actual value from the string containing the value and unit plus potential
|
|
856
|
-
additional info. Parsing is done with dedicated regular expressions per parameter, e.g.
|
|
857
|
-
parsing a temperature takes the 'ºC' into account when extracting the actual value.
|
|
858
|
-
|
|
859
|
-
Args:
|
|
860
|
-
key (str): name of the parameter
|
|
861
|
-
value (str): the value as returned by the TCS EGSE
|
|
862
|
-
"""
|
|
863
|
-
|
|
864
|
-
if key not in patterns:
|
|
865
|
-
return value
|
|
866
|
-
|
|
867
|
-
match = patterns[key].search(value)
|
|
868
|
-
if match is not None:
|
|
869
|
-
value = match.group(1)
|
|
870
|
-
return value
|
|
871
|
-
|
|
872
|
-
@cli.command()
|
|
873
|
-
@click.option(
|
|
874
|
-
"--use-all-hk", is_flag=True,
|
|
875
|
-
help=("Use get_all_housekeeping() method to read telemetry from the CDAQ. "
|
|
876
|
-
"The device must not be in remote control mode for this.")
|
|
877
|
-
)
|
|
878
|
-
@click.option(
|
|
879
|
-
"--interval", default=1, help="what is the time delay between measurements [seconds]"
|
|
880
|
-
)
|
|
881
|
-
@click.option(
|
|
882
|
-
"--background/--no-background", "-bg/-no-bg", default=False,
|
|
883
|
-
help="start the data acquisition in the background"
|
|
884
|
-
)
|
|
885
|
-
@pass_config
|
|
886
|
-
def cdaq(config, use_all_hk, interval, background):
|
|
887
|
-
"""
|
|
888
|
-
Run the Data Acquisition System for the CDAQ.
|
|
889
|
-
|
|
890
|
-
INPUT_FILE: YAML file containing the Setup for the CDAQ [optional]
|
|
891
|
-
|
|
892
|
-
Note: When this command runs in the background, send an INTERRUPT SIGNAL with the kill command
|
|
893
|
-
to terminate. Never send a KILL SIGNAL (9) because then the process will not properly be
|
|
894
|
-
unregistered from the storage manager.
|
|
895
|
-
|
|
896
|
-
$ kill -INT <PID>
|
|
897
|
-
|
|
898
|
-
"""
|
|
899
|
-
|
|
900
|
-
if background:
|
|
901
|
-
cmd = "das cdaq-photo"
|
|
902
|
-
cmd += " --use-all-hk" if use_all_hk else ""
|
|
903
|
-
cmd += f" --interval {interval}"
|
|
904
|
-
LOGGER.info(f"Invoking background command: {cmd}")
|
|
905
|
-
invoke.run(cmd, disown=True)
|
|
906
|
-
return
|
|
907
|
-
|
|
908
|
-
multiprocessing.current_process().name = "das-cdaq-photodiodes"
|
|
909
|
-
|
|
910
|
-
if config.debug:
|
|
911
|
-
logging.basicConfig(level=logging.DEBUG, format=Settings.LOG_FORMAT_FULL)
|
|
912
|
-
|
|
913
|
-
if not is_cdaq9184_cs_active():
|
|
914
|
-
LOGGER.error("The cdaq Control Server is not running, start the 'cdaq_cs' command "
|
|
915
|
-
"before running the data acquisition.")
|
|
916
|
-
return
|
|
917
|
-
|
|
918
|
-
if not is_storage_manager_active():
|
|
919
|
-
LOGGER.error("The storage manager is not running, start the core services "
|
|
920
|
-
"before running the data acquisition.")
|
|
921
|
-
return
|
|
922
|
-
|
|
923
|
-
metrics_cdaq = define_metrics("DAS-CDAQ-PHOTODIODES")
|
|
924
|
-
hk_names = ["GIAS_OGSE2_PHOTOD_1", "GIAS_OGSE2_PHOTOD_2", "GIAS_OGSE2_TAMPLI_1", "GIAS_OGSE2_TAMPLI_2",
|
|
925
|
-
"GIAS_OGSE2_TSPHERE", "GIAS_OGSE2_GAMPLI_1", "GIAS_OGSE2_GAMPLI_2"]
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
# The unit for photodiodes is V, for temperatures is °C and without unit for gains
|
|
929
|
-
|
|
930
|
-
start_http_server(DAS.METRICS_PORT_CDAQ)
|
|
931
|
-
|
|
932
|
-
# Initialize some variables that will be used for registration to the Storage Manager
|
|
933
|
-
|
|
934
|
-
origin = "DAS-CDAQ-PHOTODIODES"
|
|
935
|
-
persistence_class = CSV
|
|
936
|
-
prep = {
|
|
937
|
-
"mode": "a",
|
|
938
|
-
"ending": "\n",
|
|
939
|
-
"header": "CDAQ First Connection Tests",
|
|
940
|
-
"column_names": ["timestamp", *metrics_cdaq],
|
|
941
|
-
}
|
|
942
|
-
|
|
943
|
-
killer = SignalCatcher()
|
|
944
|
-
|
|
945
|
-
with cdaq9184Proxy() as cdaq, StorageProxy() as storage:
|
|
946
|
-
|
|
947
|
-
# Use the names in the header of the CSV file as column names.
|
|
948
|
-
|
|
949
|
-
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
950
|
-
|
|
951
|
-
while True:
|
|
952
|
-
try:
|
|
953
|
-
response = cdaq.read_values()
|
|
954
|
-
|
|
955
|
-
if killer.term_signal_received:
|
|
956
|
-
break
|
|
957
|
-
if not response:
|
|
958
|
-
LOGGER.warning("Received an empty response from the CDAQ, "
|
|
959
|
-
"check the connection with the device.")
|
|
960
|
-
LOGGER.warning(f"Response: {response=}")
|
|
961
|
-
time.sleep(1.0)
|
|
962
|
-
continue
|
|
963
|
-
if isinstance(response, Failure):
|
|
964
|
-
LOGGER.warning(f"Received a Failure from the CDAQ Control Server:")
|
|
965
|
-
LOGGER.warning(f"Response: {response}")
|
|
966
|
-
time.sleep(1.0)
|
|
967
|
-
continue
|
|
968
|
-
|
|
969
|
-
reformatted_response = reformat_cdaq_values(response)
|
|
970
|
-
values = reformatted_response[2:]
|
|
971
|
-
hk_dict = {key: value for key, value in zip(hk_names, values)}
|
|
972
|
-
timestamp = format_datetime(datetime.strptime(reformatted_response[0] + '-' + reformatted_response[1], "%y/%m/%d-%H:%M:%S.%f"))
|
|
973
|
-
data = [timestamp] + values
|
|
974
|
-
storage.save({"origin": origin, "data": data})
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
for key, value in hk_dict.items():
|
|
978
|
-
metrics_cdaq[key].set(value)
|
|
979
|
-
|
|
980
|
-
# time.sleep(interval)
|
|
981
|
-
|
|
982
|
-
except KeyboardInterrupt:
|
|
983
|
-
LOGGER.debug("Interrupt received, terminating...")
|
|
984
|
-
break
|
|
985
|
-
|
|
986
|
-
except Exception as exc:
|
|
987
|
-
LOGGER.warning(f"DAS Exception: {exc}", exc_info=True)
|
|
988
|
-
LOGGER.warning("Got a corrupt response from the CDAQ. "
|
|
989
|
-
"Check log messages for 'DAS Exception'.")
|
|
990
|
-
time.sleep(1.0)
|
|
991
|
-
continue
|
|
992
|
-
|
|
993
|
-
storage.unregister({"origin": origin})
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
def reformat_cdaq_values(data_to_be_reformatted: list) -> list:
|
|
997
|
-
data = data_to_be_reformatted
|
|
998
|
-
if len(data) != 35:
|
|
999
|
-
LOGGER.error(f"Data received from Labview has not the expected length ({len(data)} but expected 35).")
|
|
1000
|
-
else:
|
|
1001
|
-
# The next elements (see the names in the headers) are not wanted so I delete them:
|
|
1002
|
-
# 1) Filtre_Roue_1, Filtre_Roue_2, Shutter, FEMTO_1_BIAS, FEMTO_2_BIAS, FEMTO_1_Input_Current,
|
|
1003
|
-
# FEMTO_2_Input_Current (the 7 last elements)
|
|
1004
|
-
# 2) Temp_4 to Temp_8 (elements from index 16 to 25)
|
|
1005
|
-
# 3) FEMTO_1_OVERLOAD and FEMTO_2_OVERLOAD (elements at indexes 4, 5, 8 and 9)
|
|
1006
|
-
|
|
1007
|
-
# 1)
|
|
1008
|
-
del data[-7:]
|
|
1009
|
-
# 2)
|
|
1010
|
-
del data[16:26]
|
|
1011
|
-
# 3)
|
|
1012
|
-
for i in [9, 8, 5, 4]:
|
|
1013
|
-
del data[i]
|
|
1014
|
-
|
|
1015
|
-
# Only mean is wanted, not deviation. So I delete all deviation elements
|
|
1016
|
-
# which are at indexes [3, 5, 7, 9, 11 ( = list(range(3, 12, 2)) )
|
|
1017
|
-
for index_to_delete in sorted(list(range(3, 12, 2)), reverse=True): # I delete in the reverse order
|
|
1018
|
-
del data[index_to_delete]
|
|
1019
|
-
|
|
1020
|
-
# The 2 first elements of data are date as str type. I leave them as str type. I convert the other elements of
|
|
1021
|
-
# data (which are relevant values) from str to float.
|
|
1022
|
-
data = data[:2] + [float(value) for value in data[2:]]
|
|
1023
|
-
|
|
1024
|
-
return data
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
@cli.command()
|
|
1028
|
-
@click.option(
|
|
1029
|
-
"--background/--no-background", "-bg/-no-bg", default=False,
|
|
1030
|
-
help="start the data acquisition in the background"
|
|
1031
|
-
)
|
|
1032
|
-
@pass_config
|
|
1033
|
-
def cdaq_alarms(config, background):
|
|
1034
|
-
"""
|
|
1035
|
-
Run the Data Acquisition System for the CDAQ alarms (CDAQ9375).
|
|
1036
|
-
|
|
1037
|
-
INPUT_FILE: YAML file containing the Setup for the CDAQ [optional]
|
|
1038
|
-
|
|
1039
|
-
Note: When this command runs in the background, send an INTERRUPT SIGNAL with the kill command
|
|
1040
|
-
to terminate. Never send a KILL SIGNAL (9) because then the process will not properly be
|
|
1041
|
-
unregistered from the storage manager.
|
|
1042
|
-
|
|
1043
|
-
$ kill -INT <PID>
|
|
1044
|
-
|
|
1045
|
-
"""
|
|
1046
|
-
|
|
1047
|
-
if background:
|
|
1048
|
-
cmd = "das cdaq-alarms"
|
|
1049
|
-
LOGGER.info(f"Invoking background command: {cmd}")
|
|
1050
|
-
invoke.run(cmd, disown=True)
|
|
1051
|
-
return
|
|
1052
|
-
|
|
1053
|
-
multiprocessing.current_process().name = "das-cdaq-alarms"
|
|
1054
|
-
|
|
1055
|
-
if config.debug:
|
|
1056
|
-
logging.basicConfig(level=logging.DEBUG, format=Settings.LOG_FORMAT_FULL)
|
|
1057
|
-
|
|
1058
|
-
if not is_cdaq9375_cs_active():
|
|
1059
|
-
LOGGER.error("The cdaq9375 Control Server is not running, start the 'cdaq9375_cs' command "
|
|
1060
|
-
"before running the data acquisition.")
|
|
1061
|
-
return
|
|
1062
|
-
|
|
1063
|
-
if not is_tcs_cs_active():
|
|
1064
|
-
LOGGER.error("The TCS Control Server is not running, start the 'tcs_cs' command "
|
|
1065
|
-
"before running the data acquisition.")
|
|
1066
|
-
return
|
|
1067
|
-
|
|
1068
|
-
if not is_aeu_cs_active(name="CRIO", timeout=1):
|
|
1069
|
-
LOGGER.error("The AEU Control Server is not running, start the 'aeu_cs' command "
|
|
1070
|
-
"before running the data acquisition.")
|
|
1071
|
-
return
|
|
1072
|
-
|
|
1073
|
-
if not is_storage_manager_active():
|
|
1074
|
-
LOGGER.error("The storage manager is not running, start the core services "
|
|
1075
|
-
"before running the data acquisition.")
|
|
1076
|
-
return
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
setup = load_setup_from_configuration_manager()
|
|
1080
|
-
|
|
1081
|
-
metrics_cdaq_alarms = define_metrics("DAS-CDAQ-ALARMS")
|
|
1082
|
-
|
|
1083
|
-
start_http_server(DAS.METRICS_PORT_CDAQ_ALARMS)
|
|
1084
|
-
|
|
1085
|
-
# Initialize some variables that will be used for registration to the Storage Manager
|
|
1086
|
-
|
|
1087
|
-
origin = "DAS-CDAQ-ALARMS"
|
|
1088
|
-
persistence_class = CSV
|
|
1089
|
-
prep = {
|
|
1090
|
-
"mode": "a",
|
|
1091
|
-
"ending": "\n",
|
|
1092
|
-
"header": "CDAQ-ALARMS First Connection Tests",
|
|
1093
|
-
"column_names": ["timestamp", *metrics_cdaq_alarms],
|
|
1094
|
-
}
|
|
1095
|
-
|
|
1096
|
-
killer = SignalCatcher()
|
|
1097
|
-
|
|
1098
|
-
with cdaq9375Proxy() as cdaq, StorageProxy() as storage, TCSProxy() as tcs_proxy, CRIOProxy() as aeu_crio:
|
|
1099
|
-
|
|
1100
|
-
# Use the names in the header of the CSV file as column names.
|
|
1101
|
-
|
|
1102
|
-
storage.register({"origin": origin, "persistence_class": persistence_class, "prep": prep})
|
|
1103
|
-
|
|
1104
|
-
alarms_temperature = setup.gse.CDAQ_alarms.alarms_temperature
|
|
1105
|
-
|
|
1106
|
-
trp1_min_op = alarms_temperature.trp1_min_op
|
|
1107
|
-
trp1_max_op = alarms_temperature.trp1_max_op
|
|
1108
|
-
trp1_min_nop = alarms_temperature.trp1_min_nop
|
|
1109
|
-
trp1_max_nop = alarms_temperature.trp1_max_nop
|
|
1110
|
-
trp22_min_op = alarms_temperature.trp22_min_op
|
|
1111
|
-
trp22_max_op = alarms_temperature.trp22_max_op
|
|
1112
|
-
trp22_min_nop = alarms_temperature.trp22_min_nop
|
|
1113
|
-
trp22_max_nop = alarms_temperature.trp22_max_nop
|
|
1114
|
-
|
|
1115
|
-
alarm_exp1 = False # Low Temp NOP
|
|
1116
|
-
alarm_exp2 = False # High Temp NOP
|
|
1117
|
-
alarm_exp3 = False # low/High Temp OP
|
|
1118
|
-
alarm_exp4 = False # UPS alarm (UPS_Arrakis_alarm_summary or UPS_Ix_alarm_summary)
|
|
1119
|
-
while True:
|
|
1120
|
-
try:
|
|
1121
|
-
response = cdaq.get_tvac_and_ups_state()
|
|
1122
|
-
|
|
1123
|
-
if killer.term_signal_received:
|
|
1124
|
-
break
|
|
1125
|
-
if not response:
|
|
1126
|
-
LOGGER.warning("Received an empty response from the CDAQ9375, "
|
|
1127
|
-
"check the connection with the device.")
|
|
1128
|
-
LOGGER.warning(f"Response: {response=}")
|
|
1129
|
-
time.sleep(1.0)
|
|
1130
|
-
continue
|
|
1131
|
-
if isinstance(response, Failure):
|
|
1132
|
-
LOGGER.warning(f"Received a Failure from the CDAQ9375 Control Server:")
|
|
1133
|
-
LOGGER.warning(f"Response: {response}")
|
|
1134
|
-
time.sleep(1.0)
|
|
1135
|
-
continue
|
|
1136
|
-
|
|
1137
|
-
# EXP4
|
|
1138
|
-
if any([response["UPS_Ix_alarm_summary"], response["UPS_Ix_power_supply_absence"],
|
|
1139
|
-
response["UPS_Arrakis_alarm_summary"], response["UPS_Arrakis_power_supply_absence"]]):
|
|
1140
|
-
alarm_exp4 = True
|
|
1141
|
-
else:
|
|
1142
|
-
alarm_exp4 = False
|
|
1143
|
-
|
|
1144
|
-
try:
|
|
1145
|
-
trp1_avg = tcs_proxy.get_housekeeping_value("tou_rtd_tav").value
|
|
1146
|
-
|
|
1147
|
-
if not trp1_avg:
|
|
1148
|
-
LOGGER.warning("Received an empty response from the TCS, "
|
|
1149
|
-
"check the connection with the device.")
|
|
1150
|
-
LOGGER.warning(f"Response: {trp1_avg=}")
|
|
1151
|
-
time.sleep(1.0)
|
|
1152
|
-
continue
|
|
1153
|
-
if isinstance(trp1_avg, Failure):
|
|
1154
|
-
LOGGER.warning(f"Received a Failure from the TCS EGSE Control Server:")
|
|
1155
|
-
LOGGER.warning(f"Response: {trp1_avg}")
|
|
1156
|
-
time.sleep(1.0)
|
|
1157
|
-
continue
|
|
1158
|
-
|
|
1159
|
-
trp1_avg = float(trp1_avg)
|
|
1160
|
-
|
|
1161
|
-
except ValueError:
|
|
1162
|
-
LOGGER.warning(f"TRP1 Value Error in cdaq alarms: trp1_avg should be a number, got {trp1_avg}."
|
|
1163
|
-
f"\nTerminating...")
|
|
1164
|
-
if trp1_avg == "tbd":
|
|
1165
|
-
LOGGER.warning("Got TBD for TRP1_AVG.\nCheck if the task is well running.\nTerminating...")
|
|
1166
|
-
break
|
|
1167
|
-
|
|
1168
|
-
try:
|
|
1169
|
-
trp22_avg = tcs_proxy.get_housekeeping_value("fee_rtd_tav").value
|
|
1170
|
-
|
|
1171
|
-
if not trp22_avg:
|
|
1172
|
-
LOGGER.warning("Received an empty response from the TCS, "
|
|
1173
|
-
"check the connection with the device.")
|
|
1174
|
-
LOGGER.warning(f"Response: {trp22_avg=}")
|
|
1175
|
-
time.sleep(1.0)
|
|
1176
|
-
continue
|
|
1177
|
-
if isinstance(trp22_avg, Failure):
|
|
1178
|
-
LOGGER.warning(f"Received a Failure from the TCS EGSE Control Server:")
|
|
1179
|
-
LOGGER.warning(f"Response: {trp22_avg}")
|
|
1180
|
-
time.sleep(1.0)
|
|
1181
|
-
continue
|
|
1182
|
-
|
|
1183
|
-
trp22_avg = float(trp22_avg)
|
|
1184
|
-
|
|
1185
|
-
except ValueError:
|
|
1186
|
-
LOGGER.warning(f"TRP22_AVG ValueError: trp22_avg should be a number, got {trp22_avg}."
|
|
1187
|
-
f"\nTerminating...")
|
|
1188
|
-
if trp22_avg == "tbd":
|
|
1189
|
-
LOGGER.warning("Got TBD for TRP22_AVG.\nCheck if the task is well running.\nTerminating...")
|
|
1190
|
-
break
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
aeu_standby = aeu_crio.get_operating_mode() == OperatingMode.STANDBY # True means aeu is in standby mode
|
|
1194
|
-
|
|
1195
|
-
if aeu_standby:
|
|
1196
|
-
alarm_exp3 = False
|
|
1197
|
-
# EXP1
|
|
1198
|
-
if trp1_avg < trp1_min_nop or trp22_avg < trp22_min_nop:
|
|
1199
|
-
alarm_exp1 = True
|
|
1200
|
-
else:
|
|
1201
|
-
alarm_exp1 = False
|
|
1202
|
-
|
|
1203
|
-
# EXP2
|
|
1204
|
-
if trp1_avg > trp1_max_nop or trp22_avg > trp22_max_nop:
|
|
1205
|
-
alarm_exp2 = True
|
|
1206
|
-
else:
|
|
1207
|
-
alarm_exp2 = False
|
|
1208
|
-
|
|
1209
|
-
# EXP3
|
|
1210
|
-
else:
|
|
1211
|
-
if any([trp1_avg < trp1_min_op, trp1_avg > trp1_max_op,
|
|
1212
|
-
trp22_avg < trp22_min_op, trp22_avg > trp22_max_op]):
|
|
1213
|
-
alarm_exp3 = True
|
|
1214
|
-
else:
|
|
1215
|
-
alarm_exp3 = False
|
|
1216
|
-
|
|
1217
|
-
alarm_exp_dict = {"alarm_EXP1": int(alarm_exp1),
|
|
1218
|
-
"alarm_EXP2": int(alarm_exp2),
|
|
1219
|
-
"alarm_EXP3": int(alarm_exp3),
|
|
1220
|
-
"alarm_EXP4": int(alarm_exp4)}
|
|
1221
|
-
response.update(alarm_exp_dict)
|
|
1222
|
-
hk_conversion_table = read_conversion_dict("DAS-CDAQ-ALARMS")
|
|
1223
|
-
hk_dict = convert_hk_names(response, hk_conversion_table)
|
|
1224
|
-
storage.save({"origin": origin, "data": hk_dict})
|
|
1225
|
-
|
|
1226
|
-
hk_dict.pop("timestamp")
|
|
1227
|
-
for key, value in hk_dict.items():
|
|
1228
|
-
metrics_cdaq_alarms[key].set(value)
|
|
1229
|
-
|
|
1230
|
-
cdaq.send_egse_state_to_tvac(alarm_exp1, alarm_exp2, alarm_exp3, alarm_exp4)
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
except KeyboardInterrupt:
|
|
1234
|
-
LOGGER.debug("Interrupt received, terminating...")
|
|
1235
|
-
break
|
|
1236
|
-
|
|
1237
|
-
except Exception as exc:
|
|
1238
|
-
LOGGER.warning(f"DAS Exception: {exc}", exc_info=True)
|
|
1239
|
-
LOGGER.warning("Got a corrupt hk_dict from the CDAQ. "
|
|
1240
|
-
"Check log messages for 'DAS Exception'.")
|
|
1241
|
-
time.sleep(1.0)
|
|
1242
|
-
continue
|
|
1243
|
-
|
|
1244
|
-
storage.unregister({"origin": origin})
|
|
1245
|
-
|
|
1246
|
-
if __name__ == "__main__":
|
|
1247
|
-
cli()
|