barsukov 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,71 @@
1
+ # This workflow will upload a Python Package to PyPI when a release is created
2
+ # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
3
+
4
+ # This workflow uses actions that are not certified by GitHub.
5
+ # They are provided by a third-party and are governed by
6
+ # separate terms of service, privacy policy, and support
7
+ # documentation.
8
+
9
+ name: Upload Python Package to PyPI
10
+
11
+ on:
12
+ push:
13
+ tags:
14
+ - 'v*.*.*'
15
+
16
+ jobs:
17
+ build:
18
+ name: Build Distribution
19
+ runs-on: ubuntu-latest
20
+
21
+ steps:
22
+ - name: Checkout code
23
+ uses: actions/checkout@v4
24
+
25
+ - name: Set up Python
26
+ uses: actions/setup-python@v5
27
+ with:
28
+ python-version: "3.x"
29
+
30
+ - name: Build release distribution
31
+ run: |
32
+ python -m pip install --upgrade pip
33
+ python -m pip install build
34
+ python -m build
35
+
36
+ - name: Upload distributions
37
+ uses: actions/upload-artifact@v4
38
+ with:
39
+ name: push-dists
40
+ path: dist/
41
+
42
+ pypi-publish:
43
+ name: Publish to PyPI
44
+ if: startsWith(github.ref, 'refs/tags/')
45
+ needs: build
46
+ runs-on: ubuntu-latest
47
+
48
+ permissions:
49
+ id-token: write
50
+
51
+ environment:
52
+ name: pypi
53
+ url: https://pypi.org/p/barsukov
54
+ #
55
+ # ALTERNATIVE: if your GitHub Release name is the PyPI project version string
56
+ # ALTERNATIVE: exactly, uncomment the following line instead:
57
+ # url: https://pypi.org/project/YOURPROJECT/${{ github.event.release.name }}
58
+
59
+ steps:
60
+ - name: Retrieve release distributions
61
+ uses: actions/download-artifact@v4
62
+ with:
63
+ name: push-dists
64
+ path: dist/
65
+
66
+ - name: Publish push-distributions to PyPI
67
+ uses: pypa/gh-action-pypi-publish@release/v1
68
+ with:
69
+ user: ${{ secrets.PYPI_USERNAME }}
70
+ password: ${{ secrets.PYPI_PASSWORD }}
71
+ packages-dir: dist/
@@ -0,0 +1,244 @@
1
+ #logger files
2
+ *_.txt
3
+
4
+ # Byte-compiled / optimized / DLL files
5
+ __pycache__/
6
+ *.py[cod]
7
+ *$py.class
8
+
9
+ # C extensions
10
+ *.so
11
+
12
+ # Distribution / packaging
13
+ .Python
14
+ build/
15
+ develop-eggs/
16
+ dist/
17
+ downloads/
18
+ eggs/
19
+ .eggs/
20
+ lib/
21
+ lib64/
22
+ parts/
23
+ sdist/
24
+ var/
25
+ wheels/
26
+ share/python-wheels/
27
+ *.egg-info/
28
+ .installed.cfg
29
+ *.egg
30
+
31
+ # PyInstaller
32
+ # Usually these files are written by a python script from a template
33
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
34
+ *.manifest
35
+ *.spec
36
+
37
+ # Installer logs
38
+ pip-log.txt
39
+ pip-delete-this-directory.txt
40
+
41
+ # Unit test / coverage reports
42
+ htmlcov/
43
+ .tox/
44
+ .nox/
45
+ .coverage
46
+ .coverage.*
47
+ .cache
48
+ nosetests.xml
49
+ coverage.xml
50
+ *.cover
51
+ *.py,cover
52
+ .hypothesis/
53
+ .pytest_cache/
54
+ cover/
55
+
56
+ # Translations
57
+ *.mo
58
+ *.pot
59
+
60
+ # Django stuff:
61
+ *.log
62
+ local_settings.py
63
+ db.sqlite3
64
+ db.sqlite3-journal
65
+
66
+ # Flask stuff:
67
+ instance/
68
+ .webassets-cache
69
+
70
+ # Scrapy stuff:
71
+ .scrapy
72
+
73
+ # Sphinx documentation
74
+ docs/_build/
75
+
76
+ # PyBuilder
77
+ .pybuilder/
78
+ target/
79
+
80
+ # Jupyter Notebook
81
+ *.ipynb
82
+ .ipynb_checkpoints
83
+
84
+ # IPython
85
+ profile_default/
86
+ ipython_config.py
87
+
88
+ # pyenv
89
+ # For a library or package, you might want to ignore these files since the code is
90
+ # intended to run in multiple environments; otherwise, check them in:
91
+ # .python-version
92
+
93
+ # pipenv
94
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
95
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
96
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
97
+ # install all needed dependencies.
98
+ #Pipfile.lock
99
+
100
+ # poetry
101
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
102
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
103
+ # commonly ignored for libraries.
104
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
105
+ #poetry.lock
106
+
107
+ # pdm
108
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
109
+ #pdm.lock
110
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
111
+ # in version control.
112
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
113
+ .pdm.toml
114
+ .pdm-python
115
+ .pdm-build/
116
+
117
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
118
+ __pypackages__/
119
+
120
+ # Celery stuff
121
+ celerybeat-schedule
122
+ celerybeat.pid
123
+
124
+ # SageMath parsed files
125
+ *.sage.py
126
+
127
+ # Environments
128
+ .env
129
+ .venv
130
+ env/
131
+ venv/
132
+ ENV/
133
+ env.bak/
134
+ venv.bak/
135
+
136
+ # Spyder project settings
137
+ .spyderproject
138
+ .spyproject
139
+
140
+ # Rope project settings
141
+ .ropeproject
142
+
143
+ # mkdocs documentation
144
+ /site
145
+
146
+ # mypy
147
+ .mypy_cache/
148
+ .dmypy.json
149
+ dmypy.json
150
+
151
+ # Pyre type checker
152
+ .pyre/
153
+
154
+ # pytype static type analyzer
155
+ .pytype/
156
+
157
+ # Cython debug symbols
158
+ cython_debug/
159
+
160
+ # PyCharm
161
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
162
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
163
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
164
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
165
+ .idea/
166
+
167
+ #adding jetbrains .idea stuff
168
+ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
169
+ # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
170
+
171
+ # User-specific stuff
172
+ .idea/**/workspace.xml
173
+ .idea/**/tasks.xml
174
+ .idea/**/usage.statistics.xml
175
+ .idea/**/dictionaries
176
+ .idea/**/shelf
177
+
178
+ # AWS User-specific
179
+ .idea/**/aws.xml
180
+
181
+ # Generated files
182
+ .idea/**/contentModel.xml
183
+
184
+ # Sensitive or high-churn files
185
+ .idea/**/dataSources/
186
+ .idea/**/dataSources.ids
187
+ .idea/**/dataSources.local.xml
188
+ .idea/**/sqlDataSources.xml
189
+ .idea/**/dynamic.xml
190
+ .idea/**/uiDesigner.xml
191
+ .idea/**/dbnavigator.xml
192
+
193
+ # Gradle
194
+ .idea/**/gradle.xml
195
+ .idea/**/libraries
196
+
197
+ # Gradle and Maven with auto-import
198
+ # When using Gradle or Maven with auto-import, you should exclude module files,
199
+ # since they will be recreated, and may cause churn. Uncomment if using
200
+ # auto-import.
201
+ # .idea/artifacts
202
+ # .idea/compiler.xml
203
+ # .idea/jarRepositories.xml
204
+ # .idea/modules.xml
205
+ # .idea/*.iml
206
+ # .idea/modules
207
+ # *.iml
208
+ # *.ipr
209
+
210
+ # CMake
211
+ cmake-build-*/
212
+
213
+ # Mongo Explorer plugin
214
+ .idea/**/mongoSettings.xml
215
+
216
+ # File-based project format
217
+ *.iws
218
+
219
+ # IntelliJ
220
+ out/
221
+
222
+ # mpeltonen/sbt-idea plugin
223
+ .idea_modules/
224
+
225
+ # JIRA plugin
226
+ atlassian-ide-plugin.xml
227
+
228
+ # Cursive Clojure plugin
229
+ .idea/replstate.xml
230
+
231
+ # SonarLint plugin
232
+ .idea/sonarlint/
233
+
234
+ # Crashlytics plugin (for Android Studio and IntelliJ)
235
+ com_crashlytics_export_strings.xml
236
+ crashlytics.properties
237
+ crashlytics-build.properties
238
+ fabric.properties
239
+
240
+ # Editor-based Rest Client
241
+ .idea/httpRequests
242
+
243
+ # Android studio 3.1+ serialized cache file
244
+ .idea/caches/build_file_checksums.ser
@@ -0,0 +1,3 @@
1
+ include README.md
2
+
3
+ recursive-include src *.py
@@ -0,0 +1,47 @@
1
+ Metadata-Version: 2.2
2
+ Name: barsukov
3
+ Version: 0.0.0
4
+ Summary: Experiment Automation Package
5
+ Author-email: Igor Barsukov <igorb@ucr.edu>, Steven Castaneda <scast206@ucr.edu>
6
+ Project-URL: Homepage, https://barsukov.ucr.edu
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: Operating System :: OS Independent
9
+ Requires-Python: >=3.6
10
+ Description-Content-Type: text/markdown
11
+ Requires-Dist: pytz>=2014.10
12
+ Requires-Dist: numpy>=1.0.0
13
+ Requires-Dist: scipy>=0.9.0
14
+
15
+ # Barsukov
16
+
17
+ Barsukov is a Python library for experiment automation.
18
+
19
+ ## Installation
20
+
21
+ Use the package manager [pip](https://pip.pypa.io/en/stable/) to install barsukov.
22
+
23
+ ```bash
24
+ pip install barsukov
25
+ ```
26
+
27
+ ## Usage
28
+
29
+ ```python
30
+ #
31
+ #
32
+ #
33
+ #
34
+ #
35
+ #
36
+ #
37
+ ```
38
+
39
+ ## Contributing
40
+
41
+ -
42
+ -
43
+ -
44
+
45
+ ## License
46
+
47
+ [MIT](https://choosealicense.com/licenses/mit/)
@@ -0,0 +1,33 @@
1
+ # Barsukov
2
+
3
+ Barsukov is a Python library for experiment automation.
4
+
5
+ ## Installation
6
+
7
+ Use the package manager [pip](https://pip.pypa.io/en/stable/) to install barsukov.
8
+
9
+ ```bash
10
+ pip install barsukov
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ```python
16
+ #
17
+ #
18
+ #
19
+ #
20
+ #
21
+ #
22
+ #
23
+ ```
24
+
25
+ ## Contributing
26
+
27
+ -
28
+ -
29
+ -
30
+
31
+ ## License
32
+
33
+ [MIT](https://choosealicense.com/licenses/mit/)
@@ -0,0 +1,30 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61", "setuptools-scm"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "barsukov"
7
+ dynamic = ["version"]
8
+ dependencies = [
9
+ "pytz>=2014.10",
10
+ "numpy>=1.0.0",
11
+ "scipy>=0.9.0",
12
+ ]
13
+ authors = [
14
+ { name = "Igor Barsukov", email = "igorb@ucr.edu" },
15
+ { name = "Steven Castaneda", email = "scast206@ucr.edu" },
16
+ ]
17
+ requires-python = ">=3.6"
18
+ description = "Experiment Automation Package"
19
+ readme = "README.md"
20
+ classifiers = [
21
+ "Programming Language :: Python :: 3",
22
+ "Operating System :: OS Independent",
23
+ ]
24
+
25
+ [project.urls]
26
+ Homepage = "https://barsukov.ucr.edu"
27
+ #Repository = <githublink>
28
+
29
+ [tool.setuptools.packages.find]
30
+ where = ["src"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,18 @@
1
+ # Modules:
2
+ from . import time
3
+ from . import data
4
+
5
+
6
+ # Objects/Functions:
7
+ from .script import Script
8
+ from .logger import Logger
9
+
10
+ from .obj2file import *
11
+
12
+
13
+ # Equipment Objects:
14
+ from .exp.mwHP import mwHP
15
+
16
+ __all__ = ["time", "data", "save_object", "load_object", "Script", "Logger", "mwHP"]
17
+
18
+
@@ -0,0 +1 @@
1
+ from .fft import *
@@ -0,0 +1,87 @@
1
+ ### BEGIN Dependencies ###
2
+ import numpy as np
3
+ import scipy as sp
4
+ from barsukov.logger import debug # Un-comment before implementing in pip
5
+ ### END Dependencies ###
6
+
7
+
8
+ def fft(x, y, equidistant_check=True, equidistant_rel_error=1e-4, remove_negative_f=False, inverse=False):
9
+ ### Takes: x=list of real floats, y=list of data or list of lists of data. Data can be real or complex.
10
+ ### Returns: freqs, fft_y_norm, msg
11
+ ### This fft used to give wrong sign of the imaginary component because of the "wrong" definition of fft in np and sp
12
+ ### Now it has been corrected to the Mathematica definition of fft = int ... exp(2pi f t)
13
+ msg = ''
14
+ if equidistant_check:
15
+ diffs = np.diff(x)
16
+ if not np.allclose(diffs, diffs[0], rtol=equidistant_rel_error):
17
+ # x is not equidistant, must start interpolating
18
+ x,y = make_equidistant(x, y, step=None)
19
+ y = y.T
20
+ msg += debug('fft(x,y) made x,y equidistant.')
21
+
22
+ y = np.array(y)
23
+ #print(y)
24
+
25
+ if y.ndim == 1:
26
+ # y is 1D, treat it as a single column
27
+ n = len(y) # Number of points in the column
28
+ if inverse is False: fft_y = np.fft.ifft(y) * n # np fft has the "wrong" imag sign
29
+ else: fft_y = np.fft.fft(y) # That's why fft and ifft are inverted in this code
30
+ else:
31
+ # y is 2D, treat it as multiple columns
32
+ n = y.shape[1] # Number of points in each column
33
+ if inverse is False: fft_y = np.fft.ifft(y, axis=1) * n # np fft has the "wrong" imag sign
34
+ else: fft_y = np.fft.fft(y, axis=1) # That's why fft and ifft are inverted in this code
35
+
36
+ sample_spacing = ( x[-1] - x[0] ) / (n-1)
37
+ #print(n, sample_spacing, x[1] - x[0])
38
+ fft_y_norm = fft_y * sample_spacing # This normalizes FFT to mathematically correct
39
+ freqs = np.fft.fftfreq(n, d=sample_spacing)
40
+
41
+ if remove_negative_f is False:
42
+ sorted_indices = np.argsort(freqs)
43
+ freqs = freqs[sorted_indices]
44
+ if isinstance(fft_y_norm[0], (list, np.ndarray)): # If fft_y_norm contains multiple columns
45
+ fft_y_norm = [x[sorted_indices] for x in fft_y_norm]
46
+ else: # If fft_y_norm is a single column
47
+ fft_y_norm = fft_y_norm[sorted_indices]
48
+ msg += debug('fft(x,y) sorted negative and positive frequencies.')
49
+ else:
50
+ mask = freqs >= 0 # Boolean array with Falses for negative frequencies, effectively removing them
51
+ freqs = freqs[mask]
52
+ # If fft_y_norm contains multiple columns:
53
+ if isinstance(fft_y_norm[0], (list, np.ndarray)):
54
+ fft_y_norm = [x[mask] for x in fft_y_norm]
55
+ else: # If fft_y_norm is a single column
56
+ fft_y_norm = fft_y_norm[mask]
57
+ msg += debug('fft(x,y) removed negative frequencies.')
58
+
59
+ msg += debug('freqs, fft_y_norm, msg = fft(x,y) is done.\nThe forward fft approximates the mathematically correct integral over ...exp(+i2pift).\nNow do not forget to apply np.abs(fft_y_norm), np.angle(fft_y_norm), fft_y_norm.real, fft_y_norm.imag')
60
+ return freqs, fft_y_norm, msg
61
+
62
+ def ifft(x, y, equidistant_check=True, equidistant_rel_error=1e-4, remove_negative_f=False):
63
+ return fft(x, y, equidistant_check=equidistant_check, equidistant_rel_error=equidistant_rel_error, remove_negative_f=remove_negative_f, inverse=True)
64
+
65
+ def make_equidistant(x, y, step=None):
66
+ ### Takes one column x and one or more columns y and makes them equidistant in x
67
+ ### Returns new_x, new_y. The number of points will likely change.
68
+ if step is None:
69
+ # Calculate the smallest difference between consecutive elements
70
+ min_step = np.min(np.diff(x))
71
+ else:
72
+ min_step = step
73
+
74
+ # Generate the new equidistant x array
75
+ new_x = np.arange(x[0], x[-1] + min_step, min_step)
76
+
77
+ if isinstance(y[0], (list, np.ndarray)): # If y contains multiple columns
78
+ new_y = []
79
+ for y_column in y:
80
+ interpolation_function = sp.interpolate.interp1d(x, y_column, kind='linear', fill_value='extrapolate')
81
+ new_y.append(interpolation_function(new_x))
82
+ new_y = np.array(new_y).T # Transpose to match the original structure
83
+ else: # If y is a single column
84
+ interpolation_function = sp.interpolate.interp1d(x, y, kind='linear', fill_value='extrapolate')
85
+ new_y = interpolation_function(new_x)
86
+
87
+ return new_x, new_y
@@ -0,0 +1 @@
1
+ from . import mwHP
@@ -0,0 +1,136 @@
1
+ ### BEGIN Dependencies ###
2
+ import numpy as np
3
+ import sys
4
+ from barsukov.time import *
5
+ ### END Dependencies
6
+
7
+
8
+ ### BEGIN Helper functions
9
+
10
+ def log_in_eq(eq_obj, msg, log='default'): # FINISHED 2024/10/26
11
+ decorated_msg = str(eq_obj.msg_deco) + ' ' + msg
12
+ if eq_obj.logger is None:
13
+ if log=='no': return
14
+ else: print(time_stamp() + ' ' + decorated_msg)
15
+ else:
16
+ eq_obj.logger.log(decorated_msg, log)
17
+
18
+
19
+ def initialize_gpib(eq_obj):
20
+ # Initializes a visa.open_resource(). Returns rm.open_resource(). Exits if error.
21
+ if eq_obj.rm is None: # eq_obj has no ResourceManager
22
+ if eq_obj.script is None: # If there is no Script
23
+ eq_obj.log('Visa ResourceManager and Script have not been passed to me. I will attempt to initialize visa myself.', log='important')
24
+ try:
25
+ import pyvisa as visa
26
+ eq_obj.rm = visa.ResourceManager()
27
+ eq_obj.log('I just set my self.rm = visa.ResourceManager.', log='screen')
28
+ except:
29
+ eq_obj.log('I failed to initialize set my self.rm = visa.ResourceManager. I am sys-exiting.', log='important')
30
+ sys.exit()
31
+ else: # If there is a Script
32
+ if eq_obj.script.rm is None: # If Script has no ResourceManager
33
+ eq_obj.log('I see Script but it does not have rm. I am asking Script to initialize visa.ResourceManager and pass it to me.', log='important')
34
+ try:
35
+ eq_obj.rm = eq_obj.script.init_rm() # Script will try to init ResourceManager
36
+ #print('eq_obj.rm initialized as ', eq_obj.rm)
37
+ except:
38
+ eq_obj.log('Error while Script was initializing visa.ResourceManager. I am sys-exiting.', log='important')
39
+ sys.exit()
40
+ else:
41
+ eq_obj.log('Script has visa.ResourceManager. I am grabbing it.', log='screen')
42
+ eq_obj.rm = eq_obj.script.rm
43
+ if eq_obj.rm is None: # Just to double check if rm is in fact there.
44
+ eq_obj.log('My last check showed that my self.rm is still None. I am sys-exiting.', log='important')
45
+ sys.exit()
46
+ # Now, we assume there is a resource manager
47
+ if (eq_obj.gpib is None) or (eq_obj.gpib_card is None):
48
+ eq_obj.log('GPIB card number or GPIB address is not set.', log='important')
49
+ sys.exit()
50
+ try:
51
+ #print(eq_obj.rm)
52
+ eq_obj.log('I am trying to rm.open_resource().', log='screen')
53
+ y = eq_obj.rm.open_resource(f'GPIB{eq_obj.gpib_card}::{eq_obj.gpib}')
54
+ #print('y=',y)
55
+ return y
56
+ except:
57
+ eq_obj.log(f'I could not initialize rm.open_resource() for GPIB {eq_obj.gpib_card}::{eq_obj.gpib}. Also check visa_rm, just in case.', log='important')
58
+ sys.exit()
59
+
60
+
61
+ def eq_disconnect(eq_obj):
62
+ try:
63
+ eq_obj.rm.close()
64
+ eq_obj.rm.visalib._registry.clear()
65
+ eq_obj.log( f'Successfully disconnected GPIB resource for {eq_obj.gpib_card}::{eq_obj.gpib}.', log='screen')
66
+ except:
67
+ eq_obj.log( f'Failed to disconnect GPIB resource for {eq_obj.gpib_card}::{eq_obj.gpib}.', log='screen')
68
+
69
+
70
+ def eq_reconnect(eq_obj):
71
+ try:
72
+ import pyvisa as visa
73
+ eq_obj.rm = visa.ResourceManager()
74
+ eq_obj.eq = initialize_gpib(eq_obj)
75
+ eq_obj.log( f'Successfully reconnected GPIB resource for {eq_obj.gpib_card}::{eq_obj.gpib}.', log='screen')
76
+ eq_obj.log( f'Initialized: {eq_obj.identify()}', log='important' )
77
+ except:
78
+ eq_obj.log(f'Failed to reconnect GPIB resource for {eq_obj.gpib_card}::{eq_obj.gpib}.', log='important')
79
+
80
+ ### END Helper functions
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+
94
+
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+ ### BEGIN Functions that are likely unnecessary
105
+
106
+ def query_float(eq_obj, cmd):
107
+ ### Returns float or np.nan
108
+ try:
109
+ q = eq_obj.eq.query(cmd)
110
+ q = float(q) # Will it work with all equipment?
111
+ return q
112
+ except:
113
+ eq_obj.log(f'Error while quering: \"{cmd}\".', log='important')
114
+ return np.nan
115
+
116
+ def write_float(eq_obj, cmd, value, digits, limits):
117
+ value_differs = ''
118
+ try:
119
+ value = float(value)
120
+ value_round = round(value, digits)
121
+ if value_round < limits[0]:
122
+ value_round = limits[0]
123
+ if value_round > limits[1]:
124
+ value_round = limits[1]
125
+ if value_round != value:
126
+ value_differs = f' But requested {value}.'
127
+ cmd = ''.join( [value_round if el is None else el for el in cmd] )
128
+ eq_obj.eq.write(cmd)
129
+ write_error = False
130
+ return write_error, value_differs
131
+ except:
132
+ write_error = True
133
+ eq_obj.log(f'Error while writing: \"{cmd}\".', log='important')
134
+ return write_error, value_differs
135
+
136
+ ### END Functions that are likely unnecessary