cubevis 0.5.14__tar.gz → 0.5.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cubevis might be problematic. Click here for more details.
- {cubevis-0.5.14 → cubevis-0.5.15}/PKG-INFO +1 -1
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/__init__.py +5 -1
- {cubevis-0.5.14 → cubevis-0.5.15}/pyproject.toml +1 -1
- cubevis-0.5.14/cubevis/data/measurement_set/__init__.py +0 -7
- cubevis-0.5.14/cubevis/data/measurement_set/_ms_data.py +0 -178
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/__init__.py +0 -30
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_concat.py +0 -98
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_coords.py +0 -78
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_data.py +0 -213
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_io.py +0 -55
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_raster_data.py +0 -154
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_select.py +0 -91
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_ps_stats.py +0 -218
- cubevis-0.5.14/cubevis/data/measurement_set/processing_set/_xds_data.py +0 -149
- cubevis-0.5.14/cubevis/plot/__init__.py +0 -1
- cubevis-0.5.14/cubevis/plot/ms_plot/__init__.py +0 -29
- cubevis-0.5.14/cubevis/plot/ms_plot/_ms_plot.py +0 -242
- cubevis-0.5.14/cubevis/plot/ms_plot/_ms_plot_constants.py +0 -22
- cubevis-0.5.14/cubevis/plot/ms_plot/_ms_plot_selectors.py +0 -348
- cubevis-0.5.14/cubevis/plot/ms_plot/_raster_plot.py +0 -292
- cubevis-0.5.14/cubevis/plot/ms_plot/_raster_plot_inputs.py +0 -116
- cubevis-0.5.14/cubevis/plot/ms_plot/_xds_plot_axes.py +0 -110
- cubevis-0.5.14/cubevis/private/apps/_ms_raster.py +0 -815
- {cubevis-0.5.14 → cubevis-0.5.15}/LICENSE +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/LICENSE.rst +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/20px/fast-backward.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/20px/fast-forward.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/20px/step-backward.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/20px/step-forward.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/add-chan.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/add-chan.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/add-cube.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/add-cube.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/drag.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/drag.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/mask-selected.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/mask.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/mask.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/new-layer-sm-selected.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/new-layer-sm-selected.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/new-layer-sm.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/new-layer-sm.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/reset.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/reset.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/sub-chan.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/sub-chan.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/sub-cube.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/sub-cube.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/zoom-to-fit.png +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__icons__/zoom-to-fit.svg +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__js__/bokeh-3.6.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__js__/bokeh-tables-3.6.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__js__/bokeh-widgets-3.6.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__js__/casalib.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/__js__/cubevisjs.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/annotations/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/annotations/_ev_poly_annotation.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/components/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/format/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/format/_time_ticks.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/format/_wcs_ticks.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/models/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/models/_edit_span.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/models/_ev_text_input.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/models/_tip.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/models/_tip_button.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/_data_pipe.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/_image_data_source.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/_image_pipe.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/_spectra_data_source.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/sources/_updatable_data_source.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/_initialize.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/_javascript.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/_palette.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/_session.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/bokeh-2.4.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/bokeh-gl-2.4.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/bokeh-tables-2.4.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/bokeh-widgets-2.4.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/casaguijs-v0.0.4.0-b2.4.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/casaguijs-v0.0.5.0-b2.4.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/casaguijs-v0.0.6.0-b2.4.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/state/js/casalib-v0.0.1.min.js +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/tools/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/tools/_cbreset_tool.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/tools/_drag_tool.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/utils/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/utils/_axes_labels.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/bokeh/utils/_svg_icon.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/data/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/data/casaimage/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/_gclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_createmask.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_createregion.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_interactiveclean.mustache +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_interactiveclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_interactiveclean_wrappers.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_plotants.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/apps/_plotbandpass.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casashell/createmask.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casashell/iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casatasks/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casatasks/createmask.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casatasks/createregion.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/private/casatasks/iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/readme.rst +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/remote/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/remote/_gclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/remote/_local.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/remote/_remote_kernel.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/toolbox/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/toolbox/_app_context.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/toolbox/_cube.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/toolbox/_region_list.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_ResourceManager.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/__init__.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_contextmgrchain.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_conversion.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_copydoc.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_docenum.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_import_protected_module.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_logging.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_pkgs.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_regions.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_static.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/cubevis/utils/_tiles.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/readme.rst +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/alma-many-chan/alma-many-chan.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/basic-websockets-demo/client.html +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/basic-websockets-demo/client.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/basic-websockets-demo/server.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/createmask-demo/run-createmask.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/createregion-demo/run-createregion.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/cubemask-demo/image-slider-spectra-done-stats.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/cubemask-demo/image-slider-spectra-done.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/cubemask-demo/image-slider-spectra.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/cubemask-demo/image-slider.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/cubemask-demo/image.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/m100_interactive.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/mask0-iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/run-gclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/run-iclean-obj.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/run-iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-demo/vla-sim-jet-iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-first-look/run-fl-cont.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-first-look/run-fl-line.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-outlier/run-iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-outlier/test_outlier.txt +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/iclean-remote/iclean_remote_webserver.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/large-cube/run-largecube.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/svg-test.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/updatable-data-source/direct-plot.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/updatable-data-source/simple-update.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/updatable-data-source/updated-plot.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/uranus-demo/uranus-iclean.py +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/websocket-reconnect/client.html +0 -0
- {cubevis-0.5.14 → cubevis-0.5.15}/tests/manual/websocket-reconnect/server.py +0 -0
|
@@ -32,6 +32,11 @@ from cubevis.utils import copydoc, ImportProtectedModule
|
|
|
32
32
|
from cubevis.bokeh.state import initialize_session
|
|
33
33
|
initialize_session()
|
|
34
34
|
|
|
35
|
+
###
|
|
36
|
+
### This import roundabout was introduced when astroviper and casa6 apps were in the same
|
|
37
|
+
### package to avoid forcing the user to install BOTH astroviper and casa6 even if they
|
|
38
|
+
### were only going to use one of the applications. Now it could be removed...
|
|
39
|
+
###
|
|
35
40
|
sys.modules[__name__].__class__ = ImportProtectedModule( __name__, { 'plotants': '._plotants',
|
|
36
41
|
'plotbandpass': '._plotbandpass',
|
|
37
42
|
'CreateMask': '._createmask',
|
|
@@ -40,5 +45,4 @@ sys.modules[__name__].__class__ = ImportProtectedModule( __name__, { 'plotants':
|
|
|
40
45
|
'iclean': '..casatasks.iclean',
|
|
41
46
|
'createmask': '..casatasks.createmask',
|
|
42
47
|
'createregion': '..casatasks.createregion',
|
|
43
|
-
'MsRaster': '._ms_raster',
|
|
44
48
|
} )
|
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
'''
|
|
2
|
-
Class for accessing and selecting MeasurementSet data using a data backend.
|
|
3
|
-
'''
|
|
4
|
-
|
|
5
|
-
from cubevis.data.measurement_set.processing_set._ps_data import PsData
|
|
6
|
-
|
|
7
|
-
class MsData:
|
|
8
|
-
'''
|
|
9
|
-
Access and select MeasurementSet data.
|
|
10
|
-
Current backend implementation is PsData using xradio Processing Set.
|
|
11
|
-
'''
|
|
12
|
-
|
|
13
|
-
def __init__(self, ms_path, logger):
|
|
14
|
-
self._ms_path = ms_path
|
|
15
|
-
self._logger = logger
|
|
16
|
-
self._data = None
|
|
17
|
-
self._data_initialized = False
|
|
18
|
-
self._init_data(ms_path)
|
|
19
|
-
|
|
20
|
-
def is_valid(self):
|
|
21
|
-
''' Returns whether MS path has been set so data can be accessed. '''
|
|
22
|
-
return self._data_initialized
|
|
23
|
-
|
|
24
|
-
def is_ms_path(self, path):
|
|
25
|
-
''' Check if input path matches input ms path or zarr path '''
|
|
26
|
-
return path == self.get_path() or path == self._ms_path
|
|
27
|
-
|
|
28
|
-
def get_path(self):
|
|
29
|
-
''' Returns path of MS/zarr file or None if not set. '''
|
|
30
|
-
if self._data_initialized:
|
|
31
|
-
return self._data.get_path() # path to zarr file
|
|
32
|
-
if self._ms_path:
|
|
33
|
-
return self._ms_path # path to ms v2
|
|
34
|
-
return None
|
|
35
|
-
|
|
36
|
-
def summary(self, data_group='base', columns=None):
|
|
37
|
-
''' Print summary of Processing Set data.
|
|
38
|
-
columns (None, str, list): type of metadata to list.
|
|
39
|
-
None: Print all summary columns in ProcessingSet.
|
|
40
|
-
'by_msv4': Print formatted summary metadata by MSv4.
|
|
41
|
-
str, list: Print a subset of summary columns in ProcessingSet.
|
|
42
|
-
Options: 'name', 'intents', 'shape', 'polarization', 'scan_number', 'spw_name',
|
|
43
|
-
'field_name, 'source_name', 'field_coords', 'start_frequency', 'end_frequency'
|
|
44
|
-
'''
|
|
45
|
-
# ProcessingSet function
|
|
46
|
-
if self._data_initialized:
|
|
47
|
-
self._data.summary(data_group, columns)
|
|
48
|
-
else:
|
|
49
|
-
self._log_no_ms()
|
|
50
|
-
|
|
51
|
-
def get_ps_summary(self):
|
|
52
|
-
''' Return Pandas DataFrame summary of ProcessingSet '''
|
|
53
|
-
if self._data_initialized:
|
|
54
|
-
return self._data.get_summary()
|
|
55
|
-
self._log_no_ms()
|
|
56
|
-
return None
|
|
57
|
-
|
|
58
|
-
def data_groups(self):
|
|
59
|
-
''' Returns set of data group names in Processing Set data. '''
|
|
60
|
-
# ProcessingSet function
|
|
61
|
-
if self._data_initialized:
|
|
62
|
-
return self._data.get_data_groups()
|
|
63
|
-
self._log_no_ms()
|
|
64
|
-
return None
|
|
65
|
-
|
|
66
|
-
def get_antennas(self, plot_positions=False, label_antennas=False):
|
|
67
|
-
''' Returns list of antenna names in data.
|
|
68
|
-
plot_positions (bool): show plot of antenna positions.
|
|
69
|
-
label_antennas (bool): label positions with antenna names.
|
|
70
|
-
'''
|
|
71
|
-
# Antenna positions plot is ProcessingSet function
|
|
72
|
-
if self._data_initialized:
|
|
73
|
-
return self._data.get_antennas(plot_positions, label_antennas)
|
|
74
|
-
self._log_no_ms()
|
|
75
|
-
return None
|
|
76
|
-
|
|
77
|
-
def plot_phase_centers(self, data_group='base', label_all_fields=False):
|
|
78
|
-
''' Plot the phase center locations of all fields in the Processing Set (original or selected) and label central field.
|
|
79
|
-
label_all_fields (bool); label all fields on the plot
|
|
80
|
-
data_group (str); data group to use for processing.
|
|
81
|
-
'''
|
|
82
|
-
# ProcessingSet function
|
|
83
|
-
if self._data_initialized:
|
|
84
|
-
self._data.plot_phase_centers(label_all_fields, data_group)
|
|
85
|
-
else:
|
|
86
|
-
self._log_no_ms()
|
|
87
|
-
|
|
88
|
-
def get_num_ms(self):
|
|
89
|
-
''' Returns number of MeasurementSets in data. '''
|
|
90
|
-
if self._data_initialized:
|
|
91
|
-
return self._data.get_ps_len()
|
|
92
|
-
self._log_no_ms()
|
|
93
|
-
return None
|
|
94
|
-
|
|
95
|
-
def get_max_data_dims(self):
|
|
96
|
-
''' Returns maximum length of dimensions in data. '''
|
|
97
|
-
if self._data_initialized:
|
|
98
|
-
return self._data.get_max_dims()
|
|
99
|
-
self._log_no_ms()
|
|
100
|
-
return None
|
|
101
|
-
|
|
102
|
-
def get_data_dimensions(self):
|
|
103
|
-
''' Returns names of data dimensions. '''
|
|
104
|
-
if self._data_initialized:
|
|
105
|
-
return self._data.get_data_dimensions()
|
|
106
|
-
self._log_no_ms()
|
|
107
|
-
return None
|
|
108
|
-
|
|
109
|
-
def get_dimension_values(self, dim):
|
|
110
|
-
''' Return values for dimension in current data.
|
|
111
|
-
dim (str): dimension name
|
|
112
|
-
'''
|
|
113
|
-
if self._data_initialized:
|
|
114
|
-
return self._data.get_dimension_values(dim)
|
|
115
|
-
self._log_no_ms()
|
|
116
|
-
return None
|
|
117
|
-
|
|
118
|
-
def get_dimension_attrs(self, dim):
|
|
119
|
-
''' Return dict of data attributes for dimension.
|
|
120
|
-
dim (str): dimension name
|
|
121
|
-
'''
|
|
122
|
-
if self._data_initialized:
|
|
123
|
-
return self._data.get_dimension_attrs(dim)
|
|
124
|
-
self._log_no_ms()
|
|
125
|
-
return None
|
|
126
|
-
|
|
127
|
-
def get_first_spw(self):
|
|
128
|
-
''' Returns name of first spw by id. '''
|
|
129
|
-
if self._data_initialized:
|
|
130
|
-
return self._data.get_first_spw()
|
|
131
|
-
self._log_no_ms()
|
|
132
|
-
return None
|
|
133
|
-
|
|
134
|
-
def select_data(self, selection):
|
|
135
|
-
''' Apply selection in data.
|
|
136
|
-
selection (dict): fields and values to select
|
|
137
|
-
'''
|
|
138
|
-
if self._data_initialized:
|
|
139
|
-
self._data.select_data(selection)
|
|
140
|
-
else:
|
|
141
|
-
self._log_no_ms()
|
|
142
|
-
|
|
143
|
-
def clear_selection(self):
|
|
144
|
-
''' Clears selection dict and selected data. '''
|
|
145
|
-
if self._data_initialized:
|
|
146
|
-
self._data.clear_selection()
|
|
147
|
-
|
|
148
|
-
def get_vis_stats(self, selection, vis_axis):
|
|
149
|
-
''' Returns statistics (min, max, mean, std) for data selected by selection.
|
|
150
|
-
selection (dict): fields and values to select
|
|
151
|
-
'''
|
|
152
|
-
if self._data_initialized:
|
|
153
|
-
return self._data.get_vis_stats(selection, vis_axis)
|
|
154
|
-
self._log_no_ms()
|
|
155
|
-
return None
|
|
156
|
-
|
|
157
|
-
def get_correlated_data(self, data_group):
|
|
158
|
-
''' Returns name of correlated data variable in Processing Set data group '''
|
|
159
|
-
if self._data_initialized:
|
|
160
|
-
return self._data.get_correlated_data(data_group)
|
|
161
|
-
self._log_no_ms()
|
|
162
|
-
return None
|
|
163
|
-
|
|
164
|
-
def get_raster_data(self, plot_inputs):
|
|
165
|
-
''' Returns xarray Dataset after applying plot inputs and raster plane selection '''
|
|
166
|
-
if self._data_initialized:
|
|
167
|
-
return self._data.get_raster_data(plot_inputs)
|
|
168
|
-
self._log_no_ms()
|
|
169
|
-
return None
|
|
170
|
-
|
|
171
|
-
def _log_no_ms(self):
|
|
172
|
-
self._logger.info("No MS path set, cannot access data")
|
|
173
|
-
|
|
174
|
-
def _init_data(self, ms_path):
|
|
175
|
-
''' Data backend for MeasurementSet; currently xradio ProcessingSet '''
|
|
176
|
-
if ms_path:
|
|
177
|
-
self._data = PsData(ms_path, self._logger)
|
|
178
|
-
self._data_initialized = True
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
'''
|
|
2
|
-
Module to access MeasurementSet data using xradio ProcessingSetXdt and Xarray objects.
|
|
3
|
-
'''
|
|
4
|
-
|
|
5
|
-
from ._ps_concat import (
|
|
6
|
-
concat_ps_xdt,
|
|
7
|
-
)
|
|
8
|
-
|
|
9
|
-
from ._ps_coords import (
|
|
10
|
-
set_coordinates,
|
|
11
|
-
set_datetime_coordinate,
|
|
12
|
-
set_index_coordinates,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
from ._ps_data import (
|
|
16
|
-
PsData,
|
|
17
|
-
)
|
|
18
|
-
|
|
19
|
-
from ._ps_io import (
|
|
20
|
-
get_processing_set,
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
from ._ps_raster_data import (
|
|
24
|
-
raster_data,
|
|
25
|
-
)
|
|
26
|
-
|
|
27
|
-
from ._xds_data import (
|
|
28
|
-
get_axis_data,
|
|
29
|
-
get_correlated_data,
|
|
30
|
-
)
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
'''
|
|
2
|
-
Concat ProcessingSet xarray DataSets into single xds by time dimension (in order)
|
|
3
|
-
'''
|
|
4
|
-
|
|
5
|
-
import xarray as xr
|
|
6
|
-
|
|
7
|
-
from cubevis.data.measurement_set.processing_set._ps_coords import set_coordinates
|
|
8
|
-
|
|
9
|
-
def concat_ps_xdt(ps_xdt, logger):
|
|
10
|
-
''' Concatenate xarray Datasets in ProcessingSet by time dimension.
|
|
11
|
-
Return concat xds. '''
|
|
12
|
-
if len(ps_xdt) == 0:
|
|
13
|
-
raise RuntimeError("Processing set empty after selection.")
|
|
14
|
-
|
|
15
|
-
ps = {}
|
|
16
|
-
for name, ms_xdt in ps_xdt.items():
|
|
17
|
-
# Set units to str not list and set baseline coordinate. Returns xarray.Dataset
|
|
18
|
-
ps[name] = set_coordinates(ms_xdt)
|
|
19
|
-
|
|
20
|
-
if len(ps) == 1:
|
|
21
|
-
logger.debug("Processing set contains one dataset, nothing to concat.")
|
|
22
|
-
return list(ps.values())[0]
|
|
23
|
-
|
|
24
|
-
# Split xds by time gaps
|
|
25
|
-
sorted_times = _get_sorted_times(ps)
|
|
26
|
-
xds_list = []
|
|
27
|
-
time_list = []
|
|
28
|
-
for xds in ps.values():
|
|
29
|
-
xdss, times = _split_xds_by_time_gap(xds, sorted_times)
|
|
30
|
-
xds_list.extend(xdss)
|
|
31
|
-
time_list.extend(times)
|
|
32
|
-
|
|
33
|
-
if len(xds_list) > len(ps_xdt):
|
|
34
|
-
logger.debug(f"Split {len(ps_xdt)} datasets by time gap into {len(xds_list)} datasets.")
|
|
35
|
-
|
|
36
|
-
# Create sorted xds list using sorted times
|
|
37
|
-
time_list.sort()
|
|
38
|
-
sorted_xds = [None] * len(time_list)
|
|
39
|
-
|
|
40
|
-
for xds in xds_list:
|
|
41
|
-
try:
|
|
42
|
-
first_xds_time = xds.time.values[0]
|
|
43
|
-
except IndexError: # only one value
|
|
44
|
-
first_xds_time = xds.time.values
|
|
45
|
-
|
|
46
|
-
for idx, value in enumerate(time_list):
|
|
47
|
-
if value == first_xds_time and sorted_xds[idx] is None:
|
|
48
|
-
if "baseline" in xds.coords:
|
|
49
|
-
# Cannot concat with non-dim string coord
|
|
50
|
-
xds = xds.drop("baseline_antenna1_name")
|
|
51
|
-
xds = xds.drop("baseline_antenna2_name")
|
|
52
|
-
# Convert MeasurementSetXds to xr Dataset for concat
|
|
53
|
-
# (TypeError: MeasurementSetXds.__init__() got an unexpected keyword argument 'coords')
|
|
54
|
-
sorted_xds[idx] = xr.Dataset(xds.data_vars, xds.coords, xds.attrs)
|
|
55
|
-
break
|
|
56
|
-
return xr.concat(sorted_xds, dim='time')
|
|
57
|
-
|
|
58
|
-
def _get_sorted_times(ps):
|
|
59
|
-
values = []
|
|
60
|
-
for key in ps:
|
|
61
|
-
time_values = ps[key].time.values
|
|
62
|
-
if time_values.size > 1:
|
|
63
|
-
values.extend(time_values.tolist())
|
|
64
|
-
else:
|
|
65
|
-
values.append(time_values)
|
|
66
|
-
return sorted(values)
|
|
67
|
-
|
|
68
|
-
def _split_xds_by_time_gap(xds, sorted_times):
|
|
69
|
-
''' Split xds where there is a gap in sorted times.
|
|
70
|
-
Return list of xds and first time in each one. '''
|
|
71
|
-
times = xds.time.values.ravel()
|
|
72
|
-
xds_list = []
|
|
73
|
-
first_times = [times[0]]
|
|
74
|
-
|
|
75
|
-
if len(times) == 1:
|
|
76
|
-
xds_list.append(xds)
|
|
77
|
-
else:
|
|
78
|
-
sorted_time_idx = sorted_times.index(times[0])
|
|
79
|
-
idx = xds_start_idx = 0 # start for iselection
|
|
80
|
-
|
|
81
|
-
for idx, time in enumerate(times):
|
|
82
|
-
if time == sorted_times[sorted_time_idx]:
|
|
83
|
-
# No time gap, go to next time
|
|
84
|
-
sorted_time_idx += 1
|
|
85
|
-
continue
|
|
86
|
-
|
|
87
|
-
# Found time gap, select xds
|
|
88
|
-
xds_list.append(xds.isel(time=slice(xds_start_idx, idx)))
|
|
89
|
-
|
|
90
|
-
# start next xds with new first time, find idx in sorted times (skip ahead)
|
|
91
|
-
xds_start_idx = idx
|
|
92
|
-
first_times.append(times[idx])
|
|
93
|
-
sorted_time_idx = sorted_times.index(time) + 1 # for next time
|
|
94
|
-
|
|
95
|
-
# add last time range xds
|
|
96
|
-
xds_list.append(xds.isel(time=slice(xds_start_idx, idx + 1)))
|
|
97
|
-
|
|
98
|
-
return xds_list, first_times
|
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
'''
|
|
2
|
-
Modify/add xarray Dataset coordinates for plotting.
|
|
3
|
-
'''
|
|
4
|
-
|
|
5
|
-
import numpy as np
|
|
6
|
-
from pandas import to_datetime
|
|
7
|
-
|
|
8
|
-
def set_coordinates(ms_xdt):
|
|
9
|
-
''' Convert coordinate units and add baseline coordinate for plotting.
|
|
10
|
-
Returns xarray.Dataset
|
|
11
|
-
'''
|
|
12
|
-
_set_coordinate_unit(ms_xdt)
|
|
13
|
-
_set_frequency_unit(ms_xdt)
|
|
14
|
-
return _add_baseline_coordinate(ms_xdt)
|
|
15
|
-
|
|
16
|
-
def set_datetime_coordinate(ms_xds):
|
|
17
|
-
''' Convert float time to datetime for plotting. '''
|
|
18
|
-
time_attrs = ms_xds.time.attrs
|
|
19
|
-
try:
|
|
20
|
-
ms_xds.coords['time'] = to_datetime(ms_xds.time, unit=time_attrs['units'], origin=time_attrs['format'])
|
|
21
|
-
except TypeError:
|
|
22
|
-
ms_xds.coords['time'] = to_datetime(ms_xds.time, unit=time_attrs['units'][0], origin=time_attrs['format'])
|
|
23
|
-
ms_xds.time.attrs = time_attrs
|
|
24
|
-
|
|
25
|
-
def _set_coordinate_unit(ms_xdt):
|
|
26
|
-
''' Set coordinate units attribute as string not list for plotting. '''
|
|
27
|
-
for coord in ms_xdt.coords:
|
|
28
|
-
# Plots need unit to be string not list
|
|
29
|
-
if 'units' in ms_xdt.coords[coord].attrs:
|
|
30
|
-
units = ms_xdt.coords[coord].units
|
|
31
|
-
if isinstance(units, list) and len(units) == 1:
|
|
32
|
-
ms_xdt.coords[coord].attrs['units'] = units[0]
|
|
33
|
-
|
|
34
|
-
def set_index_coordinates(ms_xds, coordinates):
|
|
35
|
-
''' Return ms_xds with new coordinate for string values (name) then replace coordinate with numerical index. '''
|
|
36
|
-
for coordinate in coordinates:
|
|
37
|
-
if coordinate == "polarization":
|
|
38
|
-
ms_xds = ms_xds.assign_coords({"polarization_name": (ms_xds.polarization.dims, ms_xds.polarization.values)})
|
|
39
|
-
ms_xds["polarization"] = np.array(range(ms_xds.polarization.size))
|
|
40
|
-
elif coordinate == "baseline":
|
|
41
|
-
ms_xds = ms_xds.assign_coords({"baseline_name": (ms_xds.baseline.dims, ms_xds.baseline.values)})
|
|
42
|
-
ms_xds["baseline"] = np.array(range(ms_xds.baseline.size))
|
|
43
|
-
elif coordinate == "antenna_name":
|
|
44
|
-
ms_xds = ms_xds.assign_coords({"antenna": (ms_xds.antenna_name.dims, ms_xds.antenna_name.values)})
|
|
45
|
-
ms_xds["antenna_name"] = np.array(range(ms_xds.antenna_name.size))
|
|
46
|
-
return ms_xds
|
|
47
|
-
|
|
48
|
-
def _set_frequency_unit(ms_xdt):
|
|
49
|
-
''' Convert frequency to GHz. Note attrs (channel_width, reference_frequency) still have Hz units in dict '''
|
|
50
|
-
if ms_xdt.frequency.attrs['units'] == "Hz":
|
|
51
|
-
frequency_xda = ms_xdt.frequency / 1e9
|
|
52
|
-
frequency_attrs = ms_xdt.frequency.attrs
|
|
53
|
-
frequency_attrs['units'] = "GHz"
|
|
54
|
-
frequency_xda = frequency_xda.assign_attrs(frequency_attrs)
|
|
55
|
-
ms_xdt.coords['frequency'] = frequency_xda
|
|
56
|
-
|
|
57
|
-
def _add_baseline_coordinate(ms_xdt):
|
|
58
|
-
'''
|
|
59
|
-
Replace "baseline_id" (int) with "baseline" (string) coordinate "ant1 & ant2".
|
|
60
|
-
Baseline ids are not consistent across ms_xdts.
|
|
61
|
-
'''
|
|
62
|
-
# Cannot assign coords to DataTree.
|
|
63
|
-
baseline_ms_xdt = ms_xdt.to_dataset() # mutable Dataset
|
|
64
|
-
|
|
65
|
-
if 'baseline_id' not in baseline_ms_xdt.coords:
|
|
66
|
-
return baseline_ms_xdt
|
|
67
|
-
|
|
68
|
-
ant1_names = ms_xdt.baseline_antenna1_name.values
|
|
69
|
-
ant2_names = ms_xdt.baseline_antenna2_name.values
|
|
70
|
-
if ant1_names.size == 1:
|
|
71
|
-
baseline_names = f"{ant1_names.item()} & {ant2_names.item()}"
|
|
72
|
-
baseline_ms_xdt = baseline_ms_xdt.assign_coords({"baseline": np.array(baseline_names)})
|
|
73
|
-
else:
|
|
74
|
-
baseline_names = [f"{ant1_names[idx]} & {ant2_names[idx]}" for idx in range(len(ant1_names))]
|
|
75
|
-
baseline_ms_xdt = baseline_ms_xdt.assign_coords({"baseline": ("baseline_id", np.array(baseline_names))})
|
|
76
|
-
baseline_ms_xdt = baseline_ms_xdt.swap_dims({"baseline_id": "baseline"})
|
|
77
|
-
baseline_ms_xdt = baseline_ms_xdt.drop("baseline_id")
|
|
78
|
-
return baseline_ms_xdt
|
|
@@ -1,213 +0,0 @@
|
|
|
1
|
-
'''
|
|
2
|
-
MeasurementSet data backend using xradio Processing Set.
|
|
3
|
-
'''
|
|
4
|
-
|
|
5
|
-
import numpy as np
|
|
6
|
-
import pandas as pd
|
|
7
|
-
|
|
8
|
-
try:
|
|
9
|
-
from cubevis.data.measurement_set.processing_set._ps_io import get_processing_set
|
|
10
|
-
_HAVE_XRADIO = True
|
|
11
|
-
from cubevis.data.measurement_set.processing_set._ps_select import select_ps
|
|
12
|
-
from cubevis.data.measurement_set.processing_set._ps_stats import calculate_ps_stats
|
|
13
|
-
from cubevis.data.measurement_set.processing_set._ps_raster_data import raster_data
|
|
14
|
-
from cubevis.data.measurement_set.processing_set._xds_data import get_correlated_data
|
|
15
|
-
except ImportError as e:
|
|
16
|
-
_HAVE_XRADIO = False
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class PsData:
|
|
20
|
-
'''
|
|
21
|
-
Class implementing data backend using xradio Processing Set for accessing and selecting MeasurementSet data.
|
|
22
|
-
'''
|
|
23
|
-
|
|
24
|
-
def __init__(self, ms, logger):
|
|
25
|
-
if not _HAVE_XRADIO:
|
|
26
|
-
raise RuntimeError("xradio package not available for reading MeasurementSet")
|
|
27
|
-
|
|
28
|
-
if not ms:
|
|
29
|
-
raise RuntimeError("MS path not available for reading MeasurementSet")
|
|
30
|
-
|
|
31
|
-
# Open processing set from zarr
|
|
32
|
-
# Converts msv2 if ms path is not zarr
|
|
33
|
-
self._ps_xdt, self._zarr_path = get_processing_set(ms, logger)
|
|
34
|
-
|
|
35
|
-
self._logger = logger
|
|
36
|
-
self._selection = {}
|
|
37
|
-
self._selected_ps_xdt = None # cumulative selection
|
|
38
|
-
|
|
39
|
-
def get_path(self):
|
|
40
|
-
''' Return path to zarr file (input or converted from msv2) '''
|
|
41
|
-
return self._zarr_path
|
|
42
|
-
|
|
43
|
-
def summary(self, data_group='base', columns=None):
|
|
44
|
-
''' Print full or selected summary of Processing Set metadata, optionally by ms '''
|
|
45
|
-
ps_summary = self._ps_xdt.xr_ps.summary(data_group=data_group)
|
|
46
|
-
pd.set_option("display.max_rows", len(self._ps_xdt))
|
|
47
|
-
pd.set_option("display.max_columns", len(ps_summary.columns))
|
|
48
|
-
pd.set_option("display.max_colwidth", None)
|
|
49
|
-
|
|
50
|
-
if columns is None:
|
|
51
|
-
print(ps_summary)
|
|
52
|
-
elif columns == "by_ms":
|
|
53
|
-
for row in ps_summary.itertuples(index=False):
|
|
54
|
-
print(f"MSv4 name: {row[0]}")
|
|
55
|
-
print(f"intent: {row[1]}")
|
|
56
|
-
shape = row[2]
|
|
57
|
-
print(f"shape: {shape[0]} times, {shape[1]} baselines, {shape[2]} channels, {shape[3]} polarizations")
|
|
58
|
-
print(f"polarization: {row[3]}")
|
|
59
|
-
scans = [str(scan) for scan in row[4]]
|
|
60
|
-
print(f"scan_name: {scans}")
|
|
61
|
-
print(f"spw_name: {row[5]}")
|
|
62
|
-
fields = [str(field) for field in row[6]]
|
|
63
|
-
print(f"field_name: {fields}")
|
|
64
|
-
sources = [str(source) for source in row[7]]
|
|
65
|
-
print(f"source_name: {sources}")
|
|
66
|
-
lines = [str(line) for line in row[8]]
|
|
67
|
-
print(f"line_name: {lines}")
|
|
68
|
-
field_coords = row[9]
|
|
69
|
-
print(f"field_coords: ({field_coords[0]}) {field_coords[1]} {field_coords[2]}")
|
|
70
|
-
print(f"frequency range: {row[10]:e} - {row[11]:e}")
|
|
71
|
-
print("-----")
|
|
72
|
-
else:
|
|
73
|
-
if isinstance(columns, str):
|
|
74
|
-
columns = [columns]
|
|
75
|
-
col_df = ps_summary[columns]
|
|
76
|
-
print(col_df)
|
|
77
|
-
|
|
78
|
-
def get_summary(self):
|
|
79
|
-
''' Return summary of original ps '''
|
|
80
|
-
return self._ps_xdt.xr_ps.summary()
|
|
81
|
-
|
|
82
|
-
def get_data_groups(self):
|
|
83
|
-
''' Returns set of data group names in Processing Set data. '''
|
|
84
|
-
data_groups = []
|
|
85
|
-
for ms_xdt_name in self._ps_xdt:
|
|
86
|
-
data_groups.extend(list(self._ps_xdt[ms_xdt_name].data_groups))
|
|
87
|
-
return set(data_groups)
|
|
88
|
-
|
|
89
|
-
def get_antennas(self, plot_positions=False, label_antennas=False):
|
|
90
|
-
''' Returns list of antenna names in ProcessingSet antenna_xds.
|
|
91
|
-
plot_positions (bool): show plot of antenna positions.
|
|
92
|
-
label_antennas (bool): label positions with antenna names.
|
|
93
|
-
'''
|
|
94
|
-
if plot_positions:
|
|
95
|
-
self._ps_xdt.xr_ps.plot_antenna_positions(label_antennas)
|
|
96
|
-
return self._ps_xdt.xr_ps.get_combined_antenna_xds().antenna_name.values.tolist()
|
|
97
|
-
|
|
98
|
-
def plot_phase_centers(self, label_all_fields=False, data_group='base'):
|
|
99
|
-
''' Plot the phase center locations of all fields in the Processing Set (original or selected) and label central field.
|
|
100
|
-
label_all_fields (bool); label all fields on the plot
|
|
101
|
-
data_group (str); data group to use for processing.
|
|
102
|
-
'''
|
|
103
|
-
self._ps_xdt.xr_ps.plot_phase_centers(label_all_fields, data_group)
|
|
104
|
-
|
|
105
|
-
def get_ps_len(self):
|
|
106
|
-
''' Returns number of ms_xdt in selected ps_xdt (if selected) '''
|
|
107
|
-
return len(self._get_ps_xdt())
|
|
108
|
-
|
|
109
|
-
def get_max_dims(self):
|
|
110
|
-
''' Returns maximum length of data dimensions in selected ps_xdt (if selected) '''
|
|
111
|
-
ps_xdt = self._get_ps_xdt()
|
|
112
|
-
return ps_xdt.xr_ps.get_max_dims()
|
|
113
|
-
|
|
114
|
-
def get_data_dimensions(self):
|
|
115
|
-
''' Return the maximum dimensions in selected ps_xdt (if selected) '''
|
|
116
|
-
dims = list(self.get_max_dims().keys())
|
|
117
|
-
if 'uvw_label' in dims:
|
|
118
|
-
dims.remove('uvw_label') # not a VISIBILITY/SPECTRUM data dim
|
|
119
|
-
return dims
|
|
120
|
-
|
|
121
|
-
def get_dimension_values(self, dimension):
|
|
122
|
-
''' Return sorted list of unique values for input dimension in ProcessingSet. '''
|
|
123
|
-
ps_xdt = self._get_ps_xdt()
|
|
124
|
-
dim_values = []
|
|
125
|
-
for ms_xdt in ps_xdt.values():
|
|
126
|
-
if dimension == 'baseline':
|
|
127
|
-
ant1_names = ms_xdt.baseline_antenna1_name.values
|
|
128
|
-
ant2_names = ms_xdt.baseline_antenna2_name.values
|
|
129
|
-
for baseline_id in ms_xdt.baseline_id:
|
|
130
|
-
dim_values.append(f"{ant1_names[baseline_id]} & {ant2_names[baseline_id]}")
|
|
131
|
-
else:
|
|
132
|
-
try:
|
|
133
|
-
dim_values.extend([value.item() for value in ms_xdt[dimension].values])
|
|
134
|
-
except TypeError:
|
|
135
|
-
dim_values.append(ms_xdt[dimension].values.item())
|
|
136
|
-
return sorted(set(dim_values))
|
|
137
|
-
|
|
138
|
-
def get_dimension_attrs(self, dim):
|
|
139
|
-
''' Return attributes dict for input dimension in ProcessingSet. '''
|
|
140
|
-
ps_xdt = self._get_ps_xdt()
|
|
141
|
-
return ps_xdt.get(0)[dim].attrs
|
|
142
|
-
|
|
143
|
-
def get_first_spw(self):
|
|
144
|
-
''' Return first spw name by id '''
|
|
145
|
-
spw_id_names = {}
|
|
146
|
-
ps_xdt = self._get_ps_xdt()
|
|
147
|
-
for ms_xdt in ps_xdt.values():
|
|
148
|
-
freq_xds = ms_xdt.frequency
|
|
149
|
-
spw_id_names[freq_xds.spectral_window_id] = freq_xds.spectral_window_name
|
|
150
|
-
|
|
151
|
-
first_spw_id = min(spw_id_names)
|
|
152
|
-
first_spw_name = spw_id_names[first_spw_id]
|
|
153
|
-
|
|
154
|
-
summary = self.get_summary()
|
|
155
|
-
spw_df = summary[summary['spw_name'] == first_spw_name]
|
|
156
|
-
start_freq = spw_df.at[spw_df.index[0], 'start_frequency']
|
|
157
|
-
end_freq = spw_df.at[spw_df.index[0], 'end_frequency']
|
|
158
|
-
self._logger.info(f"Selecting first spw {first_spw_name} (id {first_spw_id}) with frequency range {start_freq:e} - {end_freq:e}")
|
|
159
|
-
return first_spw_name
|
|
160
|
-
|
|
161
|
-
def select_data(self, selection):
|
|
162
|
-
''' Apply selection dict to ProcessingSet to create selected ps_xdt.
|
|
163
|
-
If previous selection done, apply to selected ps_xdt.
|
|
164
|
-
Add selection to previous selections. '''
|
|
165
|
-
ps_xdt = self._get_ps_xdt()
|
|
166
|
-
self._selected_ps_xdt = select_ps(ps_xdt, selection, self._logger)
|
|
167
|
-
if self._selection:
|
|
168
|
-
self._selection |= selection
|
|
169
|
-
else:
|
|
170
|
-
self._selection = selection
|
|
171
|
-
|
|
172
|
-
def clear_selection(self):
|
|
173
|
-
''' Clear previous selections and use original ps_xdt '''
|
|
174
|
-
self._selection = None
|
|
175
|
-
self._selected_ps_xdt = None
|
|
176
|
-
|
|
177
|
-
def get_vis_stats(self, selection, vis_axis):
|
|
178
|
-
''' Returns statistics (min, max, mean, std) for data selected by selection.
|
|
179
|
-
selection (dict): fields and values to select
|
|
180
|
-
'''
|
|
181
|
-
stats_ps_xdt = select_ps(self._ps_xdt, selection, self._logger)
|
|
182
|
-
data_group = selection['data_group'] if 'data_group' in selection else 'base'
|
|
183
|
-
return calculate_ps_stats(stats_ps_xdt, self._zarr_path, vis_axis, data_group, self._logger)
|
|
184
|
-
|
|
185
|
-
def get_correlated_data(self, data_group):
|
|
186
|
-
''' Returns name of 'correlated_data' in Processing Set data_group '''
|
|
187
|
-
ps_xdt = self._get_ps_xdt()
|
|
188
|
-
for ms_xdt in ps_xdt.values():
|
|
189
|
-
if data_group in ms_xdt.attrs['data_groups']:
|
|
190
|
-
return get_correlated_data(ms_xdt.ds, data_group)
|
|
191
|
-
raise RuntimeError(f"No correlated data for data group {data_group}")
|
|
192
|
-
|
|
193
|
-
def get_raster_data(self, plot_inputs):
|
|
194
|
-
''' Returns xarray Dataset after applying plot inputs and raster plane selection '''
|
|
195
|
-
return raster_data(self._get_ps_xdt(),
|
|
196
|
-
plot_inputs,
|
|
197
|
-
self._logger
|
|
198
|
-
)
|
|
199
|
-
|
|
200
|
-
def _get_ps_xdt(self):
|
|
201
|
-
''' Returns selected ps_xdt if selection has been done, else original ps_xdt '''
|
|
202
|
-
return self._selected_ps_xdt if self._selected_ps_xdt else self._ps_xdt
|
|
203
|
-
|
|
204
|
-
def _get_unique_values(self, df_col):
|
|
205
|
-
''' Return unique values in pandas Dataframe column, for summary '''
|
|
206
|
-
values = df_col.to_numpy()
|
|
207
|
-
try:
|
|
208
|
-
# numeric arrays
|
|
209
|
-
return np.unique(np.concatenate(values))
|
|
210
|
-
except ValueError:
|
|
211
|
-
# string arrays
|
|
212
|
-
all_values = [row[0] for row in values]
|
|
213
|
-
return np.unique(np.concatenate(all_values))
|