xradio 0.0.23__py3-none-any.whl → 0.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xradio/_utils/zarr/common.py +45 -0
- xradio/image/_util/_zarr/zarr_low_level.py +3 -7
- xradio/image/_util/zarr.py +3 -3
- xradio/vis/_processing_set.py +42 -23
- xradio/vis/load_processing_set.py +92 -92
- xradio/vis/read_processing_set.py +74 -32
- {xradio-0.0.23.dist-info → xradio-0.0.24.dist-info}/METADATA +1 -1
- {xradio-0.0.23.dist-info → xradio-0.0.24.dist-info}/RECORD +11 -11
- {xradio-0.0.23.dist-info → xradio-0.0.24.dist-info}/LICENSE.txt +0 -0
- {xradio-0.0.23.dist-info → xradio-0.0.24.dist-info}/WHEEL +0 -0
- {xradio-0.0.23.dist-info → xradio-0.0.24.dist-info}/top_level.txt +0 -0
xradio/_utils/zarr/common.py
CHANGED
|
@@ -3,6 +3,47 @@ import xarray as xr
|
|
|
3
3
|
import zarr
|
|
4
4
|
|
|
5
5
|
|
|
6
|
+
def _open_dataset(store, xds_isel=None, data_variables=None, load=False):
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
Parameters
|
|
10
|
+
----------
|
|
11
|
+
store : _type_
|
|
12
|
+
_description_
|
|
13
|
+
xds_isel : _type_, optional
|
|
14
|
+
Example {'time':slice(0,10), 'frequency':slice(5,7)}, by default None
|
|
15
|
+
data_variables : _type_, optional
|
|
16
|
+
Example ['VISIBILITY','WEIGHT'], by default None
|
|
17
|
+
load : bool, optional
|
|
18
|
+
_description_, by default False
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
_type_
|
|
23
|
+
_description_
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
import dask
|
|
27
|
+
|
|
28
|
+
xds = xr.open_zarr(store)
|
|
29
|
+
|
|
30
|
+
if xds_isel is not None:
|
|
31
|
+
xds = xds.isel(xds_isel)
|
|
32
|
+
|
|
33
|
+
if data_variables is not None:
|
|
34
|
+
xds_sub = xr.Dataset()
|
|
35
|
+
for dv in data_variables:
|
|
36
|
+
xds_sub[dv] = xds[dv]
|
|
37
|
+
xds_sub.attrs = xds.attrs
|
|
38
|
+
xds = xds_sub
|
|
39
|
+
|
|
40
|
+
if load:
|
|
41
|
+
with dask.config.set(scheduler="synchronous"):
|
|
42
|
+
xds = xds.load()
|
|
43
|
+
return xds
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# Code to depricate:
|
|
6
47
|
def _get_attrs(zarr_obj):
|
|
7
48
|
"""
|
|
8
49
|
get attributes of zarr obj (groups or arrays)
|
|
@@ -33,6 +74,7 @@ def _load_no_dask_zarr(zarr_name, slice_dict={}):
|
|
|
33
74
|
coords = {}
|
|
34
75
|
xds = xr.Dataset()
|
|
35
76
|
for var_name, var in zarr_group.arrays():
|
|
77
|
+
print("Hallo 3", var_name, var.shape)
|
|
36
78
|
var_attrs = _get_attrs(var)
|
|
37
79
|
|
|
38
80
|
for dim in var_attrs[DIMENSION_KEY]:
|
|
@@ -54,6 +96,9 @@ def _load_no_dask_zarr(zarr_name, slice_dict={}):
|
|
|
54
96
|
for dim in var_attrs[DIMENSION_KEY]:
|
|
55
97
|
slicing_list.append(slice_dict_complete[dim])
|
|
56
98
|
slicing_tuple = tuple(slicing_list)
|
|
99
|
+
|
|
100
|
+
print(var_attrs[DIMENSION_KEY])
|
|
101
|
+
|
|
57
102
|
xds[var_name] = xr.DataArray(
|
|
58
103
|
var[slicing_tuple], dims=var_attrs[DIMENSION_KEY]
|
|
59
104
|
)
|
|
@@ -70,7 +70,6 @@ image_data_variables_and_dims_single_precision = {
|
|
|
70
70
|
}
|
|
71
71
|
|
|
72
72
|
|
|
73
|
-
|
|
74
73
|
def pad_array_with_nans(input_array, output_shape, dtype):
|
|
75
74
|
"""
|
|
76
75
|
Pad an integer array with NaN values to match the specified output shape.
|
|
@@ -277,8 +276,7 @@ def create_data_variable_meta_data_on_disk(
|
|
|
277
276
|
return zarr_meta
|
|
278
277
|
|
|
279
278
|
|
|
280
|
-
|
|
281
|
-
def write_chunk(img_xds,meta,parallel_dims_chunk_id,compressor,image_file):
|
|
279
|
+
def write_chunk(img_xds, meta, parallel_dims_chunk_id, compressor, image_file):
|
|
282
280
|
dims = meta["dims"]
|
|
283
281
|
dtype = meta["dtype"]
|
|
284
282
|
data_varaible_name = meta["name"]
|
|
@@ -304,8 +302,6 @@ def write_chunk(img_xds,meta,parallel_dims_chunk_id,compressor,image_file):
|
|
|
304
302
|
|
|
305
303
|
write_binary_blob_to_disk(
|
|
306
304
|
array,
|
|
307
|
-
file_path=os.path.join(
|
|
308
|
-
image_file, data_varaible_name, chunk_name
|
|
309
|
-
),
|
|
305
|
+
file_path=os.path.join(image_file, data_varaible_name, chunk_name),
|
|
310
306
|
compressor=compressor,
|
|
311
|
-
)
|
|
307
|
+
)
|
xradio/image/_util/zarr.py
CHANGED
|
@@ -3,7 +3,7 @@ from ._zarr.xds_from_zarr import _read_zarr
|
|
|
3
3
|
import numpy as np
|
|
4
4
|
import os
|
|
5
5
|
import xarray as xr
|
|
6
|
-
from ..._utils.zarr.common import
|
|
6
|
+
from ..._utils.zarr.common import _open_dataset
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def _xds_to_zarr(xds: xr.Dataset, zarr_store: str):
|
|
@@ -25,11 +25,11 @@ def _xds_from_zarr(
|
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
def _load_image_from_zarr_no_dask(zarr_file: str, selection: dict) -> xr.Dataset:
|
|
28
|
-
image_xds =
|
|
28
|
+
image_xds = _open_dataset(zarr_file, selection, load=True)
|
|
29
29
|
for h in ["HISTORY", "_attrs_xds_history"]:
|
|
30
30
|
history = os.sep.join([zarr_file, h])
|
|
31
31
|
if os.path.isdir(history):
|
|
32
|
-
image_xds.attrs["history"] =
|
|
32
|
+
image_xds.attrs["history"] = _open_dataset(history, load=True)
|
|
33
33
|
break
|
|
34
34
|
_iter_dict(image_xds.attrs)
|
|
35
35
|
return image_xds
|
xradio/vis/_processing_set.py
CHANGED
|
@@ -4,7 +4,8 @@ import pandas as pd
|
|
|
4
4
|
class processing_set(dict):
|
|
5
5
|
def __init__(self, *args, **kwargs):
|
|
6
6
|
super().__init__(*args, **kwargs)
|
|
7
|
-
self.meta = {
|
|
7
|
+
self.meta = {"summary": {}}
|
|
8
|
+
|
|
8
9
|
# generate_meta(self)
|
|
9
10
|
|
|
10
11
|
# def generate_meta(self):
|
|
@@ -12,18 +13,25 @@ class processing_set(dict):
|
|
|
12
13
|
# self.meta['max_dims'] = _get_ps_max_dims(self)
|
|
13
14
|
|
|
14
15
|
def summary(self, data_group="base"):
|
|
15
|
-
if data_group in self.meta[
|
|
16
|
-
return self.meta[
|
|
16
|
+
if data_group in self.meta["summary"]:
|
|
17
|
+
return self.meta["summary"][data_group]
|
|
17
18
|
else:
|
|
18
|
-
self.meta[
|
|
19
|
-
return self.meta[
|
|
20
|
-
|
|
19
|
+
self.meta["summary"][data_group] = self._summary(data_group)
|
|
20
|
+
return self.meta["summary"][data_group]
|
|
21
|
+
|
|
21
22
|
def get_ps_max_dims(self):
|
|
22
|
-
if
|
|
23
|
-
return self.meta[
|
|
23
|
+
if "max_dims" in self.meta:
|
|
24
|
+
return self.meta["max_dims"]
|
|
25
|
+
else:
|
|
26
|
+
self.meta["max_dims"] = self._get_ps_max_dims()
|
|
27
|
+
return self.meta["max_dims"]
|
|
28
|
+
|
|
29
|
+
def get_ps_freq_axis(self):
|
|
30
|
+
if "freq_axis" in self.meta:
|
|
31
|
+
return self.meta["freq_axis"]
|
|
24
32
|
else:
|
|
25
|
-
self.meta[
|
|
26
|
-
return self.meta[
|
|
33
|
+
self.meta["freq_axis"] = self._get_ps_freq_axis()
|
|
34
|
+
return self.meta["freq_axis"]
|
|
27
35
|
|
|
28
36
|
def _summary(self, data_group="base"):
|
|
29
37
|
summary_data = {
|
|
@@ -34,7 +42,7 @@ class processing_set(dict):
|
|
|
34
42
|
"field_name": [],
|
|
35
43
|
"start_frequency": [],
|
|
36
44
|
"end_frequency": [],
|
|
37
|
-
"shape": []
|
|
45
|
+
"shape": [],
|
|
38
46
|
}
|
|
39
47
|
for key, value in self.items():
|
|
40
48
|
summary_data["name"].append(key)
|
|
@@ -47,25 +55,36 @@ class processing_set(dict):
|
|
|
47
55
|
if "spectrum" in value.attrs["data_groups"][data_group]:
|
|
48
56
|
data_name = value.attrs["data_groups"][data_group]["spectrum"]
|
|
49
57
|
|
|
50
|
-
summary_data["shape"].append(
|
|
51
|
-
value[data_name].shape
|
|
52
|
-
)
|
|
58
|
+
summary_data["shape"].append(value[data_name].shape)
|
|
53
59
|
|
|
54
60
|
summary_data["field_id"].append(
|
|
55
|
-
value[data_name].attrs[
|
|
56
|
-
"field_info"
|
|
57
|
-
]["field_id"]
|
|
61
|
+
value[data_name].attrs["field_info"]["field_id"]
|
|
58
62
|
)
|
|
59
63
|
summary_data["field_name"].append(
|
|
60
|
-
value[data_name].attrs[
|
|
61
|
-
"field_info"
|
|
62
|
-
]["name"]
|
|
64
|
+
value[data_name].attrs["field_info"]["name"]
|
|
63
65
|
)
|
|
64
66
|
summary_data["start_frequency"].append(value["frequency"].values[0])
|
|
65
67
|
summary_data["end_frequency"].append(value["frequency"].values[-1])
|
|
66
68
|
summary_df = pd.DataFrame(summary_data)
|
|
67
69
|
return summary_df
|
|
68
|
-
|
|
70
|
+
|
|
71
|
+
def _get_ps_freq_axis(self):
|
|
72
|
+
import xarray as xr
|
|
73
|
+
|
|
74
|
+
spw_ids = []
|
|
75
|
+
freq_axis_list = []
|
|
76
|
+
frame = self.get(0).frequency.attrs["frame"]
|
|
77
|
+
for ms_xds in self.values():
|
|
78
|
+
assert (
|
|
79
|
+
frame == ms_xds.frequency.attrs["frame"]
|
|
80
|
+
), "Frequency reference frame not consistent in processing set."
|
|
81
|
+
if ms_xds.frequency.attrs["spw_id"] not in spw_ids:
|
|
82
|
+
spw_ids.append(ms_xds.frequency.attrs["spw_id"])
|
|
83
|
+
freq_axis_list.append(ms_xds.frequency)
|
|
84
|
+
|
|
85
|
+
freq_axis = xr.concat(freq_axis_list, dim="frequency").sortby("frequency")
|
|
86
|
+
return freq_axis
|
|
87
|
+
|
|
69
88
|
def _get_ps_max_dims(self):
|
|
70
89
|
max_dims = None
|
|
71
90
|
for ms_xds in self.values():
|
|
@@ -79,6 +98,6 @@ class processing_set(dict):
|
|
|
79
98
|
else:
|
|
80
99
|
max_dims[dim_name] = size
|
|
81
100
|
return max_dims
|
|
82
|
-
|
|
101
|
+
|
|
83
102
|
def get(self, id):
|
|
84
|
-
return self[list(self.keys())[id]]
|
|
103
|
+
return self[list(self.keys())[id]]
|
|
@@ -3,107 +3,105 @@ import zarr
|
|
|
3
3
|
import copy
|
|
4
4
|
import os
|
|
5
5
|
from ._processing_set import processing_set
|
|
6
|
-
from
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
6
|
+
from typing import Dict, Union
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def load_processing_set(
|
|
10
|
+
ps_store: str,
|
|
11
|
+
sel_parms: dict,
|
|
12
|
+
data_variables: Union[list, None] = None,
|
|
13
|
+
load_sub_datasets: bool = True,
|
|
14
|
+
)->processing_set:
|
|
15
|
+
"""Loads a processing set into memory.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
ps_store : str
|
|
20
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
21
|
+
sel_parms : dict
|
|
22
|
+
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
23
|
+
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
24
|
+
For example::
|
|
25
|
+
|
|
26
|
+
{
|
|
27
|
+
'ms_v4_name_1': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
28
|
+
...
|
|
29
|
+
'ms_v4_name_n': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
data_variables : Union[list, None], optional
|
|
33
|
+
The list of data variables to load into memory for example ['VISIBILITY', 'WEIGHT, 'FLAGS']. By default None which will load all data variables into memory.
|
|
34
|
+
load_sub_datasets : bool, optional
|
|
35
|
+
If true sub-datasets (for example weather_xds, antenna_xds, pointing_xds, ...) will be loaded into memory, by default True.
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
processing_set
|
|
40
|
+
In memory representation of processing set (data is represented by Dask.arrays).
|
|
41
|
+
"""
|
|
42
|
+
from xradio._utils.zarr.common import _open_dataset
|
|
25
43
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
)
|
|
34
|
-
write_ms_xds(ms_xds, xds_cached_name)
|
|
35
|
-
|
|
36
|
-
found_in_cache = False
|
|
37
|
-
return xds, found_in_cache
|
|
38
|
-
else:
|
|
39
|
-
found_in_cache = None
|
|
40
|
-
ms_xds = _load_ms_xds_core(
|
|
41
|
-
ms_xds_name=os.path.join(ps_name, ms_xds_name), slice_dict=slice_dict
|
|
42
|
-
)
|
|
43
|
-
return ms_xds, found_in_cache
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def _write_ms_xds(ms_xds, ms_xds_name):
|
|
47
|
-
ms_xds_temp = ms_xds
|
|
48
|
-
xr.Dataset.to_zarr(
|
|
49
|
-
ms_xds.attrs["ANTENNA"],
|
|
50
|
-
os.path.join(xds_cached_name, "ANTENNA"),
|
|
51
|
-
consolidated=True,
|
|
52
|
-
)
|
|
53
|
-
ms_xds_temp = ms_xds
|
|
54
|
-
ms_xds_temp.attrs["ANTENNA"] = {}
|
|
55
|
-
xr.Dataset.to_zarr(
|
|
56
|
-
ms_xds_temp, os.path.join(xds_cached_name, "MAIN"), consolidated=True
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def _load_ms_xds_core(ms_xds_name, slice_dict):
|
|
61
|
-
ms_xds = _load_no_dask_zarr(
|
|
62
|
-
zarr_name=os.path.join(ms_xds_name, "MAIN"), slice_dict=slice_dict
|
|
63
|
-
)
|
|
64
|
-
ms_xds.attrs["antenna_xds"] = _load_no_dask_zarr(
|
|
65
|
-
zarr_name=os.path.join(ms_xds_name, "ANTENNA")
|
|
66
|
-
)
|
|
67
|
-
sub_xds = {
|
|
68
|
-
"antenna_xds": "ANTENNA",
|
|
69
|
-
}
|
|
70
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
71
|
-
ms_xds.attrs[sub_xds_key] = _load_no_dask_zarr(
|
|
72
|
-
zarr_name=os.path.join(ms_xds_name, sub_xds_name)
|
|
44
|
+
ps = processing_set()
|
|
45
|
+
for ms_dir_name, ms_xds_isel in sel_parms.items():
|
|
46
|
+
xds = _open_dataset(
|
|
47
|
+
os.path.join(ps_store, ms_dir_name, "MAIN"),
|
|
48
|
+
ms_xds_isel,
|
|
49
|
+
data_variables,
|
|
50
|
+
load=True,
|
|
73
51
|
)
|
|
74
|
-
optional_sub_xds = {
|
|
75
|
-
"weather_xds": "WEATHER",
|
|
76
|
-
"pointing_xds": "POINTING",
|
|
77
|
-
}
|
|
78
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
79
|
-
sub_xds_path = os.path.join(ms_xds_name, sub_xds_name)
|
|
80
|
-
if os.path.isdir(sub_xds_path):
|
|
81
|
-
ms_xds.attrs[sub_xds_key] = _load_no_dask_zarr(
|
|
82
|
-
zarr_name=os.path.join(ms_xds_name, sub_xds_name)
|
|
83
|
-
)
|
|
84
52
|
|
|
85
|
-
|
|
53
|
+
if load_sub_datasets:
|
|
54
|
+
from xradio.vis.read_processing_set import _read_sub_xds
|
|
86
55
|
|
|
56
|
+
xds.attrs = {
|
|
57
|
+
**xds.attrs,
|
|
58
|
+
**_read_sub_xds(os.path.join(ps_store, ms_dir_name), load=True),
|
|
59
|
+
}
|
|
87
60
|
|
|
88
|
-
|
|
89
|
-
"""
|
|
90
|
-
sel_parms
|
|
91
|
-
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
92
|
-
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
93
|
-
"""
|
|
94
|
-
ps = processing_set()
|
|
95
|
-
for name_ms_xds, ms_xds_sel_parms in sel_parms.items():
|
|
96
|
-
ps[name_ms_xds] = _load_ms_xds(ps_name, name_ms_xds, ms_xds_sel_parms)[0]
|
|
61
|
+
ps[ms_dir_name] = xds
|
|
97
62
|
return ps
|
|
98
63
|
|
|
99
64
|
|
|
100
65
|
class processing_set_iterator:
|
|
101
66
|
|
|
102
|
-
def __init__(
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
sel_parms: dict,
|
|
70
|
+
input_data_store: str,
|
|
71
|
+
input_data: Union[Dict, processing_set, None] = None,
|
|
72
|
+
data_variables: list = None,
|
|
73
|
+
load_sub_datasets: bool = True,
|
|
74
|
+
):
|
|
75
|
+
"""An iterator that will go through a processing set one MS v4 at a time.
|
|
76
|
+
|
|
77
|
+
Parameters
|
|
78
|
+
----------
|
|
79
|
+
sel_parms : dict
|
|
80
|
+
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
81
|
+
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
82
|
+
For example::
|
|
83
|
+
|
|
84
|
+
{
|
|
85
|
+
'ms_v4_name_1': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
86
|
+
...
|
|
87
|
+
'ms_v4_name_n': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
88
|
+
}
|
|
89
|
+
input_data_store : str
|
|
90
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
91
|
+
input_data : Union[Dict, processing_set, None], optional
|
|
92
|
+
If the processing set is in memory already it can be supplied here. By default None which will make the iterator load data using the supplied input_data_store.
|
|
93
|
+
data_variables : list, optional
|
|
94
|
+
The list of data variables to load into memory for example ['VISIBILITY', 'WEIGHT, 'FLAGS']. By default None which will load all data variables into memory.
|
|
95
|
+
load_sub_datasets : bool, optional
|
|
96
|
+
If true sub-datasets (for example weather_xds, antenna_xds, pointing_xds, ...) will be loaded into memory, by default True.
|
|
97
|
+
"""
|
|
98
|
+
|
|
103
99
|
self.input_data = input_data
|
|
104
100
|
self.input_data_store = input_data_store
|
|
105
|
-
self.
|
|
106
|
-
self.xds_name_iter = iter(
|
|
101
|
+
self.sel_parms = sel_parms
|
|
102
|
+
self.xds_name_iter = iter(sel_parms.keys())
|
|
103
|
+
self.data_variables = data_variables
|
|
104
|
+
self.load_sub_datasets = load_sub_datasets
|
|
107
105
|
|
|
108
106
|
def __iter__(self):
|
|
109
107
|
return self
|
|
@@ -115,13 +113,15 @@ class processing_set_iterator:
|
|
|
115
113
|
raise StopIteration
|
|
116
114
|
|
|
117
115
|
if self.input_data is None:
|
|
118
|
-
slice_description = self.
|
|
116
|
+
slice_description = self.sel_parms[xds_name]
|
|
119
117
|
ps = load_processing_set(
|
|
120
|
-
|
|
118
|
+
ps_store=self.input_data_store,
|
|
121
119
|
sel_parms={xds_name: slice_description},
|
|
120
|
+
data_variables=self.data_variables,
|
|
121
|
+
load_sub_datasets=self.load_sub_datasets,
|
|
122
122
|
)
|
|
123
123
|
xds = ps.get(0)
|
|
124
124
|
else:
|
|
125
125
|
xds = self.input_data[xds_name] # In memory
|
|
126
126
|
|
|
127
|
-
return xds
|
|
127
|
+
return xds
|
|
@@ -1,43 +1,85 @@
|
|
|
1
1
|
import os
|
|
2
|
-
|
|
3
2
|
import xarray as xr
|
|
4
|
-
|
|
5
3
|
from ._processing_set import processing_set
|
|
4
|
+
import graphviper.utils.logger as logger
|
|
5
|
+
from xradio._utils.zarr.common import _open_dataset
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
def read_processing_set(
|
|
9
|
-
|
|
8
|
+
def read_processing_set(
|
|
9
|
+
ps_store: str, intents: list = None, fields: str = None
|
|
10
|
+
)->processing_set:
|
|
11
|
+
"""Creates a lazy representation of a Processing Set (only meta-data is loaded into memory).
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
ps_store : str
|
|
16
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
17
|
+
intents : list, optional
|
|
18
|
+
A list of the intents to be read for example ['OBSERVE_TARGET#ON_SOURCE']. The intents in a processing set can be seem by calling processing_set.summary().
|
|
19
|
+
By default None, which will read all intents.
|
|
20
|
+
fields : str, optional
|
|
21
|
+
The list of field names that will be read, by default None which will read all fields.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
processing_set
|
|
26
|
+
Lazy representation of processing set (data is represented by Dask.arrays).
|
|
27
|
+
"""
|
|
28
|
+
items = os.listdir(ps_store)
|
|
10
29
|
ms_xds = xr.Dataset()
|
|
11
30
|
ps = processing_set()
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
31
|
+
data_group = 'base'
|
|
32
|
+
for ms_dir_name in items:
|
|
33
|
+
if "ddi" in ms_dir_name:
|
|
34
|
+
xds = _open_dataset(os.path.join(ps_store, ms_dir_name, "MAIN"))
|
|
16
35
|
if (intents is None) or (xds.attrs["intent"] in intents):
|
|
17
|
-
|
|
18
|
-
if "visibility" in xds.attrs["data_groups"][data_group]:
|
|
19
|
-
data_name = xds.attrs["data_groups"][data_group]["visibility"]
|
|
36
|
+
data_name = _get_data_name(xds, data_group)
|
|
20
37
|
|
|
21
|
-
if
|
|
22
|
-
data_name
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
"antenna_xds": "ANTENNA",
|
|
28
|
-
}
|
|
29
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
30
|
-
ps[i].attrs[sub_xds_key] = xr.open_zarr(
|
|
31
|
-
ps_name + "/" + i + "/" + sub_xds_name
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
optional_sub_xds = {
|
|
35
|
-
"weather_xds": "WEATHER",
|
|
36
|
-
"pointing_xds": "POINTING",
|
|
38
|
+
if (fields is None) or (
|
|
39
|
+
xds[data_name].attrs["field_info"]["name"] in fields
|
|
40
|
+
):
|
|
41
|
+
xds.attrs = {
|
|
42
|
+
**xds.attrs,
|
|
43
|
+
**_read_sub_xds(os.path.join(ps_store, ms_dir_name)),
|
|
37
44
|
}
|
|
38
|
-
|
|
39
|
-
sub_xds_path = ps_name + "/" + i + "/" + sub_xds_name
|
|
40
|
-
if os.path.isdir(sub_xds_path):
|
|
41
|
-
ps[i].attrs[sub_xds_key] = xr.open_zarr(sub_xds_path)
|
|
42
|
-
|
|
45
|
+
ps[ms_dir_name] = xds
|
|
43
46
|
return ps
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _read_sub_xds(ms_store, load=False):
|
|
50
|
+
sub_xds_dict = {}
|
|
51
|
+
|
|
52
|
+
sub_xds = {
|
|
53
|
+
"antenna_xds": "ANTENNA",
|
|
54
|
+
}
|
|
55
|
+
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
56
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(
|
|
57
|
+
os.path.join(ms_store, sub_xds_name), load=load
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
optional_sub_xds = {
|
|
61
|
+
"weather_xds": "WEATHER",
|
|
62
|
+
"pointing_xds": "POINTING",
|
|
63
|
+
}
|
|
64
|
+
for sub_xds_key, sub_xds_name in optional_sub_xds.items():
|
|
65
|
+
sub_xds_path = os.path.join(ms_store, sub_xds_name)
|
|
66
|
+
if os.path.isdir(sub_xds_path):
|
|
67
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(sub_xds_path, load=load)
|
|
68
|
+
|
|
69
|
+
return sub_xds_dict
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _get_data_name(xds, data_group):
|
|
73
|
+
if "visibility" in xds.attrs["data_groups"][data_group]:
|
|
74
|
+
data_name = xds.attrs["data_groups"][data_group]["visibility"]
|
|
75
|
+
elif "spectrum" in xds.attrs["data_groups"][data_group]:
|
|
76
|
+
data_name = xds.attrs["data_groups"][data_group]["spectrum"]
|
|
77
|
+
else:
|
|
78
|
+
error_message = (
|
|
79
|
+
"No Visibility or Spectrum data variable found in data_group "
|
|
80
|
+
+ data_group
|
|
81
|
+
+ "."
|
|
82
|
+
)
|
|
83
|
+
logger.exception(error_message)
|
|
84
|
+
raise ValueError(error_message)
|
|
85
|
+
return data_name
|
|
@@ -3,7 +3,7 @@ xradio/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
3
3
|
xradio/_utils/common.py,sha256=bjUZfZZrYTOt0i_XVfmQ2kvgr3egoYPWKGgnr4vKe-Y,46
|
|
4
4
|
xradio/_utils/_casacore/tables.py,sha256=aq6E_4RRAHdTBCwMKrVil1cWhFU2O980DNH9IlRKXLw,1280
|
|
5
5
|
xradio/_utils/zarr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
xradio/_utils/zarr/common.py,sha256=
|
|
6
|
+
xradio/_utils/zarr/common.py,sha256=HzZOky2XNuKkpRi6M4ZQCsTpl9ngiPtH9a6Pw63s4EI,3291
|
|
7
7
|
xradio/image/__init__.py,sha256=HAD0GfopIbhdxOYckyW6S9US_dSWmZrwIl3FHUzZwrE,435
|
|
8
8
|
xradio/image/image.py,sha256=QoJ_BTLoMfeXJzU1yvtidBIhaMmjNA5_-6C3FWJRUeI,15635
|
|
9
9
|
xradio/image/_util/__init__.py,sha256=M9lxD1Gc7kv0ucDEDbjLRuIEuESev-IG8j9EaCKUAkA,77
|
|
@@ -11,7 +11,7 @@ xradio/image/_util/casacore.py,sha256=Ozuq_VgseU1pcX230078TzOSsdYdAbwbjHCkwU79m0
|
|
|
11
11
|
xradio/image/_util/common.py,sha256=uH2Po1SycsOK_KZqyLWllNPmRtjTSEr0r2YF4oPRZas,8793
|
|
12
12
|
xradio/image/_util/fits.py,sha256=gyGm06fuCKqVGK7uv-ObvQNfFawUDsIOa_nQyklM3Aw,329
|
|
13
13
|
xradio/image/_util/image_factory.py,sha256=6tPzs20FTm2wEshHc1xqtTV7D0TbKxGLUKAVtvOc68I,10506
|
|
14
|
-
xradio/image/_util/zarr.py,sha256=
|
|
14
|
+
xradio/image/_util/zarr.py,sha256=xTjg-KY-T4vuyua8pvuZZjCL-rI_wAsPjPUOYd5PZr4,1512
|
|
15
15
|
xradio/image/_util/_casacore/__init__.py,sha256=OlsiRE40o1jSbBI4khgQQzgfDYbAlOMKIhO4UFlbGhg,41
|
|
16
16
|
xradio/image/_util/_casacore/common.py,sha256=ky999eTCWta8w-uIs-7P7rPhZRLuh9yTuQXAxPvaPm4,1579
|
|
17
17
|
xradio/image/_util/_casacore/xds_from_casacore.py,sha256=Ubt5PCVLFX2t9YpCEH9jS0g0hMI56Owfr5Ibup7-Hg4,42562
|
|
@@ -20,7 +20,7 @@ xradio/image/_util/_fits/xds_from_fits.py,sha256=SIYyl4dAoCdiB4CSv0_wV_hY7jhLZZE
|
|
|
20
20
|
xradio/image/_util/_zarr/common.py,sha256=apMX_bF4Hr3pFGjnDFpp36KgmhTYAPBZquNkjBHrsXk,307
|
|
21
21
|
xradio/image/_util/_zarr/xds_from_zarr.py,sha256=hz6lHlpybfr_r8pn_uObDHOFmN5h75F11bkBv8KCuP0,3192
|
|
22
22
|
xradio/image/_util/_zarr/xds_to_zarr.py,sha256=wogXbwX8n3Sl9PHoc3_Y_LBowQsQ-94HZQFZ5NcxUZA,1624
|
|
23
|
-
xradio/image/_util/_zarr/zarr_low_level.py,sha256=
|
|
23
|
+
xradio/image/_util/_zarr/zarr_low_level.py,sha256=DuAD1kVUbHjOxVH9jRagk9GdPA5uH2fIMzA0djxM6Sc,9537
|
|
24
24
|
xradio/schema/__init__.py,sha256=UpejQegOaCLrsbcR4MLudR0RxeE0sN3zxFXM8rzyJPo,444
|
|
25
25
|
xradio/schema/bases.py,sha256=vcW47jZWpJ0mJdni7eFVY7zJoper2sy2VjX8LE3pUqc,150
|
|
26
26
|
xradio/schema/check.py,sha256=3u79hRL3pGF6dvQE0LF21nGdAVnRXWwgnbMmStGBSSA,16310
|
|
@@ -28,11 +28,11 @@ xradio/schema/dataclass.py,sha256=vkc2cqLjGV5QN8j70GbBaNfslT_KLWmebsPGeBEuGcs,88
|
|
|
28
28
|
xradio/schema/metamodel.py,sha256=RHrihyaetinu7_lGTTZ31Rlv-_Db_EgQCXzk56H004o,3476
|
|
29
29
|
xradio/schema/typing.py,sha256=coF3LuKOlCUJGKTUUH81EcjePZ86koOYzm8qzsAw-HU,9983
|
|
30
30
|
xradio/vis/__init__.py,sha256=AV2WG26NzFB1LEEtFaq1ULQKz9VnluEAjg0Qb5Ju7m8,358
|
|
31
|
-
xradio/vis/_processing_set.py,sha256=
|
|
31
|
+
xradio/vis/_processing_set.py,sha256=3cm-WAuBjzX0_XPUjSR1i2i5_PeXiGwW12XioVji2Wk,3666
|
|
32
32
|
xradio/vis/convert_msv2_to_processing_set.py,sha256=7vTjqtWFEBOySnLVoadceKCA4VLgOig7eCs1dxrdYQA,3966
|
|
33
|
-
xradio/vis/load_processing_set.py,sha256=
|
|
33
|
+
xradio/vis/load_processing_set.py,sha256=DsKjizvcJxXNn5BuXH2fye-dzMoB-BDK2f-8jsvK2QU,4776
|
|
34
34
|
xradio/vis/model.py,sha256=uBjvvhYEY1p-3H3NStrt1ZKMQACLGLo93OiEBvDVId8,17083
|
|
35
|
-
xradio/vis/read_processing_set.py,sha256=
|
|
35
|
+
xradio/vis/read_processing_set.py,sha256=ljQVi8njdsAU--V-3QaZ4_EhWXqjnW3mdLr0VE1N4cA,3043
|
|
36
36
|
xradio/vis/vis_io.py,sha256=rCSOt4Max37uFzF3_Ck4U4xWzzYcipdbYcxbhBhQ_Qs,5278
|
|
37
37
|
xradio/vis/_vis_utils/__init__.py,sha256=Scu6rKJ2SpO8aG7F-xdTZcYfyWx0viV8gFh8E8ur_gI,93
|
|
38
38
|
xradio/vis/_vis_utils/ms.py,sha256=0uycYCDmeQku16TdPcnZEBJMdfb_i6xPoieYPhPoVIg,5258
|
|
@@ -64,8 +64,8 @@ xradio/vis/_vis_utils/_utils/xds_helper.py,sha256=UhtAZV5DyYzVVBkXzwDAOz6TICxotQ
|
|
|
64
64
|
xradio/vis/_vis_utils/_zarr/encoding.py,sha256=GENIlThV6a9CUCL6gIGlu9c6NR3OFWNos6mpxZjMwDc,536
|
|
65
65
|
xradio/vis/_vis_utils/_zarr/read.py,sha256=ikNGlOdHuZ_cgWpPAZ4iHzeLdU44I0iBLcqSEiM_hCk,7111
|
|
66
66
|
xradio/vis/_vis_utils/_zarr/write.py,sha256=exvrqNVnVKk6LzbDPm_fm142YzX-7lHGqklXTQB9wh0,8864
|
|
67
|
-
xradio-0.0.
|
|
68
|
-
xradio-0.0.
|
|
69
|
-
xradio-0.0.
|
|
70
|
-
xradio-0.0.
|
|
71
|
-
xradio-0.0.
|
|
67
|
+
xradio-0.0.24.dist-info/LICENSE.txt,sha256=dvACd-5O67yjSZlnEKcWmu3DqwzBtbC922iPv0KOeAw,1516
|
|
68
|
+
xradio-0.0.24.dist-info/METADATA,sha256=yJW0v_5BpEInE5IVWDbELOpAHGGzgT0a0PmrbLnWYZ8,4070
|
|
69
|
+
xradio-0.0.24.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
70
|
+
xradio-0.0.24.dist-info/top_level.txt,sha256=dQu27fGBZJ2Yk-gW5XeD-dZ76Xa4Xcvk60Vz-dwXp7k,7
|
|
71
|
+
xradio-0.0.24.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|