xradio 0.0.23__tar.gz → 0.0.25__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {xradio-0.0.23/src/xradio.egg-info → xradio-0.0.25}/PKG-INFO +3 -3
- {xradio-0.0.23 → xradio-0.0.25}/pyproject.toml +3 -3
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/_utils/zarr/common.py +45 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_zarr/zarr_low_level.py +42 -10
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/zarr.py +3 -3
- xradio-0.0.25/src/xradio/vis/_processing_set.py +112 -0
- xradio-0.0.25/src/xradio/vis/load_processing_set.py +127 -0
- xradio-0.0.25/src/xradio/vis/read_processing_set.py +85 -0
- {xradio-0.0.23 → xradio-0.0.25/src/xradio.egg-info}/PKG-INFO +3 -3
- xradio-0.0.23/src/xradio/vis/_processing_set.py +0 -84
- xradio-0.0.23/src/xradio/vis/load_processing_set.py +0 -127
- xradio-0.0.23/src/xradio/vis/read_processing_set.py +0 -43
- {xradio-0.0.23 → xradio-0.0.25}/LICENSE.txt +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/MANIFEST.in +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/README.md +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/setup.cfg +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/_utils/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/_utils/_casacore/tables.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/_utils/common.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/_utils/zarr/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_casacore/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_casacore/common.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_casacore/xds_from_casacore.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_casacore/xds_to_casacore.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_fits/xds_from_fits.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_zarr/common.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_zarr/xds_from_zarr.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/_zarr/xds_to_zarr.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/casacore.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/common.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/fits.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/_util/image_factory.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/image/image.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/bases.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/check.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/dataclass.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/metamodel.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/schema/typing.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/__init__.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/load.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/load_main_table.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/read.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/read_subtables.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/table_query.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/write.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/_tables/write_exp_api.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/chunks.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/conversion.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/descr.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/msv2_msv3.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/msv4_infos.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/optimised_functions.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/partition_queries.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/partitions.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_ms/subtables.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_utils/cds.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_utils/partition_attrs.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_utils/stokes_types.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_utils/xds_helper.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_zarr/encoding.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_zarr/read.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/_zarr/write.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/ms.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/ms_column_descriptions_dicts.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/_vis_utils/zarr.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/convert_msv2_to_processing_set.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/model.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio/vis/vis_io.py +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio.egg-info/SOURCES.txt +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio.egg-info/dependency_links.txt +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio.egg-info/requires.txt +0 -0
- {xradio-0.0.23 → xradio-0.0.25}/src/xradio.egg-info/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: xradio
|
|
3
|
-
Version: 0.0.
|
|
4
|
-
Summary: Xarray Radio Astronomy Data IO
|
|
3
|
+
Version: 0.0.25
|
|
4
|
+
Summary: Xarray Radio Astronomy Data IO
|
|
5
5
|
Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>
|
|
6
6
|
License: BSD 3-Clause License
|
|
7
7
|
|
|
@@ -32,7 +32,7 @@ License: BSD 3-Clause License
|
|
|
32
32
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
33
33
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
34
34
|
|
|
35
|
-
Requires-Python: <3.12,>=3.
|
|
35
|
+
Requires-Python: <3.12,>=3.9
|
|
36
36
|
Description-Content-Type: text/markdown
|
|
37
37
|
License-File: LICENSE.txt
|
|
38
38
|
Requires-Dist: astropy
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "xradio"
|
|
3
|
-
version = "0.0.
|
|
4
|
-
description = "Xarray Radio Astronomy Data IO
|
|
3
|
+
version = "0.0.25"
|
|
4
|
+
description = "Xarray Radio Astronomy Data IO"
|
|
5
5
|
authors = [
|
|
6
6
|
{name = "Jan-Willem Steeb", email="jsteeb@nrao.edu"},
|
|
7
7
|
]
|
|
8
8
|
license = {file = "LICENSE.txt"}
|
|
9
9
|
readme = "README.md"
|
|
10
|
-
requires-python = ">= 3.
|
|
10
|
+
requires-python = ">= 3.9, < 3.12"
|
|
11
11
|
|
|
12
12
|
dependencies = [
|
|
13
13
|
'astropy',
|
|
@@ -3,6 +3,47 @@ import xarray as xr
|
|
|
3
3
|
import zarr
|
|
4
4
|
|
|
5
5
|
|
|
6
|
+
def _open_dataset(store, xds_isel=None, data_variables=None, load=False):
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
Parameters
|
|
10
|
+
----------
|
|
11
|
+
store : _type_
|
|
12
|
+
_description_
|
|
13
|
+
xds_isel : _type_, optional
|
|
14
|
+
Example {'time':slice(0,10), 'frequency':slice(5,7)}, by default None
|
|
15
|
+
data_variables : _type_, optional
|
|
16
|
+
Example ['VISIBILITY','WEIGHT'], by default None
|
|
17
|
+
load : bool, optional
|
|
18
|
+
_description_, by default False
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
_type_
|
|
23
|
+
_description_
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
import dask
|
|
27
|
+
|
|
28
|
+
xds = xr.open_zarr(store)
|
|
29
|
+
|
|
30
|
+
if xds_isel is not None:
|
|
31
|
+
xds = xds.isel(xds_isel)
|
|
32
|
+
|
|
33
|
+
if data_variables is not None:
|
|
34
|
+
xds_sub = xr.Dataset()
|
|
35
|
+
for dv in data_variables:
|
|
36
|
+
xds_sub[dv] = xds[dv]
|
|
37
|
+
xds_sub.attrs = xds.attrs
|
|
38
|
+
xds = xds_sub
|
|
39
|
+
|
|
40
|
+
if load:
|
|
41
|
+
with dask.config.set(scheduler="synchronous"):
|
|
42
|
+
xds = xds.load()
|
|
43
|
+
return xds
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# Code to depricate:
|
|
6
47
|
def _get_attrs(zarr_obj):
|
|
7
48
|
"""
|
|
8
49
|
get attributes of zarr obj (groups or arrays)
|
|
@@ -33,6 +74,7 @@ def _load_no_dask_zarr(zarr_name, slice_dict={}):
|
|
|
33
74
|
coords = {}
|
|
34
75
|
xds = xr.Dataset()
|
|
35
76
|
for var_name, var in zarr_group.arrays():
|
|
77
|
+
print("Hallo 3", var_name, var.shape)
|
|
36
78
|
var_attrs = _get_attrs(var)
|
|
37
79
|
|
|
38
80
|
for dim in var_attrs[DIMENSION_KEY]:
|
|
@@ -54,6 +96,9 @@ def _load_no_dask_zarr(zarr_name, slice_dict={}):
|
|
|
54
96
|
for dim in var_attrs[DIMENSION_KEY]:
|
|
55
97
|
slicing_list.append(slice_dict_complete[dim])
|
|
56
98
|
slicing_tuple = tuple(slicing_list)
|
|
99
|
+
|
|
100
|
+
print(var_attrs[DIMENSION_KEY])
|
|
101
|
+
|
|
57
102
|
xds[var_name] = xr.DataArray(
|
|
58
103
|
var[slicing_tuple], dims=var_attrs[DIMENSION_KEY]
|
|
59
104
|
)
|
|
@@ -70,7 +70,6 @@ image_data_variables_and_dims_single_precision = {
|
|
|
70
70
|
}
|
|
71
71
|
|
|
72
72
|
|
|
73
|
-
|
|
74
73
|
def pad_array_with_nans(input_array, output_shape, dtype):
|
|
75
74
|
"""
|
|
76
75
|
Pad an integer array with NaN values to match the specified output shape.
|
|
@@ -110,19 +109,55 @@ def write_binary_blob_to_disk(arr, file_path, compressor):
|
|
|
110
109
|
Returns:
|
|
111
110
|
- None
|
|
112
111
|
"""
|
|
112
|
+
import graphviper.utils.logger as logger
|
|
113
113
|
# Encode the NumPy array using the codec
|
|
114
|
+
logger.debug('1. Before compressor ' + file_path)
|
|
114
115
|
compressed_arr = compressor.encode(np.ascontiguousarray(arr))
|
|
115
116
|
|
|
117
|
+
logger.debug('2. Before makedir')
|
|
116
118
|
# Ensure the directory exists before saving the file
|
|
117
119
|
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
118
120
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
121
|
+
arr_len = len(compressed_arr)
|
|
122
|
+
logger.debug('3. Before write the len is: ' + str(arr_len))
|
|
123
|
+
#Save the compressed array to disk
|
|
124
|
+
# with open(file_path, "wb") as file:
|
|
125
|
+
# file.write(compressed_arr)
|
|
126
|
+
|
|
127
|
+
logger.debug('4. Using new writer: ' + str(arr_len))
|
|
128
|
+
write_to_lustre_chunked(file_path, compressed_arr)
|
|
129
|
+
|
|
130
|
+
# /.lustre/aoc/sciops/pford/CHILES/cube_image/uid___A002_Xee7674_X2844_Cube_3.img.zarr/SKY/0.0.110.0.0
|
|
131
|
+
# 348192501 bytes
|
|
132
|
+
# 332.0622453689575 M
|
|
133
|
+
|
|
134
|
+
# from io import BufferedWriter
|
|
135
|
+
# # Calculate buffer size based on compressed_arr size (adjust multiplier)
|
|
136
|
+
# buffer_size = min(len(compressed_arr), 1024 * 1024 * 4) # Max 4 MB buffer
|
|
137
|
+
# with BufferedWriter(open(file_path, "wb"), buffer_size) as f:
|
|
138
|
+
# f.write(compressed_arr)
|
|
139
|
+
# f.flush() # Ensure data gets written to disk
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
logger.debug('4. Write completed')
|
|
122
143
|
|
|
123
144
|
# print(f"Compressed array saved to {file_path}")
|
|
124
145
|
|
|
125
146
|
|
|
147
|
+
def write_to_lustre_chunked(file_path, compressed_arr, chunk_size=1024 * 1024 * 128): # 128 MiB chunks
|
|
148
|
+
"""
|
|
149
|
+
Writes compressed data to a Lustre file path with chunking.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
file_path: Path to the file for writing.
|
|
153
|
+
compressed_arr: Compressed data array to write.
|
|
154
|
+
chunk_size: Size of each data chunk in bytes (default: 128 MiB).
|
|
155
|
+
"""
|
|
156
|
+
with open(file_path, "wb") as f:
|
|
157
|
+
for i in range(0, len(compressed_arr), chunk_size):
|
|
158
|
+
chunk = compressed_arr[i:i + chunk_size]
|
|
159
|
+
f.write(chunk)
|
|
160
|
+
|
|
126
161
|
def read_binary_blob_from_disk(file_path, compressor, dtype=np.float64):
|
|
127
162
|
"""
|
|
128
163
|
Read a compressed binary blob from disk and decode it using Blosc.
|
|
@@ -277,8 +312,7 @@ def create_data_variable_meta_data_on_disk(
|
|
|
277
312
|
return zarr_meta
|
|
278
313
|
|
|
279
314
|
|
|
280
|
-
|
|
281
|
-
def write_chunk(img_xds,meta,parallel_dims_chunk_id,compressor,image_file):
|
|
315
|
+
def write_chunk(img_xds, meta, parallel_dims_chunk_id, compressor, image_file):
|
|
282
316
|
dims = meta["dims"]
|
|
283
317
|
dtype = meta["dtype"]
|
|
284
318
|
data_varaible_name = meta["name"]
|
|
@@ -304,8 +338,6 @@ def write_chunk(img_xds,meta,parallel_dims_chunk_id,compressor,image_file):
|
|
|
304
338
|
|
|
305
339
|
write_binary_blob_to_disk(
|
|
306
340
|
array,
|
|
307
|
-
file_path=os.path.join(
|
|
308
|
-
image_file, data_varaible_name, chunk_name
|
|
309
|
-
),
|
|
341
|
+
file_path=os.path.join(image_file, data_varaible_name, chunk_name),
|
|
310
342
|
compressor=compressor,
|
|
311
|
-
)
|
|
343
|
+
)
|
|
@@ -3,7 +3,7 @@ from ._zarr.xds_from_zarr import _read_zarr
|
|
|
3
3
|
import numpy as np
|
|
4
4
|
import os
|
|
5
5
|
import xarray as xr
|
|
6
|
-
from ..._utils.zarr.common import
|
|
6
|
+
from ..._utils.zarr.common import _open_dataset
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def _xds_to_zarr(xds: xr.Dataset, zarr_store: str):
|
|
@@ -25,11 +25,11 @@ def _xds_from_zarr(
|
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
def _load_image_from_zarr_no_dask(zarr_file: str, selection: dict) -> xr.Dataset:
|
|
28
|
-
image_xds =
|
|
28
|
+
image_xds = _open_dataset(zarr_file, selection, load=True)
|
|
29
29
|
for h in ["HISTORY", "_attrs_xds_history"]:
|
|
30
30
|
history = os.sep.join([zarr_file, h])
|
|
31
31
|
if os.path.isdir(history):
|
|
32
|
-
image_xds.attrs["history"] =
|
|
32
|
+
image_xds.attrs["history"] = _open_dataset(history, load=True)
|
|
33
33
|
break
|
|
34
34
|
_iter_dict(image_xds.attrs)
|
|
35
35
|
return image_xds
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class processing_set(dict):
|
|
5
|
+
def __init__(self, *args, **kwargs):
|
|
6
|
+
super().__init__(*args, **kwargs)
|
|
7
|
+
self.meta = {"summary": {}}
|
|
8
|
+
|
|
9
|
+
# generate_meta(self)
|
|
10
|
+
|
|
11
|
+
# def generate_meta(self):
|
|
12
|
+
# self.meta['summary'] = {"base": _summary(self)}
|
|
13
|
+
# self.meta['max_dims'] = _get_ps_max_dims(self)
|
|
14
|
+
|
|
15
|
+
def summary(self, data_group="base"):
|
|
16
|
+
if data_group in self.meta["summary"]:
|
|
17
|
+
return self.meta["summary"][data_group]
|
|
18
|
+
else:
|
|
19
|
+
self.meta["summary"][data_group] = self._summary(data_group)
|
|
20
|
+
return self.meta["summary"][data_group]
|
|
21
|
+
|
|
22
|
+
def get_ps_max_dims(self):
|
|
23
|
+
if "max_dims" in self.meta:
|
|
24
|
+
return self.meta["max_dims"]
|
|
25
|
+
else:
|
|
26
|
+
self.meta["max_dims"] = self._get_ps_max_dims()
|
|
27
|
+
return self.meta["max_dims"]
|
|
28
|
+
|
|
29
|
+
def get_ps_freq_axis(self):
|
|
30
|
+
if "freq_axis" in self.meta:
|
|
31
|
+
return self.meta["freq_axis"]
|
|
32
|
+
else:
|
|
33
|
+
self.meta["freq_axis"] = self._get_ps_freq_axis()
|
|
34
|
+
return self.meta["freq_axis"]
|
|
35
|
+
|
|
36
|
+
def _summary(self, data_group="base"):
|
|
37
|
+
summary_data = {
|
|
38
|
+
"name": [],
|
|
39
|
+
"ddi": [],
|
|
40
|
+
"intent": [],
|
|
41
|
+
"field_id": [],
|
|
42
|
+
"field_name": [],
|
|
43
|
+
"start_frequency": [],
|
|
44
|
+
"end_frequency": [],
|
|
45
|
+
"shape": [],
|
|
46
|
+
"field_coords": []
|
|
47
|
+
}
|
|
48
|
+
from astropy.coordinates import SkyCoord
|
|
49
|
+
import astropy.units as u
|
|
50
|
+
for key, value in self.items():
|
|
51
|
+
summary_data["name"].append(key)
|
|
52
|
+
summary_data["ddi"].append(value.attrs["ddi"])
|
|
53
|
+
summary_data["intent"].append(value.attrs["intent"])
|
|
54
|
+
|
|
55
|
+
if "visibility" in value.attrs["data_groups"][data_group]:
|
|
56
|
+
data_name = value.attrs["data_groups"][data_group]["visibility"]
|
|
57
|
+
|
|
58
|
+
if "spectrum" in value.attrs["data_groups"][data_group]:
|
|
59
|
+
data_name = value.attrs["data_groups"][data_group]["spectrum"]
|
|
60
|
+
|
|
61
|
+
summary_data["shape"].append(value[data_name].shape)
|
|
62
|
+
|
|
63
|
+
summary_data["field_id"].append(
|
|
64
|
+
value[data_name].attrs["field_info"]["field_id"]
|
|
65
|
+
)
|
|
66
|
+
summary_data["field_name"].append(
|
|
67
|
+
value[data_name].attrs["field_info"]["name"]
|
|
68
|
+
)
|
|
69
|
+
summary_data["start_frequency"].append(value["frequency"].values[0])
|
|
70
|
+
summary_data["end_frequency"].append(value["frequency"].values[-1])
|
|
71
|
+
|
|
72
|
+
ra_dec_rad = value[data_name].attrs["field_info"]['phase_direction']['data']
|
|
73
|
+
frame = value[data_name].attrs["field_info"]['phase_direction']['attrs']['frame'].lower()
|
|
74
|
+
coord = SkyCoord(ra=ra_dec_rad[0]*u.rad, dec=ra_dec_rad[1]*u.rad, frame=frame)
|
|
75
|
+
|
|
76
|
+
summary_data["field_coords"].append([frame, coord.ra.to_string(unit=u.hour), coord.dec.to_string(unit=u.deg)])
|
|
77
|
+
summary_df = pd.DataFrame(summary_data)
|
|
78
|
+
return summary_df
|
|
79
|
+
|
|
80
|
+
def _get_ps_freq_axis(self):
|
|
81
|
+
import xarray as xr
|
|
82
|
+
|
|
83
|
+
spw_ids = []
|
|
84
|
+
freq_axis_list = []
|
|
85
|
+
frame = self.get(0).frequency.attrs["frame"]
|
|
86
|
+
for ms_xds in self.values():
|
|
87
|
+
assert (
|
|
88
|
+
frame == ms_xds.frequency.attrs["frame"]
|
|
89
|
+
), "Frequency reference frame not consistent in processing set."
|
|
90
|
+
if ms_xds.frequency.attrs["spw_id"] not in spw_ids:
|
|
91
|
+
spw_ids.append(ms_xds.frequency.attrs["spw_id"])
|
|
92
|
+
freq_axis_list.append(ms_xds.frequency)
|
|
93
|
+
|
|
94
|
+
freq_axis = xr.concat(freq_axis_list, dim="frequency").sortby("frequency")
|
|
95
|
+
return freq_axis
|
|
96
|
+
|
|
97
|
+
def _get_ps_max_dims(self):
|
|
98
|
+
max_dims = None
|
|
99
|
+
for ms_xds in self.values():
|
|
100
|
+
if max_dims is None:
|
|
101
|
+
max_dims = dict(ms_xds.sizes)
|
|
102
|
+
else:
|
|
103
|
+
for dim_name, size in ms_xds.sizes.items():
|
|
104
|
+
if dim_name in max_dims:
|
|
105
|
+
if max_dims[dim_name] < size:
|
|
106
|
+
max_dims[dim_name] = size
|
|
107
|
+
else:
|
|
108
|
+
max_dims[dim_name] = size
|
|
109
|
+
return max_dims
|
|
110
|
+
|
|
111
|
+
def get(self, id):
|
|
112
|
+
return self[list(self.keys())[id]]
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import xarray as xr
|
|
2
|
+
import zarr
|
|
3
|
+
import copy
|
|
4
|
+
import os
|
|
5
|
+
from ._processing_set import processing_set
|
|
6
|
+
from typing import Dict, Union
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def load_processing_set(
|
|
10
|
+
ps_store: str,
|
|
11
|
+
sel_parms: dict,
|
|
12
|
+
data_variables: Union[list, None] = None,
|
|
13
|
+
load_sub_datasets: bool = True,
|
|
14
|
+
)->processing_set:
|
|
15
|
+
"""Loads a processing set into memory.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
ps_store : str
|
|
20
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
21
|
+
sel_parms : dict
|
|
22
|
+
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
23
|
+
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
24
|
+
For example::
|
|
25
|
+
|
|
26
|
+
{
|
|
27
|
+
'ms_v4_name_1': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
28
|
+
...
|
|
29
|
+
'ms_v4_name_n': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
data_variables : Union[list, None], optional
|
|
33
|
+
The list of data variables to load into memory for example ['VISIBILITY', 'WEIGHT, 'FLAGS']. By default None which will load all data variables into memory.
|
|
34
|
+
load_sub_datasets : bool, optional
|
|
35
|
+
If true sub-datasets (for example weather_xds, antenna_xds, pointing_xds, ...) will be loaded into memory, by default True.
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
processing_set
|
|
40
|
+
In memory representation of processing set (data is represented by Dask.arrays).
|
|
41
|
+
"""
|
|
42
|
+
from xradio._utils.zarr.common import _open_dataset
|
|
43
|
+
|
|
44
|
+
ps = processing_set()
|
|
45
|
+
for ms_dir_name, ms_xds_isel in sel_parms.items():
|
|
46
|
+
xds = _open_dataset(
|
|
47
|
+
os.path.join(ps_store, ms_dir_name, "MAIN"),
|
|
48
|
+
ms_xds_isel,
|
|
49
|
+
data_variables,
|
|
50
|
+
load=True,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
if load_sub_datasets:
|
|
54
|
+
from xradio.vis.read_processing_set import _read_sub_xds
|
|
55
|
+
|
|
56
|
+
xds.attrs = {
|
|
57
|
+
**xds.attrs,
|
|
58
|
+
**_read_sub_xds(os.path.join(ps_store, ms_dir_name), load=True),
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
ps[ms_dir_name] = xds
|
|
62
|
+
return ps
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class processing_set_iterator:
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
sel_parms: dict,
|
|
70
|
+
input_data_store: str,
|
|
71
|
+
input_data: Union[Dict, processing_set, None] = None,
|
|
72
|
+
data_variables: list = None,
|
|
73
|
+
load_sub_datasets: bool = True,
|
|
74
|
+
):
|
|
75
|
+
"""An iterator that will go through a processing set one MS v4 at a time.
|
|
76
|
+
|
|
77
|
+
Parameters
|
|
78
|
+
----------
|
|
79
|
+
sel_parms : dict
|
|
80
|
+
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
81
|
+
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
82
|
+
For example::
|
|
83
|
+
|
|
84
|
+
{
|
|
85
|
+
'ms_v4_name_1': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
86
|
+
...
|
|
87
|
+
'ms_v4_name_n': {'frequency': slice(0, 160, None),'time':slice(0,100)},
|
|
88
|
+
}
|
|
89
|
+
input_data_store : str
|
|
90
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
91
|
+
input_data : Union[Dict, processing_set, None], optional
|
|
92
|
+
If the processing set is in memory already it can be supplied here. By default None which will make the iterator load data using the supplied input_data_store.
|
|
93
|
+
data_variables : list, optional
|
|
94
|
+
The list of data variables to load into memory for example ['VISIBILITY', 'WEIGHT, 'FLAGS']. By default None which will load all data variables into memory.
|
|
95
|
+
load_sub_datasets : bool, optional
|
|
96
|
+
If true sub-datasets (for example weather_xds, antenna_xds, pointing_xds, ...) will be loaded into memory, by default True.
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
self.input_data = input_data
|
|
100
|
+
self.input_data_store = input_data_store
|
|
101
|
+
self.sel_parms = sel_parms
|
|
102
|
+
self.xds_name_iter = iter(sel_parms.keys())
|
|
103
|
+
self.data_variables = data_variables
|
|
104
|
+
self.load_sub_datasets = load_sub_datasets
|
|
105
|
+
|
|
106
|
+
def __iter__(self):
|
|
107
|
+
return self
|
|
108
|
+
|
|
109
|
+
def __next__(self):
|
|
110
|
+
try:
|
|
111
|
+
xds_name = next(self.xds_name_iter)
|
|
112
|
+
except Exception as e:
|
|
113
|
+
raise StopIteration
|
|
114
|
+
|
|
115
|
+
if self.input_data is None:
|
|
116
|
+
slice_description = self.sel_parms[xds_name]
|
|
117
|
+
ps = load_processing_set(
|
|
118
|
+
ps_store=self.input_data_store,
|
|
119
|
+
sel_parms={xds_name: slice_description},
|
|
120
|
+
data_variables=self.data_variables,
|
|
121
|
+
load_sub_datasets=self.load_sub_datasets,
|
|
122
|
+
)
|
|
123
|
+
xds = ps.get(0)
|
|
124
|
+
else:
|
|
125
|
+
xds = self.input_data[xds_name] # In memory
|
|
126
|
+
|
|
127
|
+
return xds
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import xarray as xr
|
|
3
|
+
from ._processing_set import processing_set
|
|
4
|
+
import graphviper.utils.logger as logger
|
|
5
|
+
from xradio._utils.zarr.common import _open_dataset
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def read_processing_set(
|
|
9
|
+
ps_store: str, intents: list = None, fields: str = None
|
|
10
|
+
)->processing_set:
|
|
11
|
+
"""Creates a lazy representation of a Processing Set (only meta-data is loaded into memory).
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
ps_store : str
|
|
16
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
17
|
+
intents : list, optional
|
|
18
|
+
A list of the intents to be read for example ['OBSERVE_TARGET#ON_SOURCE']. The intents in a processing set can be seem by calling processing_set.summary().
|
|
19
|
+
By default None, which will read all intents.
|
|
20
|
+
fields : str, optional
|
|
21
|
+
The list of field names that will be read, by default None which will read all fields.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
processing_set
|
|
26
|
+
Lazy representation of processing set (data is represented by Dask.arrays).
|
|
27
|
+
"""
|
|
28
|
+
items = os.listdir(ps_store)
|
|
29
|
+
ms_xds = xr.Dataset()
|
|
30
|
+
ps = processing_set()
|
|
31
|
+
data_group = 'base'
|
|
32
|
+
for ms_dir_name in items:
|
|
33
|
+
if "ddi" in ms_dir_name:
|
|
34
|
+
xds = _open_dataset(os.path.join(ps_store, ms_dir_name, "MAIN"))
|
|
35
|
+
if (intents is None) or (xds.attrs["intent"] in intents):
|
|
36
|
+
data_name = _get_data_name(xds, data_group)
|
|
37
|
+
|
|
38
|
+
if (fields is None) or (
|
|
39
|
+
xds[data_name].attrs["field_info"]["name"] in fields
|
|
40
|
+
):
|
|
41
|
+
xds.attrs = {
|
|
42
|
+
**xds.attrs,
|
|
43
|
+
**_read_sub_xds(os.path.join(ps_store, ms_dir_name)),
|
|
44
|
+
}
|
|
45
|
+
ps[ms_dir_name] = xds
|
|
46
|
+
return ps
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _read_sub_xds(ms_store, load=False):
|
|
50
|
+
sub_xds_dict = {}
|
|
51
|
+
|
|
52
|
+
sub_xds = {
|
|
53
|
+
"antenna_xds": "ANTENNA",
|
|
54
|
+
}
|
|
55
|
+
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
56
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(
|
|
57
|
+
os.path.join(ms_store, sub_xds_name), load=load
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
optional_sub_xds = {
|
|
61
|
+
"weather_xds": "WEATHER",
|
|
62
|
+
"pointing_xds": "POINTING",
|
|
63
|
+
}
|
|
64
|
+
for sub_xds_key, sub_xds_name in optional_sub_xds.items():
|
|
65
|
+
sub_xds_path = os.path.join(ms_store, sub_xds_name)
|
|
66
|
+
if os.path.isdir(sub_xds_path):
|
|
67
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(sub_xds_path, load=load)
|
|
68
|
+
|
|
69
|
+
return sub_xds_dict
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _get_data_name(xds, data_group):
|
|
73
|
+
if "visibility" in xds.attrs["data_groups"][data_group]:
|
|
74
|
+
data_name = xds.attrs["data_groups"][data_group]["visibility"]
|
|
75
|
+
elif "spectrum" in xds.attrs["data_groups"][data_group]:
|
|
76
|
+
data_name = xds.attrs["data_groups"][data_group]["spectrum"]
|
|
77
|
+
else:
|
|
78
|
+
error_message = (
|
|
79
|
+
"No Visibility or Spectrum data variable found in data_group "
|
|
80
|
+
+ data_group
|
|
81
|
+
+ "."
|
|
82
|
+
)
|
|
83
|
+
logger.exception(error_message)
|
|
84
|
+
raise ValueError(error_message)
|
|
85
|
+
return data_name
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: xradio
|
|
3
|
-
Version: 0.0.
|
|
4
|
-
Summary: Xarray Radio Astronomy Data IO
|
|
3
|
+
Version: 0.0.25
|
|
4
|
+
Summary: Xarray Radio Astronomy Data IO
|
|
5
5
|
Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>
|
|
6
6
|
License: BSD 3-Clause License
|
|
7
7
|
|
|
@@ -32,7 +32,7 @@ License: BSD 3-Clause License
|
|
|
32
32
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
33
33
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
34
34
|
|
|
35
|
-
Requires-Python: <3.12,>=3.
|
|
35
|
+
Requires-Python: <3.12,>=3.9
|
|
36
36
|
Description-Content-Type: text/markdown
|
|
37
37
|
License-File: LICENSE.txt
|
|
38
38
|
Requires-Dist: astropy
|
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import pandas as pd
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
class processing_set(dict):
|
|
5
|
-
def __init__(self, *args, **kwargs):
|
|
6
|
-
super().__init__(*args, **kwargs)
|
|
7
|
-
self.meta = {'summary':{}}
|
|
8
|
-
# generate_meta(self)
|
|
9
|
-
|
|
10
|
-
# def generate_meta(self):
|
|
11
|
-
# self.meta['summary'] = {"base": _summary(self)}
|
|
12
|
-
# self.meta['max_dims'] = _get_ps_max_dims(self)
|
|
13
|
-
|
|
14
|
-
def summary(self, data_group="base"):
|
|
15
|
-
if data_group in self.meta['summary']:
|
|
16
|
-
return self.meta['summary'][data_group]
|
|
17
|
-
else:
|
|
18
|
-
self.meta['summary'][data_group] = self._summary(data_group)
|
|
19
|
-
return self.meta['summary'][data_group]
|
|
20
|
-
|
|
21
|
-
def get_ps_max_dims(self):
|
|
22
|
-
if 'max_dims' in self.meta:
|
|
23
|
-
return self.meta['max_dims']
|
|
24
|
-
else:
|
|
25
|
-
self.meta['max_dims'] = self._get_ps_max_dims()
|
|
26
|
-
return self.meta['max_dims']
|
|
27
|
-
|
|
28
|
-
def _summary(self, data_group="base"):
|
|
29
|
-
summary_data = {
|
|
30
|
-
"name": [],
|
|
31
|
-
"ddi": [],
|
|
32
|
-
"intent": [],
|
|
33
|
-
"field_id": [],
|
|
34
|
-
"field_name": [],
|
|
35
|
-
"start_frequency": [],
|
|
36
|
-
"end_frequency": [],
|
|
37
|
-
"shape": []
|
|
38
|
-
}
|
|
39
|
-
for key, value in self.items():
|
|
40
|
-
summary_data["name"].append(key)
|
|
41
|
-
summary_data["ddi"].append(value.attrs["ddi"])
|
|
42
|
-
summary_data["intent"].append(value.attrs["intent"])
|
|
43
|
-
|
|
44
|
-
if "visibility" in value.attrs["data_groups"][data_group]:
|
|
45
|
-
data_name = value.attrs["data_groups"][data_group]["visibility"]
|
|
46
|
-
|
|
47
|
-
if "spectrum" in value.attrs["data_groups"][data_group]:
|
|
48
|
-
data_name = value.attrs["data_groups"][data_group]["spectrum"]
|
|
49
|
-
|
|
50
|
-
summary_data["shape"].append(
|
|
51
|
-
value[data_name].shape
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
summary_data["field_id"].append(
|
|
55
|
-
value[data_name].attrs[
|
|
56
|
-
"field_info"
|
|
57
|
-
]["field_id"]
|
|
58
|
-
)
|
|
59
|
-
summary_data["field_name"].append(
|
|
60
|
-
value[data_name].attrs[
|
|
61
|
-
"field_info"
|
|
62
|
-
]["name"]
|
|
63
|
-
)
|
|
64
|
-
summary_data["start_frequency"].append(value["frequency"].values[0])
|
|
65
|
-
summary_data["end_frequency"].append(value["frequency"].values[-1])
|
|
66
|
-
summary_df = pd.DataFrame(summary_data)
|
|
67
|
-
return summary_df
|
|
68
|
-
|
|
69
|
-
def _get_ps_max_dims(self):
|
|
70
|
-
max_dims = None
|
|
71
|
-
for ms_xds in self.values():
|
|
72
|
-
if max_dims is None:
|
|
73
|
-
max_dims = dict(ms_xds.sizes)
|
|
74
|
-
else:
|
|
75
|
-
for dim_name, size in ms_xds.sizes.items():
|
|
76
|
-
if dim_name in max_dims:
|
|
77
|
-
if max_dims[dim_name] < size:
|
|
78
|
-
max_dims[dim_name] = size
|
|
79
|
-
else:
|
|
80
|
-
max_dims[dim_name] = size
|
|
81
|
-
return max_dims
|
|
82
|
-
|
|
83
|
-
def get(self, id):
|
|
84
|
-
return self[list(self.keys())[id]]
|
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
import xarray as xr
|
|
2
|
-
import zarr
|
|
3
|
-
import copy
|
|
4
|
-
import os
|
|
5
|
-
from ._processing_set import processing_set
|
|
6
|
-
from .._utils.zarr.common import _load_no_dask_zarr
|
|
7
|
-
|
|
8
|
-
# from xradio._utils._logger import _get_logger
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def _load_ms_xds(
|
|
12
|
-
ps_name, ms_xds_name, slice_dict={}, cache_dir=None, chunk_id=None, date_time=""
|
|
13
|
-
):
|
|
14
|
-
# logger = _get_logger()
|
|
15
|
-
if cache_dir:
|
|
16
|
-
xds_cached_name = (
|
|
17
|
-
os.path.join(cache_dir, ms_xds_name) + "_" + str(chunk_id) + "_" + date_time
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
# Check if already chached:
|
|
21
|
-
try:
|
|
22
|
-
ms_xds = _load_ms_xds_core(
|
|
23
|
-
ms_xds_name=xds_cached_name, slice_dict=slice_dict
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
# logger.debug(ms_xds_name + ' chunk ' + str(slice_dict) + ' was found in cache: ' + xds_cached)
|
|
27
|
-
found_in_cache = True
|
|
28
|
-
return xds, found_in_cache
|
|
29
|
-
except:
|
|
30
|
-
# logger.debug(xds_cached + ' chunk ' + str(slice_dict) + ' was not found in cache or failed to load. Retrieving chunk from ' + ms_xds_name + ' .')
|
|
31
|
-
ms_xds = _load_ms_xds_core(
|
|
32
|
-
ms_xds_name=os.path.join(ps_name, ms_xds_name), slice_dict=slice_dict
|
|
33
|
-
)
|
|
34
|
-
write_ms_xds(ms_xds, xds_cached_name)
|
|
35
|
-
|
|
36
|
-
found_in_cache = False
|
|
37
|
-
return xds, found_in_cache
|
|
38
|
-
else:
|
|
39
|
-
found_in_cache = None
|
|
40
|
-
ms_xds = _load_ms_xds_core(
|
|
41
|
-
ms_xds_name=os.path.join(ps_name, ms_xds_name), slice_dict=slice_dict
|
|
42
|
-
)
|
|
43
|
-
return ms_xds, found_in_cache
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def _write_ms_xds(ms_xds, ms_xds_name):
|
|
47
|
-
ms_xds_temp = ms_xds
|
|
48
|
-
xr.Dataset.to_zarr(
|
|
49
|
-
ms_xds.attrs["ANTENNA"],
|
|
50
|
-
os.path.join(xds_cached_name, "ANTENNA"),
|
|
51
|
-
consolidated=True,
|
|
52
|
-
)
|
|
53
|
-
ms_xds_temp = ms_xds
|
|
54
|
-
ms_xds_temp.attrs["ANTENNA"] = {}
|
|
55
|
-
xr.Dataset.to_zarr(
|
|
56
|
-
ms_xds_temp, os.path.join(xds_cached_name, "MAIN"), consolidated=True
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def _load_ms_xds_core(ms_xds_name, slice_dict):
|
|
61
|
-
ms_xds = _load_no_dask_zarr(
|
|
62
|
-
zarr_name=os.path.join(ms_xds_name, "MAIN"), slice_dict=slice_dict
|
|
63
|
-
)
|
|
64
|
-
ms_xds.attrs["antenna_xds"] = _load_no_dask_zarr(
|
|
65
|
-
zarr_name=os.path.join(ms_xds_name, "ANTENNA")
|
|
66
|
-
)
|
|
67
|
-
sub_xds = {
|
|
68
|
-
"antenna_xds": "ANTENNA",
|
|
69
|
-
}
|
|
70
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
71
|
-
ms_xds.attrs[sub_xds_key] = _load_no_dask_zarr(
|
|
72
|
-
zarr_name=os.path.join(ms_xds_name, sub_xds_name)
|
|
73
|
-
)
|
|
74
|
-
optional_sub_xds = {
|
|
75
|
-
"weather_xds": "WEATHER",
|
|
76
|
-
"pointing_xds": "POINTING",
|
|
77
|
-
}
|
|
78
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
79
|
-
sub_xds_path = os.path.join(ms_xds_name, sub_xds_name)
|
|
80
|
-
if os.path.isdir(sub_xds_path):
|
|
81
|
-
ms_xds.attrs[sub_xds_key] = _load_no_dask_zarr(
|
|
82
|
-
zarr_name=os.path.join(ms_xds_name, sub_xds_name)
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
return ms_xds
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def load_processing_set(ps_name, sel_parms):
|
|
89
|
-
"""
|
|
90
|
-
sel_parms
|
|
91
|
-
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
92
|
-
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
93
|
-
"""
|
|
94
|
-
ps = processing_set()
|
|
95
|
-
for name_ms_xds, ms_xds_sel_parms in sel_parms.items():
|
|
96
|
-
ps[name_ms_xds] = _load_ms_xds(ps_name, name_ms_xds, ms_xds_sel_parms)[0]
|
|
97
|
-
return ps
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
class processing_set_iterator:
|
|
101
|
-
|
|
102
|
-
def __init__(self, data_selection, input_data_store, input_data=None):
|
|
103
|
-
self.input_data = input_data
|
|
104
|
-
self.input_data_store = input_data_store
|
|
105
|
-
self.data_selection = data_selection
|
|
106
|
-
self.xds_name_iter = iter(data_selection.keys())
|
|
107
|
-
|
|
108
|
-
def __iter__(self):
|
|
109
|
-
return self
|
|
110
|
-
|
|
111
|
-
def __next__(self):
|
|
112
|
-
try:
|
|
113
|
-
xds_name = next(self.xds_name_iter)
|
|
114
|
-
except Exception as e:
|
|
115
|
-
raise StopIteration
|
|
116
|
-
|
|
117
|
-
if self.input_data is None:
|
|
118
|
-
slice_description = self.data_selection[xds_name]
|
|
119
|
-
ps = load_processing_set(
|
|
120
|
-
ps_name=self.input_data_store,
|
|
121
|
-
sel_parms={xds_name: slice_description},
|
|
122
|
-
)
|
|
123
|
-
xds = ps.get(0)
|
|
124
|
-
else:
|
|
125
|
-
xds = self.input_data[xds_name] # In memory
|
|
126
|
-
|
|
127
|
-
return xds
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
import xarray as xr
|
|
4
|
-
|
|
5
|
-
from ._processing_set import processing_set
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def read_processing_set(ps_name, intents=None, data_group='base', fields=None):
|
|
9
|
-
items = os.listdir(ps_name)
|
|
10
|
-
ms_xds = xr.Dataset()
|
|
11
|
-
ps = processing_set()
|
|
12
|
-
for i in items:
|
|
13
|
-
if "ddi" in i:
|
|
14
|
-
xds = xr.open_zarr(ps_name + "/" + i + "/MAIN")
|
|
15
|
-
|
|
16
|
-
if (intents is None) or (xds.attrs["intent"] in intents):
|
|
17
|
-
|
|
18
|
-
if "visibility" in xds.attrs["data_groups"][data_group]:
|
|
19
|
-
data_name = xds.attrs["data_groups"][data_group]["visibility"]
|
|
20
|
-
|
|
21
|
-
if "spectrum" in xds.attrs["data_groups"][data_group]:
|
|
22
|
-
data_name = xds.attrs["data_groups"][data_group]["spectrum"]
|
|
23
|
-
|
|
24
|
-
if (fields is None) or (xds[data_name].attrs["field_info"]["name"] in fields):
|
|
25
|
-
ps[i] = xds
|
|
26
|
-
sub_xds = {
|
|
27
|
-
"antenna_xds": "ANTENNA",
|
|
28
|
-
}
|
|
29
|
-
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
30
|
-
ps[i].attrs[sub_xds_key] = xr.open_zarr(
|
|
31
|
-
ps_name + "/" + i + "/" + sub_xds_name
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
optional_sub_xds = {
|
|
35
|
-
"weather_xds": "WEATHER",
|
|
36
|
-
"pointing_xds": "POINTING",
|
|
37
|
-
}
|
|
38
|
-
for sub_xds_key, sub_xds_name in optional_sub_xds.items():
|
|
39
|
-
sub_xds_path = ps_name + "/" + i + "/" + sub_xds_name
|
|
40
|
-
if os.path.isdir(sub_xds_path):
|
|
41
|
-
ps[i].attrs[sub_xds_key] = xr.open_zarr(sub_xds_path)
|
|
42
|
-
|
|
43
|
-
return ps
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|