xradio 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xradio/_utils/zarr/common.py +7 -2
- xradio/image/_util/_casacore/xds_from_casacore.py +1 -1
- xradio/image/_util/_casacore/xds_to_casacore.py +6 -6
- xradio/image/_util/_fits/xds_from_fits.py +1 -1
- xradio/image/_util/_zarr/zarr_low_level.py +39 -3
- xradio/image/_util/casacore.py +2 -2
- xradio/image/_util/common.py +4 -0
- xradio/vis/_processing_set.py +9 -0
- xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +2 -4
- xradio/vis/load_processing_set.py +63 -20
- xradio/vis/read_processing_set.py +82 -17
- {xradio-0.0.24.dist-info → xradio-0.0.26.dist-info}/METADATA +4 -3
- {xradio-0.0.24.dist-info → xradio-0.0.26.dist-info}/RECORD +16 -16
- {xradio-0.0.24.dist-info → xradio-0.0.26.dist-info}/LICENSE.txt +0 -0
- {xradio-0.0.24.dist-info → xradio-0.0.26.dist-info}/WHEEL +0 -0
- {xradio-0.0.24.dist-info → xradio-0.0.26.dist-info}/top_level.txt +0 -0
xradio/_utils/zarr/common.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import copy
|
|
2
2
|
import xarray as xr
|
|
3
3
|
import zarr
|
|
4
|
+
import s3fs
|
|
4
5
|
|
|
5
6
|
|
|
6
|
-
def _open_dataset(store, xds_isel=None, data_variables=None, load=False):
|
|
7
|
+
def _open_dataset(store, xds_isel=None, data_variables=None, load=False, **kwargs):
|
|
7
8
|
"""
|
|
8
9
|
|
|
9
10
|
Parameters
|
|
@@ -25,7 +26,11 @@ def _open_dataset(store, xds_isel=None, data_variables=None, load=False):
|
|
|
25
26
|
|
|
26
27
|
import dask
|
|
27
28
|
|
|
28
|
-
|
|
29
|
+
if "s3" in kwargs.keys():
|
|
30
|
+
mapping = s3fs.S3Map(root=store, s3=kwargs["s3"], check=False)
|
|
31
|
+
xds = xr.open_zarr(store=mapping)
|
|
32
|
+
else:
|
|
33
|
+
xds = xr.open_zarr(store)
|
|
29
34
|
|
|
30
35
|
if xds_isel is not None:
|
|
31
36
|
xds = xds.isel(xds_isel)
|
|
@@ -106,7 +106,7 @@ def _add_sky_or_aperture(
|
|
|
106
106
|
unit = casa_image.unit()
|
|
107
107
|
xda.attrs[_image_type] = image_type
|
|
108
108
|
xda.attrs["units"] = unit
|
|
109
|
-
name = "
|
|
109
|
+
name = "SKY" if has_sph_dims else "APERTURE"
|
|
110
110
|
xda = xda.rename(name)
|
|
111
111
|
xds[xda.name] = xda
|
|
112
112
|
return xds
|
|
@@ -8,7 +8,7 @@ from astropy.coordinates import Angle
|
|
|
8
8
|
from casacore import tables
|
|
9
9
|
|
|
10
10
|
from .common import _active_mask, _create_new_image, _object_name, _pointing_center
|
|
11
|
-
from ..common import _compute_sky_reference_pixel, _doppler_types
|
|
11
|
+
from ..common import _aperture_or_sky, _compute_sky_reference_pixel, _doppler_types
|
|
12
12
|
from ...._utils._casacore.tables import open_table_rw
|
|
13
13
|
|
|
14
14
|
|
|
@@ -168,7 +168,7 @@ def _history_from_xds(xds: xr.Dataset, image: str) -> None:
|
|
|
168
168
|
|
|
169
169
|
def _imageinfo_dict_from_xds(xds: xr.Dataset) -> dict:
|
|
170
170
|
ii = {}
|
|
171
|
-
ap_sky =
|
|
171
|
+
ap_sky = _aperture_or_sky(xds)
|
|
172
172
|
ii["image_type"] = (
|
|
173
173
|
xds[ap_sky].attrs["image_type"] if "image_type" in xds[ap_sky].attrs else ""
|
|
174
174
|
)
|
|
@@ -208,12 +208,12 @@ def _imageinfo_dict_from_xds(xds: xr.Dataset) -> dict:
|
|
|
208
208
|
|
|
209
209
|
|
|
210
210
|
def _write_casa_data(xds: xr.Dataset, image_full_path: str) -> None:
|
|
211
|
-
sky_ap =
|
|
211
|
+
sky_ap = _aperture_or_sky(xds)
|
|
212
212
|
if xds[sky_ap].shape[0] != 1:
|
|
213
213
|
raise RuntimeError("XDS can only be converted if it has exactly one time plane")
|
|
214
214
|
trans_coords = (
|
|
215
215
|
("frequency", "polarization", "m", "l")
|
|
216
|
-
if sky_ap == "
|
|
216
|
+
if sky_ap == "SKY"
|
|
217
217
|
else ("frequency", "polarization", "v", "u")
|
|
218
218
|
)
|
|
219
219
|
casa_image_shape = xds[sky_ap].isel(time=0).transpose(*trans_coords).shape[::-1]
|
|
@@ -312,7 +312,7 @@ def _write_initial_image(
|
|
|
312
312
|
) -> None:
|
|
313
313
|
if not maskname:
|
|
314
314
|
maskname = ""
|
|
315
|
-
for dv in ["
|
|
315
|
+
for dv in ["SKY", "APERTURE"]:
|
|
316
316
|
if dv in xds.data_vars:
|
|
317
317
|
value = xds[dv][0, 0, 0, 0, 0].values.item()
|
|
318
318
|
if xds[dv][0, 0, 0, 0, 0].values.dtype == "float32":
|
|
@@ -352,7 +352,7 @@ def _write_pixels(
|
|
|
352
352
|
value: xr.DataArray = None,
|
|
353
353
|
) -> None:
|
|
354
354
|
flip = False
|
|
355
|
-
if v == "
|
|
355
|
+
if v == "SKY" or v == "APERTURE":
|
|
356
356
|
filename = image_full_path
|
|
357
357
|
else:
|
|
358
358
|
# mask
|
|
@@ -685,7 +685,7 @@ def _add_sky_or_aperture(
|
|
|
685
685
|
unit = helpers["bunit"]
|
|
686
686
|
xda.attrs[_image_type] = image_type
|
|
687
687
|
xda.attrs["units"] = unit
|
|
688
|
-
name = "
|
|
688
|
+
name = "SKY" if has_sph_dims else "APERTURE"
|
|
689
689
|
xda = xda.rename(name)
|
|
690
690
|
xds[xda.name] = xda
|
|
691
691
|
if helpers["has_mask"]:
|
|
@@ -109,19 +109,55 @@ def write_binary_blob_to_disk(arr, file_path, compressor):
|
|
|
109
109
|
Returns:
|
|
110
110
|
- None
|
|
111
111
|
"""
|
|
112
|
+
import graphviper.utils.logger as logger
|
|
112
113
|
# Encode the NumPy array using the codec
|
|
114
|
+
logger.debug('1. Before compressor ' + file_path)
|
|
113
115
|
compressed_arr = compressor.encode(np.ascontiguousarray(arr))
|
|
114
116
|
|
|
117
|
+
logger.debug('2. Before makedir')
|
|
115
118
|
# Ensure the directory exists before saving the file
|
|
116
119
|
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
117
120
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
+
arr_len = len(compressed_arr)
|
|
122
|
+
logger.debug('3. Before write the len is: ' + str(arr_len))
|
|
123
|
+
#Save the compressed array to disk
|
|
124
|
+
# with open(file_path, "wb") as file:
|
|
125
|
+
# file.write(compressed_arr)
|
|
126
|
+
|
|
127
|
+
logger.debug('4. Using new writer: ' + str(arr_len))
|
|
128
|
+
write_to_lustre_chunked(file_path, compressed_arr)
|
|
129
|
+
|
|
130
|
+
# /.lustre/aoc/sciops/pford/CHILES/cube_image/uid___A002_Xee7674_X2844_Cube_3.img.zarr/SKY/0.0.110.0.0
|
|
131
|
+
# 348192501 bytes
|
|
132
|
+
# 332.0622453689575 M
|
|
133
|
+
|
|
134
|
+
# from io import BufferedWriter
|
|
135
|
+
# # Calculate buffer size based on compressed_arr size (adjust multiplier)
|
|
136
|
+
# buffer_size = min(len(compressed_arr), 1024 * 1024 * 4) # Max 4 MB buffer
|
|
137
|
+
# with BufferedWriter(open(file_path, "wb"), buffer_size) as f:
|
|
138
|
+
# f.write(compressed_arr)
|
|
139
|
+
# f.flush() # Ensure data gets written to disk
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
logger.debug('4. Write completed')
|
|
121
143
|
|
|
122
144
|
# print(f"Compressed array saved to {file_path}")
|
|
123
145
|
|
|
124
146
|
|
|
147
|
+
def write_to_lustre_chunked(file_path, compressed_arr, chunk_size=1024 * 1024 * 128): # 128 MiB chunks
|
|
148
|
+
"""
|
|
149
|
+
Writes compressed data to a Lustre file path with chunking.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
file_path: Path to the file for writing.
|
|
153
|
+
compressed_arr: Compressed data array to write.
|
|
154
|
+
chunk_size: Size of each data chunk in bytes (default: 128 MiB).
|
|
155
|
+
"""
|
|
156
|
+
with open(file_path, "wb") as f:
|
|
157
|
+
for i in range(0, len(compressed_arr), chunk_size):
|
|
158
|
+
chunk = compressed_arr[i:i + chunk_size]
|
|
159
|
+
f.write(chunk)
|
|
160
|
+
|
|
125
161
|
def read_binary_blob_from_disk(file_path, compressor, dtype=np.float64):
|
|
126
162
|
"""
|
|
127
163
|
Read a compressed binary blob from disk and decode it using Blosc.
|
xradio/image/_util/casacore.py
CHANGED
|
@@ -30,7 +30,7 @@ from ._casacore.xds_to_casacore import (
|
|
|
30
30
|
_imageinfo_dict_from_xds,
|
|
31
31
|
_write_casa_data,
|
|
32
32
|
)
|
|
33
|
-
from .common import _get_xds_dim_order, _dask_arrayize_dv
|
|
33
|
+
from .common import _aperture_or_sky, _get_xds_dim_order, _dask_arrayize_dv
|
|
34
34
|
|
|
35
35
|
warnings.filterwarnings("ignore", category=FutureWarning)
|
|
36
36
|
|
|
@@ -104,7 +104,7 @@ def _xds_to_casa_image(xds: xr.Dataset, imagename: str) -> None:
|
|
|
104
104
|
image_full_path = os.path.expanduser(imagename)
|
|
105
105
|
_write_casa_data(xds, image_full_path)
|
|
106
106
|
# create coordinates
|
|
107
|
-
ap_sky =
|
|
107
|
+
ap_sky = _aperture_or_sky(xds)
|
|
108
108
|
coord = _coord_dict_from_xds(xds)
|
|
109
109
|
ii = _imageinfo_dict_from_xds(xds)
|
|
110
110
|
units = xds[ap_sky].attrs["units"] if "units" in xds[ap_sky].attrs else None
|
xradio/image/_util/common.py
CHANGED
|
@@ -13,6 +13,10 @@ _doppler_types = ["RADIO", "Z", "RATIO", "BETA", "GAMMA"]
|
|
|
13
13
|
_image_type = "image_type"
|
|
14
14
|
|
|
15
15
|
|
|
16
|
+
def _aperture_or_sky(xds: xr.Dataset) -> str:
|
|
17
|
+
return "SKY" if "SKY" in xds.data_vars or "l" in xds.coords else "APERTURE"
|
|
18
|
+
|
|
19
|
+
|
|
16
20
|
def _get_xds_dim_order(has_sph: bool) -> list:
|
|
17
21
|
dimorder = ["time", "polarization", "frequency"]
|
|
18
22
|
dir_lin = ["l", "m"] if has_sph else ["u", "v"]
|
xradio/vis/_processing_set.py
CHANGED
|
@@ -43,7 +43,10 @@ class processing_set(dict):
|
|
|
43
43
|
"start_frequency": [],
|
|
44
44
|
"end_frequency": [],
|
|
45
45
|
"shape": [],
|
|
46
|
+
"field_coords": []
|
|
46
47
|
}
|
|
48
|
+
from astropy.coordinates import SkyCoord
|
|
49
|
+
import astropy.units as u
|
|
47
50
|
for key, value in self.items():
|
|
48
51
|
summary_data["name"].append(key)
|
|
49
52
|
summary_data["ddi"].append(value.attrs["ddi"])
|
|
@@ -65,6 +68,12 @@ class processing_set(dict):
|
|
|
65
68
|
)
|
|
66
69
|
summary_data["start_frequency"].append(value["frequency"].values[0])
|
|
67
70
|
summary_data["end_frequency"].append(value["frequency"].values[-1])
|
|
71
|
+
|
|
72
|
+
ra_dec_rad = value[data_name].attrs["field_info"]['phase_direction']['data']
|
|
73
|
+
frame = value[data_name].attrs["field_info"]['phase_direction']['attrs']['frame'].lower()
|
|
74
|
+
coord = SkyCoord(ra=ra_dec_rad[0]*u.rad, dec=ra_dec_rad[1]*u.rad, frame=frame)
|
|
75
|
+
|
|
76
|
+
summary_data["field_coords"].append([frame, coord.ra.to_string(unit=u.hour), coord.dec.to_string(unit=u.deg)])
|
|
68
77
|
summary_df = pd.DataFrame(summary_data)
|
|
69
78
|
return summary_df
|
|
70
79
|
|
|
@@ -195,10 +195,8 @@ def read_main_table_chunks(
|
|
|
195
195
|
n_baseline_chunks = chunks[1]
|
|
196
196
|
# loop over time chunks
|
|
197
197
|
for time_chunk in range(0, n_unique_times, n_time_chunks):
|
|
198
|
-
time_start =
|
|
199
|
-
time_end = (
|
|
200
|
-
unique_times[min(n_unique_times, time_chunk + n_time_chunks) - 1] + tol
|
|
201
|
-
)
|
|
198
|
+
time_start = unique_times[time_chunk] - tol
|
|
199
|
+
time_end = unique_times[min(n_unique_times, time_chunk + n_time_chunks) - 1] + tol
|
|
202
200
|
|
|
203
201
|
# chunk time length
|
|
204
202
|
ctlen = min(n_unique_times, time_chunk + n_time_chunks) - time_chunk
|
|
@@ -11,13 +11,13 @@ def load_processing_set(
|
|
|
11
11
|
sel_parms: dict,
|
|
12
12
|
data_variables: Union[list, None] = None,
|
|
13
13
|
load_sub_datasets: bool = True,
|
|
14
|
-
)->processing_set:
|
|
14
|
+
) -> processing_set:
|
|
15
15
|
"""Loads a processing set into memory.
|
|
16
16
|
|
|
17
17
|
Parameters
|
|
18
18
|
----------
|
|
19
19
|
ps_store : str
|
|
20
|
-
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr'.
|
|
20
|
+
String of the path and name of the processing set. For example '/users/user_1/uid___A002_Xf07bba_Xbe5c_target.lsrk.vis.zarr' for a file stored on a local file system, or 's3://viper-test-data/Antennae_North.cal.lsrk.split.vis.zarr/' for a file in AWS object storage.
|
|
21
21
|
sel_parms : dict
|
|
22
22
|
A dictionary where the keys are the names of the ms_xds's and the values are slice_dicts.
|
|
23
23
|
slice_dicts: A dictionary where the keys are the dimension names and the values are slices.
|
|
@@ -37,26 +37,69 @@ def load_processing_set(
|
|
|
37
37
|
Returns
|
|
38
38
|
-------
|
|
39
39
|
processing_set
|
|
40
|
-
In memory representation of processing set (data is represented by Dask.arrays).
|
|
41
|
-
"""
|
|
40
|
+
In memory representation of processing set (data is represented by Dask.arrays).
|
|
41
|
+
"""
|
|
42
42
|
from xradio._utils.zarr.common import _open_dataset
|
|
43
|
+
import s3fs
|
|
44
|
+
from botocore.exceptions import NoCredentialsError
|
|
43
45
|
|
|
46
|
+
s3 = None
|
|
44
47
|
ps = processing_set()
|
|
45
48
|
for ms_dir_name, ms_xds_isel in sel_parms.items():
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
49
|
+
|
|
50
|
+
# before the _open_dataset call, check if dealing with an S3 bucket URL
|
|
51
|
+
if ps_store.startswith("s3"):
|
|
52
|
+
if not ps_store.endswith("/"):
|
|
53
|
+
# just for consistency, as there is no os.path equivalent in s3fs
|
|
54
|
+
ps_store = ps_store + "/"
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
s3 = s3fs.S3FileSystem(anon=False, requester_pays=False)
|
|
58
|
+
|
|
59
|
+
main_xds = ps_store + ms_dir_name + "/MAIN"
|
|
60
|
+
xds = _open_dataset(
|
|
61
|
+
main_xds, ms_xds_isel, data_variables, load=True, s3=s3
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
if load_sub_datasets:
|
|
65
|
+
from xradio.vis.read_processing_set import _read_sub_xds
|
|
66
|
+
|
|
67
|
+
xds.attrs = {
|
|
68
|
+
**xds.attrs,
|
|
69
|
+
**_read_sub_xds(
|
|
70
|
+
os.path.join(ps_store, ms_dir_name), load=True, s3=s3
|
|
71
|
+
),
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
except (NoCredentialsError, PermissionError) as e:
|
|
75
|
+
# only public, read-only buckets will be accessible
|
|
76
|
+
s3 = s3fs.S3FileSystem(anon=True)
|
|
77
|
+
|
|
78
|
+
main_xds = ps_store + ms_dir_name + "/MAIN"
|
|
79
|
+
xds = _open_dataset(
|
|
80
|
+
main_xds, ms_xds_isel, data_variables, load=True, s3=s3
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
if load_sub_datasets:
|
|
84
|
+
from xradio.vis.read_processing_set import _read_sub_xds
|
|
85
|
+
|
|
86
|
+
xds.attrs = {
|
|
87
|
+
**xds.attrs,
|
|
88
|
+
**_read_sub_xds(
|
|
89
|
+
os.path.join(ps_store, ms_dir_name), load=True, s3=s3
|
|
90
|
+
),
|
|
91
|
+
}
|
|
92
|
+
else:
|
|
93
|
+
# fall back to the default case of assuming the files are on local disk
|
|
94
|
+
main_xds = os.path.join(ps_store, ms_dir_name, "MAIN")
|
|
95
|
+
xds = _open_dataset(main_xds, ms_xds_isel, data_variables, load=True)
|
|
96
|
+
if load_sub_datasets:
|
|
97
|
+
from xradio.vis.read_processing_set import _read_sub_xds
|
|
98
|
+
|
|
99
|
+
xds.attrs = {
|
|
100
|
+
**xds.attrs,
|
|
101
|
+
**_read_sub_xds(os.path.join(ps_store, ms_dir_name), load=True),
|
|
102
|
+
}
|
|
60
103
|
|
|
61
104
|
ps[ms_dir_name] = xds
|
|
62
105
|
return ps
|
|
@@ -94,7 +137,7 @@ class processing_set_iterator:
|
|
|
94
137
|
The list of data variables to load into memory for example ['VISIBILITY', 'WEIGHT, 'FLAGS']. By default None which will load all data variables into memory.
|
|
95
138
|
load_sub_datasets : bool, optional
|
|
96
139
|
If true sub-datasets (for example weather_xds, antenna_xds, pointing_xds, ...) will be loaded into memory, by default True.
|
|
97
|
-
"""
|
|
140
|
+
"""
|
|
98
141
|
|
|
99
142
|
self.input_data = input_data
|
|
100
143
|
self.input_data_store = input_data_store
|
|
@@ -124,4 +167,4 @@ class processing_set_iterator:
|
|
|
124
167
|
else:
|
|
125
168
|
xds = self.input_data[xds_name] # In memory
|
|
126
169
|
|
|
127
|
-
return xds
|
|
170
|
+
return xds
|
|
@@ -3,11 +3,13 @@ import xarray as xr
|
|
|
3
3
|
from ._processing_set import processing_set
|
|
4
4
|
import graphviper.utils.logger as logger
|
|
5
5
|
from xradio._utils.zarr.common import _open_dataset
|
|
6
|
+
import s3fs
|
|
7
|
+
from botocore.exceptions import NoCredentialsError
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
def read_processing_set(
|
|
9
11
|
ps_store: str, intents: list = None, fields: str = None
|
|
10
|
-
)->processing_set:
|
|
12
|
+
) -> processing_set:
|
|
11
13
|
"""Creates a lazy representation of a Processing Set (only meta-data is loaded into memory).
|
|
12
14
|
|
|
13
15
|
Parameters
|
|
@@ -18,44 +20,101 @@ def read_processing_set(
|
|
|
18
20
|
A list of the intents to be read for example ['OBSERVE_TARGET#ON_SOURCE']. The intents in a processing set can be seem by calling processing_set.summary().
|
|
19
21
|
By default None, which will read all intents.
|
|
20
22
|
fields : str, optional
|
|
21
|
-
The list of field names that will be read, by default None which will read all fields.
|
|
22
|
-
|
|
23
|
+
The list of field names that will be read, by default None which will read all fields.
|
|
24
|
+
|
|
23
25
|
Returns
|
|
24
26
|
-------
|
|
25
27
|
processing_set
|
|
26
|
-
Lazy representation of processing set (data is represented by Dask.arrays).
|
|
27
|
-
"""
|
|
28
|
-
|
|
28
|
+
Lazy representation of processing set (data is represented by Dask.arrays).
|
|
29
|
+
"""
|
|
30
|
+
s3 = None
|
|
31
|
+
ps_store_is_s3dir = None
|
|
32
|
+
|
|
33
|
+
if os.path.isdir(ps_store):
|
|
34
|
+
ps_store_is_s3dir = False
|
|
35
|
+
# default to assuming the data are accessible on local file system
|
|
36
|
+
items = os.listdir(ps_store)
|
|
37
|
+
|
|
38
|
+
elif ps_store.startswith("s3"):
|
|
39
|
+
# only if not found locally, check if dealing with an S3 bucket URL
|
|
40
|
+
ps_store_is_s3dir = True
|
|
41
|
+
if not ps_store.endswith("/"):
|
|
42
|
+
# just for consistency, as there is no os.path equivalent in s3fs
|
|
43
|
+
ps_store = ps_store + "/"
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
# initialize the S3 "file system", first attempting to use pre-configured credentials
|
|
47
|
+
s3 = s3fs.S3FileSystem(anon=False, requester_pays=False)
|
|
48
|
+
|
|
49
|
+
items = [bd.split(sep="/")[-1] for bd in s3.listdir(ps_store, detail=False)]
|
|
50
|
+
|
|
51
|
+
except (NoCredentialsError, PermissionError) as e:
|
|
52
|
+
# only public, read-only buckets will be accessible
|
|
53
|
+
# we will want to add messaging and error handling here
|
|
54
|
+
s3 = s3fs.S3FileSystem(anon=True)
|
|
55
|
+
|
|
56
|
+
items = [bd.split(sep="/")[-1] for bd in s3.listdir(ps_store, detail=False)]
|
|
57
|
+
|
|
58
|
+
else:
|
|
59
|
+
raise (
|
|
60
|
+
FileNotFoundError,
|
|
61
|
+
f"Could not find {ps_store} either locally or in the cloud.",
|
|
62
|
+
)
|
|
63
|
+
|
|
29
64
|
ms_xds = xr.Dataset()
|
|
30
65
|
ps = processing_set()
|
|
31
|
-
data_group =
|
|
66
|
+
data_group = "base"
|
|
32
67
|
for ms_dir_name in items:
|
|
33
68
|
if "ddi" in ms_dir_name:
|
|
34
|
-
|
|
69
|
+
if ps_store_is_s3dir:
|
|
70
|
+
store_path = ps_store + ms_dir_name
|
|
71
|
+
store_path_main = store_path + "/MAIN"
|
|
72
|
+
else:
|
|
73
|
+
store_path_main = os.path.join(ps_store, ms_dir_name, "MAIN")
|
|
74
|
+
store_path = os.path.split(store_path_main)[0]
|
|
75
|
+
if s3 is not None:
|
|
76
|
+
xds = _open_dataset(store_path_main, s3=s3)
|
|
77
|
+
else:
|
|
78
|
+
xds = _open_dataset(store_path_main)
|
|
79
|
+
|
|
35
80
|
if (intents is None) or (xds.attrs["intent"] in intents):
|
|
36
81
|
data_name = _get_data_name(xds, data_group)
|
|
37
82
|
|
|
38
83
|
if (fields is None) or (
|
|
39
84
|
xds[data_name].attrs["field_info"]["name"] in fields
|
|
40
85
|
):
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
86
|
+
if s3 is not None:
|
|
87
|
+
xds.attrs = {
|
|
88
|
+
**xds.attrs,
|
|
89
|
+
**_read_sub_xds(store_path, s3=s3),
|
|
90
|
+
}
|
|
91
|
+
ps[ms_dir_name] = xds
|
|
92
|
+
else:
|
|
93
|
+
xds.attrs = {
|
|
94
|
+
**xds.attrs,
|
|
95
|
+
**_read_sub_xds(store_path),
|
|
96
|
+
}
|
|
97
|
+
ps[ms_dir_name] = xds
|
|
98
|
+
|
|
46
99
|
return ps
|
|
47
100
|
|
|
48
101
|
|
|
49
|
-
def _read_sub_xds(ms_store, load=False):
|
|
102
|
+
def _read_sub_xds(ms_store, load=False, **kwargs):
|
|
50
103
|
sub_xds_dict = {}
|
|
51
104
|
|
|
52
105
|
sub_xds = {
|
|
53
106
|
"antenna_xds": "ANTENNA",
|
|
54
107
|
}
|
|
55
108
|
for sub_xds_key, sub_xds_name in sub_xds.items():
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
109
|
+
if "s3" in kwargs.keys():
|
|
110
|
+
joined_store = ms_store + "/" + sub_xds_name
|
|
111
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(
|
|
112
|
+
joined_store, load=load, s3=kwargs["s3"]
|
|
113
|
+
)
|
|
114
|
+
else:
|
|
115
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(
|
|
116
|
+
os.path.join(ms_store, sub_xds_name), load=load
|
|
117
|
+
)
|
|
59
118
|
|
|
60
119
|
optional_sub_xds = {
|
|
61
120
|
"weather_xds": "WEATHER",
|
|
@@ -65,6 +124,12 @@ def _read_sub_xds(ms_store, load=False):
|
|
|
65
124
|
sub_xds_path = os.path.join(ms_store, sub_xds_name)
|
|
66
125
|
if os.path.isdir(sub_xds_path):
|
|
67
126
|
sub_xds_dict[sub_xds_key] = _open_dataset(sub_xds_path, load=load)
|
|
127
|
+
elif "s3" in kwargs.keys():
|
|
128
|
+
joined_store = ms_store + "/" + sub_xds_name
|
|
129
|
+
if kwargs["s3"].isdir(joined_store):
|
|
130
|
+
sub_xds_dict[sub_xds_key] = _open_dataset(
|
|
131
|
+
joined_store, load=load, s3=kwargs["s3"]
|
|
132
|
+
)
|
|
68
133
|
|
|
69
134
|
return sub_xds_dict
|
|
70
135
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: xradio
|
|
3
|
-
Version: 0.0.
|
|
4
|
-
Summary: Xarray Radio Astronomy Data IO
|
|
3
|
+
Version: 0.0.26
|
|
4
|
+
Summary: Xarray Radio Astronomy Data IO
|
|
5
5
|
Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>
|
|
6
6
|
License: BSD 3-Clause License
|
|
7
7
|
|
|
@@ -32,7 +32,7 @@ License: BSD 3-Clause License
|
|
|
32
32
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
33
33
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
34
34
|
|
|
35
|
-
Requires-Python: <3.12,>=3.
|
|
35
|
+
Requires-Python: <3.12,>=3.9
|
|
36
36
|
Description-Content-Type: text/markdown
|
|
37
37
|
License-File: LICENSE.txt
|
|
38
38
|
Requires-Dist: astropy
|
|
@@ -47,6 +47,7 @@ Requires-Dist: prettytable
|
|
|
47
47
|
Requires-Dist: pytest
|
|
48
48
|
Requires-Dist: pytest-cov
|
|
49
49
|
Requires-Dist: pytest-html
|
|
50
|
+
Requires-Dist: s3fs
|
|
50
51
|
Requires-Dist: scipy
|
|
51
52
|
Requires-Dist: tqdm
|
|
52
53
|
Requires-Dist: xarray
|
|
@@ -3,24 +3,24 @@ xradio/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
3
3
|
xradio/_utils/common.py,sha256=bjUZfZZrYTOt0i_XVfmQ2kvgr3egoYPWKGgnr4vKe-Y,46
|
|
4
4
|
xradio/_utils/_casacore/tables.py,sha256=aq6E_4RRAHdTBCwMKrVil1cWhFU2O980DNH9IlRKXLw,1280
|
|
5
5
|
xradio/_utils/zarr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
xradio/_utils/zarr/common.py,sha256=
|
|
6
|
+
xradio/_utils/zarr/common.py,sha256=w_orsJ7-nh8nXFllkYPwbEYFS5B-KeMVWnhO3elX_PQ,3470
|
|
7
7
|
xradio/image/__init__.py,sha256=HAD0GfopIbhdxOYckyW6S9US_dSWmZrwIl3FHUzZwrE,435
|
|
8
8
|
xradio/image/image.py,sha256=QoJ_BTLoMfeXJzU1yvtidBIhaMmjNA5_-6C3FWJRUeI,15635
|
|
9
9
|
xradio/image/_util/__init__.py,sha256=M9lxD1Gc7kv0ucDEDbjLRuIEuESev-IG8j9EaCKUAkA,77
|
|
10
|
-
xradio/image/_util/casacore.py,sha256=
|
|
11
|
-
xradio/image/_util/common.py,sha256=
|
|
10
|
+
xradio/image/_util/casacore.py,sha256=DmBTHUQ6870N5ARuFnYSfjZSLniJYgsjrsICUlCREYM,4234
|
|
11
|
+
xradio/image/_util/common.py,sha256=hzVcnq4pYhxvkbMAIBzhWKdLN4nrqRGlmFwABxkww7A,8921
|
|
12
12
|
xradio/image/_util/fits.py,sha256=gyGm06fuCKqVGK7uv-ObvQNfFawUDsIOa_nQyklM3Aw,329
|
|
13
13
|
xradio/image/_util/image_factory.py,sha256=6tPzs20FTm2wEshHc1xqtTV7D0TbKxGLUKAVtvOc68I,10506
|
|
14
14
|
xradio/image/_util/zarr.py,sha256=xTjg-KY-T4vuyua8pvuZZjCL-rI_wAsPjPUOYd5PZr4,1512
|
|
15
15
|
xradio/image/_util/_casacore/__init__.py,sha256=OlsiRE40o1jSbBI4khgQQzgfDYbAlOMKIhO4UFlbGhg,41
|
|
16
16
|
xradio/image/_util/_casacore/common.py,sha256=ky999eTCWta8w-uIs-7P7rPhZRLuh9yTuQXAxPvaPm4,1579
|
|
17
|
-
xradio/image/_util/_casacore/xds_from_casacore.py,sha256=
|
|
18
|
-
xradio/image/_util/_casacore/xds_to_casacore.py,sha256=
|
|
19
|
-
xradio/image/_util/_fits/xds_from_fits.py,sha256=
|
|
17
|
+
xradio/image/_util/_casacore/xds_from_casacore.py,sha256=Rht4A32QLAQ7uizwKfOsZ3Z819shvlUbsZfAuTIVerU,42562
|
|
18
|
+
xradio/image/_util/_casacore/xds_to_casacore.py,sha256=P6c-yoOjuVQkm07ApA7FFKfje4aPwV-MsRFKaRaPq9I,15338
|
|
19
|
+
xradio/image/_util/_fits/xds_from_fits.py,sha256=rsV2OMK3g4U_GYKgj68cZXWIlXi0M0oU2M9FtAUeCk8,28339
|
|
20
20
|
xradio/image/_util/_zarr/common.py,sha256=apMX_bF4Hr3pFGjnDFpp36KgmhTYAPBZquNkjBHrsXk,307
|
|
21
21
|
xradio/image/_util/_zarr/xds_from_zarr.py,sha256=hz6lHlpybfr_r8pn_uObDHOFmN5h75F11bkBv8KCuP0,3192
|
|
22
22
|
xradio/image/_util/_zarr/xds_to_zarr.py,sha256=wogXbwX8n3Sl9PHoc3_Y_LBowQsQ-94HZQFZ5NcxUZA,1624
|
|
23
|
-
xradio/image/_util/_zarr/zarr_low_level.py,sha256=
|
|
23
|
+
xradio/image/_util/_zarr/zarr_low_level.py,sha256=5D8Vu8QZD8CpKSPMkxHhnKpdk3vhUStW3kQFHUQUQns,10984
|
|
24
24
|
xradio/schema/__init__.py,sha256=UpejQegOaCLrsbcR4MLudR0RxeE0sN3zxFXM8rzyJPo,444
|
|
25
25
|
xradio/schema/bases.py,sha256=vcW47jZWpJ0mJdni7eFVY7zJoper2sy2VjX8LE3pUqc,150
|
|
26
26
|
xradio/schema/check.py,sha256=3u79hRL3pGF6dvQE0LF21nGdAVnRXWwgnbMmStGBSSA,16310
|
|
@@ -28,11 +28,11 @@ xradio/schema/dataclass.py,sha256=vkc2cqLjGV5QN8j70GbBaNfslT_KLWmebsPGeBEuGcs,88
|
|
|
28
28
|
xradio/schema/metamodel.py,sha256=RHrihyaetinu7_lGTTZ31Rlv-_Db_EgQCXzk56H004o,3476
|
|
29
29
|
xradio/schema/typing.py,sha256=coF3LuKOlCUJGKTUUH81EcjePZ86koOYzm8qzsAw-HU,9983
|
|
30
30
|
xradio/vis/__init__.py,sha256=AV2WG26NzFB1LEEtFaq1ULQKz9VnluEAjg0Qb5Ju7m8,358
|
|
31
|
-
xradio/vis/_processing_set.py,sha256=
|
|
31
|
+
xradio/vis/_processing_set.py,sha256=zYCswmWjsRN_8M61IkbMo6zpI-4ABMWpKZkL8f20KGo,4187
|
|
32
32
|
xradio/vis/convert_msv2_to_processing_set.py,sha256=7vTjqtWFEBOySnLVoadceKCA4VLgOig7eCs1dxrdYQA,3966
|
|
33
|
-
xradio/vis/load_processing_set.py,sha256=
|
|
33
|
+
xradio/vis/load_processing_set.py,sha256=ldgg0GqFwYuNc2fPkRlc_lW2Kt7naq2wFGloALl0J4Y,6753
|
|
34
34
|
xradio/vis/model.py,sha256=uBjvvhYEY1p-3H3NStrt1ZKMQACLGLo93OiEBvDVId8,17083
|
|
35
|
-
xradio/vis/read_processing_set.py,sha256=
|
|
35
|
+
xradio/vis/read_processing_set.py,sha256=mhBcwrNEVlu5_dMfRaJQHzD-Yc3n2dtVqhNxvaoq09U,5522
|
|
36
36
|
xradio/vis/vis_io.py,sha256=rCSOt4Max37uFzF3_Ck4U4xWzzYcipdbYcxbhBhQ_Qs,5278
|
|
37
37
|
xradio/vis/_vis_utils/__init__.py,sha256=Scu6rKJ2SpO8aG7F-xdTZcYfyWx0viV8gFh8E8ur_gI,93
|
|
38
38
|
xradio/vis/_vis_utils/ms.py,sha256=0uycYCDmeQku16TdPcnZEBJMdfb_i6xPoieYPhPoVIg,5258
|
|
@@ -52,7 +52,7 @@ xradio/vis/_vis_utils/_ms/subtables.py,sha256=hkSa3sXUaSRVEv2CNK65Svx5Uyp3opZHaH
|
|
|
52
52
|
xradio/vis/_vis_utils/_ms/_tables/load.py,sha256=TXrEf7fKfsP0Wp7fB7DLWeOS_Z8A1OLKGWGNSoiYHOM,1665
|
|
53
53
|
xradio/vis/_vis_utils/_ms/_tables/load_main_table.py,sha256=HyIfjaQXfFA30YRUzU2H_ugRnArQ1XOiVGKxC9PbiOo,13497
|
|
54
54
|
xradio/vis/_vis_utils/_ms/_tables/read.py,sha256=lt3JOre-7WyzTprUuZIowi6aEcXc8R367oLZUB0vdhk,21781
|
|
55
|
-
xradio/vis/_vis_utils/_ms/_tables/read_main_table.py,sha256=
|
|
55
|
+
xradio/vis/_vis_utils/_ms/_tables/read_main_table.py,sha256=9XasWazrGSkQ6C4jSrItIGidaTUzh-bdSRYkYR5j0Dg,23958
|
|
56
56
|
xradio/vis/_vis_utils/_ms/_tables/read_subtables.py,sha256=_3AlxlLJwdu__a8sfG8a7-wz-yTo2S7JyVit17XaxKs,11741
|
|
57
57
|
xradio/vis/_vis_utils/_ms/_tables/table_query.py,sha256=q8EGFf_zIwHcHnvFJOn8hPh8zFZQ3f7BGbXvL3bHad4,555
|
|
58
58
|
xradio/vis/_vis_utils/_ms/_tables/write.py,sha256=Uuk4QGib_QOc8i6g7OYB9E6SxMUDPlXDYXlHqkzPX5Q,8475
|
|
@@ -64,8 +64,8 @@ xradio/vis/_vis_utils/_utils/xds_helper.py,sha256=UhtAZV5DyYzVVBkXzwDAOz6TICxotQ
|
|
|
64
64
|
xradio/vis/_vis_utils/_zarr/encoding.py,sha256=GENIlThV6a9CUCL6gIGlu9c6NR3OFWNos6mpxZjMwDc,536
|
|
65
65
|
xradio/vis/_vis_utils/_zarr/read.py,sha256=ikNGlOdHuZ_cgWpPAZ4iHzeLdU44I0iBLcqSEiM_hCk,7111
|
|
66
66
|
xradio/vis/_vis_utils/_zarr/write.py,sha256=exvrqNVnVKk6LzbDPm_fm142YzX-7lHGqklXTQB9wh0,8864
|
|
67
|
-
xradio-0.0.
|
|
68
|
-
xradio-0.0.
|
|
69
|
-
xradio-0.0.
|
|
70
|
-
xradio-0.0.
|
|
71
|
-
xradio-0.0.
|
|
67
|
+
xradio-0.0.26.dist-info/LICENSE.txt,sha256=dvACd-5O67yjSZlnEKcWmu3DqwzBtbC922iPv0KOeAw,1516
|
|
68
|
+
xradio-0.0.26.dist-info/METADATA,sha256=Z3bKRq6dhmg8EIvgddHYafA9TSFoWd1PDerkpkkf8Gw,4089
|
|
69
|
+
xradio-0.0.26.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
70
|
+
xradio-0.0.26.dist-info/top_level.txt,sha256=dQu27fGBZJ2Yk-gW5XeD-dZ76Xa4Xcvk60Vz-dwXp7k,7
|
|
71
|
+
xradio-0.0.26.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|