xradio 0.0.40__py3-none-any.whl → 0.0.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xradio/{vis → correlated_data}/__init__.py +3 -2
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/load_main_table.py +1 -1
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/read.py +14 -0
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/conversion.py +117 -58
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/create_antenna_xds.py +195 -167
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/create_field_and_source_xds.py +40 -39
- xradio/correlated_data/_utils/_ms/msv4_info_dicts.py +203 -0
- xradio/correlated_data/_utils/_ms/msv4_sub_xdss.py +516 -0
- xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/subtables.py +1 -1
- xradio/{vis/_vis_utils → correlated_data/_utils}/zarr.py +3 -3
- xradio/{vis → correlated_data}/convert_msv2_to_processing_set.py +9 -2
- xradio/correlated_data/correlated_xds.py +13 -0
- xradio/{vis → correlated_data}/load_processing_set.py +13 -17
- xradio/{vis/read_processing_set.py → correlated_data/open_processing_set.py} +20 -22
- xradio/{vis/_processing_set.py → correlated_data/processing_set.py} +11 -12
- xradio/{vis → correlated_data}/schema.py +572 -186
- xradio/correlated_data/test__processing_set.py +74 -0
- {xradio-0.0.40.dist-info → xradio-0.0.41.dist-info}/METADATA +9 -10
- xradio-0.0.41.dist-info/RECORD +75 -0
- {xradio-0.0.40.dist-info → xradio-0.0.41.dist-info}/WHEEL +1 -1
- xradio/vis/_vis_utils/_ms/msv4_infos.py +0 -0
- xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +0 -306
- xradio-0.0.40.dist-info/RECORD +0 -73
- /xradio/{vis/_vis_utils → correlated_data/_utils}/__init__.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/load.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/read_main_table.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/read_subtables.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/table_query.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/write.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/_tables/write_exp_api.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/chunks.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/descr.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/msv2_msv3.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/msv2_to_msv4_meta.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/optimised_functions.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/partition_queries.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_ms/partitions.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_utils/cds.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_utils/partition_attrs.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_utils/stokes_types.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_utils/xds_helper.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_zarr/encoding.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_zarr/read.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/_zarr/write.py +0 -0
- /xradio/{vis/_vis_utils → correlated_data/_utils}/ms.py +0 -0
- {xradio-0.0.40.dist-info → xradio-0.0.41.dist-info}/LICENSE.txt +0 -0
- {xradio-0.0.40.dist-info → xradio-0.0.41.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import xarray as xr
|
|
3
|
+
|
|
4
|
+
from casacore import tables
|
|
5
|
+
import toolviper.utils.logger as logger
|
|
6
|
+
|
|
7
|
+
from .subtables import subt_rename_ids
|
|
8
|
+
from ._tables.read import load_generic_table, convert_casacore_time
|
|
9
|
+
from xradio._utils.list_and_array import check_if_consistent, unique_1d, to_list
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def create_info_dicts(
|
|
13
|
+
in_file: str,
|
|
14
|
+
xds: xr.Dataset,
|
|
15
|
+
field_and_source_xds: xr.Dataset,
|
|
16
|
+
partition_info_misc_fields: dict,
|
|
17
|
+
tb_tool: tables.table,
|
|
18
|
+
) -> dict:
|
|
19
|
+
"""
|
|
20
|
+
For an MSv4, produces several info dicts (partition_info, processor_info,
|
|
21
|
+
observation_info). The info dicts are returned in a dictionary that
|
|
22
|
+
contains them indexed by their corresponding keys, which can be used
|
|
23
|
+
directly to update the attrs dict of an MSv4.
|
|
24
|
+
|
|
25
|
+
Parameters:
|
|
26
|
+
-----------
|
|
27
|
+
in_file: str
|
|
28
|
+
path to the input MSv2
|
|
29
|
+
xds: xr.Dataset
|
|
30
|
+
main xds of the MSv4 being converted
|
|
31
|
+
field_and_source_xds: xr.Dataset
|
|
32
|
+
field_and_source_xds subdataset
|
|
33
|
+
partition_info_misc_fiels: dict
|
|
34
|
+
dict with several scalar fields for the partition_info dict that are
|
|
35
|
+
collected while processing the main MSv4 table. Expected: scan_id,
|
|
36
|
+
obs_mode, taql_where
|
|
37
|
+
tb_tool: tables.table
|
|
38
|
+
table (query) on the main table with an MSv4 query
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
--------
|
|
42
|
+
info_dicts: dict
|
|
43
|
+
info dicts ready to be used to update the attrs of the MSv4
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
if "line_name" in field_and_source_xds.coords:
|
|
47
|
+
line_name = to_list(unique_1d(np.ravel(field_and_source_xds.line_name.values)))
|
|
48
|
+
else:
|
|
49
|
+
line_name = []
|
|
50
|
+
|
|
51
|
+
info_dicts = {}
|
|
52
|
+
info_dicts["partition_info"] = {
|
|
53
|
+
# "spectral_window_id": xds.frequency.attrs["spectral_window_id"],
|
|
54
|
+
"spectral_window_name": xds.frequency.attrs["spectral_window_name"],
|
|
55
|
+
# "field_id": to_list(unique_1d(field_id)),
|
|
56
|
+
"field_name": to_list(np.unique(field_and_source_xds.field_name.values)),
|
|
57
|
+
"polarization_setup": to_list(xds.polarization.values),
|
|
58
|
+
"scan_number": to_list(np.unique(partition_info_misc_fields["scan_id"])),
|
|
59
|
+
"source_name": to_list(np.unique(field_and_source_xds.source_name.values)),
|
|
60
|
+
# "source_id": to_list(unique_1d(source_id)),
|
|
61
|
+
"intents": partition_info_misc_fields["intents"].split(","),
|
|
62
|
+
"taql": partition_info_misc_fields["taql_where"],
|
|
63
|
+
"line_name": line_name,
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
observation_id = check_if_consistent(
|
|
67
|
+
tb_tool.getcol("OBSERVATION_ID"), "OBSERVATION_ID"
|
|
68
|
+
)
|
|
69
|
+
info_dicts["observation_info"] = create_observation_info(in_file, observation_id)
|
|
70
|
+
|
|
71
|
+
processor_id = check_if_consistent(tb_tool.getcol("PROCESSOR_ID"), "PROCESSOR_ID")
|
|
72
|
+
info_dicts["processor_info"] = create_processor_info(in_file, processor_id)
|
|
73
|
+
|
|
74
|
+
return info_dicts
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def create_observation_info(in_file: str, observation_id: int):
|
|
78
|
+
"""
|
|
79
|
+
Makes a dict with the observation info extracted from the PROCESSOR subtable.
|
|
80
|
+
|
|
81
|
+
Parameters
|
|
82
|
+
----------
|
|
83
|
+
in_file: str
|
|
84
|
+
path to an input MSv2
|
|
85
|
+
processor_id: int
|
|
86
|
+
processor ID for one MSv4 dataset
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
--------
|
|
90
|
+
observation_info: dict
|
|
91
|
+
observation description ready for the MSv4 observation_info attr
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
generic_observation_xds = load_generic_table(
|
|
95
|
+
in_file,
|
|
96
|
+
"OBSERVATION",
|
|
97
|
+
rename_ids=subt_rename_ids["OBSERVATION"],
|
|
98
|
+
taql_where=f" where ROWID() = {observation_id}",
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
observation_info = {
|
|
102
|
+
"observer": [generic_observation_xds["OBSERVER"].values[0]],
|
|
103
|
+
"release_date": str(
|
|
104
|
+
convert_casacore_time(generic_observation_xds["RELEASE_DATE"].values)[0]
|
|
105
|
+
),
|
|
106
|
+
}
|
|
107
|
+
# could just assume lower:upper case but keeping explicit dict for now
|
|
108
|
+
mandatory_fields = {"project": "PROJECT"}
|
|
109
|
+
for field_msv4, row_msv2 in mandatory_fields.items():
|
|
110
|
+
observation_info[field_msv4] = generic_observation_xds[row_msv2].values[0]
|
|
111
|
+
|
|
112
|
+
exec_block_xds = None
|
|
113
|
+
try:
|
|
114
|
+
exec_block_xds = load_generic_table(in_file, "ASDM_EXECBLOCK")
|
|
115
|
+
except ValueError as exc:
|
|
116
|
+
logger.debug(
|
|
117
|
+
"Did not find the ASDM_EXECBLOCK subtable, not loading optional fields in observation_info"
|
|
118
|
+
)
|
|
119
|
+
if exec_block_xds:
|
|
120
|
+
exec_block_info = extract_exec_block_info(exec_block_xds)
|
|
121
|
+
observation_info.update(exec_block_info)
|
|
122
|
+
|
|
123
|
+
return observation_info
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def extract_exec_block_info(exec_block_xds: xr.Dataset) -> dict:
|
|
127
|
+
"""
|
|
128
|
+
Get the (optional) fields of the observation_info that come from the
|
|
129
|
+
ASDM_EXECBLOCK subtable.
|
|
130
|
+
|
|
131
|
+
Note this does not parse strings like 'session_reference':
|
|
132
|
+
'<EntityRef entityId="uid://A001/X133d/X169f" partId="X00000000" entityTypeName="OUSStatus"'
|
|
133
|
+
We might want to simplify that to 'uid://A001/X133d/X169f', but keeping the
|
|
134
|
+
full string for now, as it has additional information such as the type.
|
|
135
|
+
|
|
136
|
+
Parameters
|
|
137
|
+
----------
|
|
138
|
+
exec_block_xds: xr.Dataset
|
|
139
|
+
raw xds read from subtable ASDM_EXECBLOCK
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
--------
|
|
143
|
+
exec_block_info: dict
|
|
144
|
+
Execution block description ready for the MSv4 observation_info dict
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
optional_fields = {
|
|
148
|
+
"execution_block_id": "execBlockId",
|
|
149
|
+
"execution_block_number": "execBlockNum",
|
|
150
|
+
"execution_block_UID": "execBlockUID",
|
|
151
|
+
"session_reference": "sessionReference",
|
|
152
|
+
"observing_script": "observingScript",
|
|
153
|
+
"observing_script_UID": "observingScriptUID",
|
|
154
|
+
"observing_log": "observingLog",
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
exec_block_info = {}
|
|
158
|
+
for field_msv4, row_msv2 in optional_fields.items():
|
|
159
|
+
if row_msv2 in exec_block_xds.data_vars:
|
|
160
|
+
msv2_value = exec_block_xds[row_msv2].values[0]
|
|
161
|
+
if isinstance(msv2_value, np.ndarray):
|
|
162
|
+
exec_block_info[field_msv4] = ",".join([log for log in msv2_value])
|
|
163
|
+
else:
|
|
164
|
+
exec_block_info[field_msv4] = msv2_value
|
|
165
|
+
|
|
166
|
+
return exec_block_info
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def create_processor_info(in_file: str, processor_id: int):
|
|
170
|
+
"""
|
|
171
|
+
Makes a dict with the processor info extracted from the PROCESSOR subtable.
|
|
172
|
+
|
|
173
|
+
Parameters
|
|
174
|
+
----------
|
|
175
|
+
in_file: str
|
|
176
|
+
path to an input MSv2
|
|
177
|
+
processor_id: int
|
|
178
|
+
processor ID for one MSv4 dataset
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
--------
|
|
182
|
+
processor_info: dict
|
|
183
|
+
processor description ready for the MSv4 processor_info attr
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
generic_processor_xds = load_generic_table(
|
|
187
|
+
in_file,
|
|
188
|
+
"PROCESSOR",
|
|
189
|
+
rename_ids=subt_rename_ids["PROCESSOR"],
|
|
190
|
+
taql_where=f" where ROWID() = {processor_id}",
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Many telescopes (ASKAP, MeerKAT, SKA-Mid, VLBI, VLBA, ngEHT) seem to
|
|
194
|
+
# produce an empty PROCESSOR subtable
|
|
195
|
+
if len(generic_processor_xds.data_vars) <= 0:
|
|
196
|
+
processor_info = {"type": "", "sub_type": ""}
|
|
197
|
+
else:
|
|
198
|
+
processor_info = {
|
|
199
|
+
"type": generic_processor_xds["TYPE"].values[0],
|
|
200
|
+
"sub_type": generic_processor_xds["SUB_TYPE"].values[0],
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
return processor_info
|