xradio 0.0.38__py3-none-any.whl → 0.0.40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xradio/__init__.py +1 -1
- xradio/_utils/common.py +41 -0
- xradio/_utils/schema.py +14 -3
- xradio/image/_util/_casacore/xds_from_casacore.py +1 -1
- xradio/image/_util/_zarr/xds_from_zarr.py +1 -1
- xradio/image/_util/_zarr/zarr_low_level.py +1 -1
- xradio/vis/_vis_utils/_ms/_tables/load.py +5 -6
- xradio/vis/_vis_utils/_ms/_tables/load_main_table.py +1 -1
- xradio/vis/_vis_utils/_ms/_tables/read.py +58 -52
- xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +1 -1
- xradio/vis/_vis_utils/_ms/_tables/read_subtables.py +1 -1
- xradio/vis/_vis_utils/_ms/_tables/write.py +1 -1
- xradio/vis/_vis_utils/_ms/conversion.py +1 -1
- xradio/vis/_vis_utils/_ms/create_antenna_xds.py +1 -1
- xradio/vis/_vis_utils/_ms/create_field_and_source_xds.py +208 -175
- xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py +1 -1
- xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +133 -178
- xradio/vis/_vis_utils/_ms/partition_queries.py +1 -1
- xradio/vis/_vis_utils/_ms/partitions.py +1 -1
- xradio/vis/_vis_utils/_ms/subtables.py +1 -1
- xradio/vis/_vis_utils/_utils/xds_helper.py +10 -12
- xradio/vis/_vis_utils/_zarr/read.py +1 -1
- xradio/vis/_vis_utils/_zarr/write.py +1 -1
- xradio/vis/_vis_utils/ms.py +1 -1
- xradio/vis/_vis_utils/zarr.py +1 -1
- xradio/vis/convert_msv2_to_processing_set.py +1 -1
- xradio/vis/read_processing_set.py +1 -1
- {xradio-0.0.38.dist-info → xradio-0.0.40.dist-info}/METADATA +3 -2
- {xradio-0.0.38.dist-info → xradio-0.0.40.dist-info}/RECORD +32 -32
- {xradio-0.0.38.dist-info → xradio-0.0.40.dist-info}/WHEEL +1 -1
- {xradio-0.0.38.dist-info → xradio-0.0.40.dist-info}/LICENSE.txt +0 -0
- {xradio-0.0.38.dist-info → xradio-0.0.40.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,7 @@ from typing import Tuple, Union
|
|
|
5
5
|
import numpy as np
|
|
6
6
|
import xarray as xr
|
|
7
7
|
|
|
8
|
-
|
|
8
|
+
import toolviper.utils.logger as logger
|
|
9
9
|
from xradio.vis._vis_utils._ms.msv4_sub_xdss import interpolate_to_time
|
|
10
10
|
from xradio.vis._vis_utils._ms.subtables import subt_rename_ids
|
|
11
11
|
from xradio.vis._vis_utils._ms._tables.read import (
|
|
@@ -13,13 +13,16 @@ from xradio.vis._vis_utils._ms._tables.read import (
|
|
|
13
13
|
make_taql_where_between_min_max,
|
|
14
14
|
load_generic_table,
|
|
15
15
|
)
|
|
16
|
-
|
|
16
|
+
from xradio._utils.common import cast_to_str, convert_to_si_units, add_position_offsets
|
|
17
17
|
from xradio._utils.list_and_array import (
|
|
18
18
|
check_if_consistent,
|
|
19
19
|
unique_1d,
|
|
20
20
|
to_np_array,
|
|
21
21
|
)
|
|
22
|
-
from xradio._utils.
|
|
22
|
+
from xradio._utils.schema import (
|
|
23
|
+
column_description_casacore_to_msv4_measure,
|
|
24
|
+
convert_generic_xds_to_xradio_schema,
|
|
25
|
+
)
|
|
23
26
|
|
|
24
27
|
|
|
25
28
|
def create_field_and_source_xds(
|
|
@@ -176,16 +179,29 @@ def extract_ephemeris_info(
|
|
|
176
179
|
else:
|
|
177
180
|
unit_keyword = "QuantumUnits"
|
|
178
181
|
|
|
179
|
-
# We are using the "time_ephemeris_axis" label because it might not match the optional time axis of the source and field info. If ephemeris_interpolate=True then rename it to time.
|
|
180
|
-
coords = {
|
|
181
|
-
"ellipsoid_pos_label": ["lon", "lat", "dist"],
|
|
182
|
-
"time_ephemeris_axis": ephemeris_xds["time"].data,
|
|
183
|
-
"sky_pos_label": ["ra", "dec", "dist"],
|
|
184
|
-
}
|
|
185
|
-
|
|
186
182
|
temp_xds = xr.Dataset()
|
|
187
183
|
|
|
188
|
-
# Add mandatory data:
|
|
184
|
+
# Add mandatory data: OBSERVATION_POSITION
|
|
185
|
+
observation_position = [
|
|
186
|
+
ephemeris_meta["GeoLong"],
|
|
187
|
+
ephemeris_meta["GeoLat"],
|
|
188
|
+
ephemeris_meta["GeoDist"],
|
|
189
|
+
]
|
|
190
|
+
temp_xds["OBSERVATION_POSITION"] = xr.DataArray(
|
|
191
|
+
observation_position, dims=["ellipsoid_pos_label"]
|
|
192
|
+
)
|
|
193
|
+
temp_xds["OBSERVATION_POSITION"].attrs.update(
|
|
194
|
+
{
|
|
195
|
+
"type": "location",
|
|
196
|
+
"units": ["deg", "deg", "m"],
|
|
197
|
+
"data": observation_position,
|
|
198
|
+
"ellipsoid": "WGS84",
|
|
199
|
+
"origin_object_name": "Earth",
|
|
200
|
+
"coordinate_system": ephemeris_meta["obsloc"].lower(),
|
|
201
|
+
}
|
|
202
|
+
) # I think the units are ['deg','deg','m'] and 'WGS84'.
|
|
203
|
+
|
|
204
|
+
# Add (optional) data: SOURCE_LOCATION (POSITION / sky_pos_label)
|
|
189
205
|
temp_xds["SOURCE_LOCATION"] = xr.DataArray(
|
|
190
206
|
np.column_stack(
|
|
191
207
|
(
|
|
@@ -206,71 +222,42 @@ def extract_ephemeris_info(
|
|
|
206
222
|
{"type": "sky_coord", "frame": sky_coord_frame, "units": sky_coord_units}
|
|
207
223
|
)
|
|
208
224
|
|
|
209
|
-
#
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
ephemeris_meta["GeoLat"],
|
|
228
|
-
ephemeris_meta["GeoDist"],
|
|
229
|
-
]
|
|
230
|
-
temp_xds["OBSERVATION_POSITION"] = xr.DataArray(
|
|
231
|
-
observation_position, dims=["ellipsoid_pos_label"]
|
|
225
|
+
# Convert a few columns/variables that can be converted with standard
|
|
226
|
+
# convert_generic_xds_to_xradio_schema().
|
|
227
|
+
# Metadata has to be fixed manually. Alternatively, issues like
|
|
228
|
+
# UNIT/QuantumUnits issue could be handled in convert_generic_xds_to_xradio_schema,
|
|
229
|
+
# but for now preferring not to pollute that function.
|
|
230
|
+
to_new_data_variables = {
|
|
231
|
+
# mandatory: SOURCE_RADIAL_VELOCITY
|
|
232
|
+
"RadVel": ["SOURCE_RADIAL_VELOCITY", ["time_ephemeris_axis"]],
|
|
233
|
+
# optional: data NORTH_POLE_POSITION_ANGLE and NORTH_POLE_ANGULAR_DISTANCE
|
|
234
|
+
"NP_ang": ["NORTH_POLE_POSITION_ANGLE", ["time_ephemeris_axis"]],
|
|
235
|
+
"NP_dist": ["NORTH_POLE_ANGULAR_DISTANCE", ["time_ephemeris_axis"]],
|
|
236
|
+
# optional: HELIOCENTRIC_RADIAL_VELOCITY
|
|
237
|
+
"rdot": ["HELIOCENTRIC_RADIAL_VELOCITY", ["time_ephemeris_axis"]],
|
|
238
|
+
# optional: OBSERVER_PHASE_ANGLE
|
|
239
|
+
"phang": ["OBSERVER_PHASE_ANGLE", ["time_ephemeris_axis"]],
|
|
240
|
+
}
|
|
241
|
+
convert_generic_xds_to_xradio_schema(
|
|
242
|
+
ephemeris_xds, temp_xds, to_new_data_variables, {}
|
|
232
243
|
)
|
|
233
|
-
temp_xds["OBSERVATION_POSITION"].attrs.update(
|
|
234
|
-
{
|
|
235
|
-
"type": "location",
|
|
236
|
-
"units": ["deg", "deg", "m"],
|
|
237
|
-
"data": observation_position,
|
|
238
|
-
"ellipsoid": "WGS84",
|
|
239
|
-
"origin_object_name": "Earth",
|
|
240
|
-
"coordinate_system": ephemeris_meta["obsloc"].lower(),
|
|
241
|
-
}
|
|
242
|
-
) # I think the units are ['deg','deg','m'] and 'WGS84'.
|
|
243
244
|
|
|
244
|
-
#
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
if "NP_dist" in ephemeris_xds.data_vars:
|
|
261
|
-
temp_xds["NORTH_POLE_ANGULAR_DISTANCE"] = xr.DataArray(
|
|
262
|
-
ephemeris_xds["NP_dist"].data, dims=["time_ephemeris_axis"]
|
|
263
|
-
)
|
|
264
|
-
temp_xds["NORTH_POLE_ANGULAR_DISTANCE"].attrs.update(
|
|
265
|
-
{
|
|
266
|
-
"type": "quantity",
|
|
267
|
-
"units": [
|
|
268
|
-
cast_to_str(
|
|
269
|
-
ephemris_column_description["NP_dist"]["keywords"][unit_keyword]
|
|
270
|
-
)
|
|
271
|
-
],
|
|
272
|
-
}
|
|
273
|
-
)
|
|
245
|
+
# Adjust metadata:
|
|
246
|
+
for generic_var_name, msv4_variable_def in to_new_data_variables.items():
|
|
247
|
+
msv4_var_name = msv4_variable_def[0]
|
|
248
|
+
if msv4_var_name in temp_xds:
|
|
249
|
+
temp_xds[msv4_var_name].attrs.update(
|
|
250
|
+
{
|
|
251
|
+
"type": "quantity",
|
|
252
|
+
"units": [
|
|
253
|
+
cast_to_str(
|
|
254
|
+
ephemris_column_description[generic_var_name]["keywords"][
|
|
255
|
+
unit_keyword
|
|
256
|
+
]
|
|
257
|
+
)
|
|
258
|
+
],
|
|
259
|
+
}
|
|
260
|
+
)
|
|
274
261
|
|
|
275
262
|
# Add optional data: SUB_OBSERVER_POSITION and SUB_SOLAR_POSITION
|
|
276
263
|
if "DiskLong" in ephemris_column_description:
|
|
@@ -341,38 +328,12 @@ def extract_ephemeris_info(
|
|
|
341
328
|
}
|
|
342
329
|
)
|
|
343
330
|
|
|
344
|
-
#
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
{
|
|
351
|
-
"type": "quantity",
|
|
352
|
-
"units": [
|
|
353
|
-
cast_to_str(
|
|
354
|
-
ephemris_column_description["rdot"]["keywords"][unit_keyword]
|
|
355
|
-
)
|
|
356
|
-
],
|
|
357
|
-
}
|
|
358
|
-
)
|
|
359
|
-
|
|
360
|
-
# Add optional data: OBSERVER_PHASE_ANGLE
|
|
361
|
-
if "phang" in ephemeris_xds.data_vars:
|
|
362
|
-
temp_xds["OBSERVER_PHASE_ANGLE"] = xr.DataArray(
|
|
363
|
-
ephemeris_xds["phang"].data, dims=["time_ephemeris_axis"]
|
|
364
|
-
)
|
|
365
|
-
temp_xds["OBSERVER_PHASE_ANGLE"].attrs.update(
|
|
366
|
-
{
|
|
367
|
-
"type": "quantity",
|
|
368
|
-
"units": [
|
|
369
|
-
cast_to_str(
|
|
370
|
-
ephemris_column_description["phang"]["keywords"][unit_keyword]
|
|
371
|
-
)
|
|
372
|
-
],
|
|
373
|
-
}
|
|
374
|
-
)
|
|
375
|
-
|
|
331
|
+
# We are using the "time_ephemeris_axis" label because it might not match the optional time axis of the source and field info. If ephemeris_interpolate=True then rename it to time.
|
|
332
|
+
coords = {
|
|
333
|
+
"ellipsoid_pos_label": ["lon", "lat", "dist"],
|
|
334
|
+
"time_ephemeris_axis": ephemeris_xds["time"].data,
|
|
335
|
+
"sky_pos_label": ["ra", "dec", "dist"],
|
|
336
|
+
}
|
|
376
337
|
temp_xds = temp_xds.assign_coords(coords)
|
|
377
338
|
time_coord_attrs = {
|
|
378
339
|
"type": "time",
|
|
@@ -435,8 +396,74 @@ def extract_ephemeris_info(
|
|
|
435
396
|
return xds
|
|
436
397
|
|
|
437
398
|
|
|
399
|
+
def make_line_dims_and_coords(
|
|
400
|
+
source_xds: xr.Dataset, source_id: Union[int, np.ndarray], num_lines: int
|
|
401
|
+
) -> tuple[list, dict]:
|
|
402
|
+
"""
|
|
403
|
+
Produces the dimensions and coordinates used in data variables related
|
|
404
|
+
to line information (LINE_REST_FREQUENCY, LINE_SYSTEMIC_VELOCITY).
|
|
405
|
+
|
|
406
|
+
In the dimensions, "time" is optional. To produce the points of the
|
|
407
|
+
coordinates we need to look into the (optional) TRANSITION column or
|
|
408
|
+
alternatively other columns (DIRECTION) to produce coordinates points of
|
|
409
|
+
appropriate shape, given the "num_lines" "and source_id".
|
|
410
|
+
|
|
411
|
+
Parameters:
|
|
412
|
+
----------
|
|
413
|
+
source_xds: xr.Dataset
|
|
414
|
+
generic source xarray dataset
|
|
415
|
+
source_id: Union[int, np.ndarray]
|
|
416
|
+
source_id of the dataset, when it is an array that indicates the
|
|
417
|
+
presence of the "time" dimension
|
|
418
|
+
num_line: int
|
|
419
|
+
number of lines in the source dataset
|
|
420
|
+
|
|
421
|
+
Returns:
|
|
422
|
+
-------
|
|
423
|
+
tuple : tuple[list, dict]
|
|
424
|
+
The dimensions and coordinates to use with line data variables. The
|
|
425
|
+
dimensions are produced as a list of dimension names, and the
|
|
426
|
+
coordinates as a dict for xarray coords.
|
|
427
|
+
"""
|
|
428
|
+
|
|
429
|
+
# Transition is an optional column and occasionally not populated
|
|
430
|
+
if "TRANSITION" in source_xds.data_vars:
|
|
431
|
+
transition_var_data = source_xds["TRANSITION"].data
|
|
432
|
+
else:
|
|
433
|
+
transition_var_data = np.zeros(source_xds["DIRECTION"].shape, dtype="str")
|
|
434
|
+
|
|
435
|
+
# if TRANSITION is left empty (or otherwise incomplete), and num_lines > 1,
|
|
436
|
+
# the data_vars expect a "num_lines" size in the last dimension
|
|
437
|
+
vars_shape = transition_var_data.shape[:-1] + (np.max(num_lines),)
|
|
438
|
+
if transition_var_data.shape == vars_shape:
|
|
439
|
+
coords_lines_data = transition_var_data
|
|
440
|
+
else:
|
|
441
|
+
coords_lines_data = np.broadcast_to(
|
|
442
|
+
transition_var_data, max(transition_var_data.shape, vars_shape)
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
line_label_data = np.arange(coords_lines_data.shape[-1]).astype(str)
|
|
446
|
+
if len(source_id) == 1:
|
|
447
|
+
line_coords = {
|
|
448
|
+
"line_name": ("line_label", coords_lines_data),
|
|
449
|
+
"line_label": line_label_data,
|
|
450
|
+
}
|
|
451
|
+
line_dims = ["line_label"]
|
|
452
|
+
else:
|
|
453
|
+
line_coords = {
|
|
454
|
+
"line_name": (("time", "line_label"), coords_lines_data),
|
|
455
|
+
"line_label": line_label_data,
|
|
456
|
+
}
|
|
457
|
+
line_dims = ["time", "line_label"]
|
|
458
|
+
|
|
459
|
+
return line_dims, line_coords
|
|
460
|
+
|
|
461
|
+
|
|
438
462
|
def extract_source_info(
|
|
439
|
-
xds: xr.Dataset,
|
|
463
|
+
xds: xr.Dataset,
|
|
464
|
+
path: str,
|
|
465
|
+
source_id: Union[int, np.ndarray],
|
|
466
|
+
spectral_window_id: int,
|
|
440
467
|
) -> tuple[xr.Dataset, int]:
|
|
441
468
|
"""
|
|
442
469
|
Extracts source information from the given path and adds it to the xarray dataset.
|
|
@@ -447,7 +474,7 @@ def extract_source_info(
|
|
|
447
474
|
The xarray dataset to which the source information will be added.
|
|
448
475
|
path : str
|
|
449
476
|
The path to the input file.
|
|
450
|
-
source_id : int
|
|
477
|
+
source_id : Union[int, np.ndarray]
|
|
451
478
|
The ID of the source.
|
|
452
479
|
spectral_window_id : int
|
|
453
480
|
The ID of the spectral window.
|
|
@@ -554,11 +581,12 @@ def extract_source_info(
|
|
|
554
581
|
direction_var = source_xds[direction_msv2_col]
|
|
555
582
|
|
|
556
583
|
# SOURCE_LOCATION (DIRECTION / sky_dir_label)
|
|
557
|
-
xds["SOURCE_LOCATION"] = xr.DataArray(direction_var.data, dims=direction_dims)
|
|
558
584
|
location_msv4_measure = column_description_casacore_to_msv4_measure(
|
|
559
585
|
source_column_description[direction_msv2_col]
|
|
560
586
|
)
|
|
561
|
-
xds["SOURCE_LOCATION"].
|
|
587
|
+
xds["SOURCE_LOCATION"] = xr.DataArray(
|
|
588
|
+
direction_var.data, dims=direction_dims, attrs=location_msv4_measure
|
|
589
|
+
)
|
|
562
590
|
|
|
563
591
|
# Do we have line data:
|
|
564
592
|
if source_xds["NUM_LINES"].data.ndim == 0:
|
|
@@ -567,53 +595,21 @@ def extract_source_info(
|
|
|
567
595
|
num_lines = source_xds["NUM_LINES"].data
|
|
568
596
|
|
|
569
597
|
if any(num_lines > 0):
|
|
598
|
+
line_dims, line_coords = make_line_dims_and_coords(
|
|
599
|
+
source_xds, source_id, num_lines
|
|
600
|
+
)
|
|
601
|
+
xds = xds.assign_coords(line_coords)
|
|
570
602
|
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
else:
|
|
575
|
-
transition_var_data = np.zeros(source_xds["DIRECTION"].shape, dtype="str")
|
|
576
|
-
|
|
577
|
-
# if TRANSITION is left empty (or otherwise incomplete), and num_lines > 1,
|
|
578
|
-
# the data_vars expect a "num_lines" size in the last dimension
|
|
579
|
-
vars_shape = transition_var_data.shape[:-1] + (np.max(num_lines),)
|
|
580
|
-
if transition_var_data.shape == vars_shape:
|
|
581
|
-
coords_lines_data = transition_var_data
|
|
582
|
-
else:
|
|
583
|
-
coords_lines_data = np.broadcast_to(
|
|
584
|
-
transition_var_data, max(transition_var_data.shape, vars_shape)
|
|
585
|
-
)
|
|
586
|
-
|
|
587
|
-
line_label_data = np.arange(coords_lines_data.shape[-1]).astype(str)
|
|
588
|
-
if len(source_id) == 1:
|
|
589
|
-
coords_lines = {
|
|
590
|
-
"line_name": ("line_label", coords_lines_data),
|
|
591
|
-
"line_label": line_label_data,
|
|
592
|
-
}
|
|
593
|
-
xds = xds.assign_coords(coords_lines)
|
|
594
|
-
line_dims = ["line_label"]
|
|
595
|
-
else:
|
|
596
|
-
coords_lines = {
|
|
597
|
-
"line_name": (("time", "line_label"), coords_lines_data),
|
|
598
|
-
"line_label": line_label_data,
|
|
599
|
-
}
|
|
600
|
-
xds = xds.assign_coords(coords_lines)
|
|
601
|
-
line_dims = ["time", "line_label"]
|
|
602
|
-
|
|
603
|
-
optional_data_variables = {
|
|
604
|
-
"REST_FREQUENCY": "LINE_REST_FREQUENCY",
|
|
605
|
-
"SYSVEL": "LINE_SYSTEMIC_VELOCITY",
|
|
603
|
+
to_new_data_variables = {
|
|
604
|
+
"REST_FREQUENCY": ["LINE_REST_FREQUENCY", line_dims],
|
|
605
|
+
"SYSVEL": ["LINE_SYSTEMIC_VELOCITY", line_dims],
|
|
606
606
|
}
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
xds[msv4_name] = xr.DataArray(
|
|
614
|
-
source_xds[generic_name].data, dims=line_dims
|
|
615
|
-
)
|
|
616
|
-
xds[msv4_name].attrs.update(msv4_measure)
|
|
607
|
+
to_new_coords = {
|
|
608
|
+
"TIME": ["time", ["time"]],
|
|
609
|
+
}
|
|
610
|
+
convert_generic_xds_to_xradio_schema(
|
|
611
|
+
source_xds, xds, to_new_data_variables, to_new_coords
|
|
612
|
+
)
|
|
617
613
|
|
|
618
614
|
# Need to add doppler info if present. Add check.
|
|
619
615
|
try:
|
|
@@ -634,8 +630,56 @@ def extract_source_info(
|
|
|
634
630
|
return xds, np.sum(num_lines[unique_source_ids_indices])
|
|
635
631
|
|
|
636
632
|
|
|
633
|
+
def make_field_dims_and_coords(
|
|
634
|
+
field_xds: xr.Dataset, field_id: Union[int, np.ndarray], field_times: list
|
|
635
|
+
) -> tuple[list, dict]:
|
|
636
|
+
"""
|
|
637
|
+
Produces the dimensions and coordinates used in the field data variables
|
|
638
|
+
extracted from the MSv2 FIELD subtable (FIELD_PHASE_CENTER/
|
|
639
|
+
FIELD_REFERENCE_CENTER).
|
|
640
|
+
|
|
641
|
+
Parameters:
|
|
642
|
+
----------
|
|
643
|
+
field_xds: xr.Dataset
|
|
644
|
+
generic field xarray dataset
|
|
645
|
+
field_id: Union[int, np.ndarray]
|
|
646
|
+
field_id of the dataset
|
|
647
|
+
field_times:
|
|
648
|
+
Unique times for the dataset (when not partitioning by FIELD_ID)
|
|
649
|
+
|
|
650
|
+
Returns:
|
|
651
|
+
-------
|
|
652
|
+
tuple : tuple[list, dict]
|
|
653
|
+
The dimensions and coordinates to use with field data variables. The
|
|
654
|
+
dimensions are produced as a list of dimension names, and the
|
|
655
|
+
coordinates as a dict for xarray coords.
|
|
656
|
+
"""
|
|
657
|
+
|
|
658
|
+
coords = {"sky_dir_label": ["ra", "dec"]}
|
|
659
|
+
|
|
660
|
+
# field_times is the same as the time axis in the main MSv4 dataset and is used if more than one field is present.
|
|
661
|
+
if field_times is not None:
|
|
662
|
+
coords["time"] = field_times
|
|
663
|
+
dims = ["time", "sky_dir_label"]
|
|
664
|
+
coords["field_name"] = (
|
|
665
|
+
"time",
|
|
666
|
+
np.char.add(field_xds["NAME"].data, np.char.add("_", field_id.astype(str))),
|
|
667
|
+
)
|
|
668
|
+
# coords["field_id"] = ("time", field_id)
|
|
669
|
+
else:
|
|
670
|
+
coords["field_name"] = field_xds["NAME"].values.item() + "_" + str(field_id)
|
|
671
|
+
# coords["field_id"] = field_id
|
|
672
|
+
dims = ["sky_dir_label"]
|
|
673
|
+
|
|
674
|
+
return dims, coords
|
|
675
|
+
|
|
676
|
+
|
|
637
677
|
def extract_field_info_and_check_ephemeris(
|
|
638
|
-
field_and_source_xds
|
|
678
|
+
field_and_source_xds: xr.Dataset,
|
|
679
|
+
in_file: str,
|
|
680
|
+
field_id: Union[int, np.ndarray],
|
|
681
|
+
field_times: list,
|
|
682
|
+
is_single_dish: bool,
|
|
639
683
|
):
|
|
640
684
|
"""
|
|
641
685
|
Create field information and check for ephemeris in the FIELD table folder.
|
|
@@ -646,8 +690,12 @@ def extract_field_info_and_check_ephemeris(
|
|
|
646
690
|
The xarray dataset to which the field and source information will be added.
|
|
647
691
|
in_file : str
|
|
648
692
|
The path to the input file.
|
|
649
|
-
field_id : int
|
|
693
|
+
field_id : Union[int, np.ndarray]
|
|
650
694
|
The ID of the field.
|
|
695
|
+
field_times: list
|
|
696
|
+
Time of the MSv4
|
|
697
|
+
is_single_dish: bool
|
|
698
|
+
Whether to extract single dish (FIELD_REFERENCE_CENTER) info
|
|
651
699
|
|
|
652
700
|
Returns:
|
|
653
701
|
-------
|
|
@@ -658,7 +706,6 @@ def extract_field_info_and_check_ephemeris(
|
|
|
658
706
|
ephemeris_table_name : str
|
|
659
707
|
The name of the ephemeris table.
|
|
660
708
|
"""
|
|
661
|
-
coords = {}
|
|
662
709
|
|
|
663
710
|
unique_field_id = unique_1d(
|
|
664
711
|
field_id
|
|
@@ -718,6 +765,8 @@ def extract_field_info_and_check_ephemeris(
|
|
|
718
765
|
f"Could not find ephemeris table for field_id {field_id}. Ephemeris information will not be included in the field_and_source_xds."
|
|
719
766
|
)
|
|
720
767
|
|
|
768
|
+
dims, coords = make_field_dims_and_coords(field_xds, field_id, field_times)
|
|
769
|
+
|
|
721
770
|
if is_single_dish:
|
|
722
771
|
field_data_variables = {
|
|
723
772
|
"REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
|
|
@@ -729,27 +778,10 @@ def extract_field_info_and_check_ephemeris(
|
|
|
729
778
|
# "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
|
|
730
779
|
}
|
|
731
780
|
|
|
732
|
-
field_measures_type = "sky_coord"
|
|
733
|
-
|
|
734
|
-
coords["sky_dir_label"] = ["ra", "dec"]
|
|
735
781
|
field_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
|
|
736
782
|
"column_descriptions"
|
|
737
783
|
]
|
|
738
784
|
|
|
739
|
-
# field_times is the same as the time axis in the main MSv4 dataset and is used if more than one field is present.
|
|
740
|
-
if field_times is not None:
|
|
741
|
-
coords["time"] = field_times
|
|
742
|
-
dims = ["time", "sky_dir_label"]
|
|
743
|
-
coords["field_name"] = (
|
|
744
|
-
"time",
|
|
745
|
-
np.char.add(field_xds["NAME"].data, np.char.add("_", field_id.astype(str))),
|
|
746
|
-
)
|
|
747
|
-
# coords["field_id"] = ("time", field_id)
|
|
748
|
-
else:
|
|
749
|
-
coords["field_name"] = field_xds["NAME"].values.item() + "_" + str(field_id)
|
|
750
|
-
# coords["field_id"] = field_id
|
|
751
|
-
dims = ["sky_dir_label"]
|
|
752
|
-
|
|
753
785
|
for generic_name, msv4_name in field_data_variables.items():
|
|
754
786
|
|
|
755
787
|
delay_dir_ref_col = "DelayDir_Ref"
|
|
@@ -772,6 +804,7 @@ def extract_field_info_and_check_ephemeris(
|
|
|
772
804
|
}
|
|
773
805
|
)
|
|
774
806
|
|
|
807
|
+
field_measures_type = "sky_coord"
|
|
775
808
|
field_and_source_xds[msv4_name].attrs["type"] = field_measures_type
|
|
776
809
|
|
|
777
810
|
field_and_source_xds = field_and_source_xds.assign_coords(coords)
|