xradio 0.0.39__py3-none-any.whl → 0.0.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. xradio/__init__.py +1 -1
  2. xradio/_utils/schema.py +14 -3
  3. xradio/image/_util/_casacore/xds_from_casacore.py +1 -1
  4. xradio/image/_util/_zarr/xds_from_zarr.py +1 -1
  5. xradio/image/_util/_zarr/zarr_low_level.py +1 -1
  6. xradio/vis/_vis_utils/_ms/_tables/load_main_table.py +1 -1
  7. xradio/vis/_vis_utils/_ms/_tables/read.py +1 -1
  8. xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +1 -1
  9. xradio/vis/_vis_utils/_ms/_tables/read_subtables.py +1 -1
  10. xradio/vis/_vis_utils/_ms/_tables/write.py +1 -1
  11. xradio/vis/_vis_utils/_ms/conversion.py +1 -1
  12. xradio/vis/_vis_utils/_ms/create_antenna_xds.py +1 -1
  13. xradio/vis/_vis_utils/_ms/create_field_and_source_xds.py +208 -175
  14. xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py +1 -1
  15. xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +133 -178
  16. xradio/vis/_vis_utils/_ms/partition_queries.py +1 -1
  17. xradio/vis/_vis_utils/_ms/partitions.py +1 -1
  18. xradio/vis/_vis_utils/_ms/subtables.py +1 -1
  19. xradio/vis/_vis_utils/_utils/xds_helper.py +1 -1
  20. xradio/vis/_vis_utils/_zarr/read.py +1 -1
  21. xradio/vis/_vis_utils/_zarr/write.py +1 -1
  22. xradio/vis/_vis_utils/ms.py +1 -1
  23. xradio/vis/_vis_utils/zarr.py +1 -1
  24. xradio/vis/convert_msv2_to_processing_set.py +1 -1
  25. xradio/vis/read_processing_set.py +1 -1
  26. {xradio-0.0.39.dist-info → xradio-0.0.40.dist-info}/METADATA +2 -1
  27. {xradio-0.0.39.dist-info → xradio-0.0.40.dist-info}/RECORD +30 -30
  28. {xradio-0.0.39.dist-info → xradio-0.0.40.dist-info}/WHEEL +1 -1
  29. {xradio-0.0.39.dist-info → xradio-0.0.40.dist-info}/LICENSE.txt +0 -0
  30. {xradio-0.0.39.dist-info → xradio-0.0.40.dist-info}/top_level.txt +0 -0
xradio/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import os
2
- from graphviper.utils.logger import setup_logger
2
+ from toolviper.utils.logger import setup_logger
3
3
 
4
4
  # _logger_name = "xradio"
5
5
  # if os.getenv("VIPER_LOGGER_NAME") != _logger_name:
xradio/_utils/schema.py CHANGED
@@ -1,12 +1,12 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import xarray as xr
3
3
 
4
4
 
5
5
  def convert_generic_xds_to_xradio_schema(
6
6
  generic_xds: xr.Dataset,
7
7
  msv4_xds: xr.Dataset,
8
- to_new_data_variables: dict,
9
- to_new_coords: dict,
8
+ to_new_data_variables: dict[str, list],
9
+ to_new_coords: dict[str, list],
10
10
  ) -> xr.Dataset:
11
11
  """Converts a generic xarray Dataset to the xradio schema.
12
12
 
@@ -55,6 +55,7 @@ def convert_generic_xds_to_xradio_schema(
55
55
  "column_descriptions"
56
56
  ]
57
57
  coords = {}
58
+ coord_attrs = {}
58
59
 
59
60
  name_keys = list(generic_xds.data_vars.keys()) + list(generic_xds.coords.keys())
60
61
 
@@ -80,7 +81,14 @@ def convert_generic_xds_to_xradio_schema(
80
81
  new_coord[1],
81
82
  generic_xds[key].data,
82
83
  )
84
+
85
+ if msv4_measure:
86
+ coord_attrs[new_coord[0]] = msv4_measure
87
+
83
88
  msv4_xds = msv4_xds.assign_coords(coords)
89
+ for coord, coord_attrs in coord_attrs.items():
90
+ msv4_xds.coords[coord].attrs.update(coord_attrs)
91
+
84
92
  return msv4_xds
85
93
 
86
94
 
@@ -102,6 +110,9 @@ def column_description_casacore_to_msv4_measure(
102
110
  casacore_column_description["keywords"]["QuantumUnits"]
103
111
  )
104
112
 
113
+ # Beware: casa_ref won't be found in cases such as the custom
114
+ # 'NRAO_GBT_USER/NRAO_GBT_USER_DIR_REF' in POINTING
115
+ casa_ref = None
105
116
  # Reference frame to convert?
106
117
  if "Ref" in msv4_measure_conversion:
107
118
  # Find reference frame
@@ -5,7 +5,7 @@ from typing import List, Tuple, Union
5
5
 
6
6
  import dask
7
7
  import dask.array as da
8
- import graphviper.utils.logger as logger
8
+ import toolviper.utils.logger as logger
9
9
  import numpy as np
10
10
  import xarray as xr
11
11
  from astropy import units as u
@@ -1,7 +1,7 @@
1
1
  import copy
2
2
  import dask.array as da
3
3
 
4
- # import graphviper.utils.logger as logger
4
+ # import toolviper.utils.logger as logger
5
5
  import numpy as np
6
6
  import os
7
7
  import xarray as xr
@@ -112,7 +112,7 @@ def write_binary_blob_to_disk(arr, file_path, compressor):
112
112
  Returns:
113
113
  - None
114
114
  """
115
- import graphviper.utils.logger as logger
115
+ import toolviper.utils.logger as logger
116
116
 
117
117
  # Encode the NumPy array using the codec
118
118
  logger.debug("1. Before compressor " + file_path)
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  from typing import Dict, List, Tuple, Union
3
3
 
4
4
  import pandas as pd
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import os
3
3
  from pathlib import Path
4
4
  import re
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  from typing import Any, Dict, List, Tuple, Union
3
3
 
4
4
  import dask, dask.array
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  from pathlib import Path
3
3
  from typing import Dict, Tuple, Union
4
4
 
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger, os
1
+ import toolviper.utils.logger as logger, os
2
2
  from typing import Tuple
3
3
 
4
4
  import numpy as np
@@ -2,7 +2,7 @@ import numcodecs
2
2
  import time
3
3
  from .._zarr.encoding import add_encoding
4
4
  from typing import Dict, Union
5
- import graphviper.utils.logger as logger
5
+ import toolviper.utils.logger as logger
6
6
  import os
7
7
  import pathlib
8
8
 
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import time
3
3
  from typing import Tuple, Union
4
4
 
@@ -5,7 +5,7 @@ from typing import Tuple, Union
5
5
  import numpy as np
6
6
  import xarray as xr
7
7
 
8
- from xradio._utils.schema import column_description_casacore_to_msv4_measure
8
+ import toolviper.utils.logger as logger
9
9
  from xradio.vis._vis_utils._ms.msv4_sub_xdss import interpolate_to_time
10
10
  from xradio.vis._vis_utils._ms.subtables import subt_rename_ids
11
11
  from xradio.vis._vis_utils._ms._tables.read import (
@@ -13,13 +13,16 @@ from xradio.vis._vis_utils._ms._tables.read import (
13
13
  make_taql_where_between_min_max,
14
14
  load_generic_table,
15
15
  )
16
- import graphviper.utils.logger as logger
16
+ from xradio._utils.common import cast_to_str, convert_to_si_units, add_position_offsets
17
17
  from xradio._utils.list_and_array import (
18
18
  check_if_consistent,
19
19
  unique_1d,
20
20
  to_np_array,
21
21
  )
22
- from xradio._utils.common import cast_to_str, convert_to_si_units, add_position_offsets
22
+ from xradio._utils.schema import (
23
+ column_description_casacore_to_msv4_measure,
24
+ convert_generic_xds_to_xradio_schema,
25
+ )
23
26
 
24
27
 
25
28
  def create_field_and_source_xds(
@@ -176,16 +179,29 @@ def extract_ephemeris_info(
176
179
  else:
177
180
  unit_keyword = "QuantumUnits"
178
181
 
179
- # We are using the "time_ephemeris_axis" label because it might not match the optional time axis of the source and field info. If ephemeris_interpolate=True then rename it to time.
180
- coords = {
181
- "ellipsoid_pos_label": ["lon", "lat", "dist"],
182
- "time_ephemeris_axis": ephemeris_xds["time"].data,
183
- "sky_pos_label": ["ra", "dec", "dist"],
184
- }
185
-
186
182
  temp_xds = xr.Dataset()
187
183
 
188
- # Add mandatory data: SOURCE_LOCATION (POSITION / sky_pos_label)
184
+ # Add mandatory data: OBSERVATION_POSITION
185
+ observation_position = [
186
+ ephemeris_meta["GeoLong"],
187
+ ephemeris_meta["GeoLat"],
188
+ ephemeris_meta["GeoDist"],
189
+ ]
190
+ temp_xds["OBSERVATION_POSITION"] = xr.DataArray(
191
+ observation_position, dims=["ellipsoid_pos_label"]
192
+ )
193
+ temp_xds["OBSERVATION_POSITION"].attrs.update(
194
+ {
195
+ "type": "location",
196
+ "units": ["deg", "deg", "m"],
197
+ "data": observation_position,
198
+ "ellipsoid": "WGS84",
199
+ "origin_object_name": "Earth",
200
+ "coordinate_system": ephemeris_meta["obsloc"].lower(),
201
+ }
202
+ ) # I think the units are ['deg','deg','m'] and 'WGS84'.
203
+
204
+ # Add (optional) data: SOURCE_LOCATION (POSITION / sky_pos_label)
189
205
  temp_xds["SOURCE_LOCATION"] = xr.DataArray(
190
206
  np.column_stack(
191
207
  (
@@ -206,71 +222,42 @@ def extract_ephemeris_info(
206
222
  {"type": "sky_coord", "frame": sky_coord_frame, "units": sky_coord_units}
207
223
  )
208
224
 
209
- # Add mandatory data: SOURCE_RADIAL_VELOCITY
210
- temp_xds["SOURCE_RADIAL_VELOCITY"] = xr.DataArray(
211
- ephemeris_xds["RadVel"].data, dims=["time_ephemeris_axis"]
212
- )
213
- temp_xds["SOURCE_RADIAL_VELOCITY"].attrs.update(
214
- {
215
- "type": "quantity",
216
- "units": [
217
- cast_to_str(
218
- ephemris_column_description["RadVel"]["keywords"][unit_keyword]
219
- )
220
- ],
221
- }
222
- )
223
-
224
- # Add mandatory data: OBSERVATION_POSITION
225
- observation_position = [
226
- ephemeris_meta["GeoLong"],
227
- ephemeris_meta["GeoLat"],
228
- ephemeris_meta["GeoDist"],
229
- ]
230
- temp_xds["OBSERVATION_POSITION"] = xr.DataArray(
231
- observation_position, dims=["ellipsoid_pos_label"]
225
+ # Convert a few columns/variables that can be converted with standard
226
+ # convert_generic_xds_to_xradio_schema().
227
+ # Metadata has to be fixed manually. Alternatively, issues like
228
+ # UNIT/QuantumUnits issue could be handled in convert_generic_xds_to_xradio_schema,
229
+ # but for now preferring not to pollute that function.
230
+ to_new_data_variables = {
231
+ # mandatory: SOURCE_RADIAL_VELOCITY
232
+ "RadVel": ["SOURCE_RADIAL_VELOCITY", ["time_ephemeris_axis"]],
233
+ # optional: data NORTH_POLE_POSITION_ANGLE and NORTH_POLE_ANGULAR_DISTANCE
234
+ "NP_ang": ["NORTH_POLE_POSITION_ANGLE", ["time_ephemeris_axis"]],
235
+ "NP_dist": ["NORTH_POLE_ANGULAR_DISTANCE", ["time_ephemeris_axis"]],
236
+ # optional: HELIOCENTRIC_RADIAL_VELOCITY
237
+ "rdot": ["HELIOCENTRIC_RADIAL_VELOCITY", ["time_ephemeris_axis"]],
238
+ # optional: OBSERVER_PHASE_ANGLE
239
+ "phang": ["OBSERVER_PHASE_ANGLE", ["time_ephemeris_axis"]],
240
+ }
241
+ convert_generic_xds_to_xradio_schema(
242
+ ephemeris_xds, temp_xds, to_new_data_variables, {}
232
243
  )
233
- temp_xds["OBSERVATION_POSITION"].attrs.update(
234
- {
235
- "type": "location",
236
- "units": ["deg", "deg", "m"],
237
- "data": observation_position,
238
- "ellipsoid": "WGS84",
239
- "origin_object_name": "Earth",
240
- "coordinate_system": ephemeris_meta["obsloc"].lower(),
241
- }
242
- ) # I think the units are ['deg','deg','m'] and 'WGS84'.
243
244
 
244
- # Add optional data NORTH_POLE_POSITION_ANGLE and NORTH_POLE_ANGULAR_DISTANCE
245
- if "NP_ang" in ephemeris_xds.data_vars:
246
- temp_xds["NORTH_POLE_POSITION_ANGLE"] = xr.DataArray(
247
- ephemeris_xds["NP_ang"].data, dims=["time_ephemeris_axis"]
248
- )
249
- temp_xds["NORTH_POLE_POSITION_ANGLE"].attrs.update(
250
- {
251
- "type": "quantity",
252
- "units": [
253
- cast_to_str(
254
- ephemris_column_description["NP_ang"]["keywords"][unit_keyword]
255
- )
256
- ],
257
- }
258
- )
259
-
260
- if "NP_dist" in ephemeris_xds.data_vars:
261
- temp_xds["NORTH_POLE_ANGULAR_DISTANCE"] = xr.DataArray(
262
- ephemeris_xds["NP_dist"].data, dims=["time_ephemeris_axis"]
263
- )
264
- temp_xds["NORTH_POLE_ANGULAR_DISTANCE"].attrs.update(
265
- {
266
- "type": "quantity",
267
- "units": [
268
- cast_to_str(
269
- ephemris_column_description["NP_dist"]["keywords"][unit_keyword]
270
- )
271
- ],
272
- }
273
- )
245
+ # Adjust metadata:
246
+ for generic_var_name, msv4_variable_def in to_new_data_variables.items():
247
+ msv4_var_name = msv4_variable_def[0]
248
+ if msv4_var_name in temp_xds:
249
+ temp_xds[msv4_var_name].attrs.update(
250
+ {
251
+ "type": "quantity",
252
+ "units": [
253
+ cast_to_str(
254
+ ephemris_column_description[generic_var_name]["keywords"][
255
+ unit_keyword
256
+ ]
257
+ )
258
+ ],
259
+ }
260
+ )
274
261
 
275
262
  # Add optional data: SUB_OBSERVER_POSITION and SUB_SOLAR_POSITION
276
263
  if "DiskLong" in ephemris_column_description:
@@ -341,38 +328,12 @@ def extract_ephemeris_info(
341
328
  }
342
329
  )
343
330
 
344
- # Add optional data: HELIOCENTRIC_RADIAL_VELOCITY
345
- if "rdot" in ephemeris_xds.data_vars:
346
- temp_xds["HELIOCENTRIC_RADIAL_VELOCITY"] = xr.DataArray(
347
- ephemeris_xds["rdot"].data, dims=["time_ephemeris_axis"]
348
- )
349
- temp_xds["HELIOCENTRIC_RADIAL_VELOCITY"].attrs.update(
350
- {
351
- "type": "quantity",
352
- "units": [
353
- cast_to_str(
354
- ephemris_column_description["rdot"]["keywords"][unit_keyword]
355
- )
356
- ],
357
- }
358
- )
359
-
360
- # Add optional data: OBSERVER_PHASE_ANGLE
361
- if "phang" in ephemeris_xds.data_vars:
362
- temp_xds["OBSERVER_PHASE_ANGLE"] = xr.DataArray(
363
- ephemeris_xds["phang"].data, dims=["time_ephemeris_axis"]
364
- )
365
- temp_xds["OBSERVER_PHASE_ANGLE"].attrs.update(
366
- {
367
- "type": "quantity",
368
- "units": [
369
- cast_to_str(
370
- ephemris_column_description["phang"]["keywords"][unit_keyword]
371
- )
372
- ],
373
- }
374
- )
375
-
331
+ # We are using the "time_ephemeris_axis" label because it might not match the optional time axis of the source and field info. If ephemeris_interpolate=True then rename it to time.
332
+ coords = {
333
+ "ellipsoid_pos_label": ["lon", "lat", "dist"],
334
+ "time_ephemeris_axis": ephemeris_xds["time"].data,
335
+ "sky_pos_label": ["ra", "dec", "dist"],
336
+ }
376
337
  temp_xds = temp_xds.assign_coords(coords)
377
338
  time_coord_attrs = {
378
339
  "type": "time",
@@ -435,8 +396,74 @@ def extract_ephemeris_info(
435
396
  return xds
436
397
 
437
398
 
399
+ def make_line_dims_and_coords(
400
+ source_xds: xr.Dataset, source_id: Union[int, np.ndarray], num_lines: int
401
+ ) -> tuple[list, dict]:
402
+ """
403
+ Produces the dimensions and coordinates used in data variables related
404
+ to line information (LINE_REST_FREQUENCY, LINE_SYSTEMIC_VELOCITY).
405
+
406
+ In the dimensions, "time" is optional. To produce the points of the
407
+ coordinates we need to look into the (optional) TRANSITION column or
408
+ alternatively other columns (DIRECTION) to produce coordinates points of
409
+ appropriate shape, given the "num_lines" "and source_id".
410
+
411
+ Parameters:
412
+ ----------
413
+ source_xds: xr.Dataset
414
+ generic source xarray dataset
415
+ source_id: Union[int, np.ndarray]
416
+ source_id of the dataset, when it is an array that indicates the
417
+ presence of the "time" dimension
418
+ num_line: int
419
+ number of lines in the source dataset
420
+
421
+ Returns:
422
+ -------
423
+ tuple : tuple[list, dict]
424
+ The dimensions and coordinates to use with line data variables. The
425
+ dimensions are produced as a list of dimension names, and the
426
+ coordinates as a dict for xarray coords.
427
+ """
428
+
429
+ # Transition is an optional column and occasionally not populated
430
+ if "TRANSITION" in source_xds.data_vars:
431
+ transition_var_data = source_xds["TRANSITION"].data
432
+ else:
433
+ transition_var_data = np.zeros(source_xds["DIRECTION"].shape, dtype="str")
434
+
435
+ # if TRANSITION is left empty (or otherwise incomplete), and num_lines > 1,
436
+ # the data_vars expect a "num_lines" size in the last dimension
437
+ vars_shape = transition_var_data.shape[:-1] + (np.max(num_lines),)
438
+ if transition_var_data.shape == vars_shape:
439
+ coords_lines_data = transition_var_data
440
+ else:
441
+ coords_lines_data = np.broadcast_to(
442
+ transition_var_data, max(transition_var_data.shape, vars_shape)
443
+ )
444
+
445
+ line_label_data = np.arange(coords_lines_data.shape[-1]).astype(str)
446
+ if len(source_id) == 1:
447
+ line_coords = {
448
+ "line_name": ("line_label", coords_lines_data),
449
+ "line_label": line_label_data,
450
+ }
451
+ line_dims = ["line_label"]
452
+ else:
453
+ line_coords = {
454
+ "line_name": (("time", "line_label"), coords_lines_data),
455
+ "line_label": line_label_data,
456
+ }
457
+ line_dims = ["time", "line_label"]
458
+
459
+ return line_dims, line_coords
460
+
461
+
438
462
  def extract_source_info(
439
- xds: xr.Dataset, path: str, source_id: int, spectral_window_id: int
463
+ xds: xr.Dataset,
464
+ path: str,
465
+ source_id: Union[int, np.ndarray],
466
+ spectral_window_id: int,
440
467
  ) -> tuple[xr.Dataset, int]:
441
468
  """
442
469
  Extracts source information from the given path and adds it to the xarray dataset.
@@ -447,7 +474,7 @@ def extract_source_info(
447
474
  The xarray dataset to which the source information will be added.
448
475
  path : str
449
476
  The path to the input file.
450
- source_id : int
477
+ source_id : Union[int, np.ndarray]
451
478
  The ID of the source.
452
479
  spectral_window_id : int
453
480
  The ID of the spectral window.
@@ -554,11 +581,12 @@ def extract_source_info(
554
581
  direction_var = source_xds[direction_msv2_col]
555
582
 
556
583
  # SOURCE_LOCATION (DIRECTION / sky_dir_label)
557
- xds["SOURCE_LOCATION"] = xr.DataArray(direction_var.data, dims=direction_dims)
558
584
  location_msv4_measure = column_description_casacore_to_msv4_measure(
559
585
  source_column_description[direction_msv2_col]
560
586
  )
561
- xds["SOURCE_LOCATION"].attrs.update(location_msv4_measure)
587
+ xds["SOURCE_LOCATION"] = xr.DataArray(
588
+ direction_var.data, dims=direction_dims, attrs=location_msv4_measure
589
+ )
562
590
 
563
591
  # Do we have line data:
564
592
  if source_xds["NUM_LINES"].data.ndim == 0:
@@ -567,53 +595,21 @@ def extract_source_info(
567
595
  num_lines = source_xds["NUM_LINES"].data
568
596
 
569
597
  if any(num_lines > 0):
598
+ line_dims, line_coords = make_line_dims_and_coords(
599
+ source_xds, source_id, num_lines
600
+ )
601
+ xds = xds.assign_coords(line_coords)
570
602
 
571
- # Transition is an optional column and occasionally not populated
572
- if "TRANSITION" in source_xds.data_vars:
573
- transition_var_data = source_xds["TRANSITION"].data
574
- else:
575
- transition_var_data = np.zeros(source_xds["DIRECTION"].shape, dtype="str")
576
-
577
- # if TRANSITION is left empty (or otherwise incomplete), and num_lines > 1,
578
- # the data_vars expect a "num_lines" size in the last dimension
579
- vars_shape = transition_var_data.shape[:-1] + (np.max(num_lines),)
580
- if transition_var_data.shape == vars_shape:
581
- coords_lines_data = transition_var_data
582
- else:
583
- coords_lines_data = np.broadcast_to(
584
- transition_var_data, max(transition_var_data.shape, vars_shape)
585
- )
586
-
587
- line_label_data = np.arange(coords_lines_data.shape[-1]).astype(str)
588
- if len(source_id) == 1:
589
- coords_lines = {
590
- "line_name": ("line_label", coords_lines_data),
591
- "line_label": line_label_data,
592
- }
593
- xds = xds.assign_coords(coords_lines)
594
- line_dims = ["line_label"]
595
- else:
596
- coords_lines = {
597
- "line_name": (("time", "line_label"), coords_lines_data),
598
- "line_label": line_label_data,
599
- }
600
- xds = xds.assign_coords(coords_lines)
601
- line_dims = ["time", "line_label"]
602
-
603
- optional_data_variables = {
604
- "REST_FREQUENCY": "LINE_REST_FREQUENCY",
605
- "SYSVEL": "LINE_SYSTEMIC_VELOCITY",
603
+ to_new_data_variables = {
604
+ "REST_FREQUENCY": ["LINE_REST_FREQUENCY", line_dims],
605
+ "SYSVEL": ["LINE_SYSTEMIC_VELOCITY", line_dims],
606
606
  }
607
- for generic_name, msv4_name in optional_data_variables.items():
608
- if generic_name in source_xds:
609
- msv4_measure = column_description_casacore_to_msv4_measure(
610
- source_column_description[generic_name]
611
- )
612
-
613
- xds[msv4_name] = xr.DataArray(
614
- source_xds[generic_name].data, dims=line_dims
615
- )
616
- xds[msv4_name].attrs.update(msv4_measure)
607
+ to_new_coords = {
608
+ "TIME": ["time", ["time"]],
609
+ }
610
+ convert_generic_xds_to_xradio_schema(
611
+ source_xds, xds, to_new_data_variables, to_new_coords
612
+ )
617
613
 
618
614
  # Need to add doppler info if present. Add check.
619
615
  try:
@@ -634,8 +630,56 @@ def extract_source_info(
634
630
  return xds, np.sum(num_lines[unique_source_ids_indices])
635
631
 
636
632
 
633
+ def make_field_dims_and_coords(
634
+ field_xds: xr.Dataset, field_id: Union[int, np.ndarray], field_times: list
635
+ ) -> tuple[list, dict]:
636
+ """
637
+ Produces the dimensions and coordinates used in the field data variables
638
+ extracted from the MSv2 FIELD subtable (FIELD_PHASE_CENTER/
639
+ FIELD_REFERENCE_CENTER).
640
+
641
+ Parameters:
642
+ ----------
643
+ field_xds: xr.Dataset
644
+ generic field xarray dataset
645
+ field_id: Union[int, np.ndarray]
646
+ field_id of the dataset
647
+ field_times:
648
+ Unique times for the dataset (when not partitioning by FIELD_ID)
649
+
650
+ Returns:
651
+ -------
652
+ tuple : tuple[list, dict]
653
+ The dimensions and coordinates to use with field data variables. The
654
+ dimensions are produced as a list of dimension names, and the
655
+ coordinates as a dict for xarray coords.
656
+ """
657
+
658
+ coords = {"sky_dir_label": ["ra", "dec"]}
659
+
660
+ # field_times is the same as the time axis in the main MSv4 dataset and is used if more than one field is present.
661
+ if field_times is not None:
662
+ coords["time"] = field_times
663
+ dims = ["time", "sky_dir_label"]
664
+ coords["field_name"] = (
665
+ "time",
666
+ np.char.add(field_xds["NAME"].data, np.char.add("_", field_id.astype(str))),
667
+ )
668
+ # coords["field_id"] = ("time", field_id)
669
+ else:
670
+ coords["field_name"] = field_xds["NAME"].values.item() + "_" + str(field_id)
671
+ # coords["field_id"] = field_id
672
+ dims = ["sky_dir_label"]
673
+
674
+ return dims, coords
675
+
676
+
637
677
  def extract_field_info_and_check_ephemeris(
638
- field_and_source_xds, in_file, field_id, field_times, is_single_dish
678
+ field_and_source_xds: xr.Dataset,
679
+ in_file: str,
680
+ field_id: Union[int, np.ndarray],
681
+ field_times: list,
682
+ is_single_dish: bool,
639
683
  ):
640
684
  """
641
685
  Create field information and check for ephemeris in the FIELD table folder.
@@ -646,8 +690,12 @@ def extract_field_info_and_check_ephemeris(
646
690
  The xarray dataset to which the field and source information will be added.
647
691
  in_file : str
648
692
  The path to the input file.
649
- field_id : int
693
+ field_id : Union[int, np.ndarray]
650
694
  The ID of the field.
695
+ field_times: list
696
+ Time of the MSv4
697
+ is_single_dish: bool
698
+ Whether to extract single dish (FIELD_REFERENCE_CENTER) info
651
699
 
652
700
  Returns:
653
701
  -------
@@ -658,7 +706,6 @@ def extract_field_info_and_check_ephemeris(
658
706
  ephemeris_table_name : str
659
707
  The name of the ephemeris table.
660
708
  """
661
- coords = {}
662
709
 
663
710
  unique_field_id = unique_1d(
664
711
  field_id
@@ -718,6 +765,8 @@ def extract_field_info_and_check_ephemeris(
718
765
  f"Could not find ephemeris table for field_id {field_id}. Ephemeris information will not be included in the field_and_source_xds."
719
766
  )
720
767
 
768
+ dims, coords = make_field_dims_and_coords(field_xds, field_id, field_times)
769
+
721
770
  if is_single_dish:
722
771
  field_data_variables = {
723
772
  "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
@@ -729,27 +778,10 @@ def extract_field_info_and_check_ephemeris(
729
778
  # "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
730
779
  }
731
780
 
732
- field_measures_type = "sky_coord"
733
-
734
- coords["sky_dir_label"] = ["ra", "dec"]
735
781
  field_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
736
782
  "column_descriptions"
737
783
  ]
738
784
 
739
- # field_times is the same as the time axis in the main MSv4 dataset and is used if more than one field is present.
740
- if field_times is not None:
741
- coords["time"] = field_times
742
- dims = ["time", "sky_dir_label"]
743
- coords["field_name"] = (
744
- "time",
745
- np.char.add(field_xds["NAME"].data, np.char.add("_", field_id.astype(str))),
746
- )
747
- # coords["field_id"] = ("time", field_id)
748
- else:
749
- coords["field_name"] = field_xds["NAME"].values.item() + "_" + str(field_id)
750
- # coords["field_id"] = field_id
751
- dims = ["sky_dir_label"]
752
-
753
785
  for generic_name, msv4_name in field_data_variables.items():
754
786
 
755
787
  delay_dir_ref_col = "DelayDir_Ref"
@@ -772,6 +804,7 @@ def extract_field_info_and_check_ephemeris(
772
804
  }
773
805
  )
774
806
 
807
+ field_measures_type = "sky_coord"
775
808
  field_and_source_xds[msv4_name].attrs["type"] = field_measures_type
776
809
 
777
810
  field_and_source_xds = field_and_source_xds.assign_coords(coords)
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  from xradio._utils.schema import column_description_casacore_to_msv4_measure
3
3
 
4
4
  col_to_data_variable_names = {
@@ -1,11 +1,14 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import time
3
3
  from typing import Tuple, Union
4
4
 
5
5
  import numpy as np
6
6
  import xarray as xr
7
7
 
8
- from xradio._utils.schema import column_description_casacore_to_msv4_measure
8
+ from xradio._utils.schema import (
9
+ column_description_casacore_to_msv4_measure,
10
+ convert_generic_xds_to_xradio_schema,
11
+ )
9
12
  from .subtables import subt_rename_ids
10
13
  from ._tables.read import make_taql_where_between_min_max, load_generic_table
11
14
 
@@ -70,37 +73,22 @@ def create_weather_xds(in_file: str):
70
73
  xr.Dataset
71
74
  Weather Xarray Dataset.
72
75
  """
73
- # Dictionaries that define the conversion from MSv2 to MSv4:
74
- # Dict from col/data_var names in generic_weather_xds (from MSv2) to MSV4
75
- # weather_xds produced here
76
- to_new_data_variable_names = {
77
- "H2O": "H2O",
78
- "IONOS_ELECTRON": "IONOS_ELECTRON",
79
- "PRESSURE": "PRESSURE",
80
- "REL_HUMIDITY": "REL_HUMIDITY",
81
- "TEMPERATURE": "TEMPERATURE",
82
- "DEW_POINT": "DEW_POINT",
83
- "WIND_DIRECTION": "WIND_DIRECTION",
84
- "WIND_SPEED": "WIND_SPEED",
85
- }
86
- data_variable_dims = {
87
- "H2O": ["station_id", "time"],
88
- "IONOS_ELECTRON": ["station_id", "time"],
89
- "PRESSURE": ["station_id", "time"],
90
- "REL_HUMIDITY": ["station_id", "time"],
91
- "TEMPERATURE": ["station_id", "time"],
92
- "DEW_POINT": ["station_id", "time"],
93
- "WIND_DIRECTION": ["station_id", "time"],
94
- "WIND_SPEED": ["station_id", "time"],
95
- }
96
- to_new_coord_names = {
97
- # No MS data cols are turned into xds coords
98
- }
99
- coord_dims = {
100
- # No MS data cols are turned into xds coords
76
+
77
+ dims_station_time = ["station_id", "time"]
78
+ to_new_data_variables = {
79
+ "H20": ["H2O", dims_station_time],
80
+ "IONOS_ELECTRON": ["IONOS_ELECTRON", dims_station_time],
81
+ "PRESSURE": ["PRESSURE", dims_station_time],
82
+ "REL_HUMIDITY": ["REL_HUMIDITY", dims_station_time],
83
+ "TEMPERATURE": ["TEMPERATURE", dims_station_time],
84
+ "DEW_POINT": ["DEW_POINT", dims_station_time],
85
+ "WIND_DIRECTION": ["WIND_DIRECTION", dims_station_time],
86
+ "WIND_SPEED": ["WIND_SPEED", dims_station_time],
101
87
  }
102
- to_new_dim_names = {
103
- "ANTENNA_ID": "STATION_ID",
88
+
89
+ to_new_coords = {
90
+ "ANTENNA_ID": ["station_id", ["station_id"]],
91
+ "TIME": ["time", ["time"]],
104
92
  }
105
93
 
106
94
  # Read WEATHER table into a Xarray Dataset.
@@ -113,74 +101,88 @@ def create_weather_xds(in_file: str):
113
101
  except ValueError as _exc:
114
102
  return None
115
103
 
116
- generic_weather_xds = generic_weather_xds.rename_dims(to_new_dim_names)
117
-
118
- weather_column_description = generic_weather_xds.attrs["other"]["msv2"][
119
- "ctds_attrs"
120
- ]["column_descriptions"]
121
- # ['ANTENNA_ID', 'TIME', 'INTERVAL', 'H2O', 'IONOS_ELECTRON',
122
- # 'PRESSURE', 'REL_HUMIDITY', 'TEMPERATURE', 'DEW_POINT',
123
- # 'WIND_DIRECTION', 'WIND_SPEED']
124
104
  weather_xds = xr.Dataset(attrs={"type": "weather"})
125
- time_attrs = column_description_casacore_to_msv4_measure(
126
- weather_column_description["TIME"]
105
+ weather_xds = convert_generic_xds_to_xradio_schema(
106
+ generic_weather_xds, weather_xds, to_new_data_variables, to_new_coords
127
107
  )
128
- coords = {
129
- "station_id": generic_weather_xds["STATION_ID"].data,
130
- "time": ("time", generic_weather_xds["TIME"].data, time_attrs),
131
- }
132
- for key in generic_weather_xds:
133
- msv4_measure = column_description_casacore_to_msv4_measure(
134
- weather_column_description[key.upper()]
135
- )
136
- if key in to_new_data_variable_names:
137
- var_name = to_new_data_variable_names[key]
138
- weather_xds[var_name] = xr.DataArray(
139
- generic_weather_xds[key].data, dims=data_variable_dims[key]
140
- )
141
-
142
- if msv4_measure:
143
- weather_xds[var_name].attrs.update(msv4_measure)
144
-
145
- if key in ["INTERVAL"]:
146
- weather_xds[var_name].attrs.update({"units": ["s"], "type": "quantity"})
147
- elif key in ["H2O"]:
148
- weather_xds[var_name].attrs.update(
149
- {"units": ["/m^2"], "type": "quantity"}
150
- )
151
- elif key in ["IONOS_ELECTRON"]:
152
- weather_xds[var_name].attrs.update(
153
- {"units": ["/m^2"], "type": "quantity"}
154
- )
155
- elif key in ["PRESSURE"]:
156
- weather_xds[var_name].attrs.update(
157
- {"units": ["Pa"], "type": "quantity"}
158
- )
159
- elif key in ["REL_HUMIDITY"]:
160
- weather_xds[var_name].attrs.update({"units": ["%"], "type": "quantity"})
161
- elif key in ["TEMPERATURE"]:
162
- weather_xds[var_name].attrs.update({"units": ["K"], "type": "quantity"})
163
- elif key in ["DEW_POINT"]:
164
- weather_xds[var_name].attrs.update({"units": ["K"], "type": "quantity"})
165
- elif key in ["WIND_DIRECTION"]:
166
- weather_xds[var_name].attrs.update(
167
- {"units": ["rad"], "type": "quantity"}
168
- )
169
- elif key in ["WIND_SPEED"]:
170
- weather_xds[var_name].attrs.update(
171
- {"units": ["m/s"], "type": "quantity"}
172
- )
173
-
174
- if key in to_new_coord_names:
175
- coords[to_new_coord_names[key]] = (
176
- coord_dims[key],
177
- generic_weather_xds[key].data,
178
- )
179
-
180
- weather_xds = weather_xds.assign_coords(coords)
108
+
109
+ # correct expected types
110
+ weather_xds["station_id"] = weather_xds["station_id"].astype(np.int64)
111
+
181
112
  return weather_xds
182
113
 
183
114
 
115
+ def correct_generic_pointing_xds(
116
+ generic_pointing_xds: xr.Dataset, to_new_data_variables: dict[str, list]
117
+ ) -> xr.Dataset:
118
+ """
119
+ Takes a (generic) pointing_xds as read from a POINTING subtable of an MSv2
120
+ and tries to correct several deviations from the MSv2 specs seen in
121
+ common test data.
122
+ The problems fixed here include wrong dimensions:
123
+ - for example transposed dimensions with respect to the MSv2 specs (output
124
+ from CASA simulator),
125
+ - missing/additional unexpected dimensions when some of the columns are
126
+ empty (in the sense of "empty casacore cells").
127
+
128
+ This function modifies the data arrays of the data vars affected by such
129
+ issues.
130
+
131
+ Parameters
132
+ ----------
133
+ generic_pointing_xds: xr.Dataset
134
+ The generic pointing dataset (loaded from MSv2) to be fixed
135
+ to_new_data_variables: dict
136
+ The dict used for convert_generic_xds_to_xradio_schema, which gives all
137
+ the data variables relevant for the final MSv4 dataset.
138
+
139
+ Returns:
140
+ --------
141
+ xr.Dataset
142
+ Corrected dataset with dimensions conforming to MSv2 specs.
143
+ """
144
+
145
+ correct_pointing_xds = generic_pointing_xds.copy()
146
+
147
+ for key in generic_pointing_xds:
148
+ if key in to_new_data_variables:
149
+ data_var_name = to_new_data_variables[key]
150
+ # Corrects dim sizes of "empty cell" variables, such as empty DIRECTION, TARGET, etc.
151
+ if (
152
+ "dim_2" in generic_pointing_xds.sizes
153
+ and generic_pointing_xds.sizes["dim_2"] == 0
154
+ ):
155
+ # When all direction variables are "empty"
156
+ data_var_data = xr.DataArray(
157
+ [[[[np.nan, np.nan]]]],
158
+ dims=generic_pointing_xds.dims,
159
+ ).isel(n_polynomial=0, drop=True)
160
+ correct_pointing_xds[data_var_name].data = data_var_data
161
+
162
+ elif (
163
+ "dir" in generic_pointing_xds.sizes
164
+ and generic_pointing_xds.sizes["dir"] == 0
165
+ ):
166
+ # When some direction variables are "empty" but some are populated properly
167
+ if "dim_2" in generic_pointing_xds[key].sizes:
168
+ data_var_data = xr.DataArray(
169
+ generic_pointing_xds[key].values,
170
+ dims=generic_pointing_xds[key].dims,
171
+ )
172
+ else:
173
+ shape = tuple(
174
+ generic_pointing_xds.sizes[dim]
175
+ for dim in ["TIME", "ANTENNA_ID"]
176
+ ) + (2,)
177
+ data_var_data = xr.DataArray(
178
+ np.full(shape, np.nan),
179
+ dims=generic_pointing_xds[key].dims,
180
+ )
181
+ correct_pointing_xds[data_var_name].data = data_var_data
182
+
183
+ return correct_pointing_xds
184
+
185
+
184
186
  def create_pointing_xds(
185
187
  in_file: str,
186
188
  ant_xds_name_ids: xr.DataArray,
@@ -211,49 +213,43 @@ def create_pointing_xds(
211
213
  """
212
214
  start = time.time()
213
215
 
214
- # Dictionaries that define the conversion from MSv2 to MSv4:
215
- to_new_data_variable_names = {
216
- # "name": "NAME", # removed
217
- # "time_origin": "TIME_ORIGIN", # removed?
218
- "DIRECTION": "BEAM_POINTING",
219
- "ENCODER": "DISH_MEASURED_POINTING",
220
- "TARGET": "TARGET", # => attribute?
221
- "POINTING_OFFSET": "POINTING_OFFSET",
222
- "SOURCE_OFFSET": "SOURCE_OFFSET",
223
- # "pointing_model_id": "POINTING_MODEL_ID", # removed
224
- # "tracking": "TRACKING", # => attribute
225
- # "on_source": "ON_SOURCE", # removed
226
- "OVER_THE_TOP": "OVER_THE_TOP",
227
- }
228
216
  time_ant_dims = ["time", "antenna_name"]
229
217
  time_ant_dir_dims = time_ant_dims + ["sky_dir_label"]
230
- data_variable_dims = {
231
- # "name": ["time", "antenna_name"], # removed
232
- # "time_origin": ["time", "antenna_name"], # removed?
233
- "DIRECTION": time_ant_dir_dims,
234
- "ENCODER": time_ant_dir_dims,
235
- "TARGET": time_ant_dir_dims,
236
- "POINTING_OFFSET": time_ant_dir_dims,
237
- "SOURCE_OFFSET": time_ant_dir_dims,
238
- # "pointing_model_id": ["time", "antenna_name"], # removed
239
- # "tracking": ["time", "antenna_name"], # => attribute
240
- # "on_source": ["time", "antenna_name"], # removed
241
- "OVER_THE_TOP": time_ant_dims,
218
+ to_new_data_variables = {
219
+ "DIRECTION": ["BEAM_POINTING", time_ant_dir_dims],
220
+ "ENCODER": ["DISH_MEASURED_POINTING", time_ant_dir_dims],
221
+ # => attribute?
222
+ "TARGET": ["TARGET", time_ant_dir_dims],
223
+ "POINTING_OFFSET": ["POINTING_OFFSET", time_ant_dir_dims],
224
+ "SOURCE_OFFSET": ["SOURCE_OFFSET", time_ant_dir_dims],
225
+ "OVER_THE_TOP": ["OVER_THE_TOP", time_ant_dims],
226
+ }
227
+
228
+ to_new_coords = {
229
+ "TIME": ["time", ["time"]],
230
+ # "ANTENNA_ID": ["antenna_name", ["antenna_name"]],
231
+ "dim_2": ["sky_dir_label", ["sky_dir_label"]],
242
232
  }
243
- # Unused here
244
- # to_new_coord_names = {"ra/dec": "direction"}
245
- # coord_dims = {}
246
233
 
247
234
  taql_time_range = make_taql_where_between_min_max(
248
235
  time_min_max, in_file, "POINTING", "TIME"
249
236
  )
237
+
238
+ if taql_time_range is None:
239
+ taql_where = f"WHERE (ANTENNA_ID IN [{','.join(map(str, ant_xds_name_ids.antenna_id.values))}])"
240
+ else:
241
+ taql_where = (
242
+ taql_time_range
243
+ + f" AND (ANTENNA_ID IN [{','.join(map(str, ant_xds_name_ids.antenna_id.values))}])"
244
+ )
250
245
  # Read POINTING table into a Xarray Dataset.
251
246
  generic_pointing_xds = load_generic_table(
252
247
  in_file,
253
248
  "POINTING",
254
249
  rename_ids=subt_rename_ids["POINTING"],
255
- taql_where=taql_time_range,
250
+ taql_where=taql_where,
256
251
  )
252
+
257
253
  if not generic_pointing_xds.data_vars:
258
254
  # apparently empty MS/POINTING table => produce empty xds
259
255
  return xr.Dataset()
@@ -264,69 +260,23 @@ def create_pointing_xds(
264
260
  if size == 1:
265
261
  generic_pointing_xds = generic_pointing_xds.sel({"n_polynomial": 0})
266
262
 
267
- pointing_column_descriptions = generic_pointing_xds.attrs["other"]["msv2"][
268
- "ctds_attrs"
269
- ]["column_descriptions"]
263
+ generic_pointing_xds = correct_generic_pointing_xds(
264
+ generic_pointing_xds, to_new_data_variables
265
+ )
270
266
 
271
267
  pointing_xds = xr.Dataset(attrs={"type": "pointing"})
272
- for key in generic_pointing_xds:
273
- if key in to_new_data_variable_names:
274
- data_var_name = to_new_data_variable_names[key]
275
- # Corrects dim sizes of "empty cell" variables, such as empty DIRECTION, TARGET, etc.
276
- # TODO: this should be moved to a function when/if stable - perhaps 'correct_generic_pointing_xds'
277
- if (
278
- "dim_2" in generic_pointing_xds.sizes
279
- and generic_pointing_xds.sizes["dim_2"] == 0
280
- ):
281
- # When all direction variables are "empty"
282
- data_var_data = xr.DataArray(
283
- [[[[np.nan, np.nan]]]],
284
- dims=generic_pointing_xds.dims,
285
- ).isel(n_polynomial=0, drop=True)
286
- elif (
287
- "dir" in generic_pointing_xds.sizes
288
- and generic_pointing_xds.sizes["dir"] == 0
289
- ):
290
- # When some direction variables are "empty" but some are populated properly
291
- if "dim_2" in generic_pointing_xds[key].sizes:
292
- data_var_data = xr.DataArray(
293
- generic_pointing_xds[key].values,
294
- dims=generic_pointing_xds[key].dims,
295
- )
296
- else:
297
- shape = tuple(
298
- generic_pointing_xds.sizes[dim]
299
- for dim in ["TIME", "ANTENNA_ID"]
300
- ) + (2,)
301
- data_var_data = xr.DataArray(
302
- np.full(shape, np.nan),
303
- dims=generic_pointing_xds[key].dims,
304
- )
305
- else:
306
- data_var_data = generic_pointing_xds[key].data
307
-
308
- pointing_xds[data_var_name] = xr.DataArray(
309
- data_var_data, dims=data_variable_dims[key]
310
- )
311
-
312
- msv4_measure = column_description_casacore_to_msv4_measure(
313
- pointing_column_descriptions[key.upper()]
314
- )
315
- if msv4_measure:
316
- pointing_xds[data_var_name].attrs.update(msv4_measure)
317
-
318
268
  coords = {
319
- "time": generic_pointing_xds["TIME"].values,
320
269
  "antenna_name": ant_xds_name_ids.sel(
321
270
  antenna_id=generic_pointing_xds["ANTENNA_ID"]
322
271
  ).data,
323
272
  "sky_dir_label": ["ra", "dec"],
324
273
  }
325
274
  pointing_xds = pointing_xds.assign_coords(coords)
275
+ pointing_xds = convert_generic_xds_to_xradio_schema(
276
+ generic_pointing_xds, pointing_xds, to_new_data_variables, to_new_coords
277
+ )
326
278
 
327
- # missing attributes
328
- pointing_xds["time"].attrs.update({"units": ["s"], "type": "quantity"})
329
-
279
+ # Add attributes specific to pointing_xds
330
280
  if "TRACKING" in generic_pointing_xds.data_vars:
331
281
  pointing_xds.attrs["tracking"] = generic_pointing_xds.data_vars[
332
282
  "TRACKING"
@@ -335,6 +285,10 @@ def create_pointing_xds(
335
285
  # Move target from data_vars to attributes?
336
286
  move_target_as_attr = False
337
287
  if move_target_as_attr:
288
+ pointing_column_descriptions = generic_pointing_xds.attrs["other"]["msv2"][
289
+ "ctds_attrs"
290
+ ]["column_descriptions"]
291
+
338
292
  target = generic_pointing_xds.data_vars["TARGET"]
339
293
  pointing_xds.attrs["target"] = {
340
294
  "dims": ["sky_dir_label"],
@@ -348,4 +302,5 @@ def create_pointing_xds(
348
302
  pointing_xds = interpolate_to_time(pointing_xds, interp_time, "pointing_xds")
349
303
 
350
304
  logger.debug(f"create_pointing_xds() execution time {time.time() - start:0.2f} s")
305
+
351
306
  return pointing_xds
@@ -1,5 +1,5 @@
1
1
  import itertools
2
- import graphviper.utils.logger as logger
2
+ import toolviper.utils.logger as logger
3
3
  from pathlib import Path
4
4
  from typing import Dict, List, Tuple, Union
5
5
 
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  from typing import Any, Dict, List, Tuple, Union
3
3
 
4
4
  import numpy as np
@@ -1,6 +1,6 @@
1
1
  import os
2
2
 
3
- import graphviper.utils.logger as logger
3
+ import toolviper.utils.logger as logger
4
4
 
5
5
  from pathlib import Path
6
6
  from typing import Dict, List
@@ -1,5 +1,5 @@
1
1
  from importlib.metadata import version
2
- import graphviper.utils.logger as logger, multiprocessing, psutil
2
+ import toolviper.utils.logger as logger, multiprocessing, psutil
3
3
  from typing import Any, Dict, List, Tuple, Union
4
4
 
5
5
  import numpy as np
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import os
3
3
  from pathlib import Path
4
4
  from typing import Dict, List, Tuple, Union
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger, numcodecs, os, time, warnings
1
+ import toolviper.utils.logger as logger, numcodecs, os, time, warnings
2
2
  from itertools import cycle
3
3
  from pathlib import Path
4
4
  from typing import Any, Dict, Union
@@ -1,5 +1,5 @@
1
1
  import os
2
- import graphviper.utils.logger as logger
2
+ import toolviper.utils.logger as logger
3
3
  from typing import List, Tuple, Union
4
4
 
5
5
  from ._utils.cds import CASAVisSet
@@ -3,7 +3,7 @@ from pathlib import Path
3
3
  from typing import Dict, Union
4
4
 
5
5
  import zarr
6
- import graphviper.utils.logger as logger
6
+ import toolviper.utils.logger as logger
7
7
 
8
8
  from ._utils.cds import CASAVisSet
9
9
  from ._zarr.read import read_part_keys, read_partitions, read_subtables
@@ -1,4 +1,4 @@
1
- import graphviper.utils.logger as logger
1
+ import toolviper.utils.logger as logger
2
2
  import numcodecs
3
3
  from typing import Dict, Union
4
4
 
@@ -1,7 +1,7 @@
1
1
  import os
2
2
 
3
3
  from ._processing_set import processing_set
4
- import graphviper.utils.logger as logger
4
+ import toolviper.utils.logger as logger
5
5
  from xradio._utils.zarr.common import _open_dataset, _get_file_system_and_items
6
6
  import s3fs
7
7
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: xradio
3
- Version: 0.0.39
3
+ Version: 0.0.40
4
4
  Summary: Xarray Radio Astronomy Data IO
5
5
  Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>
6
6
  License: BSD 3-Clause License
@@ -44,6 +44,7 @@ Requires-Dist: astropy
44
44
  Requires-Dist: dask
45
45
  Requires-Dist: distributed
46
46
  Requires-Dist: graphviper
47
+ Requires-Dist: toolviper
47
48
  Requires-Dist: matplotlib
48
49
  Requires-Dist: numba >=0.57.0
49
50
  Requires-Dist: numpy
@@ -1,8 +1,8 @@
1
- xradio/__init__.py,sha256=DtgOh81X3fT7HXf2_JDhJqemmDff7vGANECaRl03pu8,383
1
+ xradio/__init__.py,sha256=WHBhQWQie3YQqfIxQBL3LKiKuUcN7ZL7sPMEcdWOp5E,382
2
2
  xradio/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  xradio/_utils/common.py,sha256=GY_fjet9wLhnx9XUINTq6ONBoceyLWRXHXlrNla3j3o,3726
4
4
  xradio/_utils/list_and_array.py,sha256=_wznOiHra1pHrWNdtQPXhk-TAXJ8qb9fTGE0czQNpIo,2802
5
- xradio/_utils/schema.py,sha256=Y-qVcxbRk4aFM-JIECzaQTia62wA-dSoGhVQF45XomU,6462
5
+ xradio/_utils/schema.py,sha256=tdGyvQTd-rQivtkzmH9-1f7KMIB9KeSf57EN2v7Mw_o,6855
6
6
  xradio/_utils/_casacore/tables.py,sha256=aq6E_4RRAHdTBCwMKrVil1cWhFU2O980DNH9IlRKXLw,1280
7
7
  xradio/_utils/zarr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  xradio/_utils/zarr/common.py,sha256=egj3Zma0BUK0msOBDozMa-62rHrcxrjCNE5XkkZUq70,5332
@@ -16,13 +16,13 @@ xradio/image/_util/image_factory.py,sha256=6tPzs20FTm2wEshHc1xqtTV7D0TbKxGLUKAVt
16
16
  xradio/image/_util/zarr.py,sha256=lhQqVRC1GEWClG3zRbuDr2IlQBfXeDqaLUJIN-MVMxA,1652
17
17
  xradio/image/_util/_casacore/__init__.py,sha256=OlsiRE40o1jSbBI4khgQQzgfDYbAlOMKIhO4UFlbGhg,41
18
18
  xradio/image/_util/_casacore/common.py,sha256=ky999eTCWta8w-uIs-7P7rPhZRLuh9yTuQXAxPvaPm4,1579
19
- xradio/image/_util/_casacore/xds_from_casacore.py,sha256=Rht4A32QLAQ7uizwKfOsZ3Z819shvlUbsZfAuTIVerU,42562
19
+ xradio/image/_util/_casacore/xds_from_casacore.py,sha256=4puiE2_1-7Y1-0yK7L8FYyDp5Ab8hEhDjLvoXiGMx9E,42561
20
20
  xradio/image/_util/_casacore/xds_to_casacore.py,sha256=P6c-yoOjuVQkm07ApA7FFKfje4aPwV-MsRFKaRaPq9I,15338
21
21
  xradio/image/_util/_fits/xds_from_fits.py,sha256=kpZ2gdbDQtoUD1DdQZymup5OVnB3e73D66LruLYESsw,28445
22
22
  xradio/image/_util/_zarr/common.py,sha256=apMX_bF4Hr3pFGjnDFpp36KgmhTYAPBZquNkjBHrsXk,307
23
- xradio/image/_util/_zarr/xds_from_zarr.py,sha256=QfDJC1_7FQw0ixFiBq-3CSgmQMv_1tw4sdv0zcOWeDs,3602
23
+ xradio/image/_util/_zarr/xds_from_zarr.py,sha256=4b6KHmAcnrhBbCi-Z7e3Lm6l6wziJL1zaNIohmPAYDk,3601
24
24
  xradio/image/_util/_zarr/xds_to_zarr.py,sha256=wogXbwX8n3Sl9PHoc3_Y_LBowQsQ-94HZQFZ5NcxUZA,1624
25
- xradio/image/_util/_zarr/zarr_low_level.py,sha256=yl8An3eW_wOpgFdMyuYEt5YiIH6Y7UH8CH5KaXCMMq4,12787
25
+ xradio/image/_util/_zarr/zarr_low_level.py,sha256=_skL70DhtmVjmxRJsfZaospt3rndB9l-2QoHFF1kAKE,12786
26
26
  xradio/schema/__init__.py,sha256=EzEMnOtN8G_wdjo8QBRKfq5MrYgfr_nt1pfunlI6i6Q,733
27
27
  xradio/schema/bases.py,sha256=5BiE6gAq2xmaJEyiaGbpCSoNek83ly9f0R0Rv1rx9DM,17081
28
28
  xradio/schema/check.py,sha256=Quugw5pC-c3c6C_JujSOQSfU_XVFIwymmg8wM0IZtXY,18521
@@ -31,43 +31,43 @@ xradio/schema/metamodel.py,sha256=WjtW7pAVzcjLRWifRH3sQoOiN6TV810hARpOIz1M_gw,38
31
31
  xradio/schema/typing.py,sha256=8-o6fZd99kJ4FVdgBYRTIRJ-wDqpcUNXzCTfJvl3TIw,10439
32
32
  xradio/vis/__init__.py,sha256=oOaHEnHp799cu72Zwo4WUQdrgFE2JCy-O0XVhaz3C2w,381
33
33
  xradio/vis/_processing_set.py,sha256=rvPAU5ksfODDDaJQBzBlZaHQDSDseV6UqCxN2Qa5gxs,12097
34
- xradio/vis/convert_msv2_to_processing_set.py,sha256=ZgnuOTXGKTlOG9fhuCN1uaTp4RPRbqUuBr0V3MBxS5A,5907
34
+ xradio/vis/convert_msv2_to_processing_set.py,sha256=x5P9P8BZiK243iFi5kjn3i4gMgaGbozSN1sh1bgyYNI,5906
35
35
  xradio/vis/load_processing_set.py,sha256=sQmKG04grVn9jqOrhGc8K6B2-tk65NR0GBW8I8W62cI,5755
36
- xradio/vis/read_processing_set.py,sha256=hDbbHk4sSUH0f6HKpV_yy7ZfcWR2d9kYQE9KItyLXeY,4171
36
+ xradio/vis/read_processing_set.py,sha256=8h1UDXFZqwObs6J6p5ef0FSA9ILW3A6I_6HMbQrbRFc,4170
37
37
  xradio/vis/schema.py,sha256=pQelyv3f6uIW12UKT_sB8KETyXpMX8klPeUOpq4dyd8,44858
38
38
  xradio/vis/_vis_utils/__init__.py,sha256=Scu6rKJ2SpO8aG7F-xdTZcYfyWx0viV8gFh8E8ur_gI,93
39
- xradio/vis/_vis_utils/ms.py,sha256=hkJHFDC6O4PNhqDALX7CJ-krEpF9vxu-N_ur-UXi0Zo,4311
40
- xradio/vis/_vis_utils/zarr.py,sha256=OYCOdUVUE-KbG0-OZKIbD0bFpOy1tt2YqiQMStVzHDs,3948
39
+ xradio/vis/_vis_utils/ms.py,sha256=m7cYGZGbdCD3uSPe1BldgRurZ4Y-K3vJr6aePUVyGjE,4310
40
+ xradio/vis/_vis_utils/zarr.py,sha256=d7T7P2rLzHTvoH7jh4FsiqkFq-PHsxRHUhM4mxbZI9M,3947
41
41
  xradio/vis/_vis_utils/_ms/chunks.py,sha256=JTPk3il6fk570BjWZMoOAtsbvnLmqPcBv9EPY6A2yOs,2964
42
- xradio/vis/_vis_utils/_ms/conversion.py,sha256=gCcOnVbbaFYyuD87shOjedfi1CTIkq3V7IX9T68A5Gg,38890
43
- xradio/vis/_vis_utils/_ms/create_antenna_xds.py,sha256=bNqqOwkm0hNz29ZhSNQQGg7BY9_Iq8ryqvAP4Dk7o1g,17182
44
- xradio/vis/_vis_utils/_ms/create_field_and_source_xds.py,sha256=3fhz0pM6KWM-YOqSCLAXaG5kVPwaW0shSQr6kLYWv5g,29776
42
+ xradio/vis/_vis_utils/_ms/conversion.py,sha256=KTpv5CaPYVsqu3WQI-j4yZiNL3NWn6sev905tAyzq8I,38889
43
+ xradio/vis/_vis_utils/_ms/create_antenna_xds.py,sha256=WMX1Av328Et0o56pTcva5tYSOFlu-mBxhJJHp33rb3k,17181
44
+ xradio/vis/_vis_utils/_ms/create_field_and_source_xds.py,sha256=d_WlX_xA33rpHS8Dhqct8HmczZfteA2W7CHC1EPuZEw,30897
45
45
  xradio/vis/_vis_utils/_ms/descr.py,sha256=dYK8mhXxODIh-dfqaOm-YZb7kmoN1N2golX_RFncO94,5215
46
46
  xradio/vis/_vis_utils/_ms/msv2_msv3.py,sha256=9AKs2HWly7Ivv_Cjr11dIPGmm33_rtSBoGF9wN5ZwEQ,116
47
- xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py,sha256=ztPFLbzcBSYXOMRIkSQmnXOr1JIkrfFGwbmxGmdmuRQ,1615
47
+ xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py,sha256=gk9gU7g2Lk7dmaiLW8qecOEt574pRtGsCHnUnHXM3D0,1614
48
48
  xradio/vis/_vis_utils/_ms/msv4_infos.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
- xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py,sha256=CABRBI0LyOX23DRYgTBPOugBVnKw6CPBVkbKy_ku3a8,13018
49
+ xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py,sha256=DaLIqC05AZbkdc-XiahesYwfRFyLXNlWW4DoBv72d5I,10649
50
50
  xradio/vis/_vis_utils/_ms/optimised_functions.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
- xradio/vis/_vis_utils/_ms/partition_queries.py,sha256=ttVtkrykZekATEhyg7U-2CrIdpc-bvAgWAzMN3UMvAM,14574
52
- xradio/vis/_vis_utils/_ms/partitions.py,sha256=_HSXNb6DJZwax3fLjD9FVCt2S_s3Q7U0SI9g2yDRKFY,12980
53
- xradio/vis/_vis_utils/_ms/subtables.py,sha256=ZsqVNjXjTDz5dxQnLPzE_GOjk1AassT38ivUSrz1-d4,3944
51
+ xradio/vis/_vis_utils/_ms/partition_queries.py,sha256=hEe3VAzGqtEPg3X4GUhvHaxyesjS6WVy5et421qSdZg,14573
52
+ xradio/vis/_vis_utils/_ms/partitions.py,sha256=_KhRq8bSx2QxuWp9K57fLoLxcU6kvJ35e6wvJ-THbwc,12979
53
+ xradio/vis/_vis_utils/_ms/subtables.py,sha256=6--dh6betXhlxGejzcJJd8bYdiNkl5ppzLoyorofZ6c,3943
54
54
  xradio/vis/_vis_utils/_ms/_tables/load.py,sha256=IR3fdKlq8rgH4bHmB1JTtB5gSGuITIvErJEVjUA8rWM,1799
55
- xradio/vis/_vis_utils/_ms/_tables/load_main_table.py,sha256=XnAQ-3Bi5YqClEM7Pqfy5ZSJUlXOsHjsYNWlBQjv_UM,14799
56
- xradio/vis/_vis_utils/_ms/_tables/read.py,sha256=gagdeV_50tF5AKqUYqNcWPYdo1kf83JuxNvmOee18eg,42556
57
- xradio/vis/_vis_utils/_ms/_tables/read_main_table.py,sha256=N74HnRJb0yRD-a38ee65ZYXAHaRZeTeZT0zuUK0taD8,26057
58
- xradio/vis/_vis_utils/_ms/_tables/read_subtables.py,sha256=b4qP7U6VIN2Oo6KMjGBZD_atCZ8D6LWzkpah9IA3jsk,12439
55
+ xradio/vis/_vis_utils/_ms/_tables/load_main_table.py,sha256=_Ov4cs5t06RqfmrAHZoYwpb8K27b7iqjIkCMsJ0TR6o,14798
56
+ xradio/vis/_vis_utils/_ms/_tables/read.py,sha256=ZrDHvUoyiAJu6P_2LIpedZgrDuWBwMi99RdMHrj6wyY,42555
57
+ xradio/vis/_vis_utils/_ms/_tables/read_main_table.py,sha256=8AbNt-AxrhPK3EPRa7xqJXffxzIgfVsv1BDfoVJEXLU,26056
58
+ xradio/vis/_vis_utils/_ms/_tables/read_subtables.py,sha256=JM6pGUQtjQR881u9VqakmbJjppEFq-EVKnEZ14JqnAw,12438
59
59
  xradio/vis/_vis_utils/_ms/_tables/table_query.py,sha256=q8EGFf_zIwHcHnvFJOn8hPh8zFZQ3f7BGbXvL3bHad4,555
60
- xradio/vis/_vis_utils/_ms/_tables/write.py,sha256=41zA8JdNVzsvfZPpAvwgI4YbOlwWUDTRpHUexGf-0zA,9541
60
+ xradio/vis/_vis_utils/_ms/_tables/write.py,sha256=43XQ-tHhbhex0eUTRknNpPEEOnNR-w1lGCox9WZ9NHE,9540
61
61
  xradio/vis/_vis_utils/_ms/_tables/write_exp_api.py,sha256=GDEll8nMwkQGc6vosu4UddFL5_ld7WurRgF9hYFTRmU,15511
62
62
  xradio/vis/_vis_utils/_utils/cds.py,sha256=OpvKowSheIthUbcPEv2AoKmxlEt3DqJZS5C1AYh5z10,1179
63
63
  xradio/vis/_vis_utils/_utils/partition_attrs.py,sha256=JaePHts_A0EbB4K-0a_uC98RZ2EmfjB9pDSEI11oAwk,3401
64
64
  xradio/vis/_vis_utils/_utils/stokes_types.py,sha256=DMa8TmmS7BQ99Xm8c7ZjcRapMtLbrKVxrt4f0qUIOvg,561
65
- xradio/vis/_vis_utils/_utils/xds_helper.py,sha256=DYYCuaQqOWfS2ykuyoXAPbmX72_roSEHyK3yrV9sGC4,13219
65
+ xradio/vis/_vis_utils/_utils/xds_helper.py,sha256=jZWR55e574j9NJ4Yf5HdHD7J7QtKdXbXStbblQ66WdE,13218
66
66
  xradio/vis/_vis_utils/_zarr/encoding.py,sha256=GENIlThV6a9CUCL6gIGlu9c6NR3OFWNos6mpxZjMwDc,536
67
- xradio/vis/_vis_utils/_zarr/read.py,sha256=9UKX_kKztE5jhsriBPnLhWtbptk5DRSVUEHT8V5qh8w,7597
68
- xradio/vis/_vis_utils/_zarr/write.py,sha256=vebYh-rAKfY3Feuv6H400s-2UuciPtfZMIlMtb23YSk,10044
69
- xradio-0.0.39.dist-info/LICENSE.txt,sha256=9CYIJt7riOXo9AD0eXBZviLxo_HebD-2JJI8oiWtzfg,1807
70
- xradio-0.0.39.dist-info/METADATA,sha256=rK20i5YCzn_dFrIdjQEXE-1z7RhLAAcpuVfW97Z_JiY,4418
71
- xradio-0.0.39.dist-info/WHEEL,sha256=uCRv0ZEik_232NlR4YDw4Pv3Ajt5bKvMH13NUU7hFuI,91
72
- xradio-0.0.39.dist-info/top_level.txt,sha256=dQu27fGBZJ2Yk-gW5XeD-dZ76Xa4Xcvk60Vz-dwXp7k,7
73
- xradio-0.0.39.dist-info/RECORD,,
67
+ xradio/vis/_vis_utils/_zarr/read.py,sha256=O9DiwD2Gn8WiatQ-Q6WGGSwjsXwFktG4f81lM-mgcSg,7596
68
+ xradio/vis/_vis_utils/_zarr/write.py,sha256=k5IfqtI44Dm4KBDiKFGhL5hN7kwNOulvVHmeP5Mi7N4,10043
69
+ xradio-0.0.40.dist-info/LICENSE.txt,sha256=9CYIJt7riOXo9AD0eXBZviLxo_HebD-2JJI8oiWtzfg,1807
70
+ xradio-0.0.40.dist-info/METADATA,sha256=JSY0V_0Zij1K06oFq-4U_z8rRIZCDFsbBadezmVS-jM,4443
71
+ xradio-0.0.40.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
72
+ xradio-0.0.40.dist-info/top_level.txt,sha256=dQu27fGBZJ2Yk-gW5XeD-dZ76Xa4Xcvk60Vz-dwXp7k,7
73
+ xradio-0.0.40.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (74.1.1)
2
+ Generator: setuptools (74.1.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5