xradio 0.0.58__tar.gz → 0.0.59__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. {xradio-0.0.58/src/xradio.egg-info → xradio-0.0.59}/PKG-INFO +19 -5
  2. {xradio-0.0.58 → xradio-0.0.59}/README.md +19 -5
  3. {xradio-0.0.58 → xradio-0.0.59}/pyproject.toml +1 -1
  4. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/conversion.py +51 -11
  5. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/create_field_and_source_xds.py +3 -1
  6. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/msv4_sub_xdss.py +4 -2
  7. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/measurement_set_xdt.py +6 -2
  8. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/processing_set_xdt.py +13 -6
  9. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/schema.py +27 -10
  10. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/check.py +4 -4
  11. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/metamodel.py +1 -1
  12. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/sphinx/schema_table.py +41 -77
  13. {xradio-0.0.58 → xradio-0.0.59/src/xradio.egg-info}/PKG-INFO +19 -5
  14. {xradio-0.0.58 → xradio-0.0.59}/LICENSE.txt +0 -0
  15. {xradio-0.0.58 → xradio-0.0.59}/MANIFEST.in +0 -0
  16. {xradio-0.0.58 → xradio-0.0.59}/setup.cfg +0 -0
  17. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/__init__.py +0 -0
  18. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/__init__.py +0 -0
  19. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/_casacore/casacore_from_casatools.py +0 -0
  20. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/_casacore/tables.py +0 -0
  21. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/coord_math.py +0 -0
  22. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/dict_helpers.py +0 -0
  23. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/list_and_array.py +0 -0
  24. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/schema.py +0 -0
  25. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/zarr/__init__.py +0 -0
  26. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/_utils/zarr/common.py +0 -0
  27. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/__init__.py +0 -0
  28. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/__init__.py +0 -0
  29. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_casacore/__init__.py +0 -0
  30. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_casacore/common.py +0 -0
  31. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_casacore/xds_from_casacore.py +0 -0
  32. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_casacore/xds_to_casacore.py +0 -0
  33. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_fits/xds_from_fits.py +0 -0
  34. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_zarr/common.py +0 -0
  35. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_zarr/xds_from_zarr.py +0 -0
  36. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_zarr/xds_to_zarr.py +0 -0
  37. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/_zarr/zarr_low_level.py +0 -0
  38. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/casacore.py +0 -0
  39. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/common.py +0 -0
  40. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/image_factory.py +0 -0
  41. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/_util/zarr.py +0 -0
  42. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/image/image.py +0 -0
  43. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/__init__.py +0 -0
  44. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/__init__.py +0 -0
  45. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/__init__.py +0 -0
  46. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/_tables/read.py +0 -0
  47. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/_tables/read_main_table.py +0 -0
  48. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/_tables/table_query.py +0 -0
  49. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/create_antenna_xds.py +0 -0
  50. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/msv2_to_msv4_meta.py +0 -0
  51. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/msv4_info_dicts.py +0 -0
  52. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/optimised_functions.py +0 -0
  53. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/partition_queries.py +0 -0
  54. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_msv2/subtables.py +0 -0
  55. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_utils/interpolate.py +0 -0
  56. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_utils/partition_attrs.py +0 -0
  57. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_utils/stokes_types.py +0 -0
  58. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/_utils/_zarr/encoding.py +0 -0
  59. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/convert_msv2_to_processing_set.py +0 -0
  60. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/load_processing_set.py +0 -0
  61. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/measurement_set/open_processing_set.py +0 -0
  62. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/__init__.py +0 -0
  63. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/bases.py +0 -0
  64. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/dataclass.py +0 -0
  65. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/export.py +0 -0
  66. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/schema/typing.py +0 -0
  67. {xradio-0.0.58 → xradio-0.0.59}/src/xradio/sphinx/__init__.py +0 -0
  68. {xradio-0.0.58 → xradio-0.0.59}/src/xradio.egg-info/SOURCES.txt +0 -0
  69. {xradio-0.0.58 → xradio-0.0.59}/src/xradio.egg-info/dependency_links.txt +0 -0
  70. {xradio-0.0.58 → xradio-0.0.59}/src/xradio.egg-info/requires.txt +0 -0
  71. {xradio-0.0.58 → xradio-0.0.59}/src/xradio.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: xradio
3
- Version: 0.0.58
3
+ Version: 0.0.59
4
4
  Summary: Xarray Radio Astronomy Data IO
5
5
  Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>, Federico Montesino Pouzols <pouzols@eso.edu>, Dave Mehringer <dmehring@nrao.edu>, Peter Wortmann <peter.wortmann@skao.int>
6
6
  License: BSD 3-Clause License
@@ -108,25 +108,32 @@ Xarray Radio Astronomy Data IO is still in development.
108
108
  [![Version Status](https://img.shields.io/pypi/v/xradio.svg)](https://pypi.python.org/pypi/xradio/)
109
109
 
110
110
  # Installing
111
- It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed:
111
+ XRADIO can be installed in virtual environments via pip. It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed, for example:
112
112
  ```sh
113
113
  conda create --name xradio python=3.12 --no-default-packages
114
114
  conda activate xradio
115
115
  ```
116
+ > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
117
+
116
118
  XRADIO can now be installed using:
117
119
  ```sh
118
120
  pip install xradio
119
121
  ```
120
- This will also install the minimal dependencies for XRADIO. To install the minimal dependencies and the interactive components (JupyterLab) use:
122
+ This will also install the minimal dependencies for XRADIO.
123
+
124
+ Note that if only the minimal dependencies are installed, the functionality to convert MSv2 to MSv4 will not be available.
125
+ This requires installing `python-casacore` (also included in the `all` group, see below), or alternatively the
126
+ `casatools` backend, as explained in the [casatools I/O backend guide](docs/source/measurement_set/guides/backends.md).
127
+
128
+ To install the minimal dependencies and the interactive components (JupyterLab) use:
121
129
  ```sh
122
130
  pip install "xradio[interactive]"
123
131
  ```
132
+
124
133
  To enable conversion from MSv2 to MSv4 use (this only works for Linux):
125
134
  ```sh
126
135
  pip install "xradio[python-casacore]"
127
136
  ```
128
- > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
129
-
130
137
  To be able to run tests:
131
138
  ```sh
132
139
  pip install "xradio[test]"
@@ -135,3 +142,10 @@ Multiple-dependencies can be installed using:
135
142
  ```sh
136
143
  pip install "xradio[interactive,python-casacore,test]"
137
144
  ```
145
+
146
+ To install a more complete set of dependencies:
147
+ ```sh
148
+ pip install "xradio[all]"
149
+ ```
150
+ This will include the dependencies required to run the interactive Jupyter notebooks, run tests, build documentation,
151
+ and python-casacore to enable MSv2=>MSv4 functionality.
@@ -10,25 +10,32 @@ Xarray Radio Astronomy Data IO is still in development.
10
10
  [![Version Status](https://img.shields.io/pypi/v/xradio.svg)](https://pypi.python.org/pypi/xradio/)
11
11
 
12
12
  # Installing
13
- It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed:
13
+ XRADIO can be installed in virtual environments via pip. It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed, for example:
14
14
  ```sh
15
15
  conda create --name xradio python=3.12 --no-default-packages
16
16
  conda activate xradio
17
17
  ```
18
+ > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
19
+
18
20
  XRADIO can now be installed using:
19
21
  ```sh
20
22
  pip install xradio
21
23
  ```
22
- This will also install the minimal dependencies for XRADIO. To install the minimal dependencies and the interactive components (JupyterLab) use:
24
+ This will also install the minimal dependencies for XRADIO.
25
+
26
+ Note that if only the minimal dependencies are installed, the functionality to convert MSv2 to MSv4 will not be available.
27
+ This requires installing `python-casacore` (also included in the `all` group, see below), or alternatively the
28
+ `casatools` backend, as explained in the [casatools I/O backend guide](docs/source/measurement_set/guides/backends.md).
29
+
30
+ To install the minimal dependencies and the interactive components (JupyterLab) use:
23
31
  ```sh
24
32
  pip install "xradio[interactive]"
25
33
  ```
34
+
26
35
  To enable conversion from MSv2 to MSv4 use (this only works for Linux):
27
36
  ```sh
28
37
  pip install "xradio[python-casacore]"
29
38
  ```
30
- > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
31
-
32
39
  To be able to run tests:
33
40
  ```sh
34
41
  pip install "xradio[test]"
@@ -36,4 +43,11 @@ pip install "xradio[test]"
36
43
  Multiple-dependencies can be installed using:
37
44
  ```sh
38
45
  pip install "xradio[interactive,python-casacore,test]"
39
- ```
46
+ ```
47
+
48
+ To install a more complete set of dependencies:
49
+ ```sh
50
+ pip install "xradio[all]"
51
+ ```
52
+ This will include the dependencies required to run the interactive Jupyter notebooks, run tests, build documentation,
53
+ and python-casacore to enable MSv2=>MSv4 functionality.
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "xradio"
3
- version = "v0.0.58"
3
+ version = "v0.0.59"
4
4
  description = " Xarray Radio Astronomy Data IO"
5
5
  authors = [
6
6
  {name = "Jan-Willem Steeb", email="jsteeb@nrao.edu"},
@@ -432,8 +432,49 @@ def calc_indx_for_row_split(tb_tool, taql_where):
432
432
 
433
433
 
434
434
  def create_coordinates(
435
- xds, in_file, ddi, utime, interval, baseline_ant1_id, baseline_ant2_id, scan_id
436
- ):
435
+ xds: xr.Dataset,
436
+ in_file: str,
437
+ ddi: int,
438
+ utime: np.ndarray,
439
+ interval: np.ndarray,
440
+ baseline_ant1_id: np.ndarray,
441
+ baseline_ant2_id: np.ndarray,
442
+ scan_id: np.ndarray,
443
+ ) -> tuple[xr.Dataset, int]:
444
+ """
445
+ Creates coordinates of a VisibilityXds/SpectrumXds and assigns them to the input
446
+ correlated dataset.
447
+
448
+ Parameters
449
+ ----------
450
+ xds :
451
+ dataset to add the coords to
452
+ in_file :
453
+ path to input MSv2
454
+ ddi :
455
+ DDI index (row) for this MSv4
456
+ utime :
457
+ unique times, for the time coordinate
458
+ interval :
459
+ interval col values from the MSv2, for the integration_time attribute
460
+ of the time coord
461
+ baseline_ant1_id :
462
+ ANTENNA1 ids to be used as coord
463
+ baseline_ant2_id :
464
+ ANTENNA2 ids to be used as coord
465
+ scan_id :
466
+ SCAN_ID values from MSv2, for the scan_name coord
467
+
468
+ Returns
469
+ -------
470
+ tuple[xr.Dataset, int]
471
+ A tuple of:
472
+ - The input dataset with coordinates added and populated with all MSv4 schema
473
+ attributes.
474
+ - The MSv2 spectral_window_id of this DDI/MSv4, which is no longer added to
475
+ the frequency coord but is required to create other secondary xdss (antenna,
476
+ gain_curve, phase_calibration, system_calibration, field_and_source).
477
+ """
437
478
  coords = {
438
479
  "time": utime,
439
480
  "baseline_antenna1_id": ("baseline_id", baseline_ant1_id),
@@ -487,6 +528,7 @@ def create_coordinates(
487
528
  spw_name = spw_name + "_" + str(spectral_window_id)
488
529
 
489
530
  xds.frequency.attrs["spectral_window_name"] = spw_name
531
+ xds.frequency.attrs["spectral_window_intent"] = "UNSPECIFIED"
490
532
  msv4_measure = column_description_casacore_to_msv4_measure(
491
533
  freq_column_description["REF_FREQUENCY"],
492
534
  ref_code=spectral_window_xds["MEAS_FREQ_REF"].data,
@@ -496,7 +538,6 @@ def create_coordinates(
496
538
  msv4_measure["units"],
497
539
  msv4_measure["observer"],
498
540
  )
499
- xds.frequency.attrs["spectral_window_id"] = spectral_window_id
500
541
 
501
542
  # Add if doppler table is present
502
543
  # xds.frequency.attrs["doppler_velocity"] =
@@ -534,7 +575,7 @@ def create_coordinates(
534
575
  interval, msv4_measure["units"] if msv4_measure else "s"
535
576
  )
536
577
 
537
- return xds
578
+ return xds, spectral_window_id
538
579
 
539
580
 
540
581
  def find_min_max_times(tb_tool: tables.table, taql_where: str) -> tuple:
@@ -1086,7 +1127,7 @@ def convert_and_write_partition(
1086
1127
  scan_id[tidxs, bidxs] = tb_tool.getcol("SCAN_NUMBER")
1087
1128
  scan_id = np.max(scan_id, axis=1)
1088
1129
 
1089
- xds = create_coordinates(
1130
+ xds, spectral_window_id = create_coordinates(
1090
1131
  xds,
1091
1132
  in_file,
1092
1133
  ddi,
@@ -1154,7 +1195,7 @@ def convert_and_write_partition(
1154
1195
 
1155
1196
  ant_xds = create_antenna_xds(
1156
1197
  in_file,
1157
- xds.frequency.attrs["spectral_window_id"],
1198
+ spectral_window_id,
1158
1199
  antenna_id,
1159
1200
  feed_id,
1160
1201
  telescope_name,
@@ -1163,9 +1204,7 @@ def convert_and_write_partition(
1163
1204
  logger.debug("Time antenna xds " + str(time.time() - start))
1164
1205
 
1165
1206
  start = time.time()
1166
- gain_curve_xds = create_gain_curve_xds(
1167
- in_file, xds.frequency.attrs["spectral_window_id"], ant_xds
1168
- )
1207
+ gain_curve_xds = create_gain_curve_xds(in_file, spectral_window_id, ant_xds)
1169
1208
  logger.debug("Time gain_curve xds " + str(time.time() - start))
1170
1209
 
1171
1210
  start = time.time()
@@ -1175,7 +1214,7 @@ def convert_and_write_partition(
1175
1214
  phase_cal_interp_time = None
1176
1215
  phase_calibration_xds = create_phase_calibration_xds(
1177
1216
  in_file,
1178
- xds.frequency.attrs["spectral_window_id"],
1217
+ spectral_window_id,
1179
1218
  ant_xds,
1180
1219
  time_min_max,
1181
1220
  phase_cal_interp_time,
@@ -1190,6 +1229,7 @@ def convert_and_write_partition(
1190
1229
  sys_cal_interp_time = None
1191
1230
  system_calibration_xds = create_system_calibration_xds(
1192
1231
  in_file,
1232
+ spectral_window_id,
1193
1233
  xds.frequency,
1194
1234
  ant_xds,
1195
1235
  sys_cal_interp_time,
@@ -1276,7 +1316,7 @@ def convert_and_write_partition(
1276
1316
  create_field_and_source_xds(
1277
1317
  in_file,
1278
1318
  field_id,
1279
- xds.frequency.attrs["spectral_window_id"],
1319
+ spectral_window_id,
1280
1320
  field_times,
1281
1321
  is_single_dish,
1282
1322
  time_min_max,
@@ -573,7 +573,9 @@ def pad_missing_sources(
573
573
  for missing_id in missing_source_ids:
574
574
  missing_source_xds[concat_dim] = missing_id
575
575
  xdss_to_concat.append(missing_source_xds)
576
- filled_source_xds = xr.concat(xdss_to_concat, concat_dim).sortby(concat_dim)
576
+ filled_source_xds = xr.concat(xdss_to_concat, concat_dim, join="outer").sortby(
577
+ concat_dim
578
+ )
577
579
 
578
580
  return filled_source_xds
579
581
 
@@ -591,6 +591,7 @@ def prepare_generic_sys_cal_xds(generic_sys_cal_xds: xr.Dataset) -> xr.Dataset:
591
591
 
592
592
  def create_system_calibration_xds(
593
593
  in_file: str,
594
+ spectral_window_id: int,
594
595
  main_xds_frequency: xr.DataArray,
595
596
  ant_xds: xr.DataArray,
596
597
  sys_cal_interp_time: Union[xr.DataArray, None] = None,
@@ -602,9 +603,11 @@ def create_system_calibration_xds(
602
603
  ----------
603
604
  in_file: str
604
605
  Input MS name.
606
+ spectral_window_id: int
607
+ SPW ID from MSv2
605
608
  main_xds_frequency: xr.DataArray
606
609
  frequency array of the main xds (MSv4), containing among other things
607
- spectral_window_id and measures metadata
610
+ the measures metadata
608
611
  ant_xds : xr.Dataset
609
612
  The antenna_xds that has information such as names, stations, etc., for coordinates
610
613
  sys_cal_interp_time: Union[xr.DataArray, None] = None,
@@ -616,7 +619,6 @@ def create_system_calibration_xds(
616
619
  System calibration Xarray Dataset.
617
620
  """
618
621
 
619
- spectral_window_id = main_xds_frequency.attrs["spectral_window_id"]
620
622
  try:
621
623
  generic_sys_cal_xds = load_generic_table(
622
624
  in_file,
@@ -205,6 +205,9 @@ class MeasurementSetXdt:
205
205
 
206
206
  partition_info = {
207
207
  "spectral_window_name": self._xdt.frequency.attrs["spectral_window_name"],
208
+ "spectral_window_intent": self._xdt.frequency.attrs[
209
+ "spectral_window_intent"
210
+ ],
208
211
  "field_name": to_list(np.unique(field_and_source_xds.field_name.values)),
209
212
  "polarization_setup": to_list(self._xdt.polarization.values),
210
213
  "scan_name": to_list(np.unique(self._xdt.scan_name.values)),
@@ -228,7 +231,8 @@ class MeasurementSetXdt:
228
231
  description: str = None,
229
232
  data_group_dv_shared_with: str = None,
230
233
  ) -> xr.DataTree:
231
- """_summary_
234
+ """Adds a data group to the MSv4 DataTree, grouping the given data, weight, flag, etc. variables
235
+ and field_and_source_xds.
232
236
 
233
237
  Parameters
234
238
  ----------
@@ -254,7 +258,7 @@ class MeasurementSetXdt:
254
258
  Returns
255
259
  -------
256
260
  xr.DataTree
257
- _description_
261
+ MSv4 DataTree with the new group added
258
262
  """
259
263
 
260
264
  if data_group_dv_shared_with is None:
@@ -142,7 +142,7 @@ class ProcessingSetXdt:
142
142
  if "freq_axis" in self.meta:
143
143
  return self.meta["freq_axis"]
144
144
  else:
145
- spw_ids = []
145
+ spw_names = []
146
146
  freq_axis_list = []
147
147
  frame = self._xdt[next(iter(self._xdt.children))].frequency.attrs[
148
148
  "observer"
@@ -151,11 +151,13 @@ class ProcessingSetXdt:
151
151
  assert (
152
152
  frame == ms_xdt.frequency.attrs["observer"]
153
153
  ), "Frequency reference frame not consistent in Processing Set."
154
- if ms_xdt.frequency.attrs["spectral_window_id"] not in spw_ids:
155
- spw_ids.append(ms_xdt.frequency.attrs["spectral_window_id"])
154
+ if ms_xdt.frequency.attrs["spectral_window_name"] not in spw_names:
155
+ spw_names.append(ms_xdt.frequency.attrs["spectral_window_name"])
156
156
  freq_axis_list.append(ms_xdt.frequency)
157
157
 
158
- freq_axis = xr.concat(freq_axis_list, dim="frequency").sortby("frequency")
158
+ freq_axis = xr.concat(freq_axis_list, dim="frequency", join="outer").sortby(
159
+ "frequency"
160
+ )
159
161
  self.meta["freq_axis"] = freq_axis
160
162
  return self.meta["freq_axis"]
161
163
 
@@ -167,6 +169,7 @@ class ProcessingSetXdt:
167
169
  "polarization": [],
168
170
  "scan_name": [],
169
171
  "spw_name": [],
172
+ "spw_intent": [],
170
173
  "field_name": [],
171
174
  "source_name": [],
172
175
  "line_name": [],
@@ -183,6 +186,7 @@ class ProcessingSetXdt:
183
186
  summary_data["name"].append(key)
184
187
  summary_data["intents"].append(partition_info["intents"])
185
188
  summary_data["spw_name"].append(partition_info["spectral_window_name"])
189
+ summary_data["spw_intent"].append(partition_info["spectral_window_intent"])
186
190
  summary_data["polarization"].append(value.polarization.values)
187
191
  summary_data["scan_name"].append(partition_info["scan_name"])
188
192
  data_name = value.attrs["data_groups"][data_group]["correlated_data"]
@@ -267,10 +271,10 @@ class ProcessingSetXdt:
267
271
  Examples
268
272
  --------
269
273
  >>> # Select all MSs with intents 'OBSERVE_TARGET#ON_SOURCE' and polarization 'RR' or 'LL'
270
- >>> selected_ps = ps.query(intents='OBSERVE_TARGET#ON_SOURCE', polarization=['RR', 'LL'])
274
+ >>> selected_ps_xdt = ps_xdt.xr_ps.query(intents='OBSERVE_TARGET#ON_SOURCE', polarization=['RR', 'LL'])
271
275
 
272
276
  >>> # Select all MSs with start_frequency greater than 100 GHz and less than 200 GHz
273
- >>> selected_ps = ps.query(query='start_frequency > 100e9 AND end_frequency < 200e9')
277
+ >>> selected_ps_xdt = ps_xdt.xr_ps.query(query='start_frequency > 100e9 AND end_frequency < 200e9')
274
278
  """
275
279
 
276
280
  if self._xdt.attrs.get("type") not in PS_DATASET_TYPES:
@@ -377,6 +381,7 @@ class ProcessingSetXdt:
377
381
  combined_field_and_source_xds = xr.concat(
378
382
  [combined_field_and_source_xds, field_and_source_xds],
379
383
  dim="field_name",
384
+ join="outer",
380
385
  )
381
386
 
382
387
  if (len(combined_field_and_source_xds.data_vars) > 0) and (
@@ -497,6 +502,7 @@ class ProcessingSetXdt:
497
502
  combined_ephemeris_field_and_source_xds = xr.concat(
498
503
  [combined_ephemeris_field_and_source_xds, field_and_source_xds],
499
504
  dim="time",
505
+ join="outer",
500
506
  )
501
507
 
502
508
  if (len(combined_ephemeris_field_and_source_xds.data_vars) > 0) and (
@@ -719,6 +725,7 @@ class ProcessingSetXdt:
719
725
  dim="antenna_name",
720
726
  data_vars="minimal",
721
727
  coords="minimal",
728
+ join="outer",
722
729
  )
723
730
 
724
731
  # ALMA WVR antenna_xds data has a NaN value for the antenna receptor angle.
@@ -9,7 +9,7 @@ from xradio.schema.bases import (
9
9
  from xradio.schema.typing import Attr, Coord, Coordof, Data, Dataof
10
10
  import numpy
11
11
 
12
- MSV4_SCHEMA_VERSION = "4.0.-9988"
12
+ MSV4_SCHEMA_VERSION = "4.0.-9987"
13
13
 
14
14
  # Dimensions
15
15
  Time = Literal["time"]
@@ -674,6 +674,9 @@ class FrequencyArray:
674
674
  """ Center frequencies for each channel. """
675
675
  spectral_window_name: Attr[str]
676
676
  """ Name associated with spectral window. """
677
+ spectral_window_intent: Attr[str]
678
+ """ An intent string that identifies the intention of the spectral window, for example
679
+ continuum, spectral line, etc. See :ref:`spw intents` for possible values. """
677
680
  frequency_group_name: Optional[Attr[str]]
678
681
  """ Name associated with frequency group - needed for multi-band VLBI fringe-fitting."""
679
682
  reference_frequency: Attr[SpectralCoordArray]
@@ -795,7 +798,7 @@ class FlagArray:
795
798
  polarization: Optional[Coordof[PolarizationArray]] = None
796
799
  long_name: Optional[Attr[str]] = "Visibility flags"
797
800
 
798
- allow_mutiple_versions: Optional[Attr[bool]] = True
801
+ allow_multiple_versions: Optional[Attr[bool]] = True
799
802
 
800
803
 
801
804
  @xarray_dataarray_schema
@@ -833,7 +836,7 @@ class WeightArray:
833
836
  polarization: Optional[Coordof[PolarizationArray]] = None
834
837
  long_name: Optional[Attr[str]] = "Visibility weights"
835
838
 
836
- allow_mutiple_versions: Optional[Attr[bool]] = True
839
+ allow_multiple_versions: Optional[Attr[bool]] = True
837
840
 
838
841
 
839
842
  # J2000=>fk5 is used most often. icrs is used less often. Both fk5 and icrs are also borrowed from the field center (to fix
@@ -896,7 +899,7 @@ class UvwArray:
896
899
  """ To be defined in astropy (see for example https://github.com/astropy/astropy/issues/7766) """
897
900
  units: Attr[UnitsMeters] = "m"
898
901
 
899
- allow_mutiple_versions: Optional[Attr[bool]] = True
902
+ allow_multiple_versions: Optional[Attr[bool]] = True
900
903
 
901
904
 
902
905
  @xarray_dataarray_schema
@@ -992,7 +995,7 @@ class FrequencyCentroidArray:
992
995
  @xarray_dataarray_schema
993
996
  class EffectiveChannelWidthArray:
994
997
  """
995
- Model of frequency related data variables of the main dataset, such as EFFECTIV_CHANNEL_WIDTH.
998
+ Model of frequency related data variables of the main dataset, such as EFFECTIVE_CHANNEL_WIDTH.
996
999
  """
997
1000
 
998
1001
  data: Data[
@@ -1321,7 +1324,7 @@ class VisibilityArray:
1321
1324
  long_name: Optional[Attr[str]] = "Visibility values"
1322
1325
  """ Long-form name to use for axis. Should be ``"Visibility values"``"""
1323
1326
  units: Attr[str] = "Jy"
1324
- allow_mutiple_versions: Optional[Attr[bool]] = True
1327
+ allow_multiple_versions: Optional[Attr[bool]] = True
1325
1328
 
1326
1329
 
1327
1330
  # Info dicts
@@ -1382,9 +1385,22 @@ class ObservationInfoDict:
1382
1385
  """ASDM: Logs of the observation during this execu- tion block."""
1383
1386
  intents: list[str]
1384
1387
  """ An intent string identifies one intention of the scan, such as to calibrate or observe a
1385
- target. See :ref:`scan intents` for possible values. When converting from MSv2, the list of
1386
- intents is derived from the OBS_MODE column of MSv2 state table (every comma separated value
1387
- is taken as an intent). """
1388
+ target. See :ref:`scan intents` for possible intent/subintent values. When converting from MSv2,
1389
+ the list of intents is derived from the OBS_MODE column of MSv2 state table (every comma
1390
+ separated value is taken as an intent).
1391
+ A common convention used in the MSv2 OBS_MODE column is to specify multiple intents separated
1392
+ by commas, each of them giving a main intent and a subintent separated by a '#' character. This
1393
+ is represented in this attribute as a list of "intent#subintent" strings. These are a few
1394
+ example lists:
1395
+ ["CALIBRATE_DELAY#ON_SOURCE" , "CALIBRATE_PHASE#ON_SOURCE", "CALIBRATE_WVR#ON_SOURCE"],
1396
+ ["CALIBRATE_FLUX#ON_SOURCE" , "CALIBRATE_WVR#ON_SOURCE"],
1397
+ ["CALIBRATE_POINTING#ON_SOURCE", "CALIBRATE_WVR#ON_SOURCE", "CALIBRATE_DELAY#ON_SOURCE"],
1398
+ ["CALIBRATE_ATMOSPHERE#AMBIENT", "CALIBRATE_WVR#AMBIENT"],
1399
+ ["CALIBRATE_FOCUS#ON_SOURCE" , "CALIBRATE_WVR#ON_SOURCE"],
1400
+ ["OBSERVE_TARGET#ON_SOURCE"], or ["OBSERVE_TARGE#UNSPECIFIED"].
1401
+ The list of possible intent and subintent names (see :ref:`scan intents`) is derived from the
1402
+ respective ASDM enumerations.
1403
+ """
1388
1404
 
1389
1405
 
1390
1406
  @dict_schema
@@ -1419,7 +1435,8 @@ class DataGroupDict:
1419
1435
 
1420
1436
  @dict_schema
1421
1437
  class DataGroupsDict:
1422
- """Dictionary of data group dictionaries."""
1438
+ """Dictionary of data group dictionaries. A 'base' data group is mandatory.
1439
+ Additional data groups can be added with different names."""
1423
1440
 
1424
1441
  base: DataGroupDict
1425
1442
 
@@ -382,14 +382,14 @@ def check_data_vars(
382
382
 
383
383
  issues = SchemaIssues()
384
384
  for data_var_schema in data_vars_schema:
385
- allow_mutiple_versions = False
385
+ allow_multiple_versions = False
386
386
  for attr in data_var_schema.attributes:
387
387
  if hasattr(attr, "name"):
388
- if attr.name == "allow_mutiple_versions":
389
- allow_mutiple_versions = attr.default
388
+ if attr.name == "allow_multiple_versions":
389
+ allow_multiple_versions = attr.default
390
390
 
391
391
  data_vars_names = []
392
- if allow_mutiple_versions:
392
+ if allow_multiple_versions:
393
393
  for data_var_name in data_vars:
394
394
  if data_var_schema.name in data_var_name:
395
395
  data_vars_names.append(data_var_name)
@@ -35,7 +35,7 @@ class ValueSchema:
35
35
  """
36
36
  dict_schema: typing.Optional[DictSchema] = None
37
37
  """
38
- Dictionary schema, if it is an xarray DataArray
38
+ Dictionary schema, if it is a dict
39
39
  """
40
40
  array_schema: typing.Optional[ArraySchema] = None
41
41
  """
@@ -154,40 +154,46 @@ class SchemaTableDirective(ObjectDescription):
154
154
  self.state.nested_parse(vl, 0, entry)
155
155
 
156
156
 
157
- def format_literals(typ):
157
+ def format_literals(literal) -> nodes.line:
158
158
 
159
- # a | b | c: Recurse and merge
160
- if typing.get_origin(typ) == typing.Union:
161
- type_args = typing.get_args(typ)
162
- options = []
163
- for arg in type_args:
164
- options += format_literals(arg)
165
- return options
159
+ if isinstance(literal, list) and all([isinstance(item, str) for item in literal]):
160
+ formatted_literal = [nodes.literal(text=f"'{val}'") for val in literal]
161
+ else:
162
+ raise ValueError(f"Must be a list of literal string values: {literal}")
166
163
 
167
- # Literal['a', 'b', ...]: Wrap into individual "literal" nodes
168
- if typing.get_origin(typ) == typing.Literal:
169
- return list(map(lambda t: nodes.literal(text=repr(t)), typing.get_args(typ)))
164
+ # Join the literals with ... , .. , .. , or ...
165
+ line = nodes.line()
166
+ for i, lit in enumerate(formatted_literal):
167
+ if i > 0:
168
+ if i + 1 >= len(formatted_literal):
169
+ line += nodes.Text(" or\xa0")
170
+ else:
171
+ line += nodes.Text(", ")
172
+ line += lit
170
173
 
171
- # list[Literal['a'], Literal['b'], ...]: Format as one literal (compound) value
172
- if typing.get_origin(typ) == list:
173
- type_args = typing.get_args(typ)
174
- if any([typing.get_origin(arg) != typing.Literal for arg in type_args]):
175
- raise ValueError(f"List must contain only literals: {typ}")
176
- values = [repr(typing.get_args(val)[0]) for val in typing.get_args(typ)]
177
- return [nodes.literal(text=f"[{', '.join(values)}]")]
174
+ return line
175
+
176
+
177
+ def format_class_types(state, attr_type) -> nodes.line:
178
+ line = nodes.line()
179
+ vl = StringList()
180
+ vl.append(f":py:class:`~{attr_type}`", "")
181
+ with switch_source_input(state, vl):
182
+ state.nested_parse(vl, 0, line)
178
183
 
179
- raise ValueError(f"Must be either a type or a literal: {typ}")
184
+ return line
180
185
 
181
186
 
182
- def format_attr_model_text(state, attr) -> StringList:
187
+ def format_attr_model_text(state, attr) -> nodes.line:
183
188
  """
184
- Formats the text for the 'model' column in schema tables (arrays and datasets).
189
+ For an attribute, formats the text for the 'model' column in schema tables
190
+ (arrays and datasets).
185
191
  Doesn't aim at supporting any literal types or combinations of types in general,
186
192
  but the following three ones specifically:
187
193
 
188
- - Literals (with multiple options (implicit Union of literals))
189
- - List of literals (e.g. ["rad","rad"]
190
- - Union of list of literals (e.g. ["m","m","m"]/["rad","rad","m"]
194
+ - String literals (units, frames, measure types, etc.)
195
+ - Other classes (for example usual built-in types such str, bool or ints,
196
+ or schema classes: schema dicts and schema arrays)
191
197
 
192
198
  This is meant to produce readable text listing literals as quoted text and
193
199
  their combinations, in schema attributes (particularly quantities and measures).
@@ -196,58 +202,16 @@ def format_attr_model_text(state, attr) -> StringList:
196
202
  type name.
197
203
  """
198
204
 
199
- type_args = typing.get_args(attr.typ)
200
- is_list_of_literals = typing.get_origin(attr.typ) is list and all(
201
- [typing.get_origin(arg) is typing.Literal for arg in type_args]
202
- )
203
-
204
- line = nodes.line()
205
-
206
- if not is_list_of_literals:
207
- # A type?
208
- if isinstance(attr.typ, type):
209
- vl = StringList()
210
- vl.append(f":py:class:`~{attr.typ.__module__}.{attr.typ.__name__}`", "")
211
- with switch_source_input(state, vl):
212
- state.nested_parse(vl, 0, line)
213
- return line
214
-
215
- if typing.get_origin(attr.typ) == typing.Union:
216
- vl = StringList()
217
- type_args = typing.get_args(attr.typ)
218
- options = []
219
- for i, arg in enumerate(type_args):
220
- vl.append(f":py:class:`~{arg.__module__}.{arg.__name__}`", "")
221
- if i + 1 < len(type_args):
222
- vl.append(" or ", "")
223
- with switch_source_input(state, vl):
224
- state.nested_parse(vl, 0, line)
225
- return line
226
-
227
- # Derived type, e.g. list of types?
228
- if typing.get_origin(attr.typ) == list and all(
229
- [isinstance(arg, type) for arg in type_args]
230
- ):
231
- vl = StringList()
232
- vl.append("[", "")
233
- for i, arg in enumerate(typing.get_args(attr.typ)):
234
- if i > 0:
235
- vl.append(", ", "")
236
- vl.append(f":py:class:`~{arg.__module__}.{arg.__name__}`", "")
237
- vl.append("]", "")
238
- with switch_source_input(state, vl):
239
- state.nested_parse(vl, 0, line)
240
- return line
241
-
242
- # Assume it's a literal of some kind - collect options
243
- literals = format_literals(attr.typ)
244
- for i, lit in enumerate(literals):
245
- if i > 0:
246
- if i + 1 >= len(literals):
247
- line += nodes.Text(" or\xa0")
248
- else:
249
- line += nodes.Text(", ")
250
- line += lit
205
+ if getattr(attr, "literal"):
206
+ line = format_literals(attr.literal)
207
+ else:
208
+ if getattr(attr, "dict_schema"):
209
+ attr_type = attr.dict_schema.schema_name
210
+ elif getattr(attr, "array_schema"):
211
+ attr_type = attr.array_schema.schema_name
212
+ else:
213
+ attr_type = attr.type
214
+ line = format_class_types(state, attr_type)
251
215
 
252
216
  return line
253
217
 
@@ -356,7 +320,7 @@ class DictSchemaTableDirective(SchemaTableDirective):
356
320
  for attr in schema.attributes:
357
321
  self._add_row(
358
322
  attr.name,
359
- types=[f"{attr.typ.__name__}"],
323
+ types=[attr.type],
360
324
  optional=attr.optional,
361
325
  descr=attr.docstring,
362
326
  default=attr.default,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: xradio
3
- Version: 0.0.58
3
+ Version: 0.0.59
4
4
  Summary: Xarray Radio Astronomy Data IO
5
5
  Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>, Federico Montesino Pouzols <pouzols@eso.edu>, Dave Mehringer <dmehring@nrao.edu>, Peter Wortmann <peter.wortmann@skao.int>
6
6
  License: BSD 3-Clause License
@@ -108,25 +108,32 @@ Xarray Radio Astronomy Data IO is still in development.
108
108
  [![Version Status](https://img.shields.io/pypi/v/xradio.svg)](https://pypi.python.org/pypi/xradio/)
109
109
 
110
110
  # Installing
111
- It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed:
111
+ XRADIO can be installed in virtual environments via pip. It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed, for example:
112
112
  ```sh
113
113
  conda create --name xradio python=3.12 --no-default-packages
114
114
  conda activate xradio
115
115
  ```
116
+ > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
117
+
116
118
  XRADIO can now be installed using:
117
119
  ```sh
118
120
  pip install xradio
119
121
  ```
120
- This will also install the minimal dependencies for XRADIO. To install the minimal dependencies and the interactive components (JupyterLab) use:
122
+ This will also install the minimal dependencies for XRADIO.
123
+
124
+ Note that if only the minimal dependencies are installed, the functionality to convert MSv2 to MSv4 will not be available.
125
+ This requires installing `python-casacore` (also included in the `all` group, see below), or alternatively the
126
+ `casatools` backend, as explained in the [casatools I/O backend guide](docs/source/measurement_set/guides/backends.md).
127
+
128
+ To install the minimal dependencies and the interactive components (JupyterLab) use:
121
129
  ```sh
122
130
  pip install "xradio[interactive]"
123
131
  ```
132
+
124
133
  To enable conversion from MSv2 to MSv4 use (this only works for Linux):
125
134
  ```sh
126
135
  pip install "xradio[python-casacore]"
127
136
  ```
128
- > 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
129
-
130
137
  To be able to run tests:
131
138
  ```sh
132
139
  pip install "xradio[test]"
@@ -135,3 +142,10 @@ Multiple-dependencies can be installed using:
135
142
  ```sh
136
143
  pip install "xradio[interactive,python-casacore,test]"
137
144
  ```
145
+
146
+ To install a more complete set of dependencies:
147
+ ```sh
148
+ pip install "xradio[all]"
149
+ ```
150
+ This will include the dependencies required to run the interactive Jupyter notebooks, run tests, build documentation,
151
+ and python-casacore to enable MSv2=>MSv4 functionality.
File without changes
File without changes
File without changes
File without changes