xradio 0.0.28__py3-none-any.whl → 0.0.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. xradio/__init__.py +5 -4
  2. xradio/_utils/array.py +90 -0
  3. xradio/_utils/zarr/common.py +48 -3
  4. xradio/image/_util/zarr.py +4 -1
  5. xradio/schema/__init__.py +24 -6
  6. xradio/schema/bases.py +440 -2
  7. xradio/schema/check.py +96 -55
  8. xradio/schema/dataclass.py +123 -27
  9. xradio/schema/metamodel.py +21 -4
  10. xradio/schema/typing.py +33 -18
  11. xradio/vis/__init__.py +5 -2
  12. xradio/vis/_processing_set.py +71 -32
  13. xradio/vis/_vis_utils/_ms/_tables/create_field_and_source_xds.py +710 -0
  14. xradio/vis/_vis_utils/_ms/_tables/load.py +23 -10
  15. xradio/vis/_vis_utils/_ms/_tables/load_main_table.py +145 -64
  16. xradio/vis/_vis_utils/_ms/_tables/read.py +747 -172
  17. xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +173 -44
  18. xradio/vis/_vis_utils/_ms/_tables/read_subtables.py +79 -28
  19. xradio/vis/_vis_utils/_ms/_tables/write.py +102 -45
  20. xradio/vis/_vis_utils/_ms/_tables/write_exp_api.py +127 -65
  21. xradio/vis/_vis_utils/_ms/chunks.py +58 -21
  22. xradio/vis/_vis_utils/_ms/conversion.py +582 -102
  23. xradio/vis/_vis_utils/_ms/descr.py +52 -20
  24. xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py +72 -35
  25. xradio/vis/_vis_utils/_ms/msv4_infos.py +0 -59
  26. xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +76 -9
  27. xradio/vis/_vis_utils/_ms/optimised_functions.py +0 -46
  28. xradio/vis/_vis_utils/_ms/partition_queries.py +308 -119
  29. xradio/vis/_vis_utils/_ms/partitions.py +82 -25
  30. xradio/vis/_vis_utils/_ms/subtables.py +32 -14
  31. xradio/vis/_vis_utils/_utils/partition_attrs.py +30 -11
  32. xradio/vis/_vis_utils/_utils/xds_helper.py +136 -45
  33. xradio/vis/_vis_utils/_zarr/read.py +60 -22
  34. xradio/vis/_vis_utils/_zarr/write.py +83 -9
  35. xradio/vis/_vis_utils/ms.py +48 -29
  36. xradio/vis/_vis_utils/zarr.py +44 -20
  37. xradio/vis/convert_msv2_to_processing_set.py +43 -32
  38. xradio/vis/load_processing_set.py +38 -61
  39. xradio/vis/read_processing_set.py +64 -96
  40. xradio/vis/schema.py +687 -0
  41. xradio/vis/vis_io.py +75 -43
  42. {xradio-0.0.28.dist-info → xradio-0.0.30.dist-info}/LICENSE.txt +6 -1
  43. {xradio-0.0.28.dist-info → xradio-0.0.30.dist-info}/METADATA +10 -5
  44. xradio-0.0.30.dist-info/RECORD +73 -0
  45. {xradio-0.0.28.dist-info → xradio-0.0.30.dist-info}/WHEEL +1 -1
  46. xradio/vis/model.py +0 -497
  47. xradio-0.0.28.dist-info/RECORD +0 -71
  48. {xradio-0.0.28.dist-info → xradio-0.0.30.dist-info}/top_level.txt +0 -0
@@ -6,13 +6,23 @@ import xarray as xr
6
6
 
7
7
  from casacore import tables
8
8
 
9
+ from ..msv2_msv3 import ignore_msv2_cols
10
+
9
11
 
10
12
  def revert_time(datetimes: np.ndarray) -> np.ndarray:
11
- """Convert time back from pandas datetime ref to casacore ref
13
+ """
14
+ Convert time back from pandas datetime ref to casacore ref
12
15
  (reverse of read.convert_casacore_time).
13
16
 
14
- :param rawtimes: times in pandas reference
15
- :return: times converted to casacore reference
17
+ Parameters
18
+ ----------
19
+ datetimes : np.ndarray
20
+ times in pandas reference
21
+
22
+ Returns
23
+ -------
24
+ np.ndarray
25
+ times converted to casacore reference
16
26
 
17
27
  """
18
28
  return (datetimes.astype(float) / 10**9) + 3506716800.0
@@ -54,22 +64,45 @@ def create_table(
54
64
  os.system("rm -fr %s" % outfile)
55
65
 
56
66
  # create column descriptions for table description
67
+ ctds_attrs = {}
68
+ try:
69
+ ctds_attrs = xds.attrs["other"]["msv2"]["ctds_attrs"]
70
+ except KeyError as exc:
71
+ pass
72
+
57
73
  if cols is None:
58
- cols = list(
59
- set(list(xds.data_vars) + list(xds.attrs["column_descriptions"].keys()))
60
- if "column_descriptions" in xds.attrs
61
- else list(xds.data_vars)
62
- )
74
+ if ctds_attrs and "column_descriptions" in ctds_attrs:
75
+ cols = {col: col.lower() for col in ctds_attrs["column_descriptions"]}
76
+ else:
77
+ cols = {var.upper(): var for var in xds.data_vars}
78
+ # Would add all xds data vars regardless of description availability
79
+ # +
80
+ # list(xds.data_vars) +
81
+
63
82
  tabledesc = {}
64
- for col in cols:
65
- if ("column_descriptions" in xds.attrs) and (
66
- col in xds.attrs["column_descriptions"]
83
+ for col, var_name in cols.items():
84
+ if ("column_descriptions" in ctds_attrs) and (
85
+ col in ctds_attrs["column_descriptions"]
67
86
  ):
68
- coldesc = xds.attrs["column_descriptions"][col]
87
+ coldesc = ctds_attrs["column_descriptions"][col]
88
+ # col not in ignore_msv2_cols
89
+ if (
90
+ not generic
91
+ and "DATA" in col
92
+ and "shape" not in coldesc
93
+ and var_name in xds.data_vars
94
+ ):
95
+ coldesc["shape"] = tuple(np.clip(xds[var_name].shape[1:], 1, None))
96
+
97
+ if col == "UVW" or (
98
+ (not "shape" in coldesc or type(coldesc["shape"]) == str)
99
+ and var_name in xds.data_vars
100
+ ):
101
+ coldesc["shape"] = tuple(np.clip(xds[var_name].shape[1:], 1, None))
69
102
  else:
70
103
  coldesc = {"valueType": type_converter(xds[col].dtype)}
71
104
  if generic or (
72
- col == "UVW"
105
+ col == "UVW" or col == "DATA"
73
106
  ): # will be statically shaped even if not originally
74
107
  coldesc = {"shape": tuple(np.clip(xds[col].shape[1:], 1, None))}
75
108
  elif xds[col].ndim > 1: # make variably shaped
@@ -79,9 +112,10 @@ def create_table(
79
112
  tabledesc[col] = coldesc
80
113
 
81
114
  # fix the fun set of edge cases from casatestdata that cause errors
82
- if (tabledesc[col]["dataManagerType"] == "TiledShapeStMan") and (
83
- tabledesc[col]["ndim"] == 1
84
- ):
115
+ if (
116
+ "dataManagerType" in tabledesc[col]
117
+ and tabledesc[col]["dataManagerType"] == "TiledShapeStMan"
118
+ ) and (tabledesc[col]["ndim"] == 1):
85
119
  tabledesc[col]["dataManagerType"] = ""
86
120
 
87
121
  if generic:
@@ -101,16 +135,16 @@ def create_table(
101
135
 
102
136
  # write xds attributes to table keywords, skipping certain reserved attributes
103
137
  existing_keywords = tb_tool.getkeywords()
104
- for attr in xds.attrs:
138
+ for attr in ctds_attrs:
105
139
  if attr in [
106
140
  "other",
107
141
  "history",
108
142
  "info",
109
143
  ] + list(existing_keywords.keys()):
110
144
  continue
111
- tb_tool.putkeyword(attr, xds.attrs[attr])
112
- if "info" in xds.attrs:
113
- tb_tool.putinfo(xds.attrs["info"])
145
+ tb_tool.putkeyword(attr, ctds_attrs[attr])
146
+ if "info" in ctds_attrs:
147
+ tb_tool.putinfo(ctds_attrs["info"])
114
148
 
115
149
  # copy subtables and add to main table
116
150
  if infile:
@@ -151,34 +185,38 @@ def write_generic_table(xds: xr.Dataset, outfile: str, subtable="", cols=None):
151
185
  Parameters
152
186
  ----------
153
187
  xds : xr.Dataset
154
- Source xarray dataset data
188
+
155
189
  outfile : str
156
- Destination filename (or parent main table if writing subtable)
157
- subtable : str
158
- Name of the subtable being written, triggers special logic to add subtable to
159
- parent table. Default '' for normal generic writes
160
- cols : str or list
161
- List of cols to write. Default None writes all columns
190
+
191
+ subtable : str (Default value = "")
192
+
193
+ cols : List[str] (Default value = None)
194
+
195
+ Returns
196
+ -------
197
+
162
198
  """
163
199
  outfile = os.path.expanduser(outfile)
164
200
  logger.debug("writing {os.path.join(outfile, subtable)}")
165
- if cols is None:
166
- cols = list(
167
- set(
168
- list(xds.data_vars)
169
- + [cc for cc in xds.coords if cc not in xds.sizes]
170
- + (
171
- list(
172
- xds.attrs["column_descriptions"].keys()
173
- if "column_descriptions" in xds.attrs
174
- else []
175
- )
176
- )
177
- )
178
- )
179
- cols = list(np.atleast_1d(cols))
180
201
 
181
- max_rows = xds.row.shape[0] if "row" in xds.sizes else 0
202
+ try:
203
+ ctds_attrs = {}
204
+ ctds_attrs = xds.attrs["other"]["msv2"]["ctds_attrs"]
205
+ except KeyError as exc:
206
+ pass
207
+
208
+ if cols is None:
209
+ cols = {var.upper(): var for var in xds.data_vars}
210
+ cols.update({coo.upper(): coo for coo in xds.coords if coo not in xds.dims})
211
+ # Would add cols with a description regardless of presence in xds
212
+ # + (
213
+ # list(
214
+ # ctds_attrs["column_descriptions"].keys()
215
+ # if "column_descriptions" in ctds_attrs
216
+ # else []
217
+ # )
218
+ # )
219
+ max_rows = xds.row.shape[0] if "row" in xds.dims else 0
182
220
  create_table(
183
221
  os.path.join(outfile, subtable),
184
222
  xds,
@@ -195,7 +233,7 @@ def write_generic_table(xds: xr.Dataset, outfile: str, subtable="", cols=None):
195
233
  ack=False,
196
234
  )
197
235
  try:
198
- for dv in cols:
236
+ for dv, col in cols.items():
199
237
  if (dv not in xds) or (np.prod(xds[dv].shape) == 0):
200
238
  continue
201
239
  values = (
@@ -203,7 +241,7 @@ def write_generic_table(xds: xr.Dataset, outfile: str, subtable="", cols=None):
203
241
  if xds[dv].dtype != "datetime64[ns]"
204
242
  else revert_time(xds[dv].values)
205
243
  )
206
- tb_tool.putcol(dv, values, 0, values.shape[0], 1)
244
+ tb_tool.putcol(col, values, 0, values.shape[0], 1)
207
245
  except Exception:
208
246
  print(
209
247
  "ERROR: exception in write generic table - %s, %s, %s, %s"
@@ -232,6 +270,25 @@ def write_main_table_slice(
232
270
  ):
233
271
  """
234
272
  Write an xds row chunk to the corresponding main table slice
273
+
274
+ Parameters
275
+ ----------
276
+ xda : xr.DataArray
277
+
278
+ outfile : str
279
+
280
+ ddi : int
281
+
282
+ col : str
283
+
284
+ full_shape : Tuple
285
+
286
+ starts : Tuple
287
+
288
+
289
+ Returns
290
+ -------
291
+
235
292
  """
236
293
  # trigger the DAG for this chunk and return values while the table is unlocked
237
294
  values = xda.compute().values
@@ -1,9 +1,9 @@
1
1
  import os, time
2
- from typing import Optional
2
+ from typing import List, Optional, Union
3
3
 
4
4
  import dask
5
5
  import numpy as np
6
-
6
+ import xarray as xr
7
7
 
8
8
  from ..._utils.xds_helper import flatten_xds, calc_optimal_ms_chunk_shape
9
9
  from .write import write_generic_table, write_main_table_slice
@@ -12,52 +12,88 @@ from .write import create_table, revert_time
12
12
  from casacore import tables
13
13
 
14
14
 
15
+ # TODO: this should be consolidated with the equivalent in read_main_table,
16
+ # if we keep this mapping
17
+ rename_to_msv2_cols = {
18
+ "antenna1_id": "antenna1",
19
+ "antenna2_id": "antenna2",
20
+ "feed1_id": "feed1",
21
+ "feed2_id": "feed2",
22
+ # optional cols:
23
+ # "weight": "weight_spectrum",
24
+ "vis_corrected": "corrected_data",
25
+ "vis": "data",
26
+ "vis_model": "model_data",
27
+ "autocorr": "float_data",
28
+ }
29
+ # cols added in xds not in MSv2
30
+ cols_not_in_msv2 = ["baseline_ant1_id", "baseline_ant2_id"]
31
+
32
+
33
+ def cols_from_xds_to_ms(cols: List[str]) -> List[str]:
34
+ """
35
+ Translates between lowercase/uppercase convention
36
+ Rename some MS_colum_names <-> xds_data_var_names
37
+ Excludes the pointing_ vars that are in the xds but should not be written to MS
38
+ """
39
+ return {
40
+ rename_to_msv2_cols.get(col, col).upper(): col
41
+ for col in cols
42
+ if (col and col not in cols_not_in_msv2 and not col.startswith("pointing_"))
43
+ }
44
+
45
+
15
46
  def write_ms(
16
- mxds,
17
- outfile,
18
- infile=None,
19
- subtables=False,
20
- modcols=None,
21
- verbose=False,
22
- execute=True,
47
+ mxds: xr.Dataset,
48
+ outfile: str,
49
+ infile: str = None,
50
+ subtables: bool = False,
51
+ modcols: Union[List[str], None] = None,
52
+ verbose: bool = False,
53
+ execute: bool = True,
23
54
  ) -> Optional[list]:
24
55
  """
25
56
  Write ms format xds contents back to casacore MS (CTDS - casacore Table Data System) format on disk
26
57
 
27
58
  Parameters
28
59
  ----------
29
- mxds : xarray.Dataset
60
+ mxds : xr.Dataset,
30
61
  Source multi-xarray dataset (originally created by read_ms)
31
62
  outfile : str
32
63
  Destination filename
33
- infile : str
64
+ infile : Union[str, None] (Default value = None)
34
65
  Source filename to copy subtables from. Generally faster than reading/writing through mxds via the subtables parameter. Default None
35
66
  does not copy subtables to output.
36
- subtables : bool
67
+ subtables : bool (Default value = False)
37
68
  Also write subtables from mxds. Default of False only writes mxds attributes that begin with xdsN to the MS main table.
38
69
  Setting to True will write all other mxds attributes to subtables of the main table. This is probably going to be SLOW!
39
70
  Use infile instead whenever possible.
40
- modcols : list
71
+ modcols : Union[List[str], None] (Default value = None)
41
72
  List of strings indicating what column(s) were modified (aka xds data_vars). Different logic can be applied to speed up processing when
42
73
  a data_var has not been modified from the input. Default None assumes everything has been modified (SLOW)
43
- verbose : bool
74
+ verbose : bool (Default value = False)
44
75
  Whether or not to print output progress. Since writes will typically execute the DAG, if something is
45
76
  going to go wrong, it will be here. Default False
46
- execute : bool
77
+ execute : bool (Default value = True)
47
78
  Whether or not to actually execute the DAG, or just return it with write steps appended. Default True will execute it
79
+
80
+ Returns
81
+ -------
82
+ Optional[list]
83
+ delayed write functions
48
84
  """
49
85
  outfile = os.path.expanduser(outfile)
50
86
  if verbose:
51
87
  print("initializing output...")
52
88
  start = time.time()
53
89
 
54
- xds_list = [
55
- flatten_xds(mxds.attrs[kk]) for kk in mxds.attrs if kk.startswith("xds")
56
- ]
57
- cols = list(set([dv for dx in xds_list for dv in dx.data_vars]))
90
+ xds_list = [flatten_xds(xds) for _key, xds in mxds.partitions.items()]
91
+
92
+ cols = cols_from_xds_to_ms(
93
+ list(set([dv for dx in xds_list for dv in dx.data_vars]))
94
+ )
58
95
  if modcols is None:
59
96
  modcols = cols
60
- modcols = list(np.atleast_1d(modcols))
61
97
 
62
98
  # create an empty main table with enough space for all desired xds partitions
63
99
  # the first selected xds partition will be passed to create_table to provide a definition of columns and table keywords
@@ -71,37 +107,42 @@ def write_ms(
71
107
  # the SPECTRAL_WINDOW, POLARIZATION, and DATA_DESCRIPTION tables must always be present and will always be written
72
108
  delayed_writes = [
73
109
  dask.delayed(write_generic_table)(
74
- mxds.SPECTRAL_WINDOW, outfile, "SPECTRAL_WINDOW", cols=None
75
- )
76
- ]
77
- delayed_writes += [
78
- dask.delayed(write_generic_table)(
79
- mxds.POLARIZATION, outfile, "POLARIZATION", cols=None
110
+ mxds.metainfo["spectral_window"], outfile, "SPECTRAL_WINDOW", cols=None
80
111
  )
81
112
  ]
82
113
  delayed_writes += [
83
114
  dask.delayed(write_generic_table)(
84
- mxds.DATA_DESCRIPTION, outfile, "DATA_DESCRIPTION", cols=None
115
+ mxds.metainfo["polarization"], outfile, "POLARIZATION", cols=None
85
116
  )
86
117
  ]
118
+ # should data_description be kept somewhere (in attrs?) or rebuilt?
119
+ # delayed_writes += [
120
+ # dask.delayed(write_generic_table)(
121
+ # mxds.metainfo["data_description"], outfile, "DATA_DESCRIPTION", cols=None
122
+ # )
123
+ # ]
87
124
  if subtables: # also write the rest of the subtables
88
125
  for subtable in list(mxds.attrs.keys()):
89
- if subtable.startswith("xds") or (
90
- subtable in ["SPECTRAL_WINDOW", "POLARIZATION", "DATA_DESCRIPTION"]
126
+ if (
127
+ subtable.startswith("xds")
128
+ or (subtable in ["spectral_window", "polarization", "data_description"])
129
+ or not isinstance(subtable, xr.Dataset)
91
130
  ):
92
131
  continue
132
+
93
133
  if verbose:
94
134
  print("writing subtable %s..." % subtable)
95
135
  delayed_writes += [
96
136
  dask.delayed(write_generic_table)(
97
- mxds.attrs[subtable], outfile, subtable, cols=None, verbose=verbose
137
+ mxds.attrs[subtable], outfile, subtable, cols=None
98
138
  )
99
139
  ]
100
140
 
101
141
  ddi_row_start = 0 # output rows will be ordered by DDI
102
142
  for xds in xds_list:
103
143
  txds = xds.copy().unify_chunks()
104
- ddi = txds.data_desc_id[:1].values[0]
144
+ # TODO: carry over or rebuild?
145
+ ddi = 0 # txds.data_desc_id[:1].values[0]
105
146
 
106
147
  # serial write entire DDI column first so subsequent delayed writes can find their spot
107
148
  if verbose:
@@ -111,6 +152,10 @@ def write_ms(
111
152
  for col in modcols:
112
153
  if col not in txds:
113
154
  continue # this can happen with bad_cols, should still be created in create_table()
155
+
156
+ if col in cols_not_in_msv2:
157
+ continue
158
+
114
159
  chunks = txds[col].chunks
115
160
  dims = txds[col].dims
116
161
  for d0 in range(len(chunks[0])):
@@ -161,9 +206,13 @@ def write_ms(
161
206
  max_chunk_size = np.prod(
162
207
  [txds.chunks[kk][0] for kk in txds.chunks if kk in ["row", "freq", "pol"]]
163
208
  )
164
- for col in list(np.setdiff1d(cols, modcols)):
209
+ for col in list(np.setdiff1d(list(cols), modcols)):
165
210
  if col not in txds:
166
211
  continue # this can happen with bad_cols, should still be created in create_table()
212
+
213
+ if col in cols_not_in_msv2:
214
+ continue
215
+
167
216
  col_chunk_size = np.prod([kk[0] for kk in txds[col].chunks])
168
217
  col_rows = (
169
218
  int(np.ceil(max_chunk_size / col_chunk_size)) * txds[col].chunks[0][0]
@@ -175,7 +224,7 @@ def write_ms(
175
224
  txda,
176
225
  outfile,
177
226
  ddi=ddi,
178
- col=col,
227
+ col=rename_to_msv2_cols.get(col, col).upper(),
179
228
  full_shape=txda.shape[1:],
180
229
  starts=(rr + ddi_row_start,) + (0,) * (len(txda.shape) - 1),
181
230
  )
@@ -199,38 +248,41 @@ def write_ms(
199
248
 
200
249
 
201
250
  def write_ms_serial(
202
- mxds,
203
- outfile,
204
- infile=None,
205
- subtables=False,
206
- verbose=False,
207
- execute=True,
208
- memory_available_in_bytes=500000000000,
251
+ mxds: xr.Dataset,
252
+ outfile: str,
253
+ infile: str = None,
254
+ subtables: bool = False,
255
+ verbose: bool = False,
256
+ execute: bool = True,
257
+ memory_available_in_bytes: int = 500000000000,
209
258
  ):
210
259
  """
211
260
  Write ms format xds contents back to casacore table format on disk
212
261
 
213
262
  Parameters
214
263
  ----------
215
- mxds : xarray.Dataset
264
+ mxds : xr.Dataset
216
265
  Source multi-xarray dataset (originally created by read_ms)
217
266
  outfile : str
218
267
  Destination filename
219
- infile : str
268
+ infile : str (Default value = None)
220
269
  Source filename to copy subtables from. Generally faster than reading/writing through mxds via the subtables parameter. Default None
221
270
  does not copy subtables to output.
222
- subtables : bool
271
+ subtables : bool (Default value = False)
223
272
  Also write subtables from mxds. Default of False only writes mxds attributes that begin with xdsN to the MS main table.
224
273
  Setting to True will write all other mxds attributes to subtables of the main table. This is probably going to be SLOW!
225
274
  Use infile instead whenever possible.
226
- modcols : list
227
- List of strings indicating what column(s) were modified (aka xds data_vars). Different logic can be applied to speed up processing when
228
- a data_var has not been modified from the input. Default None assumes everything has been modified (SLOW)
229
- verbose : bool
275
+ verbose : bool (Default value = False)
230
276
  Whether or not to print output progress. Since writes will typically execute the DAG, if something is
231
277
  going to go wrong, it will be here. Default False
232
- execute : bool
278
+
279
+ execute : bool (Default value = True)
233
280
  Whether or not to actually execute the DAG, or just return it with write steps appended. Default True will execute it
281
+ memory_available_in_bytes : (Default value = 500000000000)
282
+
283
+ Returns
284
+ -------
285
+
234
286
  """
235
287
 
236
288
  print("*********************")
@@ -239,11 +291,9 @@ def write_ms_serial(
239
291
  print("initializing output...")
240
292
  # start = time.time()
241
293
 
242
- xds_list = [
243
- flatten_xds(mxds.attrs[kk]) for kk in mxds.attrs if kk.startswith("xds")
244
- ]
294
+ xds_list = [flatten_xds(xds) for _key, xds in mxds.partitions.items()]
245
295
  cols = list(set([dv for dx in xds_list for dv in dx.data_vars]))
246
- cols = list(np.atleast_1d(cols))
296
+ cols = cols_from_xds_to_ms(list(np.atleast_1d(cols)))
247
297
 
248
298
  # create an empty main table with enough space for all desired xds partitions
249
299
  # the first selected xds partition will be passed to create_table to provide a definition of columns and table keywords
@@ -255,9 +305,14 @@ def write_ms_serial(
255
305
 
256
306
  # start a list of dask delayed writes to disk (to be executed later)
257
307
  # the SPECTRAL_WINDOW, POLARIZATION, and DATA_DESCRIPTION tables must always be present and will always be written
258
- write_generic_table(mxds.SPECTRAL_WINDOW, outfile, "SPECTRAL_WINDOW", cols=None)
259
- write_generic_table(mxds.POLARIZATION, outfile, "POLARIZATION", cols=None)
260
- write_generic_table(mxds.DATA_DESCRIPTION, outfile, "DATA_DESCRIPTION", cols=None)
308
+ write_generic_table(
309
+ mxds.metainfo["spectral_window"], outfile, "SPECTRAL_WINDOW", cols=None
310
+ )
311
+ write_generic_table(
312
+ mxds.metainfo["polarization"], outfile, "POLARIZATION", cols=None
313
+ )
314
+ # should data_description be kept somewhere (in attrs?) or rebuilt?
315
+ # write_generic_table(mxds.metainfo.data_description, outfile, "DATA_DESCRIPTION", cols=None)
261
316
 
262
317
  if subtables: # also write the rest of the subtables
263
318
  # for subtable in list(mxds.attrs.keys()):
@@ -266,20 +321,24 @@ def write_ms_serial(
266
321
  # ['FEED','FIELD','ANTENNA','HISTORY']
267
322
  # ,'FIELD','ANTENNA'
268
323
  # for subtable in ['OBSERVATION']:
269
- for subtable in list(mxds.attrs.keys()):
324
+ for subtable in list(mxds.metainfo.keys()):
270
325
  if subtable.startswith("xds") or (
271
- subtable in ["SPECTRAL_WINDOW", "POLARIZATION", "DATA_DESCRIPTION"]
326
+ subtable in ["spectral_window", "polarization", "data_description"]
272
327
  ):
273
328
  continue
274
329
  if verbose:
275
330
  print("writing subtable %s..." % subtable)
276
331
  # print(subtable)
277
332
  # print(mxds.attrs[subtable])
278
- write_generic_table(
279
- mxds.attrs[subtable], outfile, subtable, cols=None, verbose=verbose
280
- )
333
+ try:
334
+ write_generic_table(
335
+ mxds.metainfo[subtable], outfile, subtable.upper(), cols=None
336
+ )
337
+ except (RuntimeError, KeyError) as exc:
338
+ print(f"Exception writing subtable {subtable}: {exc}")
281
339
 
282
- vis_data_shape = mxds.xds0.data.shape
340
+ part_key0 = next(iter(mxds.partitions))
341
+ vis_data_shape = mxds.partitions[part_key0].vis.shape
283
342
  rows_chunk_size = calc_optimal_ms_chunk_shape(
284
343
  memory_available_in_bytes, vis_data_shape, 16, "DATA"
285
344
  )
@@ -292,8 +351,8 @@ def write_ms_serial(
292
351
  )
293
352
 
294
353
  start_main = time.time()
295
- for col in cols:
296
- xda = mxds.xds0[col]
354
+ for col, var_name in cols.items():
355
+ xda = mxds.partitions[part_key0][var_name]
297
356
  # print(col,xda.dtype)
298
357
 
299
358
  for start_row in np.arange(0, vis_data_shape[0], rows_chunk_size):
@@ -308,8 +367,11 @@ def write_ms_serial(
308
367
  # print('1. Time', time.time()-start, values.shape)
309
368
 
310
369
  # start = time.time()
311
- tbs.putcol(col, values, start_row, len(values))
312
- # print('2. Time', time.time()-start)
370
+ try:
371
+ tbs.putcol(col, values, start_row, len(values))
372
+ # print('2. Time', time.time()-start)
373
+ except RuntimeError as exc:
374
+ print(f"Exception writing main table column {col}: {exc}")
313
375
 
314
376
  print("3. Time", time.time() - start_main)
315
377