xradio 0.0.50__py3-none-any.whl → 0.0.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,9 +12,9 @@ from xradio._utils.zarr.common import _get_file_system_and_items
12
12
 
13
13
 
14
14
  def _read_zarr(
15
- zarr_store: str, output: dict, selection: dict = {}
15
+ zarr_store: str, id_dict: dict, selection: dict = {}
16
16
  ) -> (xr.Dataset, bool):
17
- # supported key/values in output are:
17
+ # supported key/values in id_dict are:
18
18
  # "dv"
19
19
  # what data variables should be returned as.
20
20
  # "numpy": numpy arrays
@@ -29,27 +29,27 @@ def _read_zarr(
29
29
  do_dask = False
30
30
  do_numpy = False
31
31
  do_np_coords = False
32
- if "dv" in output:
33
- dv = output["dv"]
32
+ if "dv" in id_dict:
33
+ dv = id_dict["dv"]
34
34
  if dv in ["dask", "numpy"]:
35
35
  do_dask = dv == "dask"
36
36
  do_numpy = not do_dask
37
37
  else:
38
38
  raise ValueError(
39
- f"Unsupported value {output[dv]} for output[dv]. "
39
+ f"Unsupported value {id_dict[dv]} for id_dict[dv]. "
40
40
  "Supported values are 'dask' and 'numpy'"
41
41
  )
42
- if "coords" in output:
43
- c = output["coords"]
42
+ if "coords" in id_dict:
43
+ c = id_dict["coords"]
44
44
  if c == "numpy":
45
45
  do_np_coords = True
46
46
  else:
47
47
  raise ValueError(
48
- f"Unexpected value {c} for output[coords]. "
48
+ f"Unexpected value {c} for id_dict[coords]. "
49
49
  "The supported value is 'numpy'"
50
50
  )
51
51
  # do not pass selection, because that is only for the top level data vars
52
- xds = _decode(xds, zarr_store, output)
52
+ xds = _decode(xds, zarr_store, id_dict)
53
53
  if do_np_coords:
54
54
  xds = _coords_to_numpy(xds)
55
55
  if do_dask:
@@ -59,11 +59,9 @@ def _read_zarr(
59
59
  return xds
60
60
 
61
61
 
62
- def _decode(xds: xr.Dataset, zarr_store: str, output: dict) -> (xr.Dataset, bool):
62
+ def _decode(xds: xr.Dataset, zarr_store: str, id_dict: dict) -> xr.Dataset:
63
63
  xds.attrs = _decode_dict(xds.attrs, "")
64
- sub_xdses = _decode_sub_xdses(zarr_store, output)
65
- for k, v in sub_xdses.items():
66
- xds.attrs[k] = v
64
+ _decode_sub_xdses(xds, zarr_store, id_dict)
67
65
  return xds
68
66
 
69
67
 
@@ -85,26 +83,45 @@ def _decode_dict(my_dict: dict, top_key: str) -> dict:
85
83
  return my_dict
86
84
 
87
85
 
88
- def _decode_sub_xdses(zarr_store: str, output: dict) -> dict:
89
- sub_xdses = {}
86
+ def _decode_sub_xdses(xarrayObj, top_dir: str, id_dict: dict) -> None:
87
+ # FIXME this also needs to support S3
88
+ # determine immediate subdirs of zarr_store
89
+ entries = os.scandir(top_dir)
90
+ for d in entries:
91
+ path = os.path.join(top_dir, d.name)
92
+ if os.path.isdir(path):
93
+ if d.name.startswith(_top_level_sub_xds):
94
+ ky = d.name[len(_top_level_sub_xds) :]
95
+ xarrayObj.attrs[ky] = _read_zarr(path, id_dict)
96
+ # TODO if attrs that are xdses have attrs that are xdses ...
97
+ else:
98
+ # descend into the directory
99
+ _decode_sub_xdses(xarrayObj[d.name], path, id_dict)
90
100
 
91
- fs, store_contents = _get_file_system_and_items(zarr_store)
92
101
 
102
+ """
103
+ def _decode_sub_xdses(zarr_store: str, id_dict: dict) -> dict:
104
+ sub_xdses = {}
105
+ fs, store_contents = _get_file_system_and_items(zarr_store)
93
106
  if isinstance(fs, s3fs.core.S3FileSystem):
94
107
  # could we just use the items as returned from the helper function..?
95
108
  store_tree = fs.walk(zarr_store, topdown=True)
109
+ # Q: what is prepend_s3 used for? In this version it is defined but not used.
96
110
  prepend_s3 = "s3://"
97
111
  else:
98
112
  store_tree = os.walk(zarr_store, topdown=True)
99
113
  prepend_s3 = ""
100
-
101
114
  for root, dirs, files in store_tree:
115
+ relpath = os.path.relpath(root, zarr_store)
116
+ print("rpath", relpath)
102
117
  for d in dirs:
103
118
  if d.startswith(_top_level_sub_xds):
104
- xds = _read_zarr(os.sep.join([root, d]), output)
119
+ xds = _read_zarr(os.sep.join([root, d]), id_dict)
105
120
  # for k, v in xds.data_vars.items():
106
121
  # xds = xds.drop_vars([k]).assign({k: v.compute()})
107
- ky = d[len(_top_level_sub_xds) + 1 :]
122
+ ky = d[len(_top_level_sub_xds) :]
108
123
  sub_xdses[ky] = xds
109
-
124
+ print(f"Sub xdses: {sub_xdses.keys()}")
125
+ print("return")
110
126
  return sub_xdses
127
+ """
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  import numpy as np
2
3
  import xarray as xr
3
4
  import os
@@ -6,44 +7,42 @@ from .common import _np_types, _top_level_sub_xds
6
7
 
7
8
  def _write_zarr(xds: xr.Dataset, zarr_store: str):
8
9
  xds_copy = xds.copy(deep=True)
9
- xds_copy, xds_dict = _encode(xds_copy)
10
+ sub_xds_dict = _encode(xds_copy, zarr_store)
10
11
  z_obj = xds_copy.to_zarr(store=zarr_store, compute=True)
11
- _write_sub_xdses(zarr_store, xds_dict, _top_level_sub_xds)
12
+ if sub_xds_dict:
13
+ _write_sub_xdses(sub_xds_dict)
12
14
 
13
15
 
14
- def _encode(xds: xr.Dataset):
16
+ def _encode(xds: xr.Dataset, top_path: str) -> dict:
15
17
  # encode attrs
16
- xds.attrs, xds_dict = _encode_dict(xds.attrs)
17
- return xds, xds_dict
18
+ sub_xds_dict = {}
19
+ _encode_dict(xds.attrs, top_path, sub_xds_dict)
20
+ for dv in xds.data_vars:
21
+ _encode_dict(xds[dv].attrs, os.sep.join([top_path, dv]), sub_xds_dict)
22
+ logging.debug(f"Encoded sub_xds_dict: {sub_xds_dict}")
23
+ return sub_xds_dict
18
24
 
19
25
 
20
- def _encode_dict(my_dict: dict, top_key="") -> tuple:
21
- xds_dict = {}
26
+ def _encode_dict(my_dict: dict, top_path: str, sub_xds_dict) -> tuple:
22
27
  del_keys = []
23
28
  for k, v in my_dict.items():
24
29
  if isinstance(v, dict):
25
- z = os.sep.join([top_key, k]) if top_key else k
26
- my_dict[k], ret_xds_dict = _encode_dict(v, z)
27
- if ret_xds_dict:
28
- xds_dict[k] = ret_xds_dict
30
+ z = os.sep.join([top_path, k])
31
+ _encode_dict(v, z, sub_xds_dict)
29
32
  elif isinstance(v, np.ndarray):
30
33
  my_dict[k] = {}
31
34
  my_dict[k]["_type"] = "numpy.ndarray"
32
35
  my_dict[k]["_value"] = v.tolist()
33
36
  my_dict[k]["_dtype"] = str(v.dtype)
34
37
  elif isinstance(v, xr.Dataset):
35
- xds_dict[k] = v.copy(deep=True)
38
+ sub_xds_dict[os.sep.join([top_path, f"{_top_level_sub_xds}{k}"])] = v.copy(
39
+ deep=True
40
+ )
36
41
  del_keys.append(k)
37
42
  for k in del_keys:
38
43
  del my_dict[k]
39
- return my_dict, xds_dict
40
44
 
41
45
 
42
- def _write_sub_xdses(zarr_store: str, xds_dict: dict, path: str):
43
- for k, v in xds_dict.items():
44
- my_path = f"{path}_{k}" if path else f"{k}"
45
- if isinstance(v, dict):
46
- _write_sub_xdses(zarr_store, xds_dict[k], my_path)
47
- elif isinstance(v, xr.Dataset):
48
- zs = os.sep.join([zarr_store, my_path])
49
- z_obj = v.to_zarr(store=zs, compute=True)
46
+ def _write_sub_xdses(sub_xds: dict):
47
+ for k, v in sub_xds.items():
48
+ z_obj = v.to_zarr(store=k, compute=True)
@@ -48,7 +48,9 @@ def _load_casa_image_block(infile: str, block_des: dict, do_sky_coords) -> xr.Da
48
48
  block = _get_persistent_block(
49
49
  image_full_path, shapes, starts, dimorder, transpose_list, new_axes
50
50
  )
51
- xds = _add_sky_or_aperture(xds, block, dimorder, image_full_path, ret["sphr_dims"])
51
+ xds = _add_sky_or_aperture(
52
+ xds, block, dimorder, image_full_path, ret["sphr_dims"], True
53
+ )
52
54
  mymasks = _get_mask_names(image_full_path)
53
55
  for m in mymasks:
54
56
  full_path = os.sep.join([image_full_path, m])
@@ -57,7 +59,7 @@ def _load_casa_image_block(infile: str, block_des: dict, do_sky_coords) -> xr.Da
57
59
  )
58
60
  # data vars are all caps by convention
59
61
  xds = _add_mask(xds, m.upper(), block, dimorder)
60
- xds.attrs = _casa_image_to_xds_attrs(image_full_path, True)
62
+ xds.attrs = _casa_image_to_xds_attrs(image_full_path)
61
63
  mb = _multibeam_array(xds, image_full_path, False)
62
64
  if mb is not None:
63
65
  selectors = {}
@@ -86,6 +88,7 @@ def _read_casa_image(
86
88
  dimorder,
87
89
  img_full_path,
88
90
  ret["sphr_dims"],
91
+ history,
89
92
  )
90
93
  if masks:
91
94
  mymasks = _get_mask_names(img_full_path)
@@ -93,7 +96,7 @@ def _read_casa_image(
93
96
  ary = _read_image_array(img_full_path, chunks, mask=m, verbose=verbose)
94
97
  # data var names are all caps by convention
95
98
  xds = _add_mask(xds, m.upper(), ary, dimorder)
96
- xds.attrs = _casa_image_to_xds_attrs(img_full_path, history)
99
+ xds.attrs = _casa_image_to_xds_attrs(img_full_path)
97
100
  mb = _multibeam_array(xds, img_full_path, True)
98
101
  if mb is not None:
99
102
  xds["BEAM"] = mb
@@ -1214,13 +1214,13 @@ def convert_and_write_partition(
1214
1214
  )
1215
1215
  # but before, keep the name-id arrays, we need them for the pointing and weather xds
1216
1216
  ant_xds_name_ids = ant_xds["antenna_name"].set_xindex("antenna_id")
1217
- ant_xds_station_name_ids = ant_xds["station"].set_xindex("antenna_id")
1217
+ ant_position_xds_with_ids = ant_xds["ANTENNA_POSITION"].set_xindex("antenna_id")
1218
1218
  # No longer needed after converting to name.
1219
1219
  ant_xds = ant_xds.drop_vars("antenna_id")
1220
1220
 
1221
1221
  # Create weather_xds
1222
1222
  start = time.time()
1223
- weather_xds = create_weather_xds(in_file, ant_xds_station_name_ids)
1223
+ weather_xds = create_weather_xds(in_file, ant_position_xds_with_ids)
1224
1224
  logger.debug("Time weather " + str(time.time() - start))
1225
1225
 
1226
1226
  # Create pointing_xds
@@ -1425,7 +1425,7 @@ def antenna_ids_to_names(
1425
1425
  "antenna_id",
1426
1426
  "antenna_name",
1427
1427
  "mount",
1428
- "station",
1428
+ "station_name",
1429
1429
  ]
1430
1430
  for unwanted_coord in unwanted_coords_from_ant_xds:
1431
1431
  xds = xds.drop_vars(unwanted_coord)
@@ -109,7 +109,7 @@ def extract_antenna_info(
109
109
 
110
110
  to_new_coords = {
111
111
  "NAME": ["antenna_name", ["antenna_name"]],
112
- "STATION": ["station", ["antenna_name"]],
112
+ "STATION": ["station_name", ["antenna_name"]],
113
113
  "MOUNT": ["mount", ["antenna_name"]],
114
114
  # "PHASED_ARRAY_ID": ["phased_array_id", ["antenna_name"]],
115
115
  "antenna_id": ["antenna_id", ["antenna_name"]],
@@ -158,9 +158,9 @@ def extract_antenna_info(
158
158
 
159
159
  # None of the native numpy functions work on the github test runner.
160
160
  antenna_name = ant_xds["antenna_name"].values
161
- station = ant_xds["station"].values
161
+ station_name = ant_xds["station_name"].values
162
162
  antenna_name = np.array(
163
- list(map(lambda x, y: x + "_" + y, antenna_name, station))
163
+ list(map(lambda x, y: x + "_" + y, antenna_name, station_name))
164
164
  )
165
165
 
166
166
  ant_xds["antenna_name"] = xr.DataArray(antenna_name, dims=["antenna_name"])
@@ -376,7 +376,7 @@ def create_gain_curve_xds(
376
376
 
377
377
  ant_borrowed_coords = {
378
378
  "antenna_name": ant_xds.coords["antenna_name"],
379
- "station": ant_xds.coords["station"],
379
+ "station_name": ant_xds.coords["station_name"],
380
380
  "mount": ant_xds.coords["mount"],
381
381
  "telescope_name": ant_xds.coords["telescope_name"],
382
382
  "receptor_label": ant_xds.coords["receptor_label"],
@@ -486,7 +486,7 @@ def create_phase_calibration_xds(
486
486
 
487
487
  ant_borrowed_coords = {
488
488
  "antenna_name": ant_xds.coords["antenna_name"],
489
- "station": ant_xds.coords["station"],
489
+ "station_name": ant_xds.coords["station_name"],
490
490
  "mount": ant_xds.coords["mount"],
491
491
  "telescope_name": ant_xds.coords["telescope_name"],
492
492
  "receptor_label": ant_xds.coords["receptor_label"],
@@ -17,7 +17,8 @@ from xradio.measurement_set._utils._msv2._tables.read import (
17
17
  make_taql_where_between_min_max,
18
18
  load_generic_table,
19
19
  )
20
- from xradio._utils.list_and_array import cast_to_str
20
+ from xradio._utils.list_and_array import cast_to_str, get_pad_value
21
+
21
22
  from xradio._utils.coord_math import (
22
23
  convert_to_si_units,
23
24
  add_position_offsets,
@@ -226,9 +227,10 @@ def extract_ephemeris_info(
226
227
  "type": "location",
227
228
  "units": ["deg", "deg", "m"],
228
229
  "data": observer_position,
229
- "frame": "WGS84",
230
+ "frame": "ITRS",
230
231
  "origin_object_name": "Earth",
231
232
  "coordinate_system": ephemeris_meta["obsloc"].lower(),
233
+ "ellipsoid": "WGS84",
232
234
  }
233
235
  ) # I think the units are ['deg','deg','m'] and 'WGS84'.
234
236
 
@@ -546,10 +548,16 @@ def pad_missing_sources(
546
548
  for source_id in unique_source_ids
547
549
  if source_id not in source_xds.coords["SOURCE_ID"]
548
550
  ]
551
+ if len(missing_source_ids) < 1:
552
+ return source_xds
549
553
 
550
554
  # would like to use the new-ish xr.pad, but it creates issues with indices/coords and is
551
555
  # also not free of overheads, as it for example changes all numeric types to float64
552
- missing_source_xds = xr.full_like(source_xds.isel(SOURCE_ID=0), fill_value=np.nan)
556
+ fill_value = {
557
+ var_name: get_pad_value(var.dtype)
558
+ for var_name, var in source_xds.data_vars.items()
559
+ }
560
+ missing_source_xds = xr.full_like(source_xds.isel(SOURCE_ID=0), fill_value)
553
561
  pad_str = "Unknown"
554
562
  pad_str_type = "<U9"
555
563
  for var in missing_source_xds.data_vars:
@@ -172,7 +172,130 @@ def make_taql_where_weather(
172
172
  return taql_where
173
173
 
174
174
 
175
- def create_weather_xds(in_file: str, ant_xds_station_name_ids: xr.DataArray):
175
+ def prepare_generic_weather_xds_and_station_name(
176
+ generic_weather_xds: xr.Dataset,
177
+ in_file: str,
178
+ ant_position_with_ids: xr.DataArray,
179
+ has_asdm_station_position: bool,
180
+ ) -> tuple[xr.Dataset, np.ndarray]:
181
+ """
182
+ A generic_weather_xds loaded with load_generic_table() might still need to be reloaded
183
+ with an additional WHERE condition to constrain the indices of antennas. But this depends on whether
184
+ ASDM/importasdm extension columns are present or not.
185
+
186
+ This also prepares the station_name values:
187
+ - if has_asdm_station_ids:
188
+ - tries to find from ASDM_STATION the station names,
189
+ - otherwise, takes ids (antenna_ids in generic_weather were actually the ASDM_STATION_IDs
190
+ - else: get the values from antenna_xds (the stations present)
191
+
192
+
193
+ Parameters
194
+ ----------
195
+ generic_weather_xds : xr.Dataset
196
+ generic dataset read from an MSv2 WEATHER subtable
197
+ in_file : str
198
+ Input MS name.
199
+ ant_position_with_ids : xr.DataArray
200
+ antenna_position data var from the antenna_xds (expected to still include the initial ANTENNA_ID
201
+ coordinate as well as other coordinates from the antenna_xds)
202
+ has_asdm_station_position : bool
203
+ wHether this generic weatehr_xds should be treated as including the nonstandard extensions
204
+ NS_WX_STATION_ID and NS_WX_STATION_POSITION as created by CASA/importasdm (ALMA and VLA).
205
+
206
+ Returns
207
+ -------
208
+ (generic_weather_xds, station_name): tuple[[xarray.Dataset, numpy.ndarray]
209
+ Weather Xarray Dataset prepared for generic conversion to MSv4, values for the station_name coordinate
210
+ """
211
+
212
+ if has_asdm_station_position:
213
+ asdm_station_path = os.path.join(in_file, "ASDM_STATION")
214
+ if table_exists(asdm_station_path):
215
+ asdm_station_xds = load_generic_table(in_file, "ASDM_STATION")
216
+ station_name = asdm_station_xds.name.values[
217
+ generic_weather_xds["ANTENNA_ID"].values
218
+ ]
219
+ else:
220
+ # if no info from ASDM_STATION, use the indices from antenna_id which was actually the NS_WX_STATION_ID
221
+ len_antenna_id = generic_weather_xds.sizes["ANTENNA_ID"]
222
+ station_name = list(
223
+ map(
224
+ lambda x, y: x + "_" + y,
225
+ ["Station"] * len_antenna_id,
226
+ generic_weather_xds["ANTENNA_ID"].values.astype(str),
227
+ )
228
+ )
229
+
230
+ else:
231
+ taql_where = make_taql_where_weather(in_file, ant_position_with_ids)
232
+ generic_weather_xds = load_generic_table(
233
+ in_file,
234
+ "WEATHER",
235
+ rename_ids=subt_rename_ids["WEATHER"],
236
+ taql_where=taql_where,
237
+ )
238
+
239
+ if not generic_weather_xds.data_vars:
240
+ # for example when the weather subtable only has info for antennas/stations
241
+ # not present in the MSv4 (no overlap between antennas loaded in ant_xds and weather)
242
+ return None, None
243
+
244
+ stations_present = ant_position_with_ids.sel(
245
+ antenna_id=generic_weather_xds["ANTENNA_ID"]
246
+ ).station_name
247
+ station_name = stations_present.values
248
+
249
+ return generic_weather_xds, station_name
250
+
251
+
252
+ def finalize_station_position(
253
+ weather_xds: xr.Dataset, ant_position_with_ids, has_asdm_station_position: bool
254
+ ) -> xr.Dataset:
255
+ """
256
+ For a STATION_POSITION data var being added to a weather_xds, make sure coordinates and dimensions
257
+ are conforming to the schema.
258
+
259
+ Parameters
260
+ ----------
261
+ weather_xds : xr.Dataset
262
+ weather_xds where we still need to ensure the right coordinates and attributes
263
+ ant_position_with_ids : xr.DataArray
264
+ antenna_position data var from the antenna_xds (expected to still include the initial ANTENNA_ID
265
+ coordinate as well as other coordinates from the antenna_xds)
266
+ has_asdm_station_position : bool
267
+ Whether this generic weatehr_xds should be treated as including the nonstandard extensions
268
+ NS_WX_STATION_ID and NS_WX_STATION_POSITION as created by CASA/importasdm (ALMA and VLA).
269
+
270
+ Returns
271
+ -------
272
+ weather_xds: xarray.Dataset
273
+ Weather Xarray Dataset with all coordinates and attributes in STATION_POSITION
274
+ """
275
+ if has_asdm_station_position:
276
+ # STATION_POSITION has been created but needs prooper dimensions and attrs
277
+ # Drop the time dim
278
+ weather_xds["STATION_POSITION"] = weather_xds["STATION_POSITION"].sel(
279
+ time_weather=0, drop=True, method="nearest"
280
+ )
281
+ # borrow location frame attributes from antenna position
282
+ weather_xds["STATION_POSITION"].attrs = ant_position_with_ids.attrs
283
+ else:
284
+ # borrow from ant_posision_with_ids but without carrying over other coords
285
+ weather_xds = weather_xds.assign(
286
+ {
287
+ "STATION_POSITION": (
288
+ ["station_name", "cartesian_pos_label"],
289
+ ant_position_with_ids.values,
290
+ ant_position_with_ids.attrs,
291
+ )
292
+ }
293
+ )
294
+
295
+ return weather_xds
296
+
297
+
298
+ def create_weather_xds(in_file: str, ant_position_with_ids: xr.DataArray):
176
299
  """
177
300
  Creates a Weather Xarray Dataset from a MS v2 WEATHER table.
178
301
 
@@ -180,8 +303,9 @@ def create_weather_xds(in_file: str, ant_xds_station_name_ids: xr.DataArray):
180
303
  ----------
181
304
  in_file : str
182
305
  Input MS name.
183
- ant_xds_station_name_ids : xr.DataArray
184
- station name data array from antenna_xds, with name/id information
306
+ ant_position_with_ids : xr.DataArray
307
+ antenna_position data var from the antenna_xds (expected to still including the initial ANTENNA_ID coordinate
308
+ as wellas other coordinates from the antenna_xds)
185
309
 
186
310
  Returns
187
311
  -------
@@ -190,32 +314,32 @@ def create_weather_xds(in_file: str, ant_xds_station_name_ids: xr.DataArray):
190
314
  """
191
315
 
192
316
  try:
193
- taql_where = make_taql_where_weather(in_file, ant_xds_station_name_ids)
194
317
  generic_weather_xds = load_generic_table(
195
318
  in_file,
196
319
  "WEATHER",
197
320
  rename_ids=subt_rename_ids["WEATHER"],
198
- taql_where=taql_where,
199
321
  )
200
322
  except ValueError as _exc:
201
323
  return None
202
324
 
203
- if not generic_weather_xds.data_vars:
204
- # for example when the weather subtable only has info for antennas/stations
205
- # not present in the MSv4 (no overlap between antennas loaded in ant_xds and weather)
325
+ has_asdm_station_position = (
326
+ "NS_WX_STATION_POSITION" in generic_weather_xds.data_vars
327
+ )
328
+ generic_weather_xds, station_name = prepare_generic_weather_xds_and_station_name(
329
+ generic_weather_xds, in_file, ant_position_with_ids, has_asdm_station_position
330
+ )
331
+ if not generic_weather_xds:
206
332
  return None
207
333
 
208
334
  weather_xds = xr.Dataset(attrs={"type": "weather"})
209
- stations_present = ant_xds_station_name_ids.sel(
210
- antenna_id=generic_weather_xds["ANTENNA_ID"]
211
- )
212
335
  coords = {
213
- "station_name": stations_present.data,
214
- "antenna_name": stations_present.coords["antenna_name"].data,
336
+ "station_name": station_name,
337
+ "cartesian_pos_label": ["x", "y", "z"],
215
338
  }
216
339
  weather_xds = weather_xds.assign_coords(coords)
217
340
 
218
341
  dims_station_time = ["station_name", "time_weather"]
342
+ dims_station_time_position = dims_station_time + ["cartesian_pos_label"]
219
343
  to_new_data_variables = {
220
344
  "H20": ["H2O", dims_station_time],
221
345
  "IONOS_ELECTRON": ["IONOS_ELECTRON", dims_station_time],
@@ -226,6 +350,15 @@ def create_weather_xds(in_file: str, ant_xds_station_name_ids: xr.DataArray):
226
350
  "WIND_DIRECTION": ["WIND_DIRECTION", dims_station_time],
227
351
  "WIND_SPEED": ["WIND_SPEED", dims_station_time],
228
352
  }
353
+ if has_asdm_station_position:
354
+ to_new_data_variables.update(
355
+ {
356
+ "NS_WX_STATION_POSITION": [
357
+ "STATION_POSITION",
358
+ dims_station_time_position,
359
+ ],
360
+ }
361
+ )
229
362
 
230
363
  to_new_coords = {
231
364
  "TIME": ["time_weather", ["time_weather"]],
@@ -234,6 +367,9 @@ def create_weather_xds(in_file: str, ant_xds_station_name_ids: xr.DataArray):
234
367
  weather_xds = convert_generic_xds_to_xradio_schema(
235
368
  generic_weather_xds, weather_xds, to_new_data_variables, to_new_coords
236
369
  )
370
+ weather_xds = finalize_station_position(
371
+ weather_xds, ant_position_with_ids, has_asdm_station_position
372
+ )
237
373
 
238
374
  # TODO: option to interpolate to main time
239
375
 
@@ -256,6 +392,7 @@ def correct_generic_pointing_xds(
256
392
  and tries to correct several deviations from the MSv2 specs seen in
257
393
  common test data.
258
394
  The problems fixed here include wrong dimensions:
395
+
259
396
  - for example transposed dimensions with respect to the MSv2 specs (output
260
397
  from CASA simulator),
261
398
  - missing/additional unexpected dimensions when some of the columns are
@@ -423,6 +560,7 @@ def prepare_generic_sys_cal_xds(generic_sys_cal_xds: xr.Dataset) -> xr.Dataset:
423
560
  sys_cal_xds dataset, as their structure differs in dimensions and order
424
561
  of dimensions.
425
562
  This function performs various prepareation steps, such as:
563
+
426
564
  - filter out dimensions not neeed for an individual MSv4 (SPW, FEED),
427
565
  - drop variables loaded from columns with all items set to empty array,
428
566
  - transpose the dimensions frequency,receptor