xradio 0.0.46__py3-none-any.whl → 0.0.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,7 +7,9 @@ import xarray as xr
7
7
 
8
8
  import toolviper.utils.logger as logger
9
9
  from xradio.measurement_set._utils._msv2.msv4_sub_xdss import (
10
+ interpolate_to_time,
10
11
  rename_and_interpolate_to_time,
12
+ standard_time_coord_attrs,
11
13
  )
12
14
  from xradio.measurement_set._utils._msv2.subtables import subt_rename_ids
13
15
  from xradio.measurement_set._utils._msv2._tables.read import (
@@ -41,7 +43,7 @@ def create_field_and_source_xds(
41
43
  field_times: list,
42
44
  is_single_dish: bool,
43
45
  time_min_max: Tuple[np.float64, np.float64],
44
- ephemeris_interp_time: Union[xr.DataArray, None] = None,
46
+ ephemeris_interpolate: bool = True,
45
47
  ) -> tuple[xr.Dataset, int]:
46
48
  """
47
49
  Create a field and source xarray dataset (xds) from the given input file, field ID, and spectral window ID.
@@ -57,13 +59,13 @@ def create_field_and_source_xds(
57
59
  spectral_window_id : int
58
60
  The ID of the spectral window.
59
61
  field_times: list
60
- Time data for field. It is the same as the time axis in the main MSv4 dataset and is used if more than one field is present.
62
+ Time data for field. It is the same as the time axis in the main MSv4 dataset.
61
63
  is_single_dish: bool
62
64
  whether the main xds has single-dish (SPECTRUM) data
63
65
  time_min_max : Tuple[np.float64, np.float46]
64
66
  Min / max times to constrain loading (usually to the time range relevant to an MSv4)
65
- ephemeris_interp_time : Union[xr.DataArray, None]
66
- Time axis to interpolate the ephemeris data vars to (usually main MSv4 time)
67
+ ephemeris_interpolate : bool
68
+ If true ephemeris data is interpolated to the main MSv4 time axis given in field_times.
67
69
 
68
70
  Returns:
69
71
  -------
@@ -77,41 +79,47 @@ def create_field_and_source_xds(
77
79
 
78
80
  field_and_source_xds = xr.Dataset(attrs={"type": "field_and_source"})
79
81
 
80
- field_and_source_xds, ephemeris_path, ephemeris_table_name, source_id = (
81
- extract_field_info_and_check_ephemeris(
82
- field_and_source_xds, in_file, field_id, field_times, is_single_dish
83
- )
82
+ (
83
+ field_and_source_xds,
84
+ ephemeris_path,
85
+ ephemeris_table_name,
86
+ source_id,
87
+ field_names,
88
+ ) = extract_field_info_and_check_ephemeris(
89
+ field_and_source_xds, in_file, field_id, field_times, is_single_dish
90
+ )
91
+
92
+ field_and_source_xds, num_lines = extract_source_info(
93
+ field_and_source_xds, in_file, source_id, spectral_window_id
84
94
  )
85
95
 
86
- if field_and_source_xds.attrs["is_ephemeris"]:
96
+ if field_and_source_xds.attrs["type"] == "field_and_source_ephemeris":
87
97
  field_and_source_xds = extract_ephemeris_info(
88
98
  field_and_source_xds,
89
99
  ephemeris_path,
90
100
  ephemeris_table_name,
91
101
  is_single_dish,
92
102
  time_min_max,
93
- ephemeris_interp_time,
103
+ field_times,
104
+ field_names,
105
+ ephemeris_interpolate,
94
106
  )
95
107
 
96
- field_and_source_xds, num_lines = extract_source_info(
97
- field_and_source_xds, in_file, source_id, spectral_window_id
98
- )
99
-
100
108
  logger.debug(
101
109
  f"create_field_and_source_xds() execution time {time.time() - start_time:0.2f} s"
102
110
  )
103
111
 
104
- # Check if we can drop time axis. The phase centers are repeated.
105
- if field_times is not None:
106
- if is_single_dish:
107
- center_dv = "FIELD_REFERENCE_CENTER"
108
- else:
109
- center_dv = "FIELD_PHASE_CENTER"
112
+ # # Check if we can drop time axis. The phase centers are repeated.
113
+ # if field_times is not None:
114
+ # if is_single_dish:
115
+ # center_dv = "FIELD_REFERENCE_CENTER"
116
+ # else:
117
+ # center_dv = "FIELD_PHASE_CENTER"
110
118
 
111
- if np.unique(field_and_source_xds[center_dv], axis=0).shape[0] == 1:
112
- field_and_source_xds = field_and_source_xds.isel(time=0).drop_vars("time")
119
+ # if np.unique(field_and_source_xds[center_dv], axis=0).shape[0] == 1:
120
+ # field_and_source_xds = field_and_source_xds.isel(time=0).drop_vars("time")
113
121
 
114
- return field_and_source_xds, source_id, num_lines
122
+ return field_and_source_xds, source_id, num_lines, field_names
115
123
 
116
124
 
117
125
  def extract_ephemeris_info(
@@ -121,6 +129,8 @@ def extract_ephemeris_info(
121
129
  is_single_dish,
122
130
  time_min_max: Tuple[np.float64, np.float64],
123
131
  interp_time: Union[xr.DataArray, None],
132
+ field_names: list,
133
+ ephemeris_interpolate: bool = True,
124
134
  ):
125
135
  """
126
136
  Extracts ephemeris information from the given path and table name and adds it to the xarray dataset.
@@ -165,7 +175,7 @@ def extract_ephemeris_info(
165
175
  ephemeris_xds = ephemeris_xds.isel(
166
176
  ephemeris_id=0
167
177
  ) # Collapse the ephemeris_id dimension.
168
- # Data varaibles ['time', 'RA', 'DEC', 'Rho', 'RadVel', 'NP_ang', 'NP_dist', 'DiskLong', 'DiskLat', 'Sl_lon', 'Sl_lat', 'r', 'rdot', 'phang']
178
+ # Data variables ['time', 'RA', 'DEC', 'Rho', 'RadVel', 'NP_ang', 'NP_dist', 'DiskLong', 'DiskLat', 'Sl_lon', 'Sl_lat', 'r', 'rdot', 'phang']
169
179
 
170
180
  # Get meta data.
171
181
  ephemeris_meta = ephemeris_xds.attrs["other"]["msv2"]["ctds_attrs"]
@@ -357,21 +367,24 @@ def extract_ephemeris_info(
357
367
  "sky_pos_label": ["ra", "dec", "dist"],
358
368
  }
359
369
  temp_xds = temp_xds.assign_coords(coords)
360
- time_coord_attrs = {
361
- "type": "time",
362
- "units": ["s"],
363
- "scale": "utc",
364
- "format": "unix",
365
- }
366
- temp_xds["time_ephemeris"].attrs.update(time_coord_attrs)
370
+ temp_xds["time_ephemeris"].attrs.update(standard_time_coord_attrs)
367
371
 
368
372
  # Convert to si units
369
373
  temp_xds = convert_to_si_units(temp_xds)
370
374
 
371
375
  # interpolate if ephemeris_interpolate/interp_time=True, and rename time_ephemeris=>time
372
- temp_xds = rename_and_interpolate_to_time(
373
- temp_xds, "time_ephemeris", interp_time, "field_and_source_xds"
374
- )
376
+ if ephemeris_interpolate:
377
+ temp_xds = rename_and_interpolate_to_time(
378
+ temp_xds, "time_ephemeris", interp_time, "field_and_source_xds"
379
+ )
380
+ source_location_interp = temp_xds["SOURCE_LOCATION"]
381
+ else:
382
+ source_location_interp = interpolate_to_time(
383
+ temp_xds["SOURCE_LOCATION"],
384
+ interp_time,
385
+ "field_and_source_xds",
386
+ "time_ephemeris",
387
+ )
375
388
 
376
389
  xds = xr.merge([xds, temp_xds])
377
390
 
@@ -383,35 +396,58 @@ def extract_ephemeris_info(
383
396
  else:
384
397
  center_dv = "FIELD_PHASE_CENTER"
385
398
 
386
- if "time" in xds[center_dv].coords:
387
- assert (
388
- interp_time is not None
389
- ), 'ephemeris_interpolate must be True if there is ephemeris data and multiple fields (this will occur if "FIELD_ID" is not in partition_scheme).'
399
+ xds = xds.sel(field_name=field_names) # Expand for all times in ms
400
+ xds = xds.assign_coords({"time": ("field_name", interp_time)})
401
+ xds["time"].attrs.update(standard_time_coord_attrs)
402
+ xds = xds.swap_dims({"field_name": "time"})
390
403
 
391
- field_phase_center = wrap_to_pi(
392
- xds[center_dv].values + xds["SOURCE_LOCATION"][:, 0:2].values
393
- )
394
- field_phase_center = np.column_stack(
395
- (field_phase_center, np.zeros(xds[center_dv].values.shape[0]))
396
- )
397
- field_phase_center[:, -1] = (
398
- field_phase_center[:, -1] + xds["SOURCE_LOCATION"][:, -1].values
399
- )
404
+ source_location_interp
405
+ field_phase_center = wrap_to_pi(
406
+ xds[center_dv].values + source_location_interp[:, 0:2].values
407
+ )
400
408
 
401
- xds[center_dv] = xr.DataArray(
402
- field_phase_center,
403
- dims=[xds["SOURCE_LOCATION"].dims[0], "sky_pos_label"],
404
- )
405
- else:
406
- field_phase_center = (
407
- np.append(xds[center_dv].values, 0) + xds["SOURCE_LOCATION"].values
408
- )
409
- field_phase_center[:, 0:2] = wrap_to_pi(field_phase_center[:, 0:2])
409
+ field_phase_center = np.column_stack(
410
+ (field_phase_center, np.zeros(xds[center_dv].values.shape[0]))
411
+ )
410
412
 
411
- xds[center_dv] = xr.DataArray(
412
- field_phase_center,
413
- dims=[xds["SOURCE_LOCATION"].dims[0], "sky_pos_label"],
414
- )
413
+ field_phase_center[:, -1] = (
414
+ field_phase_center[:, -1] + source_location_interp[:, -1].values
415
+ )
416
+
417
+ xds[center_dv] = xr.DataArray(
418
+ field_phase_center,
419
+ dims=["time", "sky_pos_label"],
420
+ )
421
+
422
+ # if "time" in xds[center_dv].coords:
423
+ # assert (
424
+ # interp_time is not None
425
+ # ), 'ephemeris_interpolate must be True if there is ephemeris data and multiple fields (this will occur if "FIELD_ID" is not in partition_scheme).'
426
+
427
+ # field_phase_center = wrap_to_pi(
428
+ # xds[center_dv].values + xds["SOURCE_LOCATION"][:, 0:2].values
429
+ # )
430
+ # field_phase_center = np.column_stack(
431
+ # (field_phase_center, np.zeros(xds[center_dv].values.shape[0]))
432
+ # )
433
+ # field_phase_center[:, -1] = (
434
+ # field_phase_center[:, -1] + xds["SOURCE_LOCATION"][:, -1].values
435
+ # )
436
+
437
+ # xds[center_dv] = xr.DataArray(
438
+ # field_phase_center,
439
+ # dims=[xds["SOURCE_LOCATION"].dims[0], "sky_pos_label"],
440
+ # )
441
+ # else:
442
+ # field_phase_center = (
443
+ # np.append(xds[center_dv].values, 0) + xds["SOURCE_LOCATION"].values
444
+ # )
445
+ # field_phase_center[:, 0:2] = wrap_to_pi(field_phase_center[:, 0:2])
446
+
447
+ # xds[center_dv] = xr.DataArray(
448
+ # field_phase_center,
449
+ # dims=[xds["SOURCE_LOCATION"].dims[0], "sky_pos_label"],
450
+ # )
415
451
 
416
452
  xds[center_dv].attrs.update(xds["SOURCE_LOCATION"].attrs)
417
453
 
@@ -465,18 +501,12 @@ def make_line_dims_and_coords(
465
501
  )
466
502
 
467
503
  line_label_data = np.arange(coords_lines_data.shape[-1]).astype(str)
468
- if len(source_id) == 1:
469
- line_coords = {
470
- "line_name": ("line_label", coords_lines_data),
471
- "line_label": line_label_data,
472
- }
473
- line_dims = ["line_label"]
474
- else:
475
- line_coords = {
476
- "line_name": (("time", "line_label"), coords_lines_data),
477
- "line_label": line_label_data,
478
- }
479
- line_dims = ["time", "line_label"]
504
+
505
+ line_coords = {
506
+ "line_name": (("field_name", "line_label"), coords_lines_data),
507
+ "line_label": line_label_data,
508
+ }
509
+ line_dims = ["field_name", "line_label"]
480
510
 
481
511
  return line_dims, line_coords
482
512
 
@@ -567,17 +597,16 @@ def extract_source_info(
567
597
  num_lines : int
568
598
  Sum of num_lines for all unique sources extracted.
569
599
  """
600
+ unknown = to_np_array(["Unknown"] * len(source_id))
601
+
570
602
  coords = {}
571
- is_ephemeris = xds.attrs[
572
- "is_ephemeris"
573
- ] # If ephemeris data is present we ignore the SOURCE_DIRECTION in the source table.
574
603
 
575
604
  if all(source_id == -1):
576
605
  logger.warning(
577
606
  f"Source_id is -1. No source information will be included in the field_and_source_xds."
578
607
  )
579
608
  xds = xds.assign_coords(
580
- {"source_name": "Unknown"}
609
+ {"source_name": ("field_name", unknown)}
581
610
  ) # Need to add this for ps.summary() to work.
582
611
  return xds, 0
583
612
 
@@ -585,7 +614,7 @@ def extract_source_info(
585
614
  logger.warning(
586
615
  f"Could not find SOURCE table for source_id {source_id}. Source information will not be included in the field_and_source_xds."
587
616
  )
588
- xds = xds.assign_coords({"source_name": "Unknown"})
617
+ xds = xds.assign_coords({"source_name": ("field_name", unknown)})
589
618
  return xds, 0
590
619
 
591
620
  unique_source_id = unique_1d(source_id)
@@ -603,7 +632,7 @@ def extract_source_info(
603
632
  f"SOURCE table empty for (unique) source_id {unique_source_id} and spectral_window_id {spectral_window_id}."
604
633
  )
605
634
  xds = xds.assign_coords(
606
- {"source_name": "Unknown"}
635
+ {"source_name": ("field_name", unknown)}
607
636
  ) # Need to add this for ps.summary() to work.
608
637
  return xds, 0
609
638
 
@@ -631,25 +660,25 @@ def extract_source_info(
631
660
 
632
661
  # Get source name (the time axis is optional and will probably be required if the partition scheme does not include 'FIELD_ID' or 'SOURCE_ID'.).
633
662
  # Note again that this optional time axis has nothing to do with the original time axis in the source table that we drop.
634
- if len(source_id) == 1:
635
- source_xds = source_xds.sel(SOURCE_ID=source_id[0])
636
- coords["source_name"] = (
637
- source_xds["NAME"].values.item() + "_" + str(source_id[0])
638
- )
639
- direction_dims = ["sky_dir_label"]
640
- # coords["source_id"] = source_id[0]
641
- else:
642
- source_xds = source_xds.sel(SOURCE_ID=source_id)
643
- coords["source_name"] = (
644
- "time",
645
- np.char.add(
646
- source_xds["NAME"].data, np.char.add("_", source_id.astype(str))
647
- ),
648
- )
649
- direction_dims = ["time", "sky_dir_label"]
650
- # coords["source_id"] = ("time", source_id)
663
+ # if len(source_id) == 1:
664
+ # source_xds = source_xds.sel(SOURCE_ID=source_id[0])
665
+ # coords["source_name"] = (
666
+ # source_xds["NAME"].values.item() + "_" + str(source_id[0])
667
+ # )
668
+ # direction_dims = ["sky_dir_label"]
669
+ # # coords["source_id"] = source_id[0]
670
+ # else:
671
+
672
+ source_xds = source_xds.sel(SOURCE_ID=source_id)
673
+ coords["source_name"] = (
674
+ "field_name",
675
+ np.char.add(source_xds["NAME"].data, np.char.add("_", source_id.astype(str))),
676
+ )
677
+ direction_dims = ["field_name", "sky_dir_label"]
678
+ # coords["source_id"] = ("time", source_id)
651
679
 
652
- # If ephemeris data is present we ignore the SOURCE_DIRECTION.
680
+ is_ephemeris = xds.attrs["type"] == "field_and_source_ephemeris"
681
+ # If ephemeris data is present we ignore the SOURCE_DIRECTION in the source table.
653
682
  if not is_ephemeris:
654
683
  direction_msv2_col = "DIRECTION"
655
684
  msv4_measure = column_description_casacore_to_msv4_measure(
@@ -686,6 +715,7 @@ def extract_source_info(
686
715
  line_dims, line_coords = make_line_dims_and_coords(
687
716
  source_xds, source_id, num_lines
688
717
  )
718
+
689
719
  xds = xds.assign_coords(line_coords)
690
720
 
691
721
  to_new_data_variables = {
@@ -693,7 +723,7 @@ def extract_source_info(
693
723
  "SYSVEL": ["LINE_SYSTEMIC_VELOCITY", line_dims],
694
724
  }
695
725
  to_new_coords = {
696
- "TIME": ["time", ["time"]],
726
+ "TIME": ["field_name", ["field_name"]],
697
727
  }
698
728
  convert_generic_xds_to_xradio_schema(
699
729
  source_xds, xds, to_new_data_variables, to_new_coords
@@ -757,7 +787,7 @@ def make_field_dims_and_coords(
757
787
  else:
758
788
  coords["field_name"] = field_xds["NAME"].values.item() + "_" + str(field_id)
759
789
  # coords["field_id"] = field_id
760
- dims = ["sky_dir_label"]
790
+ dims = ["field_name", "sky_dir_label"]
761
791
 
762
792
  return dims, coords
763
793
 
@@ -809,20 +839,18 @@ def extract_field_info_and_check_ephemeris(
809
839
  assert (
810
840
  len(field_xds.poly_id) == 1
811
841
  ), "Polynomial field positions not supported. Please open an issue on https://github.com/casangi/xradio/issues so that we can add support for this."
842
+
812
843
  field_xds = field_xds.isel(poly_id=0, drop=True)
844
+
813
845
  # field_xds = field_xds.assign_coords({'field_id':field_xds['field_id'].data})
814
846
  field_xds = field_xds.assign_coords({"field_id": unique_field_id})
815
- field_xds = field_xds.sel(
816
- field_id=field_id, drop=False
817
- ) # Make sure field_id match up with time axis (duplicate fields are allowed).
847
+ # field_xds = field_xds.sel(
848
+ # field_id=to_np_array(field_id), drop=False
849
+ # ) # Make sure field_id match up with time axis (duplicate fields are allowed).
818
850
  source_id = to_np_array(field_xds.SOURCE_ID.values)
819
851
 
820
852
  ephemeris_table_name = None
821
853
  ephemeris_path = None
822
- is_ephemeris = False
823
- field_and_source_xds.attrs["is_ephemeris"] = (
824
- False # If we find a path to the ephemeris table we will set this to True.
825
- )
826
854
 
827
855
  # Need to check if ephemeris_id is present and if ephemeris table is present.
828
856
  if "EPHEMERIS_ID" in field_xds:
@@ -843,66 +871,121 @@ def extract_field_info_and_check_ephemeris(
843
871
  )
844
872
 
845
873
  if len(ephemeris_name_table_index) > 0: # Are there any ephemeris tables.
846
- is_ephemeris = True
847
874
  e_index = ephemeris_name_table_index[0]
848
875
  ephemeris_path = os.path.join(in_file, "FIELD")
849
876
  ephemeris_table_name = files[e_index]
850
- field_and_source_xds.attrs["is_ephemeris"] = True
877
+ field_and_source_xds.attrs["type"] = "field_and_source_ephemeris"
851
878
  else:
852
879
  logger.warning(
853
880
  f"Could not find ephemeris table for field_id {field_id}. Ephemeris information will not be included in the field_and_source_xds."
854
881
  )
855
-
856
- dims, coords = make_field_dims_and_coords(field_xds, field_id, field_times)
882
+ from xradio._utils.schema import convert_generic_xds_to_xradio_schema
857
883
 
858
884
  if is_single_dish:
859
- field_data_variables = {
860
- "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
885
+ to_new_data_variables = {
886
+ "REFERENCE_DIR": [
887
+ "FIELD_REFERENCE_CENTER",
888
+ ["field_name", "sky_dir_label"],
889
+ ],
890
+ "FIELD_ID": ["FIELD_ID", ["field_name"]],
861
891
  }
862
892
  else:
863
- field_data_variables = {
864
- # "DELAY_DIR": "FIELD_DELAY_CENTER",
865
- "PHASE_DIR": "FIELD_PHASE_CENTER",
866
- # "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
893
+ to_new_data_variables = {
894
+ "PHASE_DIR": ["FIELD_PHASE_CENTER", ["field_name", "sky_dir_label"]],
895
+ # "DELAY_DIR": ["FIELD_DELAY_CENTER",["field_name", "sky_dir_label"]],
896
+ # "REFERENCE_DIR": ["FIELD_REFERENCE_CENTER",["field_name", "sky_dir_label"]],
867
897
  }
868
898
 
869
- field_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
870
- "column_descriptions"
871
- ]
872
-
873
- for generic_name, msv4_name in field_data_variables.items():
874
-
875
- delay_dir_ref_col = "DelayDir_Ref"
876
- if field_xds.get(delay_dir_ref_col) is None:
877
- delaydir_ref = None
878
- else:
879
- delaydir_ref = check_if_consistent(
880
- field_xds.get(delay_dir_ref_col), delay_dir_ref_col
881
- )
882
-
883
- msv4_measure = column_description_casacore_to_msv4_measure(
884
- field_column_description[generic_name], ref_code=delaydir_ref
885
- )
899
+ to_new_coords = {
900
+ "NAME": ["field_name", ["field_name"]],
901
+ "field_id": ["field_id", ["field_name"]],
902
+ }
886
903
 
887
- field_and_source_xds[msv4_name] = xr.DataArray.from_dict(
888
- {
889
- "dims": dims,
890
- "data": list(field_xds[generic_name].data),
891
- "attrs": msv4_measure,
892
- }
904
+ delay_dir_ref_col = "DelayDir_Ref"
905
+ if field_xds.get(delay_dir_ref_col) is None:
906
+ ref_code = None
907
+ else:
908
+ ref_code = check_if_consistent(
909
+ field_xds.get(delay_dir_ref_col), delay_dir_ref_col
893
910
  )
894
911
 
895
- field_measures_type = "sky_coord"
896
- field_and_source_xds[msv4_name].attrs["type"] = field_measures_type
912
+ field_and_source_xds = convert_generic_xds_to_xradio_schema(
913
+ field_xds, field_and_source_xds, to_new_data_variables, to_new_coords, ref_code
914
+ )
897
915
 
898
- field_and_source_xds = field_and_source_xds.assign_coords(coords)
899
- if "time" in field_and_source_xds:
900
- time_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
901
- "column_descriptions"
902
- ]["TIME"]
903
- time_msv4_measure = column_description_casacore_to_msv4_measure(
904
- time_column_description
905
- )
906
- field_and_source_xds.coords["time"].attrs.update(time_msv4_measure)
916
+ # Some field names are not unique. We need to add the field_id to the field_name to make it unique.
917
+ field_and_source_xds = field_and_source_xds.assign_coords(
918
+ {
919
+ "field_name": np.char.add(
920
+ field_and_source_xds["field_name"].data,
921
+ np.char.add("_", field_and_source_xds["field_id"].astype(str)),
922
+ ),
923
+ "sky_dir_label": ["ra", "dec"],
924
+ }
925
+ )
907
926
 
908
- return field_and_source_xds, ephemeris_path, ephemeris_table_name, source_id
927
+ temp = field_and_source_xds.set_xindex("field_id")
928
+ field_names = temp.sel(field_id=field_id).field_name.data
929
+ # field_id shouldn ot be in final xds, and no longer needed past this point
930
+ field_and_source_xds = field_and_source_xds.drop_vars("field_id")
931
+
932
+ # dims, coords = make_field_dims_and_coords(field_xds, field_id, field_times)
933
+
934
+ # if is_single_dish:
935
+ # field_data_variables = {
936
+ # "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
937
+ # }
938
+ # else:
939
+ # field_data_variables = {
940
+ # # "DELAY_DIR": "FIELD_DELAY_CENTER",
941
+ # "PHASE_DIR": "FIELD_PHASE_CENTER",
942
+ # # "REFERENCE_DIR": "FIELD_REFERENCE_CENTER",
943
+ # }
944
+
945
+ # field_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
946
+ # "column_descriptions"
947
+ # ]
948
+
949
+ # delay_dir_ref_col = "DelayDir_Ref"
950
+ # if field_xds.get(delay_dir_ref_col) is None:
951
+ # delaydir_ref = None
952
+ # else:
953
+ # delaydir_ref = check_if_consistent(
954
+ # field_xds.get(delay_dir_ref_col), delay_dir_ref_col
955
+ # )
956
+
957
+ # for generic_name, msv4_name in field_data_variables.items():
958
+ # msv4_measure = column_description_casacore_to_msv4_measure(
959
+ # field_column_description[generic_name], ref_code=delaydir_ref
960
+ # )
961
+
962
+ # print(msv4_name,generic_name,field_xds[generic_name].data.shape,field_xds[generic_name].data)
963
+
964
+ # field_and_source_xds[msv4_name] = xr.DataArray.from_dict(
965
+ # {
966
+ # "dims": dims,
967
+ # "data": list(field_xds[generic_name].data),
968
+ # "attrs": msv4_measure,
969
+ # }
970
+ # )
971
+
972
+ # field_measures_type = "sky_coord"
973
+ # field_and_source_xds[msv4_name].attrs["type"] = field_measures_type
974
+
975
+ # field_and_source_xds = field_and_source_xds.assign_coords(coords)
976
+ # if "time" in field_and_source_xds:
977
+ # time_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
978
+ # "column_descriptions"
979
+ # ]["TIME"]
980
+ # time_msv4_measure = column_description_casacore_to_msv4_measure(
981
+ # time_column_description
982
+ # )
983
+ # field_and_source_xds.coords["time"].attrs.update(time_msv4_measure)
984
+
985
+ return (
986
+ field_and_source_xds,
987
+ ephemeris_path,
988
+ ephemeris_table_name,
989
+ source_id,
990
+ field_names,
991
+ )
@@ -160,6 +160,6 @@ def populate_ms_descr(
160
160
  elif mode == "flat":
161
161
  summary += [(ddi, (sdf["rows"], sdf["chans"], sdf["pols"]))]
162
162
  else:
163
- summary += [((ddi, sdf["times"], sdf["baselines"], sdf["chans"], sdf["pols"]))]
163
+ summary += [(ddi, sdf["times"], sdf["baselines"], sdf["chans"], sdf["pols"])]
164
164
 
165
165
  return sdf
@@ -55,13 +55,17 @@ def create_info_dicts(
55
55
  # "field_id": to_list(unique_1d(field_id)),
56
56
  "field_name": to_list(np.unique(field_and_source_xds.field_name.values)),
57
57
  "polarization_setup": to_list(xds.polarization.values),
58
- "scan_number": to_list(np.unique(partition_info_misc_fields["scan_id"])),
58
+ "scan_name": to_list(np.unique(partition_info_misc_fields["scan_name"])),
59
59
  "source_name": to_list(np.unique(field_and_source_xds.source_name.values)),
60
60
  # "source_id": to_list(unique_1d(source_id)),
61
61
  "intents": partition_info_misc_fields["intents"].split(","),
62
62
  "taql": partition_info_misc_fields["taql_where"],
63
63
  "line_name": line_name,
64
64
  }
65
+ if "antenna_name" in partition_info_misc_fields:
66
+ info_dicts["partition_info"]["antenna_name"] = partition_info_misc_fields[
67
+ "antenna_name"
68
+ ]
65
69
 
66
70
  observation_id = check_if_consistent(
67
71
  tb_tool.getcol("OBSERVATION_ID"), "OBSERVATION_ID"