xradio 0.0.27__py3-none-any.whl → 0.0.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. xradio/__init__.py +5 -4
  2. xradio/_utils/array.py +90 -0
  3. xradio/_utils/zarr/common.py +48 -3
  4. xradio/image/_util/_fits/xds_from_fits.py +10 -5
  5. xradio/image/_util/_zarr/zarr_low_level.py +27 -24
  6. xradio/image/_util/common.py +4 -1
  7. xradio/image/_util/zarr.py +4 -1
  8. xradio/schema/__init__.py +24 -6
  9. xradio/schema/bases.py +440 -2
  10. xradio/schema/check.py +96 -55
  11. xradio/schema/dataclass.py +123 -27
  12. xradio/schema/metamodel.py +21 -4
  13. xradio/schema/typing.py +33 -18
  14. xradio/vis/__init__.py +5 -2
  15. xradio/vis/_processing_set.py +30 -9
  16. xradio/vis/_vis_utils/_ms/_tables/create_field_and_source_xds.py +710 -0
  17. xradio/vis/_vis_utils/_ms/_tables/load.py +23 -10
  18. xradio/vis/_vis_utils/_ms/_tables/load_main_table.py +145 -64
  19. xradio/vis/_vis_utils/_ms/_tables/read.py +782 -156
  20. xradio/vis/_vis_utils/_ms/_tables/read_main_table.py +176 -45
  21. xradio/vis/_vis_utils/_ms/_tables/read_subtables.py +79 -28
  22. xradio/vis/_vis_utils/_ms/_tables/write.py +102 -45
  23. xradio/vis/_vis_utils/_ms/_tables/write_exp_api.py +127 -65
  24. xradio/vis/_vis_utils/_ms/chunks.py +58 -21
  25. xradio/vis/_vis_utils/_ms/conversion.py +536 -67
  26. xradio/vis/_vis_utils/_ms/descr.py +52 -20
  27. xradio/vis/_vis_utils/_ms/msv2_to_msv4_meta.py +70 -35
  28. xradio/vis/_vis_utils/_ms/msv4_infos.py +0 -59
  29. xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +76 -9
  30. xradio/vis/_vis_utils/_ms/optimised_functions.py +0 -46
  31. xradio/vis/_vis_utils/_ms/partition_queries.py +308 -119
  32. xradio/vis/_vis_utils/_ms/partitions.py +82 -25
  33. xradio/vis/_vis_utils/_ms/subtables.py +32 -14
  34. xradio/vis/_vis_utils/_utils/partition_attrs.py +30 -11
  35. xradio/vis/_vis_utils/_utils/xds_helper.py +136 -45
  36. xradio/vis/_vis_utils/_zarr/read.py +60 -22
  37. xradio/vis/_vis_utils/_zarr/write.py +83 -9
  38. xradio/vis/_vis_utils/ms.py +48 -29
  39. xradio/vis/_vis_utils/zarr.py +44 -20
  40. xradio/vis/convert_msv2_to_processing_set.py +106 -32
  41. xradio/vis/load_processing_set.py +38 -61
  42. xradio/vis/read_processing_set.py +62 -96
  43. xradio/vis/schema.py +687 -0
  44. xradio/vis/vis_io.py +75 -43
  45. {xradio-0.0.27.dist-info → xradio-0.0.29.dist-info}/LICENSE.txt +6 -1
  46. {xradio-0.0.27.dist-info → xradio-0.0.29.dist-info}/METADATA +10 -5
  47. xradio-0.0.29.dist-info/RECORD +73 -0
  48. {xradio-0.0.27.dist-info → xradio-0.0.29.dist-info}/WHEEL +1 -1
  49. xradio/vis/model.py +0 -497
  50. xradio-0.0.27.dist-info/RECORD +0 -71
  51. {xradio-0.0.27.dist-info → xradio-0.0.29.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,710 @@
1
+ import os
2
+ import time
3
+ from typing import Tuple, Union
4
+
5
+ import numpy as np
6
+ import xarray as xr
7
+
8
+ from xradio.vis._vis_utils._ms.msv2_to_msv4_meta import (
9
+ column_description_casacore_to_msv4_measure,
10
+ )
11
+ from xradio.vis._vis_utils._ms.msv4_sub_xdss import interpolate_to_time
12
+ from xradio.vis._vis_utils._ms.subtables import subt_rename_ids
13
+ from xradio.vis._vis_utils._ms._tables.read import (
14
+ convert_casacore_time_to_mjd,
15
+ make_taql_where_between_min_max,
16
+ read_generic_table,
17
+ )
18
+ from xradio.vis._vis_utils._ms._tables.table_query import open_table_ro
19
+ import graphviper.utils.logger as logger
20
+
21
+
22
+ def cast_to_str(x):
23
+ if isinstance(x, list):
24
+ return x[0]
25
+ else:
26
+ return x
27
+
28
+
29
+ def create_field_and_source_xds(
30
+ in_file,
31
+ field_id,
32
+ spectral_window_id,
33
+ field_times,
34
+ is_single_dish,
35
+ time_min_max: Tuple[np.float64, np.float64],
36
+ ephemeris_interp_time: Union[xr.DataArray, None] = None,
37
+ ):
38
+ """
39
+ Create a field and source xarray dataset (xds) from the given input file, field ID, and spectral window ID.
40
+
41
+ Parameters:
42
+ ----------
43
+ in_file : str
44
+ The path to the input file.
45
+ field_id : int
46
+ The ID of the field.
47
+ spectral_window_id : int
48
+ The ID of the spectral window.
49
+ time_min_max : Tuple[np.float64, np.float46]
50
+ Min / max times to constrain loading (usually to the time range relevant to an MSv4)
51
+ ephemeris_interp_time : Union[xr.DataArray, None]
52
+ Time axis to interpolate the ephemeris data vars to (usually main MSv4 time)
53
+
54
+ Returns:
55
+ -------
56
+ field_and_source_xds : xr.Dataset
57
+ The xarray dataset containing the field and source information.
58
+ """
59
+
60
+ start_time = time.time()
61
+
62
+ field_and_source_xds = xr.Dataset()
63
+
64
+ field_and_source_xds, ephemeris_path, ephemeris_table_name = (
65
+ create_field_info_and_check_ephemeris(
66
+ field_and_source_xds, in_file, field_id, field_times, is_single_dish
67
+ )
68
+ )
69
+ source_id = field_and_source_xds.attrs["source_id"]
70
+
71
+ if ephemeris_path is not None:
72
+ field_and_source_xds = extract_ephemeris_info(
73
+ field_and_source_xds,
74
+ ephemeris_path,
75
+ ephemeris_table_name,
76
+ is_single_dish,
77
+ time_min_max,
78
+ ephemeris_interp_time,
79
+ )
80
+ field_and_source_xds.attrs["is_ephemeris"] = True
81
+ field_and_source_xds = extract_source_info(
82
+ field_and_source_xds,
83
+ in_file,
84
+ True,
85
+ source_id,
86
+ spectral_window_id,
87
+ )
88
+
89
+ else:
90
+ field_and_source_xds = extract_source_info(
91
+ field_and_source_xds, in_file, False, source_id, spectral_window_id
92
+ )
93
+ field_and_source_xds.attrs["is_ephemeris"] = False
94
+
95
+ logger.debug(
96
+ f"create_field_and_source_xds() execution time {time.time() - start_time:0.2f} s"
97
+ )
98
+
99
+ return field_and_source_xds
100
+
101
+
102
+ def extract_ephemeris_info(
103
+ xds,
104
+ path,
105
+ table_name,
106
+ is_single_dish,
107
+ time_min_max: Tuple[np.float64, np.float64],
108
+ interp_time: Union[xr.DataArray, None],
109
+ ):
110
+ """
111
+ Extracts ephemeris information from the given path and table name and adds it to the xarray dataset.
112
+
113
+ Parameters:
114
+ ----------
115
+ xds : xr.Dataset
116
+ The xarray dataset to which the ephemeris information will be added.
117
+ path : str
118
+ The path to the input file.
119
+ table_name : str
120
+ The name of the ephemeris table.
121
+ time_min_max : Tuple[np.float46, np.float64]
122
+ Min / max times to constrain loading (usually to the time range relevant to an MSv4)
123
+ ephemeris_interp_time : Union[xr.DataArray, None]
124
+ Time axis to interpolate the data vars to (usually main MSv4 time)
125
+
126
+ Returns:
127
+ -------
128
+ xds : xr.Dataset
129
+ The xarray dataset with the added ephemeris information.
130
+ """
131
+ # The JPL-Horizons ephemris table implmenation in CASA does not follow the standard way of defining measures.
132
+ # Consequently a lot of hardcoding is needed to extract the information.
133
+ # https://casadocs.readthedocs.io/en/latest/notebooks/external-data.html
134
+
135
+ min_max_mjd = (
136
+ convert_casacore_time_to_mjd(time_min_max[0]),
137
+ convert_casacore_time_to_mjd(time_min_max[1]),
138
+ )
139
+ taql_time_range = make_taql_where_between_min_max(
140
+ min_max_mjd, path, table_name, "MJD"
141
+ )
142
+ ephemeris_xds = read_generic_table(
143
+ path, table_name, timecols=["MJD"], taql_where=taql_time_range
144
+ )
145
+
146
+ # print(ephemeris_xds)
147
+
148
+ assert len(ephemeris_xds.ephemeris_id) == 1, "Non standard ephemeris table."
149
+ ephemeris_xds = ephemeris_xds.isel(ephemeris_id=0)
150
+
151
+ ephemeris_meta = ephemeris_xds.attrs["other"]["msv2"]["ctds_attrs"]
152
+ ephemris_column_description = ephemeris_xds.attrs["other"]["msv2"]["ctds_attrs"][
153
+ "column_descriptions"
154
+ ]
155
+
156
+ assert (
157
+ ephemeris_meta["obsloc"] == "GEOCENTRIC"
158
+ ), "Only geocentric observer ephemeris are supported."
159
+
160
+ if "posrefsys" in ephemeris_meta:
161
+ sky_coord_frame = ephemeris_meta["posrefsys"].replace("ICRF/", "")
162
+ else:
163
+ sky_coord_frame = "ICRS" # We will have to just assume this.
164
+
165
+ # Find out witch keyword is used for units (UNIT/QuantumUnits)
166
+ if "UNIT" in ephemris_column_description["RA"]["keywords"]:
167
+ unit_keyword = "UNIT"
168
+ else:
169
+ unit_keyword = "QuantumUnits"
170
+
171
+ coords = {
172
+ "ellipsoid_pos_label": ["lon", "lat", "dist"],
173
+ "time": ephemeris_xds["time"].data,
174
+ "sky_pos_label": ["ra", "dec", "dist"],
175
+ }
176
+
177
+ xds["SOURCE_POSITION"] = xr.DataArray(
178
+ np.column_stack(
179
+ (
180
+ ephemeris_xds["ra"].data,
181
+ ephemeris_xds["dec"].data,
182
+ ephemeris_xds["rho"].data,
183
+ )
184
+ ),
185
+ dims=["time", "sky_pos_label"],
186
+ )
187
+ # Have to use cast_to_str because the ephemris table units are not consistently in a list or a string.
188
+ sky_coord_units = [
189
+ cast_to_str(ephemris_column_description["RA"]["keywords"][unit_keyword]),
190
+ cast_to_str(ephemris_column_description["DEC"]["keywords"][unit_keyword]),
191
+ cast_to_str(ephemris_column_description["Rho"]["keywords"][unit_keyword]),
192
+ ]
193
+ xds["SOURCE_POSITION"].attrs.update(
194
+ {"type": "sky_coord", "frame": sky_coord_frame, "units": sky_coord_units}
195
+ )
196
+
197
+ xds["SOURCE_RADIAL_VELOCITY"] = xr.DataArray(
198
+ ephemeris_xds["radvel"].data, dims=["time"]
199
+ )
200
+ xds["SOURCE_RADIAL_VELOCITY"].attrs.update(
201
+ {
202
+ "type": "quantity",
203
+ "units": [
204
+ cast_to_str(
205
+ ephemris_column_description["RadVel"]["keywords"][unit_keyword]
206
+ )
207
+ ],
208
+ }
209
+ )
210
+
211
+ observation_position = [
212
+ ephemeris_meta["GeoLong"],
213
+ ephemeris_meta["GeoLat"],
214
+ ephemeris_meta["GeoDist"],
215
+ ]
216
+ xds["OBSERVATION_POSITION"] = xr.DataArray(
217
+ observation_position, dims=["ellipsoid_pos_label"]
218
+ )
219
+ xds["OBSERVATION_POSITION"].attrs.update(
220
+ {
221
+ "type": "location",
222
+ "units": ["deg", "deg", "m"],
223
+ "data": observation_position,
224
+ "ellipsoid": "WGS84",
225
+ "origin_object_name": "Earth",
226
+ "coordinate_system": ephemeris_meta["obsloc"].lower(),
227
+ }
228
+ ) # I think the units are ['deg','deg','m'] and 'WGS84'.
229
+
230
+ # Add optional data
231
+ # NORTH_POLE_POSITION_ANGLE
232
+
233
+ if "np_ang" in ephemeris_xds.data_vars:
234
+ xds["NORTH_POLE_POSITION_ANGLE"] = xr.DataArray(
235
+ ephemeris_xds["np_ang"].data, dims=["time"]
236
+ )
237
+ xds["NORTH_POLE_POSITION_ANGLE"].attrs.update(
238
+ {
239
+ "type": "quantity",
240
+ "units": [
241
+ cast_to_str(
242
+ ephemris_column_description["NP_ang"]["keywords"][unit_keyword]
243
+ )
244
+ ],
245
+ }
246
+ )
247
+
248
+ if "np_dist" in ephemeris_xds.data_vars:
249
+ xds["NORTH_POLE_ANGULAR_DISTANCE"] = xr.DataArray(
250
+ ephemeris_xds["np_dist"].data, dims=["time"]
251
+ )
252
+ xds["NORTH_POLE_ANGULAR_DISTANCE"].attrs.update(
253
+ {
254
+ "type": "quantity",
255
+ "units": [
256
+ cast_to_str(
257
+ ephemris_column_description["NP_dist"]["keywords"][unit_keyword]
258
+ )
259
+ ],
260
+ }
261
+ )
262
+
263
+ if "disklong" in ephemeris_xds.data_vars:
264
+ xds["SUB_OBSERVER_POSITION"] = xr.DataArray(
265
+ np.column_stack(
266
+ (
267
+ ephemeris_xds["disklong"].data,
268
+ ephemeris_xds["disklat"].data,
269
+ np.zeros(ephemeris_xds["disklong"].shape),
270
+ )
271
+ ),
272
+ dims=["time", "ellipsoid_pos_label"],
273
+ )
274
+
275
+ if "DiskLong" in ephemris_column_description:
276
+ units_key_lon = "DiskLong"
277
+ units_key_lat = "DiskLat"
278
+ else:
279
+ units_key_lon = "diskLong"
280
+ units_key_lat = "diskLat"
281
+
282
+ xds["SUB_OBSERVER_POSITION"].attrs.update(
283
+ {
284
+ "type": "location",
285
+ "ellipsoid": "NA",
286
+ "origin_object_name": ephemeris_meta["NAME"],
287
+ "coordinate_system": "planetodetic",
288
+ "units": [
289
+ cast_to_str(
290
+ ephemris_column_description[units_key_lon]["keywords"][
291
+ unit_keyword
292
+ ]
293
+ ),
294
+ cast_to_str(
295
+ ephemris_column_description[units_key_lat]["keywords"][
296
+ unit_keyword
297
+ ]
298
+ ),
299
+ "m",
300
+ ],
301
+ }
302
+ )
303
+
304
+ if "si_lon" in ephemeris_xds.data_vars:
305
+ xds["SUB_SOLAR_POSITION"] = xr.DataArray(
306
+ np.column_stack(
307
+ (
308
+ ephemeris_xds["si_lon"].data,
309
+ ephemeris_xds["si_lat"].data,
310
+ ephemeris_xds["r"].data,
311
+ )
312
+ ),
313
+ dims=["time", "ellipsoid_pos_label"],
314
+ )
315
+ xds["SUB_SOLAR_POSITION"].attrs.update(
316
+ {
317
+ "type": "location",
318
+ "ellipsoid": "NA",
319
+ "origin_object_name": "Sun",
320
+ "coordinate_system": "planetodetic",
321
+ "units": [
322
+ cast_to_str(
323
+ ephemris_column_description["SI_lon"]["keywords"][unit_keyword]
324
+ ),
325
+ cast_to_str(
326
+ ephemris_column_description["SI_lat"]["keywords"][unit_keyword]
327
+ ),
328
+ cast_to_str(
329
+ ephemris_column_description["r"]["keywords"][unit_keyword]
330
+ ),
331
+ ],
332
+ }
333
+ )
334
+
335
+ if "rdot" in ephemeris_xds.data_vars:
336
+ xds["HELIOCENTRIC_RADIAL_VELOCITY"] = xr.DataArray(
337
+ ephemeris_xds["rdot"].data, dims=["time"]
338
+ )
339
+ xds["HELIOCENTRIC_RADIAL_VELOCITY"].attrs.update(
340
+ {
341
+ "type": "quantity",
342
+ "units": [
343
+ cast_to_str(
344
+ ephemris_column_description["rdot"]["keywords"][unit_keyword]
345
+ )
346
+ ],
347
+ }
348
+ )
349
+
350
+ if "phang" in ephemeris_xds.data_vars:
351
+ xds["OBSERVER_PHASE_ANGLE"] = xr.DataArray(
352
+ ephemeris_xds["phang"].data, dims=["time"]
353
+ )
354
+ xds["OBSERVER_PHASE_ANGLE"].attrs.update(
355
+ {
356
+ "type": "quantity",
357
+ "units": [
358
+ cast_to_str(
359
+ ephemris_column_description["phang"]["keywords"][unit_keyword]
360
+ )
361
+ ],
362
+ }
363
+ )
364
+
365
+ xds = xds.assign_coords(coords)
366
+ xds["time"].attrs.update(
367
+ {"type": "time", "units": ["s"], "scale": "UTC", "format": "UNIX"}
368
+ )
369
+
370
+ xds = convert_to_si_units(xds)
371
+ xds = interpolate_to_time(xds, interp_time, "field_and_source_xds")
372
+
373
+ if is_single_dish:
374
+ xds["FIELD_REFERENCE_CENTER"] = xr.DataArray(
375
+ add_position_offsets(
376
+ np.append(xds["FIELD_REFERENCE_CENTER"].data, 0),
377
+ xds["SOURCE_POSITION"].data,
378
+ ),
379
+ dims=["time", "sky_pos_label"],
380
+ )
381
+ else:
382
+ xds["FIELD_PHASE_CENTER"] = xr.DataArray(
383
+ add_position_offsets(
384
+ np.append(xds["FIELD_PHASE_CENTER"].data, 0),
385
+ xds["SOURCE_POSITION"].data,
386
+ ),
387
+ dims=["time", "sky_pos_label"],
388
+ )
389
+
390
+ xds["FIELD_PHASE_CENTER"].attrs.update(xds["SOURCE_POSITION"].attrs)
391
+
392
+ return xds
393
+
394
+
395
+ def add_position_offsets(dv_1, dv_2):
396
+ new_pos = dv_1 + dv_2
397
+
398
+ while np.any(new_pos[:, 0] > np.pi) or np.any(new_pos[:, 0] < -np.pi):
399
+ new_pos[:, 0] = np.where(
400
+ new_pos[:, 0] > np.pi, new_pos[:, 0] - 2 * np.pi, new_pos[:, 0]
401
+ )
402
+ new_pos[:, 0] = np.where(
403
+ new_pos[:, 0] < -np.pi, new_pos[:, 0] + 2 * np.pi, new_pos[:, 0]
404
+ )
405
+
406
+ while np.any(new_pos[:, 1] > np.pi / 2) or np.any(new_pos[:, 1] < -np.pi / 2):
407
+ new_pos[:, 1] = np.where(
408
+ new_pos[:, 1] > np.pi / 2, new_pos[:, 1] - np.pi, new_pos[:, 1]
409
+ )
410
+ new_pos[:, 1] = np.where(
411
+ new_pos[:, 1] < -np.pi / 2, new_pos[:, 1] + np.pi, new_pos[:, 1]
412
+ )
413
+
414
+ return new_pos
415
+
416
+
417
+ def convert_to_si_units(xds):
418
+ for data_var in xds.data_vars:
419
+ if "units" in xds[data_var].attrs:
420
+ for u_i, u in enumerate(xds[data_var].attrs["units"]):
421
+ if u == "km":
422
+ xds[data_var][..., u_i] = xds[data_var][..., u_i] * 1e3
423
+ xds[data_var].attrs["units"][u_i] = "m"
424
+ if u == "km/s":
425
+ xds[data_var][..., u_i] = xds[data_var][..., u_i] * 1e3
426
+ xds[data_var].attrs["units"][u_i] = "m/s"
427
+ if u == "deg":
428
+ xds[data_var][..., u_i] = xds[data_var][..., u_i] * np.pi / 180
429
+ xds[data_var].attrs["units"][u_i] = "rad"
430
+ if u == "Au" or u == "AU":
431
+ xds[data_var][..., u_i] = xds[data_var][..., u_i] * 149597870700
432
+ xds[data_var].attrs["units"][u_i] = "m"
433
+ if u == "Au/d" or u == "AU/d":
434
+ xds[data_var][..., u_i] = (
435
+ xds[data_var][..., u_i] * 149597870700 / 86400
436
+ )
437
+ xds[data_var].attrs["units"][u_i] = "m/s"
438
+ if u == "arcsec":
439
+ xds[data_var][..., u_i] = xds[data_var][..., u_i] * np.pi / 648000
440
+ xds[data_var].attrs["units"][u_i] = "rad"
441
+ return xds
442
+
443
+
444
+ def extract_source_info(xds, path, is_ephemeris, source_id, spectral_window_id):
445
+ """
446
+ Extracts source information from the given path and adds it to the xarray dataset.
447
+
448
+ Parameters:
449
+ ----------
450
+ xds : xr.Dataset
451
+ The xarray dataset to which the source information will be added.
452
+ path : str
453
+ The path to the input file.
454
+ is_ephemeris : bool
455
+ Flag indicating if the source is an ephemeris.
456
+ source_id : int
457
+ The ID of the source.
458
+ spectral_window_id : int
459
+ The ID of the spectral window.
460
+
461
+ Returns:
462
+ -------
463
+ xds : xr.Dataset
464
+ The xarray dataset with the added source information.
465
+ """
466
+
467
+ if source_id == -1:
468
+ logger.warning(
469
+ f"Source_id is -1. No source information will be included in the field_and_source_xds."
470
+ )
471
+ xds.attrs["source_name"] = "None"
472
+ return xds
473
+
474
+ source_xds = read_generic_table(
475
+ path,
476
+ "SOURCE",
477
+ ignore=["SOURCE_MODEL"], # Trying to read SOURCE_MODEL causes an error.
478
+ taql_where=f"where SOURCE_ID = {source_id} AND SPECTRAL_WINDOW_ID = {spectral_window_id}",
479
+ )
480
+
481
+ if len(source_xds.data_vars) == 0: # The source xds is empty.
482
+ logger.warning(
483
+ f"SOURCE table empty for source_id {source_id} and spectral_window_id {spectral_window_id}."
484
+ )
485
+ xds.attrs["source_name"] = "None"
486
+ return xds
487
+
488
+ assert (
489
+ len(source_xds.source_id) == 1
490
+ ), "Can only process source table with a single source_id and spectral_window_id for a given MSv4 partition."
491
+ assert (
492
+ len(source_xds.spectral_window_id) == 1
493
+ ), "Can only process source table with a single source_id and spectral_window_id for a given MSv4 partition."
494
+ assert (
495
+ len(source_xds.time) == 1
496
+ ), "Can only process source table with a single time entry for a source_id and spectral_window_id."
497
+ source_xds = source_xds.isel(time=0, source_id=0, spectral_window_id=0)
498
+
499
+ xds.attrs["source_name"] = str(source_xds["name"].data)
500
+ xds.attrs["code"] = str(source_xds["code"].data)
501
+ source_column_description = source_xds.attrs["other"]["msv2"]["ctds_attrs"][
502
+ "column_descriptions"
503
+ ]
504
+
505
+ if not is_ephemeris:
506
+ msv4_measure = column_description_casacore_to_msv4_measure(
507
+ source_column_description["DIRECTION"]
508
+ )
509
+ xds["SOURCE_DIRECTION"] = xr.DataArray(
510
+ source_xds["direction"].data, dims=["sky_dir_label"]
511
+ )
512
+ xds["SOURCE_DIRECTION"].attrs.update(msv4_measure)
513
+
514
+ # msv4_measure = column_description_casacore_to_msv4_measure(
515
+ # source_column_description["PROPER_MOTION"]
516
+ # )
517
+ # xds["SOURCE_PROPER_MOTION"] = xr.DataArray(
518
+ # source_xds["proper_motion"].data, dims=["sky_dir_label"]
519
+ # )
520
+ # xds["SOURCE_PROPER_MOTION"].attrs.update(msv4_measure)
521
+
522
+ # ['DIRECTION', 'PROPER_MOTION', 'CALIBRATION_GROUP', 'CODE', 'INTERVAL', 'NAME', 'NUM_LINES', 'SOURCE_ID', 'SPECTRAL_WINDOW_ID', 'TIME', 'POSITION', 'TRANSITION', 'REST_FREQUENCY', 'SYSVEL']
523
+ if source_xds["num_lines"] > 0:
524
+ coords = {"line_name": source_xds["transition"].data}
525
+ xds = xds.assign_coords(coords)
526
+
527
+ optional_data_variables = {
528
+ "rest_frequency": "LINE_REST_FREQUENCY",
529
+ "sysvel": "LINE_SYSTEMIC_VELOCITY",
530
+ }
531
+ for generic_name, msv4_name in optional_data_variables.items():
532
+ if generic_name in source_xds:
533
+ msv4_measure = column_description_casacore_to_msv4_measure(
534
+ source_column_description[generic_name.upper()]
535
+ )
536
+ xds[msv4_name] = xr.DataArray(
537
+ source_xds[generic_name].data, dims=["line_name"]
538
+ )
539
+ xds[msv4_name].attrs.update(msv4_measure)
540
+
541
+ # Need to add doppler info if present. Add check.
542
+ try:
543
+ doppler_xds = read_generic_table(
544
+ path,
545
+ "DOPPLER",
546
+ )
547
+ assert (
548
+ False
549
+ ), "Doppler table present. Please open an issue on https://github.com/casangi/xradio/issues so that we can addd support for this."
550
+ except:
551
+ pass
552
+
553
+ return xds
554
+
555
+
556
+ def create_field_info_and_check_ephemeris(
557
+ field_and_source_xds, in_file, field_id, field_times, is_single_dish
558
+ ):
559
+ """
560
+ Create field information and check for ephemeris in the FIELD table folder.
561
+
562
+ Parameters:
563
+ ----------
564
+ field_and_source_xds : xr.Dataset
565
+ The xarray dataset to which the field and source information will be added.
566
+ in_file : str
567
+ The path to the input file.
568
+ field_id : int
569
+ The ID of the field.
570
+
571
+ Returns:
572
+ -------
573
+ field_and_source_xds : xr.Dataset
574
+ The xarray dataset with the added field and source information.
575
+ ephemeris_path : str
576
+ The path to the ephemeris table.
577
+ ephemeris_table_name : str
578
+ The name of the ephemeris table.
579
+ """
580
+ field_xds = read_generic_table(
581
+ in_file,
582
+ "FIELD",
583
+ rename_ids=subt_rename_ids["FIELD"],
584
+ ) # .sel(field_id=field_id)
585
+ # print('1****',field_xds)
586
+ # field_xds['field_id'] = np.arange(len(field_xds.field_id))
587
+
588
+ assert len(field_xds.poly_id) == 1, "Polynomial field positions not supported."
589
+ field_xds = field_xds.isel(poly_id=0)
590
+ field_xds = field_xds.sel(field_id=field_id)
591
+
592
+ from xradio._utils.array import check_if_consistent
593
+
594
+ source_id = check_if_consistent(field_xds.source_id, "source_id")
595
+
596
+ # print('source_id', source_id)
597
+ # print(field_xds)
598
+ # print('***')
599
+ # print(field_xds.field_id)
600
+ # print('***')
601
+ # print(field_id)
602
+
603
+ field_and_source_xds.attrs.update(
604
+ {
605
+ "field_name": str(field_xds["name"].data),
606
+ "field_code": str(field_xds["code"].data),
607
+ # "field_id": field_id,
608
+ "source_id": source_id,
609
+ }
610
+ )
611
+
612
+ ephemeris_table_name = None
613
+ ephemeris_path = None
614
+ is_ephemeris = False
615
+
616
+ # Need to check if ephemeris_id is present and if epehemeris table is present.
617
+ if "ephemeris_id" in field_xds:
618
+ ephemeris_id = check_if_consistent(
619
+ field_xds.ephemeris_id, "ephemeris_id"
620
+ ) # int(field_xds["ephemeris_id"].data)
621
+ if ephemeris_id > -1:
622
+ files = os.listdir(os.path.join(in_file, "FIELD"))
623
+ ephemeris_table_name_start = "EPHEM" + str(ephemeris_id)
624
+
625
+ ephemeris_name_table_index = [
626
+ i for i in range(len(files)) if ephemeris_table_name_start in files[i]
627
+ ]
628
+ assert len(ephemeris_name_table_index) == 1, (
629
+ "More than one ephemeris table which starts with "
630
+ + ephemeris_table_name_start
631
+ )
632
+
633
+ if len(ephemeris_name_table_index) > 0: # Are there any ephemeris tables.
634
+ is_ephemeris = True
635
+ e_index = ephemeris_name_table_index[0]
636
+ ephemeris_path = os.path.join(in_file, "FIELD")
637
+ ephemeris_table_name = files[e_index]
638
+ else:
639
+ logger.warning(
640
+ f"Could not find ephemeris table for field_id {field_id}. Ephemeris information will not be included in the field_and_source_xds."
641
+ )
642
+
643
+ # if is_ephemeris:
644
+ # field_data_variables = {
645
+ # "delay_dir": "FIELD_DELAY_CENTER_OFFSET",
646
+ # "phase_dir": "FIELD_PHASE_CENTER_OFFSET",
647
+ # "reference_dir": "FIELD_REFERENCE_CENTER_OFFSET",
648
+ # }
649
+ # field_measures_type = "sky_coord_offset"
650
+ # field_and_source_xds.attrs["field_and_source_xds_type"] = "ephemeris"
651
+ # else:
652
+ # field_data_variables = {
653
+ # "delay_dir": "FIELD_DELAY_CENTER",
654
+ # "phase_dir": "FIELD_PHASE_CENTER",
655
+ # "reference_dir": "FIELD_REFERENCE_CENTER",
656
+ # }
657
+ # field_measures_type = "sky_coord"
658
+ # field_and_source_xds.attrs["field_and_source_xds_type"] = "standard"
659
+
660
+ if is_single_dish:
661
+ field_data_variables = {
662
+ "reference_dir": "FIELD_REFERENCE_CENTER",
663
+ }
664
+ else:
665
+ field_data_variables = {
666
+ # "delay_dir": "FIELD_DELAY_CENTER",
667
+ "phase_dir": "FIELD_PHASE_CENTER",
668
+ # "reference_dir": "FIELD_REFERENCE_CENTER",
669
+ }
670
+
671
+ field_measures_type = "sky_coord"
672
+
673
+ coords = {}
674
+ coords["sky_dir_label"] = ["ra", "dec"]
675
+ field_column_description = field_xds.attrs["other"]["msv2"]["ctds_attrs"][
676
+ "column_descriptions"
677
+ ] # Keys are ['DELAY_DIR', 'PHASE_DIR', 'REFERENCE_DIR', 'CODE', 'FLAG_ROW', 'NAME', 'NUM_POLY', 'SOURCE_ID', 'TIME']
678
+
679
+ coords = {}
680
+ coords["sky_dir_label"] = ["ra", "dec"]
681
+ if field_times is not None:
682
+ coords["time"] = field_times
683
+ dims = ["time", "sky_dir_label"]
684
+ else:
685
+ dims = ["sky_dir_label"]
686
+
687
+ for generic_name, msv4_name in field_data_variables.items():
688
+
689
+ if field_xds.get("delaydir_ref") is None:
690
+ delaydir_ref = None
691
+ else:
692
+ delaydir_ref = check_if_consistent(
693
+ field_xds.get("delaydir_ref"), "delaydir_ref"
694
+ )
695
+ msv4_measure = column_description_casacore_to_msv4_measure(
696
+ field_column_description[generic_name.upper()], ref_code=delaydir_ref
697
+ )
698
+
699
+ field_and_source_xds[msv4_name] = xr.DataArray.from_dict(
700
+ {
701
+ "dims": dims,
702
+ "data": list(field_xds[generic_name].data),
703
+ "attrs": msv4_measure,
704
+ }
705
+ )
706
+
707
+ field_and_source_xds[msv4_name].attrs["type"] = field_measures_type
708
+
709
+ field_and_source_xds = field_and_source_xds.assign_coords(coords)
710
+ return field_and_source_xds, ephemeris_path, ephemeris_table_name