jolly-roger 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jolly-roger might be problematic. Click here for more details.

jolly_roger/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.1.0'
21
- __version_tuple__ = version_tuple = (0, 1, 0)
20
+ __version__ = version = '0.2.0'
21
+ __version_tuple__ = version_tuple = (0, 2, 0)
jolly_roger/delays.py ADDED
@@ -0,0 +1,107 @@
1
+ """Utilities and structures around the delay calculations"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from typing import TYPE_CHECKING
7
+
8
+ import astropy.units as u
9
+ import numpy as np
10
+ from numpy.typing import NDArray
11
+
12
+ from jolly_roger.logging import logger
13
+
14
+ if TYPE_CHECKING:
15
+ # avoid circular imports
16
+ from jolly_roger.tractor import BaselineData, DataChunk
17
+
18
+
19
+ @dataclass
20
+ class DelayTime:
21
+ """Container for delay time and associated metadata."""
22
+
23
+ delay_time: NDArray[np.complexfloating]
24
+ """ The delay vs time data. shape=(time, delay, pol)"""
25
+ delay: u.Quantity
26
+ """The delay values corresponding to the delay time data."""
27
+
28
+
29
+ def data_to_delay_time(data: BaselineData | DataChunk) -> DelayTime:
30
+ logger.debug("Converting freq-time to delay-time")
31
+ delay_time = np.fft.fftshift(
32
+ np.fft.fft(data.masked_data.filled(0 + 0j), axis=1), axes=1
33
+ )
34
+ delay = np.fft.fftshift(
35
+ np.fft.fftfreq(
36
+ n=len(data.freq_chan),
37
+ d=np.diff(data.freq_chan).mean(),
38
+ ).decompose()
39
+ )
40
+ return DelayTime(
41
+ delay_time=delay_time,
42
+ delay=delay,
43
+ )
44
+
45
+
46
+ def delay_time_to_data(
47
+ delay_time: DelayTime,
48
+ original_data: DataChunk,
49
+ ) -> DataChunk:
50
+ """Convert delay time data back to the original data format."""
51
+ logger.debug("Converting delay-time to freq-time")
52
+ new_data = np.fft.ifft(
53
+ np.fft.ifftshift(delay_time.delay_time, axes=1),
54
+ axis=1,
55
+ )
56
+ new_data_masked = np.ma.masked_array(
57
+ new_data,
58
+ mask=original_data.masked_data.mask,
59
+ )
60
+ new_data = original_data
61
+ new_data.masked_data = new_data_masked
62
+ return new_data
63
+
64
+
65
+ @dataclass
66
+ class DelayRate:
67
+ """Container for delay rate and associated metadata."""
68
+
69
+ delay_rate: np.ndarray
70
+ """The delay rate vs time data. shape=(rate, delay, pol)"""
71
+ delay: u.Quantity
72
+ """The delay values corresponding to the delay rate data."""
73
+ rate: u.Quantity
74
+ """The delay rate values corresponding to the delay rate data."""
75
+
76
+
77
+ def data_to_delay_rate(
78
+ baseline_data: BaselineData,
79
+ ) -> DelayRate:
80
+ """Convert baseline data to delay rate."""
81
+ # This only makes sense when running on time data. Hence
82
+ # asserting the type of BaelineData
83
+
84
+ assert isinstance(baseline_data, BaselineData), (
85
+ f"baseline_data is type={type(baseline_data)}, but needs to be BaselineData"
86
+ )
87
+
88
+ logger.info("Converting freq-time to delay-rate")
89
+ delay_rate = np.fft.fftshift(np.fft.fft2(baseline_data.masked_data.filled(0 + 0j)))
90
+ delay = np.fft.fftshift(
91
+ np.fft.fftfreq(
92
+ n=len(baseline_data.freq_chan),
93
+ d=np.diff(baseline_data.freq_chan).mean(),
94
+ ).decompose()
95
+ )
96
+ rate = np.fft.fftshift(
97
+ np.fft.fftfreq(
98
+ n=len(baseline_data.time),
99
+ d=np.diff(baseline_data.time.mjd * u.day).mean(),
100
+ ).decompose()
101
+ )
102
+
103
+ return DelayRate(
104
+ delay_rate=delay_rate,
105
+ delay=delay,
106
+ rate=rate,
107
+ )
@@ -4,7 +4,6 @@ from __future__ import annotations
4
4
 
5
5
  from dataclasses import dataclass
6
6
  from pathlib import Path
7
- from typing import Literal
8
7
 
9
8
  import astropy.units as u
10
9
  import numpy as np
@@ -26,13 +25,13 @@ class PositionHourAngles:
26
25
 
27
26
  hour_angle: u.rad
28
27
  """The hour angle across sampled time intervales of a source for a Earth location"""
29
- time_mjds: np.ndarray
28
+ time_mjds: u.Quantity
30
29
  """The MJD time in seconds from which other quantities are evalauted against. Should be drawn from a measurement set."""
31
30
  location: EarthLocation
32
31
  """The location these quantities have been derived from."""
33
32
  position: SkyCoord
34
33
  """The sky-position that is being used to calculate quantities towards"""
35
- elevation: np.ndarray
34
+ elevation: u.Quantity
36
35
  """The elevation of the ``position` direction across time"""
37
36
  time: Time
38
37
  """Representation of the `time_mjds` attribute"""
@@ -43,7 +42,7 @@ class PositionHourAngles:
43
42
 
44
43
 
45
44
  def _process_position(
46
- position: SkyCoord | Literal["sun"] | None = None,
45
+ position: SkyCoord | str | None = None,
47
46
  ms_path: Path | None = None,
48
47
  times: Time | None = None,
49
48
  ) -> SkyCoord:
@@ -54,7 +53,7 @@ def _process_position(
54
53
  set
55
54
 
56
55
  Args:
57
- position (SkyCoord | Literal["sun"] | None, optional): The position to be considered. Defaults to None.
56
+ position (SkyCoord | str | None, optional): The position to be considered. Defaults to None.
58
57
  ms_path (Path | None, optional): The path with the PHASE_DIR to use should `position` be None. Defaults to None.
59
58
  times (Time | None, optional): Times to used if they are required in the lookup. Defaults to None.
60
59
 
@@ -71,8 +70,8 @@ def _process_position(
71
70
  if times is None:
72
71
  msg = f"{times=}, but needs to be set when position is a name"
73
72
  raise ValueError(msg)
74
- if position == "sun":
75
- logger.info("Getting sky-position of the sun")
73
+ if position.lower() == "sun":
74
+ logger.info("Getting sky-position of the Sun")
76
75
  position = get_sun(times)
77
76
  else:
78
77
  logger.info(f"Getting sky-position of {position=}")
@@ -142,7 +141,7 @@ def make_hour_angles_for_ms(
142
141
  lst = times.sidereal_time("apparent", longitude=location.lon)
143
142
  hour_angle = (lst - sky_position.ra).wrap_at(12 * u.hourangle)
144
143
 
145
- logger.info("Creatring elevation curve")
144
+ logger.info("Creating elevation curve")
146
145
  altaz = sky_position.transform_to(AltAz(obstime=times, location=location))
147
146
 
148
147
  return PositionHourAngles(
jolly_roger/plots.py ADDED
@@ -0,0 +1,171 @@
1
+ """Routines around plotting"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ import matplotlib.pyplot as plt
9
+ import numpy as np
10
+
11
+ from jolly_roger.uvws import WDelays
12
+
13
+ if TYPE_CHECKING:
14
+ from jolly_roger.delays import DelayTime
15
+ from jolly_roger.tractor import BaselineData
16
+
17
+
18
+ def plot_baseline_data(
19
+ baseline_data: BaselineData,
20
+ output_dir: Path,
21
+ suffix: str = "",
22
+ ) -> None:
23
+ from astropy.visualization import quantity_support, time_support
24
+
25
+ with quantity_support(), time_support():
26
+ data_masked = baseline_data.masked_data
27
+ data_xx = data_masked[..., 0]
28
+ data_yy = data_masked[..., -1]
29
+ data_stokesi = (data_xx + data_yy) / 2
30
+ amp_stokesi = np.abs(data_stokesi)
31
+
32
+ fig, ax = plt.subplots()
33
+ im = ax.pcolormesh(
34
+ baseline_data.time,
35
+ baseline_data.freq_chan,
36
+ amp_stokesi.T,
37
+ )
38
+ fig.colorbar(im, ax=ax, label="Stokes I Amplitude / Jy")
39
+ ax.set(
40
+ ylabel=f"Frequency / {baseline_data.freq_chan.unit:latex_inline}",
41
+ title=f"Ant {baseline_data.ant_1} - Ant {baseline_data.ant_2}",
42
+ )
43
+ output_path = (
44
+ output_dir
45
+ / f"baseline_data_{baseline_data.ant_1}_{baseline_data.ant_2}{suffix}.png"
46
+ )
47
+ fig.savefig(output_path)
48
+
49
+
50
+ def plot_baseline_comparison_data(
51
+ before_baseline_data: BaselineData,
52
+ after_baseline_data: BaselineData,
53
+ before_delays: DelayTime,
54
+ after_delays: DelayTime,
55
+ output_dir: Path,
56
+ suffix: str = "",
57
+ w_delays: WDelays | None = None,
58
+ ) -> Path:
59
+ from astropy.visualization import (
60
+ ImageNormalize,
61
+ LogStretch,
62
+ MinMaxInterval,
63
+ SqrtStretch,
64
+ ZScaleInterval,
65
+ quantity_support,
66
+ time_support,
67
+ )
68
+
69
+ with quantity_support(), time_support():
70
+ before_amp_stokesi = np.abs(
71
+ (
72
+ before_baseline_data.masked_data[..., 0]
73
+ + before_baseline_data.masked_data[..., -1]
74
+ )
75
+ / 2
76
+ )
77
+ after_amp_stokesi = np.abs(
78
+ (
79
+ after_baseline_data.masked_data[..., 0]
80
+ + after_baseline_data.masked_data[..., -1]
81
+ )
82
+ / 2
83
+ )
84
+
85
+ norm = ImageNormalize(
86
+ after_amp_stokesi, interval=ZScaleInterval(), stretch=SqrtStretch()
87
+ )
88
+
89
+ fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(
90
+ 2, 2, figsize=(10, 10), sharex=True, sharey="row"
91
+ )
92
+ im = ax1.pcolormesh(
93
+ before_baseline_data.time,
94
+ before_baseline_data.freq_chan,
95
+ before_amp_stokesi.T,
96
+ norm=norm,
97
+ )
98
+ ax1.set(
99
+ ylabel=f"Frequency / {before_baseline_data.freq_chan.unit:latex_inline}",
100
+ title="Before",
101
+ )
102
+ ax2.pcolormesh(
103
+ after_baseline_data.time,
104
+ after_baseline_data.freq_chan,
105
+ after_amp_stokesi.T,
106
+ norm=norm,
107
+ )
108
+ ax2.set(
109
+ ylabel=f"Frequency / {after_baseline_data.freq_chan.unit:latex_inline}",
110
+ title="After",
111
+ )
112
+ fig.colorbar(im, ax=ax2, label="Stokes I Amplitude / Jy")
113
+
114
+ # TODO: Move these delay calculations outside of the plotting function
115
+ # And here we calculate the delay information
116
+
117
+ before_delays_i = np.abs(
118
+ (before_delays.delay_time[:, :, 0] + before_delays.delay_time[:, :, -1]) / 2
119
+ )
120
+ after_delays_i = np.abs(
121
+ (after_delays.delay_time[:, :, 0] + after_delays.delay_time[:, :, -1]) / 2
122
+ )
123
+
124
+ delay_norm = ImageNormalize(
125
+ before_delays_i, interval=MinMaxInterval(), stretch=LogStretch()
126
+ )
127
+
128
+ im = ax3.pcolormesh(
129
+ before_baseline_data.time,
130
+ before_delays.delay,
131
+ before_delays_i.T,
132
+ norm=delay_norm,
133
+ )
134
+ ax3.set(ylabel="Delay / s", title="Before")
135
+ ax4.pcolormesh(
136
+ after_baseline_data.time,
137
+ after_delays.delay,
138
+ after_delays_i.T,
139
+ norm=delay_norm,
140
+ )
141
+ ax4.set(ylabel="Delay / s", title="After")
142
+ fig.colorbar(im, ax=ax4, label="Stokes I Amplitude / Jy")
143
+
144
+ if w_delays is not None:
145
+ for ax, baseline_data in zip( # type:ignore[call-overload]
146
+ (ax3, ax4),
147
+ (before_baseline_data, after_baseline_data),
148
+ strict=True,
149
+ ):
150
+ ant_1, ant_2 = baseline_data.ant_1, baseline_data.ant_2
151
+ b_idx = w_delays.b_map[ant_1, ant_2]
152
+ ax.plot(
153
+ baseline_data.time,
154
+ w_delays.w_delays[b_idx],
155
+ color="k",
156
+ linestyle="--",
157
+ label=f"Delay for {w_delays.object_name}",
158
+ )
159
+ ax.legend()
160
+
161
+ output_path = (
162
+ output_dir
163
+ / f"baseline_data_{before_baseline_data.ant_1}_{before_baseline_data.ant_2}{suffix}.png"
164
+ )
165
+ fig.suptitle(
166
+ f"Ant {after_baseline_data.ant_1} - Ant {after_baseline_data.ant_2}"
167
+ )
168
+ fig.tight_layout()
169
+ fig.savefig(output_path)
170
+
171
+ return output_path
jolly_roger/tractor.py ADDED
@@ -0,0 +1,808 @@
1
+ from __future__ import annotations
2
+
3
+ from argparse import ArgumentParser
4
+ from collections.abc import Generator
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ import astropy.units as u
10
+ import numpy as np
11
+ from astropy.coordinates import (
12
+ SkyCoord,
13
+ )
14
+ from astropy.time import Time
15
+ from casacore.tables import makecoldesc, table, taql
16
+ from numpy.typing import NDArray
17
+ from tqdm.auto import tqdm
18
+
19
+ from jolly_roger.delays import data_to_delay_time, delay_time_to_data
20
+ from jolly_roger.logging import logger
21
+ from jolly_roger.plots import plot_baseline_comparison_data
22
+ from jolly_roger.uvws import WDelays, get_object_delay_for_ms
23
+
24
+
25
+ @dataclass(frozen=True)
26
+ class OpenMSTables:
27
+ """Open MS table references"""
28
+
29
+ main_table: table
30
+ """The main MS table"""
31
+ spw_table: table
32
+ """The spectral window table"""
33
+ field_table: table
34
+ """The field table"""
35
+ ms_path: Path
36
+ """The path to the MS used to open tables"""
37
+
38
+
39
+ def get_open_ms_tables(ms_path: Path, read_only: bool = True) -> OpenMSTables:
40
+ """Open up the set of MS table and sub-tables necessary for tractoring.
41
+
42
+ Args:
43
+ ms_path (Path): The path to the measurement set
44
+ read_only (bool, optional): Whether to open in a read-only mode. Defaults to True.
45
+
46
+ Returns:
47
+ OpenMSTables: Set of open table references
48
+ """
49
+ main_table = table(str(ms_path), ack=False, readonly=read_only)
50
+ spw_table = table(str(ms_path / "SPECTRAL_WINDOW"), ack=False, readonly=read_only)
51
+ field_table = table(str(ms_path / "FIELD"), ack=False, readonly=read_only)
52
+
53
+ # TODO: Get the data without auto-correlations e.g.
54
+ # no_auto_main_table = taql(
55
+ # "select from $main_table where ANTENNA1 != ANTENNA2",
56
+ # )
57
+
58
+ return OpenMSTables(
59
+ main_table=main_table,
60
+ spw_table=spw_table,
61
+ field_table=field_table,
62
+ ms_path=ms_path,
63
+ )
64
+
65
+
66
+ def tukey_taper(
67
+ x: np.typing.NDArray[np.floating],
68
+ outer_width: float = np.pi / 4,
69
+ tukey_width: float = np.pi / 8,
70
+ tukey_x_offset: NDArray[np.floating] | None = None,
71
+ ) -> np.ndarray:
72
+ x_freq = np.linspace(-np.pi, np.pi, len(x))
73
+
74
+ if tukey_x_offset is not None:
75
+ x_freq = x_freq[:, None] - tukey_x_offset[None, :]
76
+
77
+ taper = np.ones_like(x_freq)
78
+ logger.debug(f"{x_freq.shape=} {type(x_freq)=}")
79
+ # Fully zero region
80
+ taper[np.abs(x_freq) > outer_width] = 0
81
+
82
+ # Transition regions
83
+ left_idx = (-outer_width < x_freq) & (x_freq < -outer_width + tukey_width)
84
+ right_idx = (outer_width - tukey_width < x_freq) & (x_freq < outer_width)
85
+
86
+ taper[left_idx] = (
87
+ 1 - np.cos(np.pi * (x_freq[left_idx] + outer_width) / tukey_width)
88
+ ) / 2
89
+
90
+ taper[right_idx] = (
91
+ 1 - np.cos(np.pi * (outer_width - x_freq[right_idx]) / tukey_width)
92
+ ) / 2
93
+
94
+ return taper
95
+
96
+
97
+ @dataclass
98
+ class BaselineData:
99
+ """Container for baseline data and associated metadata."""
100
+
101
+ masked_data: np.ma.MaskedArray
102
+ """The baseline data, masked where flags are set. shape=(time, chan, pol)"""
103
+ freq_chan: u.Quantity
104
+ """The frequency channels corresponding to the data."""
105
+ phase_center: SkyCoord
106
+ """The target sky coordinate for the baseline."""
107
+ uvws_phase_center: u.Quantity
108
+ """The UVW coordinates of the phase center of the baseline."""
109
+ time: Time
110
+ """The time of the observations."""
111
+ ant_1: int
112
+ """The first antenna in the baseline."""
113
+ ant_2: int
114
+ """The second antenna in the baseline."""
115
+
116
+
117
+ @dataclass
118
+ class BaselineArrays:
119
+ data: NDArray[np.complexfloating]
120
+ flags: NDArray[np.bool_]
121
+ uvws: NDArray[np.floating]
122
+ time_centroid: NDArray[np.floating]
123
+
124
+
125
+ @dataclass
126
+ class DataChunkArray:
127
+ """Container for a chunk of data"""
128
+
129
+ data: NDArray[np.complexfloating]
130
+ """The data from the nominated data column loaded"""
131
+ flags: NDArray[np.bool_]
132
+ """Flags that correspond to the loaded data"""
133
+ uvws: NDArray[np.floating]
134
+ """The uvw coordinates for each loaded data record"""
135
+ time_centroid: NDArray[np.floating]
136
+ """The time of each data record"""
137
+ ant_1: NDArray[np.int64]
138
+ """Antenna 1 that formed the baseline"""
139
+ ant_2: NDArray[np.int64]
140
+ """Antenna 2 that formed the baseline"""
141
+ row_start: int
142
+ """The starting row of the portion of data loaded"""
143
+ chunk_size: int
144
+ """The size of the data chunk loaded (may be larger if this is the last record)"""
145
+
146
+
147
+ @dataclass
148
+ class DataChunk:
149
+ """Container for a collection of data and associated metadata.
150
+ Here data are drawn from a series of rows.
151
+ """
152
+
153
+ masked_data: np.ma.MaskedArray
154
+ """The baseline data, masked where flags are set. shape=(time, chan, pol)"""
155
+ freq_chan: u.Quantity
156
+ """The frequency channels corresponding to the data."""
157
+ phase_center: SkyCoord
158
+ """The target sky coordinate for the baseline."""
159
+ uvws_phase_center: u.Quantity
160
+ """The UVW coordinates of the phase center of the baseline."""
161
+ time: Time
162
+ """The time of the observations."""
163
+ time_mjds: NDArray[np.floating]
164
+ """The raw time extracted from the measurement set in MJDs"""
165
+ ant_1: NDArray[np.int64]
166
+ """The first antenna in the baseline."""
167
+ ant_2: NDArray[np.int64]
168
+ """The second antenna in the baseline."""
169
+ row_start: int
170
+ """Starting row index of the data"""
171
+ chunk_size: int
172
+ """Size of the chunked portion of the data"""
173
+
174
+
175
+ def _list_to_array(
176
+ list_of_rows: list[dict[str, Any]], key: str
177
+ ) -> np.typing.NDArray[Any]:
178
+ """Helper to make a simple numpy object from list of items"""
179
+ return np.array([row[key] for row in list_of_rows])
180
+
181
+
182
+ def _get_data_chunk_from_main_table(
183
+ ms_table: table,
184
+ chunk_size: int,
185
+ data_column: str,
186
+ ) -> Generator[DataChunkArray, None, None]:
187
+ """Return an appropriately size data chunk from the main
188
+ table of a measurement set. These data are ase they are
189
+ in the measurement set without any additional scaling
190
+ or unit adjustments.
191
+
192
+ Args:
193
+ ms_table (table): The opened main table of a measurement set
194
+ chunk_size (int): The size of the data to chunk and return
195
+ data_column (str): The data column to be returned
196
+
197
+ Yields:
198
+ Generator[DataChunkArray, None, None]: A segment of rows and columns
199
+ """
200
+
201
+ table_length = len(ms_table)
202
+ logger.debug(f"Length of open table: {table_length} rows")
203
+
204
+ lower_row = 0
205
+ upper_row = chunk_size
206
+
207
+ while lower_row < table_length:
208
+ rows: list[dict[str, Any]] = ms_table[lower_row:upper_row]
209
+
210
+ data = _list_to_array(list_of_rows=rows, key=data_column)
211
+ flags = _list_to_array(list_of_rows=rows, key="FLAG")
212
+ uvws = _list_to_array(list_of_rows=rows, key="UVW")
213
+ time_centroid = _list_to_array(list_of_rows=rows, key="TIME_CENTROID")
214
+ ant_1 = _list_to_array(list_of_rows=rows, key="ANTENNA1")
215
+ ant_2 = _list_to_array(list_of_rows=rows, key="ANTENNA2")
216
+
217
+ yield DataChunkArray(
218
+ data=data,
219
+ flags=flags,
220
+ uvws=uvws,
221
+ time_centroid=time_centroid,
222
+ ant_1=ant_1,
223
+ ant_2=ant_2,
224
+ row_start=lower_row,
225
+ chunk_size=chunk_size,
226
+ )
227
+
228
+ lower_row += chunk_size
229
+ upper_row += chunk_size
230
+
231
+
232
+ def get_data_chunks(
233
+ open_ms_tables: OpenMSTables,
234
+ chunk_size: int,
235
+ data_column: str,
236
+ ) -> Generator[DataChunk, None, None]:
237
+ """Yield a collection of rows with appropriate units
238
+ attached to the quantities. These quantities are not
239
+ the same data encoded in the measurement set, e.g.
240
+ masked array has been formed, astropy units have
241
+ been attached.
242
+
243
+ Args:
244
+ open_ms_tables (OpenMSTables): References to open tables from the measurement set
245
+ chunk_size (int): The number of rows to return at a time
246
+ data_column (str): The data column that would be modified
247
+
248
+ Yields:
249
+ Generator[DataChunk, None, None]: Representation of the current chunk of rows
250
+ """
251
+ freq_chan = open_ms_tables.spw_table.getcol("CHAN_FREQ")
252
+ phase_dir = open_ms_tables.field_table.getcol("PHASE_DIR")
253
+
254
+ freq_chan = freq_chan.squeeze() * u.Hz
255
+ target = SkyCoord(*(phase_dir * u.rad).squeeze())
256
+
257
+ for data_chunk_array in _get_data_chunk_from_main_table(
258
+ ms_table=open_ms_tables.main_table,
259
+ chunk_size=chunk_size,
260
+ data_column=data_column,
261
+ ):
262
+ # Transform the native arrays but attach astropy quantities
263
+ uvws_phase_center = data_chunk_array.uvws * u.m
264
+ time = Time(
265
+ data_chunk_array.time_centroid.squeeze() * u.s,
266
+ format="mjd",
267
+ scale="utc",
268
+ )
269
+ masked_data = np.ma.masked_array(
270
+ data_chunk_array.data, mask=data_chunk_array.flags
271
+ )
272
+
273
+ yield DataChunk(
274
+ masked_data=masked_data,
275
+ freq_chan=freq_chan,
276
+ phase_center=target,
277
+ uvws_phase_center=uvws_phase_center,
278
+ time=time,
279
+ time_mjds=data_chunk_array.time_centroid,
280
+ ant_1=data_chunk_array.ant_1,
281
+ ant_2=data_chunk_array.ant_2,
282
+ row_start=data_chunk_array.row_start,
283
+ chunk_size=data_chunk_array.chunk_size,
284
+ )
285
+
286
+
287
+ def _get_baseline_data(
288
+ ms_tab: table,
289
+ ant_1: int,
290
+ ant_2: int,
291
+ data_column: str = "DATA",
292
+ ) -> BaselineArrays:
293
+ _ = ms_tab, ant_1, ant_2
294
+ with taql(
295
+ "select from $ms_tab where ANTENNA1 == $ant_1 and ANTENNA2 == $ant_2",
296
+ ) as subtab:
297
+ logger.info(f"Opening subtable for baseline {ant_1} {ant_2}")
298
+ data = subtab.getcol(data_column)
299
+ flags = subtab.getcol("FLAG")
300
+ uvws = subtab.getcol("UVW")
301
+ time_centroid = subtab.getcol("TIME_CENTROID")
302
+
303
+ return BaselineArrays(
304
+ data=data,
305
+ flags=flags,
306
+ uvws=uvws,
307
+ time_centroid=time_centroid,
308
+ )
309
+
310
+
311
+ def get_baseline_data(
312
+ open_ms_tables: OpenMSTables,
313
+ ant_1: int,
314
+ ant_2: int,
315
+ data_column: str = "DATA",
316
+ ) -> BaselineData:
317
+ """Get data of a baseline from a measurement set
318
+
319
+ Args:
320
+ open_ms_tables (OpenMSTables): The measurement set to draw data from
321
+ ant_1 (int): The first antenna of the baseline
322
+ ant_2 (int): The second antenna of the baseline
323
+ data_column (str, optional): The data column to extract. Defaults to "DATA".
324
+
325
+ Returns:
326
+ BaselineData: Extracted baseline data
327
+ """
328
+ logger.info(f"Getting baseline {ant_1} {ant_2}")
329
+
330
+ freq_chan = open_ms_tables.spw_table.getcol("CHAN_FREQ")
331
+ phase_dir = open_ms_tables.field_table.getcol("PHASE_DIR")
332
+
333
+ logger.debug(f"Processing {ant_1=} {ant_2=}")
334
+
335
+ baseline_data = _get_baseline_data(
336
+ ms_tab=open_ms_tables.main_table,
337
+ ant_1=ant_1,
338
+ ant_2=ant_2,
339
+ data_column=data_column,
340
+ )
341
+
342
+ freq_chan = freq_chan.squeeze() * u.Hz
343
+ target = SkyCoord(*(phase_dir * u.rad).squeeze())
344
+ uvws_phase_center = np.swapaxes(baseline_data.uvws * u.m, 0, 1)
345
+ time = Time(
346
+ baseline_data.time_centroid.squeeze() * u.s,
347
+ format="mjd",
348
+ scale="utc",
349
+ )
350
+ masked_data = np.ma.masked_array(baseline_data.data, mask=baseline_data.flags)
351
+
352
+ logger.info(f"Got data for baseline {ant_1} {ant_2} with shape {masked_data.shape}")
353
+ return BaselineData(
354
+ masked_data=masked_data,
355
+ freq_chan=freq_chan,
356
+ phase_center=target,
357
+ uvws_phase_center=uvws_phase_center,
358
+ time=time,
359
+ ant_1=ant_1,
360
+ ant_2=ant_2,
361
+ )
362
+
363
+
364
+ def add_output_column(
365
+ tab: table,
366
+ data_column: str = "DATA",
367
+ output_column: str = "CORRECTED_DATA",
368
+ overwrite: bool = False,
369
+ copy_column_data: bool = False,
370
+ ) -> None:
371
+ """Add in the output data column where the modified data
372
+ will be recorded
373
+
374
+ Args:
375
+ tab (table): Open reference to the table to modify
376
+ data_column (str, optional): The base data column the new will be based from. Defaults to "DATA".
377
+ output_column (str, optional): The new data column to be created. Defaults to "CORRECTED_DATA".
378
+ overwrite (bool, optional): Whether to overwrite the new output column. Defaults to False.
379
+ copy_column_data (bool, optional): Copy the original data over to the output column. Defaults to False.
380
+
381
+ Raises:
382
+ ValueError: Raised if the output column already exists and overwrite is False
383
+ """
384
+ colnames = tab.colnames()
385
+ if output_column in colnames:
386
+ if not overwrite:
387
+ msg = f"Output column {output_column} already exists in the measurement set. Not overwriting."
388
+ raise ValueError(msg)
389
+
390
+ logger.warning(
391
+ f"Output column {output_column} already exists in the measurement set. Will be overwritten!"
392
+ )
393
+ else:
394
+ logger.info(f"Adding {output_column=}")
395
+ desc = makecoldesc(data_column, tab.getcoldesc(data_column))
396
+ desc["name"] = output_column
397
+ tab.addcols(desc)
398
+ tab.flush()
399
+
400
+ if copy_column_data:
401
+ logger.info(f"Copying {data_column=} to {output_column=}")
402
+ taql(f"UPDATE $tab SET {output_column}={data_column}")
403
+
404
+
405
+ def write_output_column(
406
+ ms_path: Path,
407
+ output_column: str,
408
+ baseline_data: BaselineData,
409
+ update_flags: bool = False,
410
+ ) -> None:
411
+ """Write the output column to the measurement set."""
412
+ ant_1 = baseline_data.ant_1
413
+ ant_2 = baseline_data.ant_2
414
+ _ = ant_1, ant_2
415
+ logger.info(f"Writing {output_column=} for baseline {ant_1} {ant_2}")
416
+ with table(str(ms_path), readonly=False) as tab:
417
+ colnames = tab.colnames()
418
+ if output_column not in colnames:
419
+ msg = f"Output column {output_column} does not exist in the measurement set. Cannot write data."
420
+ raise ValueError(msg)
421
+
422
+ with taql(
423
+ "select from $tab where ANTENNA1 == $ant_1 and ANTENNA2 == $ant_2",
424
+ ) as subtab:
425
+ logger.info(f"Writing {output_column=}")
426
+ subtab.putcol(output_column, baseline_data.masked_data.filled(0 + 0j))
427
+ if update_flags:
428
+ # If we want to update the flags, we need to set the flags to False
429
+ # for the output column
430
+ subtab.putcol("FLAG", baseline_data.masked_data.mask)
431
+ subtab.flush()
432
+
433
+
434
+ def make_plot_results(
435
+ open_ms_tables: OpenMSTables,
436
+ data_column: str,
437
+ output_column: str,
438
+ w_delays: WDelays | None = None,
439
+ ) -> list[Path]:
440
+ output_paths = []
441
+ output_dir = open_ms_tables.ms_path.parent / "plots"
442
+ output_dir.mkdir(exist_ok=True, parents=True)
443
+ for i in range(10):
444
+ logger.info(f"Plotting baseline={i + 1}")
445
+ before_baseline_data = get_baseline_data(
446
+ open_ms_tables=open_ms_tables,
447
+ ant_1=0,
448
+ ant_2=i + 1,
449
+ data_column=data_column,
450
+ )
451
+ after_baseline_data = get_baseline_data(
452
+ open_ms_tables=open_ms_tables,
453
+ ant_1=0,
454
+ ant_2=i + 1,
455
+ data_column=output_column,
456
+ )
457
+ before_delays = data_to_delay_time(data=before_baseline_data)
458
+ after_delays = data_to_delay_time(data=after_baseline_data)
459
+
460
+ # TODO: the baseline data and delay times could be put into a single
461
+ # structure to pass around easier.
462
+ plot_path = plot_baseline_comparison_data(
463
+ before_baseline_data=before_baseline_data,
464
+ after_baseline_data=after_baseline_data,
465
+ before_delays=before_delays,
466
+ after_delays=after_delays,
467
+ output_dir=output_dir,
468
+ suffix="_comparison",
469
+ w_delays=w_delays,
470
+ )
471
+ output_paths.append(plot_path)
472
+
473
+ return output_paths
474
+
475
+
476
+ def _get_baseline_time_indicies(
477
+ w_delays: WDelays, data_chunk: DataChunk
478
+ ) -> tuple[NDArray[np.int_], NDArray[np.int_]]:
479
+ """Extract the mappings into the data array"""
480
+
481
+ # When computing uvws we have ignored auto-correlations!
482
+ # TODO: Either extend the uvw calculations to include auto-correlations
483
+ # or ignore them during iterations. Certainly the former is the better
484
+ # approach.
485
+
486
+ # Again, note the auto-correlations are ignored!!! Here be pirates mate
487
+ baseline_idx = np.array(
488
+ [
489
+ w_delays.b_map[(int(ant_1), int(ant_2))] if ant_1 != ant_2 else 0
490
+ for ant_1, ant_2 in zip( # type: ignore[call-overload]
491
+ data_chunk.ant_1, data_chunk.ant_2, strict=False
492
+ )
493
+ ]
494
+ )
495
+
496
+ time_idx = np.array(
497
+ [w_delays.time_map[time * u.s] for time in data_chunk.time_mjds]
498
+ )
499
+
500
+ return baseline_idx, time_idx
501
+
502
+
503
+ def _tukey_tractor(
504
+ data_chunk: DataChunk,
505
+ tukey_tractor_options: TukeyTractorOptions,
506
+ w_delays: WDelays | None = None,
507
+ ) -> NDArray[np.complex128]:
508
+ """Compute a tukey taper for a dataset and then apply it
509
+ to the dataset. Here the data corresponds to a (chan, time, pol)
510
+ array. Data is not necessarily a single baseline.
511
+
512
+ If a `w_delays` is provided it represents the delay (in seconds)
513
+ between the phase direction of the measurement set and the Sun.
514
+ This quantity may be derived in a number of ways, but in `jolly_roger`
515
+ it is based on the difference of the w-coordinated towards these
516
+ two directions. It should have a shape of [baselines, time]
517
+
518
+ Args:
519
+ data_chunk (DataChunk): The representation of the data with attached units
520
+ tukey_tractor_options (TukeyTractorOptions): Options for the tukey taper
521
+ w_delays (WDelays | None, optional): The w-derived delays to apply. If None taper is applied to large delays. Defaults to None.
522
+
523
+ Returns:
524
+ NDArray[np.complex128]: Scaled complex visibilities
525
+ """
526
+
527
+ delay_time = data_to_delay_time(data=data_chunk)
528
+
529
+ # Look up the delay offset if requested
530
+ tukey_x_offset: u.Quantity = np.zeros_like(delay_time.delay)
531
+
532
+ if w_delays is not None:
533
+ baseline_idx, time_idx = _get_baseline_time_indicies(
534
+ w_delays=w_delays, data_chunk=data_chunk
535
+ )
536
+ tukey_x_offset = w_delays.w_delays[baseline_idx, time_idx]
537
+ # logger.info(f"{tukey_x_offset=}")
538
+
539
+ # need to scale the x offsert to the -pi to pi
540
+ # The delay should be symmetric
541
+ tukey_x_offset = (
542
+ tukey_x_offset / (np.max(delay_time.delay) / np.pi).decompose()
543
+ ).value
544
+ # logger.info(f"{tukey_x_offset=}")
545
+
546
+ taper = tukey_taper(
547
+ x=delay_time.delay,
548
+ outer_width=tukey_tractor_options.outer_width,
549
+ tukey_width=tukey_tractor_options.tukey_width,
550
+ tukey_x_offset=tukey_x_offset,
551
+ )
552
+ if w_delays is not None:
553
+ # The use of the `tukey_x_offset` changes the
554
+ # shape of the output array. The internals of that
555
+ # function returns a different shape via the broadcasting
556
+ taper = np.swapaxes(taper[:, :, None], 0, 1)
557
+
558
+ # Since we want to dampen the target object we invert the taper
559
+ taper = 1.0 - taper
560
+
561
+ # Delay with the elevation of the target object
562
+ elevation_mask = w_delays.elevation < (-3 * u.deg)
563
+ taper[elevation_mask[time_idx], :, :] = 1.0
564
+
565
+ # TODO: Handle case of aliased delays
566
+
567
+ # TODO: Create heuristic to determine where baseline is long enough to
568
+ # ignore the tapering. Aliasing may give us this though...
569
+
570
+ # TODO: Create flags where delay is 'close' to 0
571
+
572
+ else:
573
+ taper = taper[None, :, None]
574
+
575
+ # Delay-time is a 3D array: (time, delay, pol)
576
+ # Taper is 1D: (delay,)
577
+ tapered_delay_time_data_real = delay_time.delay_time.real * taper
578
+ tapered_delay_time_data_imag = delay_time.delay_time.imag * taper
579
+ tapered_delay_time_data = (
580
+ tapered_delay_time_data_real + 1j * tapered_delay_time_data_imag
581
+ )
582
+ tapered_delay_time = delay_time
583
+ tapered_delay_time.delay_time = tapered_delay_time_data
584
+
585
+ tapered_data = delay_time_to_data(
586
+ delay_time=tapered_delay_time,
587
+ original_data=data_chunk,
588
+ )
589
+ logger.debug(f"{tapered_data.masked_data.shape=} {tapered_data.masked_data.dtype}")
590
+
591
+ return tapered_data
592
+
593
+
594
+ @dataclass
595
+ class TukeyTractorOptions:
596
+ """Options to describe the tukey taper to apply"""
597
+
598
+ ms_path: Path
599
+ """Measurement set to be modified"""
600
+ outer_width: float = np.pi / 4
601
+ """The start of the tapering in frequency space"""
602
+ tukey_width: float = np.pi / 8
603
+ """The width of the tapered region in frequency space"""
604
+ data_column: str = "DATA"
605
+ """The visibility column to modify"""
606
+ output_column: str = "CORRECTED_DATA"
607
+ """The output column to be created with the modified data"""
608
+ copy_column_data: bool = False
609
+ """Copy the data from the data column to the output column before applying the taper"""
610
+ dry_run: bool = False
611
+ """Indicates whether the data will be written back to the measurement set"""
612
+ make_plots: bool = False
613
+ """Create a small set of diagnostic plots"""
614
+ overwrite: bool = False
615
+ """If the output column exists it will be overwritten"""
616
+ chunk_size: int = 1000
617
+ """Size of the row-wise chunking iterator"""
618
+ apply_towards_object: bool = False
619
+ """apply the taper using the delay towards the target object."""
620
+ target_object: str = "Sun"
621
+ """The target object to apply the delay towards."""
622
+
623
+
624
+ def tukey_tractor(
625
+ tukey_tractor_options: TukeyTractorOptions,
626
+ ) -> None:
627
+ """Iterate row-wise over a specified measurement set and
628
+ apply a tukey taper operation to the delay data. Iteration
629
+ is performed based on a chunk soize, indicating the number
630
+ of rows to read in at a time.
631
+
632
+ Full description of options are outlined in `TukeyTaperOptions`.
633
+
634
+ Args:
635
+ tukey_tractor_options (TukeyTractorOptions): The settings to use during the taper, and measurement set to apply them to.
636
+ """
637
+ logger.info("jolly-roger")
638
+ logger.info(f"Options: {tukey_tractor_options}")
639
+
640
+ # acquire all the tables necessary to get unit information and data from
641
+ open_ms_tables = get_open_ms_tables(
642
+ ms_path=tukey_tractor_options.ms_path, read_only=False
643
+ )
644
+
645
+ if not tukey_tractor_options.dry_run:
646
+ add_output_column(
647
+ tab=open_ms_tables.main_table,
648
+ output_column=tukey_tractor_options.output_column,
649
+ data_column=tukey_tractor_options.data_column,
650
+ overwrite=tukey_tractor_options.overwrite,
651
+ copy_column_data=tukey_tractor_options.copy_column_data,
652
+ )
653
+
654
+ # Generate the delay for all baselines and time steps
655
+ w_delays: WDelays | None = None
656
+ if tukey_tractor_options.apply_towards_object:
657
+ logger.info(
658
+ f"Pre-calculating delays towards the target: {tukey_tractor_options.target_object}"
659
+ )
660
+ w_delays = get_object_delay_for_ms(
661
+ ms_path=tukey_tractor_options.ms_path,
662
+ object_name=tukey_tractor_options.target_object,
663
+ )
664
+ assert len(w_delays.w_delays.shape) == 2
665
+
666
+ if not tukey_tractor_options.dry_run:
667
+ with tqdm(total=len(open_ms_tables.main_table)) as pbar:
668
+ for data_chunk in get_data_chunks(
669
+ open_ms_tables=open_ms_tables,
670
+ chunk_size=tukey_tractor_options.chunk_size,
671
+ data_column=tukey_tractor_options.data_column,
672
+ ):
673
+ taper_data_chunk = _tukey_tractor(
674
+ data_chunk=data_chunk,
675
+ tukey_tractor_options=tukey_tractor_options,
676
+ w_delays=w_delays,
677
+ )
678
+
679
+ pbar.update(len(taper_data_chunk.masked_data))
680
+
681
+ # only put if not a dry run
682
+ open_ms_tables.main_table.putcol(
683
+ columnname=tukey_tractor_options.output_column,
684
+ value=taper_data_chunk.masked_data,
685
+ startrow=taper_data_chunk.row_start,
686
+ nrow=taper_data_chunk.chunk_size,
687
+ )
688
+
689
+ if tukey_tractor_options.make_plots:
690
+ plot_paths = make_plot_results(
691
+ open_ms_tables=open_ms_tables,
692
+ data_column=tukey_tractor_options.data_column,
693
+ output_column=tukey_tractor_options.output_column,
694
+ w_delays=w_delays,
695
+ )
696
+
697
+ logger.info(f"Made {len(plot_paths)} output plots")
698
+
699
+
700
+ def get_parser() -> ArgumentParser:
701
+ """Create the CLI argument parser
702
+
703
+ Returns:
704
+ ArgumentParser: Constructed argument parser
705
+ """
706
+ parser = ArgumentParser(description="Run the Jolly Roger Tractor")
707
+ subparsers = parser.add_subparsers(dest="mode")
708
+
709
+ tukey_parser = subparsers.add_parser(
710
+ name="tukey", help="Perform a dumb Tukey taper across delay-time data"
711
+ )
712
+ tukey_parser.add_argument(
713
+ "ms_path",
714
+ type=Path,
715
+ help="The measurement set to process with the Tukey tractor",
716
+ )
717
+ tukey_parser.add_argument(
718
+ "--outer-width",
719
+ type=float,
720
+ default=np.pi / 4,
721
+ help="The outer width of the Tukey taper in radians",
722
+ )
723
+ tukey_parser.add_argument(
724
+ "--tukey-width",
725
+ type=float,
726
+ default=np.pi / 8,
727
+ help="The Tukey width of the Tukey taper in radians",
728
+ )
729
+ tukey_parser.add_argument(
730
+ "--data-column",
731
+ type=str,
732
+ default="DATA",
733
+ help="The data column to use for the Tukey tractor",
734
+ )
735
+ tukey_parser.add_argument(
736
+ "--output-column",
737
+ type=str,
738
+ default="CORRECTED_DATA",
739
+ help="The output column to write the Tukey tractor results to",
740
+ )
741
+ tukey_parser.add_argument(
742
+ "--copy-column-data",
743
+ action="store_true",
744
+ help="If set, the Tukey tractor will copy the data from the data column to the output column before applying the taper",
745
+ )
746
+ tukey_parser.add_argument(
747
+ "--dry-run",
748
+ action="store_true",
749
+ help="If set, the Tukey tractor will not write any output, but will log what it would do",
750
+ )
751
+ tukey_parser.add_argument(
752
+ "--make-plots",
753
+ action="store_true",
754
+ help="If set, the Tukey tractor will make plots of the results",
755
+ )
756
+ tukey_parser.add_argument(
757
+ "--overwrite",
758
+ action="store_true",
759
+ help="If set, the Tukey tractor will overwrite the output column if it already exists",
760
+ )
761
+ tukey_parser.add_argument(
762
+ "--chunk-size",
763
+ type=int,
764
+ default=10000,
765
+ help="The number of rows to process in one chunk. Larger numbers require more memory but fewer interactions with I/O.",
766
+ )
767
+ tukey_parser.add_argument(
768
+ "--target-object",
769
+ type=str,
770
+ default="Sun",
771
+ help="The target object to apply the delay towards. Defaults to 'Sun'.",
772
+ )
773
+ tukey_parser.add_argument(
774
+ "--apply-towards-object",
775
+ action="store_true",
776
+ help="Whether the tukey taper is applied towards the target object (e.g. the Sun). If not set, the taper is applied towards large delays.",
777
+ )
778
+
779
+ return parser
780
+
781
+
782
+ def cli() -> None:
783
+ """Command line interface for the Jolly Roger Tractor."""
784
+ parser = get_parser()
785
+ args = parser.parse_args()
786
+
787
+ if args.mode == "tukey":
788
+ tukey_tractor_options = TukeyTractorOptions(
789
+ ms_path=args.ms_path,
790
+ outer_width=args.outer_width,
791
+ tukey_width=args.tukey_width,
792
+ data_column=args.data_column,
793
+ output_column=args.output_column,
794
+ copy_column_data=args.copy_column_data,
795
+ dry_run=args.dry_run,
796
+ make_plots=args.make_plots,
797
+ overwrite=args.overwrite,
798
+ chunk_size=args.chunk_size,
799
+ target_object=args.target_object,
800
+ apply_towards_object=args.apply_towards_object,
801
+ )
802
+ tukey_tractor(tukey_tractor_options=tukey_tractor_options)
803
+ else:
804
+ parser.print_help()
805
+
806
+
807
+ if __name__ == "__main__":
808
+ cli()
jolly_roger/uvws.py CHANGED
@@ -11,17 +11,67 @@ from astropy.constants import c as speed_of_light
11
11
  from casacore.tables import table, taql
12
12
  from tqdm import tqdm
13
13
 
14
- from jolly_roger.baselines import Baselines
15
- from jolly_roger.hour_angles import PositionHourAngles
14
+ from jolly_roger.baselines import Baselines, get_baselines_from_ms
15
+ from jolly_roger.hour_angles import PositionHourAngles, make_hour_angles_for_ms
16
16
  from jolly_roger.logging import logger
17
17
 
18
18
 
19
+ @dataclass(frozen=True)
20
+ class WDelays:
21
+ """Representation and mappings for the w-coordinate derived delays"""
22
+
23
+ object_name: str
24
+ """The name of the object that the delays are derived towards"""
25
+ w_delays: u.Quantity
26
+ """The w-derived delay. Shape is [baseline, time]"""
27
+ b_map: dict[tuple[int, int], int]
28
+ """The mapping between (ANTENNA1,ANTENNA2) to baseline index"""
29
+ time_map: dict[u.Quantity, int]
30
+ """The mapping between time (MJDs from measurement set) to index"""
31
+ elevation: u.Quantity
32
+ """The elevation of the target object in time order of steps in the MS"""
33
+
34
+
35
+ def get_object_delay_for_ms(
36
+ ms_path: Path,
37
+ object_name: str = "sun",
38
+ ) -> WDelays:
39
+ # Generate the two sets of uvw coordinate objects
40
+ baselines: Baselines = get_baselines_from_ms(ms_path=ms_path)
41
+ hour_angles_phase = make_hour_angles_for_ms(
42
+ ms_path=ms_path,
43
+ position=None, # gets the position from phase direction
44
+ )
45
+ uvws_phase: UVWs = xyz_to_uvw(baselines=baselines, hour_angles=hour_angles_phase)
46
+
47
+ hour_angles_object = make_hour_angles_for_ms(
48
+ ms_path=ms_path,
49
+ position=object_name, # gets the position from phase direction
50
+ )
51
+ uvws_object: UVWs = xyz_to_uvw(baselines=baselines, hour_angles=hour_angles_object)
52
+
53
+ # Subtract the w-coordinates out. Since these uvws have
54
+ # been computed towards different directions the difference
55
+ # in w-coordinate is the delay distance
56
+ w_diffs = uvws_object.uvws[2] - uvws_phase.uvws[2]
57
+
58
+ delay_object = (w_diffs / speed_of_light).decompose()
59
+
60
+ return WDelays(
61
+ object_name=object_name,
62
+ w_delays=delay_object,
63
+ b_map=baselines.b_map,
64
+ time_map=hour_angles_phase.time_map,
65
+ elevation=hour_angles_object.elevation,
66
+ )
67
+
68
+
19
69
  @dataclass
20
70
  class UVWs:
21
71
  """A small container to represent uvws"""
22
72
 
23
73
  uvws: np.ndarray
24
- """The (U,V,W) coordinates"""
74
+ """The (U,V,W) coordinatesm shape [coord, baseline, time]"""
25
75
  hour_angles: PositionHourAngles
26
76
  """The hour angle information used to construct the UVWs"""
27
77
  baselines: Baselines
@@ -51,9 +101,9 @@ def xyz_to_uvw(
51
101
  declination = hour_angles.position.dec
52
102
 
53
103
  # This is necessary for broadcastung in the matrix to work.
54
- # Should the position be a solar object like the sub its position
104
+ # Should the position be a solar object like the sun its position
55
105
  # will change throughout the observation. but it will have
56
- ## been created consistently with the hour angles. If it is fixed
106
+ # been created consistently with the hour angles. If it is fixed
57
107
  # then the use of the numpy ones like will ensure the same shape.
58
108
  declination = (np.ones(len(ha)) * declination).decompose()
59
109
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: jolly-roger
3
- Version: 0.1.0
3
+ Version: 0.2.0
4
4
  Summary: The pirate flagger
5
5
  Project-URL: Homepage, https://github.com/flint-crew/jolly-roger
6
6
  Project-URL: Bug Tracker, https://github.com/flint-crew/jolly-roger/issues
@@ -23,6 +23,8 @@ Classifier: Topic :: Scientific/Engineering
23
23
  Classifier: Typing :: Typed
24
24
  Requires-Python: >=3.11
25
25
  Requires-Dist: astropy
26
+ Requires-Dist: dask-ms
27
+ Requires-Dist: matplotlib
26
28
  Requires-Dist: numpy>=2.0.0
27
29
  Requires-Dist: python-casacore>=3.6.0
28
30
  Requires-Dist: tqdm
@@ -0,0 +1,17 @@
1
+ jolly_roger/__init__.py,sha256=7xiZLdeY-7sgrYGQ1gNdCjgCfqnoPXK7AeaHncY_DGU,204
2
+ jolly_roger/_version.py,sha256=iB5DfB5V6YB5Wo4JmvS-txT42QtmGaWcWp3udRT7zCI,511
3
+ jolly_roger/_version.pyi,sha256=j5kbzfm6lOn8BzASXWjGIA1yT0OlHTWqlbyZ8Si_o0E,118
4
+ jolly_roger/baselines.py,sha256=C_vC3v_ciU2T_si31oS0hUmsMNTQA0USxrm4118vYvY,4615
5
+ jolly_roger/delays.py,sha256=cvLMhChkkB6PkS11v6JU8Wn23Zqv5bQY1HTMzeIGTNw,3015
6
+ jolly_roger/flagger.py,sha256=tlC-M_MpLpqOvkF544zw2EvOUpbSpasO2zlMlXMcxSs,3034
7
+ jolly_roger/hour_angles.py,sha256=ld3jiEDQXlYLHrChUxYD_UBSxKH0qarstakBPLQ0M8s,6044
8
+ jolly_roger/logging.py,sha256=04YVHnF_8tKDkXNtXQ-iMyJ2BLV-qowbPAqqMFDxYE4,1338
9
+ jolly_roger/plots.py,sha256=LsueygCHpGvBXZe2y4q1fmJEMyjoMl65JzFMzbduawI,5280
10
+ jolly_roger/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ jolly_roger/tractor.py,sha256=ORkQb7T7jxMFVcDihH1McYYGq07OgsvbfT44D82ghL4,27723
12
+ jolly_roger/uvws.py,sha256=ujZdIIxNY2k4HY9p65kUyH-VqN6thNpOrBb-wpL9mYM,12424
13
+ jolly_roger-0.2.0.dist-info/METADATA,sha256=vFDa_-0nKwhoFVmm9x_qqQgZURZV8jPBprgRue9h7XY,4221
14
+ jolly_roger-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
15
+ jolly_roger-0.2.0.dist-info/entry_points.txt,sha256=q8RYosASYsPShzsIo58NxOhIMuB4F-gQ2uG6zS2p224,98
16
+ jolly_roger-0.2.0.dist-info/licenses/LICENSE,sha256=7G-TthaPSOehr-pdj4TJydXj3eIUmerMbCUSatMr8hc,1522
17
+ jolly_roger-0.2.0.dist-info/RECORD,,
@@ -1,2 +1,3 @@
1
1
  [console_scripts]
2
2
  jolly_flagger = jolly_roger.flagger:cli
3
+ jolly_tractor = jolly_roger.tractor:cli
@@ -1,14 +0,0 @@
1
- jolly_roger/__init__.py,sha256=7xiZLdeY-7sgrYGQ1gNdCjgCfqnoPXK7AeaHncY_DGU,204
2
- jolly_roger/_version.py,sha256=-LyU5F1uZDjn6Q8_Z6-_FJt_8RE4Kq9zcKdg1abSSps,511
3
- jolly_roger/_version.pyi,sha256=j5kbzfm6lOn8BzASXWjGIA1yT0OlHTWqlbyZ8Si_o0E,118
4
- jolly_roger/baselines.py,sha256=C_vC3v_ciU2T_si31oS0hUmsMNTQA0USxrm4118vYvY,4615
5
- jolly_roger/flagger.py,sha256=tlC-M_MpLpqOvkF544zw2EvOUpbSpasO2zlMlXMcxSs,3034
6
- jolly_roger/hour_angles.py,sha256=SUUN_DcT3xzYp9JZ1U9ZcJpMjsfYETJ4D0YghUjee7Y,6096
7
- jolly_roger/logging.py,sha256=04YVHnF_8tKDkXNtXQ-iMyJ2BLV-qowbPAqqMFDxYE4,1338
8
- jolly_roger/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- jolly_roger/uvws.py,sha256=42FngtA425Fk8QSlEIbl-9tEBd_-In0FtV2LYcnk6-0,10555
10
- jolly_roger-0.1.0.dist-info/METADATA,sha256=d40uefWM1e6HJtaqxNoak1PFBNFFZaBaS_18wno_Au8,4172
11
- jolly_roger-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
12
- jolly_roger-0.1.0.dist-info/entry_points.txt,sha256=ZwEZAe4DBn5nznVI0tP0a1wUinYouwXxxcZP6p7Pkvk,58
13
- jolly_roger-0.1.0.dist-info/licenses/LICENSE,sha256=7G-TthaPSOehr-pdj4TJydXj3eIUmerMbCUSatMr8hc,1522
14
- jolly_roger-0.1.0.dist-info/RECORD,,