jolly-roger 0.0.2__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jolly-roger might be problematic. Click here for more details.

jolly_roger/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.0.2'
21
- __version_tuple__ = version_tuple = (0, 0, 2)
20
+ __version__ = version = '0.2.0'
21
+ __version_tuple__ = version_tuple = (0, 2, 0)
jolly_roger/delays.py ADDED
@@ -0,0 +1,107 @@
1
+ """Utilities and structures around the delay calculations"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from typing import TYPE_CHECKING
7
+
8
+ import astropy.units as u
9
+ import numpy as np
10
+ from numpy.typing import NDArray
11
+
12
+ from jolly_roger.logging import logger
13
+
14
+ if TYPE_CHECKING:
15
+ # avoid circular imports
16
+ from jolly_roger.tractor import BaselineData, DataChunk
17
+
18
+
19
+ @dataclass
20
+ class DelayTime:
21
+ """Container for delay time and associated metadata."""
22
+
23
+ delay_time: NDArray[np.complexfloating]
24
+ """ The delay vs time data. shape=(time, delay, pol)"""
25
+ delay: u.Quantity
26
+ """The delay values corresponding to the delay time data."""
27
+
28
+
29
+ def data_to_delay_time(data: BaselineData | DataChunk) -> DelayTime:
30
+ logger.debug("Converting freq-time to delay-time")
31
+ delay_time = np.fft.fftshift(
32
+ np.fft.fft(data.masked_data.filled(0 + 0j), axis=1), axes=1
33
+ )
34
+ delay = np.fft.fftshift(
35
+ np.fft.fftfreq(
36
+ n=len(data.freq_chan),
37
+ d=np.diff(data.freq_chan).mean(),
38
+ ).decompose()
39
+ )
40
+ return DelayTime(
41
+ delay_time=delay_time,
42
+ delay=delay,
43
+ )
44
+
45
+
46
+ def delay_time_to_data(
47
+ delay_time: DelayTime,
48
+ original_data: DataChunk,
49
+ ) -> DataChunk:
50
+ """Convert delay time data back to the original data format."""
51
+ logger.debug("Converting delay-time to freq-time")
52
+ new_data = np.fft.ifft(
53
+ np.fft.ifftshift(delay_time.delay_time, axes=1),
54
+ axis=1,
55
+ )
56
+ new_data_masked = np.ma.masked_array(
57
+ new_data,
58
+ mask=original_data.masked_data.mask,
59
+ )
60
+ new_data = original_data
61
+ new_data.masked_data = new_data_masked
62
+ return new_data
63
+
64
+
65
+ @dataclass
66
+ class DelayRate:
67
+ """Container for delay rate and associated metadata."""
68
+
69
+ delay_rate: np.ndarray
70
+ """The delay rate vs time data. shape=(rate, delay, pol)"""
71
+ delay: u.Quantity
72
+ """The delay values corresponding to the delay rate data."""
73
+ rate: u.Quantity
74
+ """The delay rate values corresponding to the delay rate data."""
75
+
76
+
77
+ def data_to_delay_rate(
78
+ baseline_data: BaselineData,
79
+ ) -> DelayRate:
80
+ """Convert baseline data to delay rate."""
81
+ # This only makes sense when running on time data. Hence
82
+ # asserting the type of BaelineData
83
+
84
+ assert isinstance(baseline_data, BaselineData), (
85
+ f"baseline_data is type={type(baseline_data)}, but needs to be BaselineData"
86
+ )
87
+
88
+ logger.info("Converting freq-time to delay-rate")
89
+ delay_rate = np.fft.fftshift(np.fft.fft2(baseline_data.masked_data.filled(0 + 0j)))
90
+ delay = np.fft.fftshift(
91
+ np.fft.fftfreq(
92
+ n=len(baseline_data.freq_chan),
93
+ d=np.diff(baseline_data.freq_chan).mean(),
94
+ ).decompose()
95
+ )
96
+ rate = np.fft.fftshift(
97
+ np.fft.fftfreq(
98
+ n=len(baseline_data.time),
99
+ d=np.diff(baseline_data.time.mjd * u.day).mean(),
100
+ ).decompose()
101
+ )
102
+
103
+ return DelayRate(
104
+ delay_rate=delay_rate,
105
+ delay=delay,
106
+ rate=rate,
107
+ )
@@ -4,19 +4,17 @@ from __future__ import annotations
4
4
 
5
5
  from dataclasses import dataclass
6
6
  from pathlib import Path
7
- from typing import Literal
8
7
 
9
8
  import astropy.units as u
10
9
  import numpy as np
11
- from astropy.coordinates import EarthLocation, SkyCoord, get_sun
10
+ from astropy.coordinates import AltAz, EarthLocation, SkyCoord, get_sun
12
11
  from astropy.time import Time
13
12
  from casacore.tables import table
14
13
 
15
14
  from jolly_roger.logging import logger
16
15
 
17
16
  # Default location with XYZ based on mean of antenna positions
18
- ASKAP_XYZ_m = np.array([-2556146.66356375, 5097426.58592797, -2848333.08164107]) * u.m
19
- ASKAP = EarthLocation(*ASKAP_XYZ_m)
17
+ ASKAP = EarthLocation.of_site("ASKAP")
20
18
 
21
19
 
22
20
  @dataclass
@@ -27,13 +25,13 @@ class PositionHourAngles:
27
25
 
28
26
  hour_angle: u.rad
29
27
  """The hour angle across sampled time intervales of a source for a Earth location"""
30
- time_mjds: np.ndarray
28
+ time_mjds: u.Quantity
31
29
  """The MJD time in seconds from which other quantities are evalauted against. Should be drawn from a measurement set."""
32
30
  location: EarthLocation
33
31
  """The location these quantities have been derived from."""
34
32
  position: SkyCoord
35
33
  """The sky-position that is being used to calculate quantities towards"""
36
- elevation: np.ndarray
34
+ elevation: u.Quantity
37
35
  """The elevation of the ``position` direction across time"""
38
36
  time: Time
39
37
  """Representation of the `time_mjds` attribute"""
@@ -44,7 +42,7 @@ class PositionHourAngles:
44
42
 
45
43
 
46
44
  def _process_position(
47
- position: SkyCoord | Literal["sun"] | None = None,
45
+ position: SkyCoord | str | None = None,
48
46
  ms_path: Path | None = None,
49
47
  times: Time | None = None,
50
48
  ) -> SkyCoord:
@@ -55,7 +53,7 @@ def _process_position(
55
53
  set
56
54
 
57
55
  Args:
58
- position (SkyCoord | Literal["sun"] | None, optional): The position to be considered. Defaults to None.
56
+ position (SkyCoord | str | None, optional): The position to be considered. Defaults to None.
59
57
  ms_path (Path | None, optional): The path with the PHASE_DIR to use should `position` be None. Defaults to None.
60
58
  times (Time | None, optional): Times to used if they are required in the lookup. Defaults to None.
61
59
 
@@ -72,9 +70,12 @@ def _process_position(
72
70
  if times is None:
73
71
  msg = f"{times=}, but needs to be set when position is a name"
74
72
  raise ValueError(msg)
75
- if position == "sun":
76
- logger.info("Getting sky-position of the sun")
73
+ if position.lower() == "sun":
74
+ logger.info("Getting sky-position of the Sun")
77
75
  position = get_sun(times)
76
+ else:
77
+ logger.info(f"Getting sky-position of {position=}")
78
+ position = SkyCoord.from_name(position)
78
79
 
79
80
  if position is None:
80
81
  if ms_path is None:
@@ -83,8 +84,8 @@ def _process_position(
83
84
 
84
85
  with table(str(ms_path / "FIELD")) as tab:
85
86
  logger.info(f"Getting the sky-position from PHASE_DIR of {ms_path=}")
86
- field_positions = tab.getcol("PHASE_DIR")
87
- position = SkyCoord(field_positions[0] * u.rad)
87
+ field_positions = tab.getcol("PHASE_DIR")[:]
88
+ position = SkyCoord(*(field_positions * u.rad).squeeze())
88
89
 
89
90
  if isinstance(position, SkyCoord):
90
91
  return position
@@ -116,7 +117,7 @@ def make_hour_angles_for_ms(
116
117
  logger.info(f"Computing hour angles for {ms_path=}")
117
118
  with table(str(ms_path), ack=False) as tab:
118
119
  logger.info("Extracting timesteps and constructing time mapping")
119
- times_mjds = tab.getcol("TIME_CENTROID")
120
+ times_mjds = tab.getcol("TIME_CENTROID")[:] * u.s
120
121
 
121
122
  # get unique time steps and make sure they are in their first appeared order
122
123
  times_mjds, indices = np.unique(times_mjds, return_index=True)
@@ -127,33 +128,28 @@ def make_hour_angles_for_ms(
127
128
  if whole_day:
128
129
  logger.info(f"Assuming a full day from {times_mjds} MJD (seconds)")
129
130
  time_step = times_mjds[1] - times_mjds[0]
130
- times_mjds = times_mjds[0] + time_step * np.arange(
131
- int(60 * 60 * 24 / time_step)
131
+ times_mjds = np.arange(
132
+ start=times_mjds[0],
133
+ stop=times_mjds[0] + 24 * u.hour,
134
+ step=time_step,
132
135
  )
133
136
 
134
- times = Time(times_mjds / 60 / 60 / 24, format="mjd")
137
+ times = Time(times_mjds, format="mjd", scale="utc")
135
138
 
136
- sky_position: SkyCoord = _process_position(
137
- position=position, times=times, ms_path=ms_path
138
- )
139
+ sky_position = _process_position(position=position, times=times, ms_path=ms_path)
139
140
 
140
141
  lst = times.sidereal_time("apparent", longitude=location.lon)
141
- hour_angle = lst - sky_position.ra
142
- mask = hour_angle > 12 * u.hourangle
143
- hour_angle[mask] -= 24 * u.hourangle
142
+ hour_angle = (lst - sky_position.ra).wrap_at(12 * u.hourangle)
144
143
 
145
- logger.info("Creatring elevation curve")
146
- sin_alt = np.arcsin(
147
- np.sin(location.lat) * np.sin(sky_position[0].dec.rad)
148
- + np.cos(location.lat) * np.cos(sky_position.dec.rad) * np.cos(hour_angle)
149
- ).to(u.rad)
144
+ logger.info("Creating elevation curve")
145
+ altaz = sky_position.transform_to(AltAz(obstime=times, location=location))
150
146
 
151
147
  return PositionHourAngles(
152
148
  hour_angle=hour_angle,
153
149
  time_mjds=times_mjds,
154
150
  location=location,
155
151
  position=sky_position,
156
- elevation=sin_alt,
152
+ elevation=altaz.alt.to(u.rad),
157
153
  time=times,
158
154
  time_map=time_map,
159
155
  )
jolly_roger/plots.py ADDED
@@ -0,0 +1,171 @@
1
+ """Routines around plotting"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ import matplotlib.pyplot as plt
9
+ import numpy as np
10
+
11
+ from jolly_roger.uvws import WDelays
12
+
13
+ if TYPE_CHECKING:
14
+ from jolly_roger.delays import DelayTime
15
+ from jolly_roger.tractor import BaselineData
16
+
17
+
18
+ def plot_baseline_data(
19
+ baseline_data: BaselineData,
20
+ output_dir: Path,
21
+ suffix: str = "",
22
+ ) -> None:
23
+ from astropy.visualization import quantity_support, time_support
24
+
25
+ with quantity_support(), time_support():
26
+ data_masked = baseline_data.masked_data
27
+ data_xx = data_masked[..., 0]
28
+ data_yy = data_masked[..., -1]
29
+ data_stokesi = (data_xx + data_yy) / 2
30
+ amp_stokesi = np.abs(data_stokesi)
31
+
32
+ fig, ax = plt.subplots()
33
+ im = ax.pcolormesh(
34
+ baseline_data.time,
35
+ baseline_data.freq_chan,
36
+ amp_stokesi.T,
37
+ )
38
+ fig.colorbar(im, ax=ax, label="Stokes I Amplitude / Jy")
39
+ ax.set(
40
+ ylabel=f"Frequency / {baseline_data.freq_chan.unit:latex_inline}",
41
+ title=f"Ant {baseline_data.ant_1} - Ant {baseline_data.ant_2}",
42
+ )
43
+ output_path = (
44
+ output_dir
45
+ / f"baseline_data_{baseline_data.ant_1}_{baseline_data.ant_2}{suffix}.png"
46
+ )
47
+ fig.savefig(output_path)
48
+
49
+
50
+ def plot_baseline_comparison_data(
51
+ before_baseline_data: BaselineData,
52
+ after_baseline_data: BaselineData,
53
+ before_delays: DelayTime,
54
+ after_delays: DelayTime,
55
+ output_dir: Path,
56
+ suffix: str = "",
57
+ w_delays: WDelays | None = None,
58
+ ) -> Path:
59
+ from astropy.visualization import (
60
+ ImageNormalize,
61
+ LogStretch,
62
+ MinMaxInterval,
63
+ SqrtStretch,
64
+ ZScaleInterval,
65
+ quantity_support,
66
+ time_support,
67
+ )
68
+
69
+ with quantity_support(), time_support():
70
+ before_amp_stokesi = np.abs(
71
+ (
72
+ before_baseline_data.masked_data[..., 0]
73
+ + before_baseline_data.masked_data[..., -1]
74
+ )
75
+ / 2
76
+ )
77
+ after_amp_stokesi = np.abs(
78
+ (
79
+ after_baseline_data.masked_data[..., 0]
80
+ + after_baseline_data.masked_data[..., -1]
81
+ )
82
+ / 2
83
+ )
84
+
85
+ norm = ImageNormalize(
86
+ after_amp_stokesi, interval=ZScaleInterval(), stretch=SqrtStretch()
87
+ )
88
+
89
+ fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(
90
+ 2, 2, figsize=(10, 10), sharex=True, sharey="row"
91
+ )
92
+ im = ax1.pcolormesh(
93
+ before_baseline_data.time,
94
+ before_baseline_data.freq_chan,
95
+ before_amp_stokesi.T,
96
+ norm=norm,
97
+ )
98
+ ax1.set(
99
+ ylabel=f"Frequency / {before_baseline_data.freq_chan.unit:latex_inline}",
100
+ title="Before",
101
+ )
102
+ ax2.pcolormesh(
103
+ after_baseline_data.time,
104
+ after_baseline_data.freq_chan,
105
+ after_amp_stokesi.T,
106
+ norm=norm,
107
+ )
108
+ ax2.set(
109
+ ylabel=f"Frequency / {after_baseline_data.freq_chan.unit:latex_inline}",
110
+ title="After",
111
+ )
112
+ fig.colorbar(im, ax=ax2, label="Stokes I Amplitude / Jy")
113
+
114
+ # TODO: Move these delay calculations outside of the plotting function
115
+ # And here we calculate the delay information
116
+
117
+ before_delays_i = np.abs(
118
+ (before_delays.delay_time[:, :, 0] + before_delays.delay_time[:, :, -1]) / 2
119
+ )
120
+ after_delays_i = np.abs(
121
+ (after_delays.delay_time[:, :, 0] + after_delays.delay_time[:, :, -1]) / 2
122
+ )
123
+
124
+ delay_norm = ImageNormalize(
125
+ before_delays_i, interval=MinMaxInterval(), stretch=LogStretch()
126
+ )
127
+
128
+ im = ax3.pcolormesh(
129
+ before_baseline_data.time,
130
+ before_delays.delay,
131
+ before_delays_i.T,
132
+ norm=delay_norm,
133
+ )
134
+ ax3.set(ylabel="Delay / s", title="Before")
135
+ ax4.pcolormesh(
136
+ after_baseline_data.time,
137
+ after_delays.delay,
138
+ after_delays_i.T,
139
+ norm=delay_norm,
140
+ )
141
+ ax4.set(ylabel="Delay / s", title="After")
142
+ fig.colorbar(im, ax=ax4, label="Stokes I Amplitude / Jy")
143
+
144
+ if w_delays is not None:
145
+ for ax, baseline_data in zip( # type:ignore[call-overload]
146
+ (ax3, ax4),
147
+ (before_baseline_data, after_baseline_data),
148
+ strict=True,
149
+ ):
150
+ ant_1, ant_2 = baseline_data.ant_1, baseline_data.ant_2
151
+ b_idx = w_delays.b_map[ant_1, ant_2]
152
+ ax.plot(
153
+ baseline_data.time,
154
+ w_delays.w_delays[b_idx],
155
+ color="k",
156
+ linestyle="--",
157
+ label=f"Delay for {w_delays.object_name}",
158
+ )
159
+ ax.legend()
160
+
161
+ output_path = (
162
+ output_dir
163
+ / f"baseline_data_{before_baseline_data.ant_1}_{before_baseline_data.ant_2}{suffix}.png"
164
+ )
165
+ fig.suptitle(
166
+ f"Ant {after_baseline_data.ant_1} - Ant {after_baseline_data.ant_2}"
167
+ )
168
+ fig.tight_layout()
169
+ fig.savefig(output_path)
170
+
171
+ return output_path