jolly-roger 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jolly-roger might be problematic. Click here for more details.
- jolly_roger/_version.py +2 -2
- jolly_roger/delays.py +107 -0
- jolly_roger/hour_angles.py +7 -8
- jolly_roger/plots.py +178 -0
- jolly_roger/tractor.py +809 -0
- jolly_roger/uvws.py +55 -5
- {jolly_roger-0.1.0.dist-info → jolly_roger-0.3.0.dist-info}/METADATA +3 -1
- jolly_roger-0.3.0.dist-info/RECORD +17 -0
- {jolly_roger-0.1.0.dist-info → jolly_roger-0.3.0.dist-info}/entry_points.txt +1 -0
- jolly_roger-0.1.0.dist-info/RECORD +0 -14
- {jolly_roger-0.1.0.dist-info → jolly_roger-0.3.0.dist-info}/WHEEL +0 -0
- {jolly_roger-0.1.0.dist-info → jolly_roger-0.3.0.dist-info}/licenses/LICENSE +0 -0
jolly_roger/_version.py
CHANGED
jolly_roger/delays.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""Utilities and structures around the delay calculations"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
import astropy.units as u
|
|
9
|
+
import numpy as np
|
|
10
|
+
from numpy.typing import NDArray
|
|
11
|
+
|
|
12
|
+
from jolly_roger.logging import logger
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
# avoid circular imports
|
|
16
|
+
from jolly_roger.tractor import BaselineData, DataChunk
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class DelayTime:
|
|
21
|
+
"""Container for delay time and associated metadata."""
|
|
22
|
+
|
|
23
|
+
delay_time: NDArray[np.complexfloating]
|
|
24
|
+
""" The delay vs time data. shape=(time, delay, pol)"""
|
|
25
|
+
delay: u.Quantity
|
|
26
|
+
"""The delay values corresponding to the delay time data."""
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def data_to_delay_time(data: BaselineData | DataChunk) -> DelayTime:
|
|
30
|
+
logger.debug("Converting freq-time to delay-time")
|
|
31
|
+
delay_time = np.fft.fftshift(
|
|
32
|
+
np.fft.fft(data.masked_data.filled(0 + 0j), axis=1), axes=1
|
|
33
|
+
)
|
|
34
|
+
delay = np.fft.fftshift(
|
|
35
|
+
np.fft.fftfreq(
|
|
36
|
+
n=len(data.freq_chan),
|
|
37
|
+
d=np.diff(data.freq_chan).mean(),
|
|
38
|
+
).decompose()
|
|
39
|
+
)
|
|
40
|
+
return DelayTime(
|
|
41
|
+
delay_time=delay_time,
|
|
42
|
+
delay=delay,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def delay_time_to_data(
|
|
47
|
+
delay_time: DelayTime,
|
|
48
|
+
original_data: DataChunk,
|
|
49
|
+
) -> DataChunk:
|
|
50
|
+
"""Convert delay time data back to the original data format."""
|
|
51
|
+
logger.debug("Converting delay-time to freq-time")
|
|
52
|
+
new_data = np.fft.ifft(
|
|
53
|
+
np.fft.ifftshift(delay_time.delay_time, axes=1),
|
|
54
|
+
axis=1,
|
|
55
|
+
)
|
|
56
|
+
new_data_masked = np.ma.masked_array(
|
|
57
|
+
new_data,
|
|
58
|
+
mask=original_data.masked_data.mask,
|
|
59
|
+
)
|
|
60
|
+
new_data = original_data
|
|
61
|
+
new_data.masked_data = new_data_masked
|
|
62
|
+
return new_data
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class DelayRate:
|
|
67
|
+
"""Container for delay rate and associated metadata."""
|
|
68
|
+
|
|
69
|
+
delay_rate: np.ndarray
|
|
70
|
+
"""The delay rate vs time data. shape=(rate, delay, pol)"""
|
|
71
|
+
delay: u.Quantity
|
|
72
|
+
"""The delay values corresponding to the delay rate data."""
|
|
73
|
+
rate: u.Quantity
|
|
74
|
+
"""The delay rate values corresponding to the delay rate data."""
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def data_to_delay_rate(
|
|
78
|
+
baseline_data: BaselineData,
|
|
79
|
+
) -> DelayRate:
|
|
80
|
+
"""Convert baseline data to delay rate."""
|
|
81
|
+
# This only makes sense when running on time data. Hence
|
|
82
|
+
# asserting the type of BaelineData
|
|
83
|
+
|
|
84
|
+
assert isinstance(baseline_data, BaselineData), (
|
|
85
|
+
f"baseline_data is type={type(baseline_data)}, but needs to be BaselineData"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
logger.info("Converting freq-time to delay-rate")
|
|
89
|
+
delay_rate = np.fft.fftshift(np.fft.fft2(baseline_data.masked_data.filled(0 + 0j)))
|
|
90
|
+
delay = np.fft.fftshift(
|
|
91
|
+
np.fft.fftfreq(
|
|
92
|
+
n=len(baseline_data.freq_chan),
|
|
93
|
+
d=np.diff(baseline_data.freq_chan).mean(),
|
|
94
|
+
).decompose()
|
|
95
|
+
)
|
|
96
|
+
rate = np.fft.fftshift(
|
|
97
|
+
np.fft.fftfreq(
|
|
98
|
+
n=len(baseline_data.time),
|
|
99
|
+
d=np.diff(baseline_data.time.mjd * u.day).mean(),
|
|
100
|
+
).decompose()
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
return DelayRate(
|
|
104
|
+
delay_rate=delay_rate,
|
|
105
|
+
delay=delay,
|
|
106
|
+
rate=rate,
|
|
107
|
+
)
|
jolly_roger/hour_angles.py
CHANGED
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Literal
|
|
8
7
|
|
|
9
8
|
import astropy.units as u
|
|
10
9
|
import numpy as np
|
|
@@ -26,13 +25,13 @@ class PositionHourAngles:
|
|
|
26
25
|
|
|
27
26
|
hour_angle: u.rad
|
|
28
27
|
"""The hour angle across sampled time intervales of a source for a Earth location"""
|
|
29
|
-
time_mjds:
|
|
28
|
+
time_mjds: u.Quantity
|
|
30
29
|
"""The MJD time in seconds from which other quantities are evalauted against. Should be drawn from a measurement set."""
|
|
31
30
|
location: EarthLocation
|
|
32
31
|
"""The location these quantities have been derived from."""
|
|
33
32
|
position: SkyCoord
|
|
34
33
|
"""The sky-position that is being used to calculate quantities towards"""
|
|
35
|
-
elevation:
|
|
34
|
+
elevation: u.Quantity
|
|
36
35
|
"""The elevation of the ``position` direction across time"""
|
|
37
36
|
time: Time
|
|
38
37
|
"""Representation of the `time_mjds` attribute"""
|
|
@@ -43,7 +42,7 @@ class PositionHourAngles:
|
|
|
43
42
|
|
|
44
43
|
|
|
45
44
|
def _process_position(
|
|
46
|
-
position: SkyCoord |
|
|
45
|
+
position: SkyCoord | str | None = None,
|
|
47
46
|
ms_path: Path | None = None,
|
|
48
47
|
times: Time | None = None,
|
|
49
48
|
) -> SkyCoord:
|
|
@@ -54,7 +53,7 @@ def _process_position(
|
|
|
54
53
|
set
|
|
55
54
|
|
|
56
55
|
Args:
|
|
57
|
-
position (SkyCoord |
|
|
56
|
+
position (SkyCoord | str | None, optional): The position to be considered. Defaults to None.
|
|
58
57
|
ms_path (Path | None, optional): The path with the PHASE_DIR to use should `position` be None. Defaults to None.
|
|
59
58
|
times (Time | None, optional): Times to used if they are required in the lookup. Defaults to None.
|
|
60
59
|
|
|
@@ -71,8 +70,8 @@ def _process_position(
|
|
|
71
70
|
if times is None:
|
|
72
71
|
msg = f"{times=}, but needs to be set when position is a name"
|
|
73
72
|
raise ValueError(msg)
|
|
74
|
-
if position == "sun":
|
|
75
|
-
logger.info("Getting sky-position of the
|
|
73
|
+
if position.lower() == "sun":
|
|
74
|
+
logger.info("Getting sky-position of the Sun")
|
|
76
75
|
position = get_sun(times)
|
|
77
76
|
else:
|
|
78
77
|
logger.info(f"Getting sky-position of {position=}")
|
|
@@ -142,7 +141,7 @@ def make_hour_angles_for_ms(
|
|
|
142
141
|
lst = times.sidereal_time("apparent", longitude=location.lon)
|
|
143
142
|
hour_angle = (lst - sky_position.ra).wrap_at(12 * u.hourangle)
|
|
144
143
|
|
|
145
|
-
logger.info("
|
|
144
|
+
logger.info("Creating elevation curve")
|
|
146
145
|
altaz = sky_position.transform_to(AltAz(obstime=times, location=location))
|
|
147
146
|
|
|
148
147
|
return PositionHourAngles(
|
jolly_roger/plots.py
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""Routines around plotting"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
import matplotlib.pyplot as plt
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
from jolly_roger.uvws import WDelays
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from jolly_roger.delays import DelayTime
|
|
15
|
+
from jolly_roger.tractor import BaselineData
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def plot_baseline_data(
|
|
19
|
+
baseline_data: BaselineData,
|
|
20
|
+
output_dir: Path,
|
|
21
|
+
suffix: str = "",
|
|
22
|
+
) -> None:
|
|
23
|
+
from astropy.visualization import quantity_support, time_support
|
|
24
|
+
|
|
25
|
+
with quantity_support(), time_support():
|
|
26
|
+
data_masked = baseline_data.masked_data
|
|
27
|
+
data_xx = data_masked[..., 0]
|
|
28
|
+
data_yy = data_masked[..., -1]
|
|
29
|
+
data_stokesi = (data_xx + data_yy) / 2
|
|
30
|
+
amp_stokesi = np.abs(data_stokesi)
|
|
31
|
+
|
|
32
|
+
fig, ax = plt.subplots()
|
|
33
|
+
im = ax.pcolormesh(
|
|
34
|
+
baseline_data.time,
|
|
35
|
+
baseline_data.freq_chan,
|
|
36
|
+
amp_stokesi.T,
|
|
37
|
+
)
|
|
38
|
+
fig.colorbar(im, ax=ax, label="Stokes I Amplitude / Jy")
|
|
39
|
+
ax.set(
|
|
40
|
+
ylabel=f"Frequency / {baseline_data.freq_chan.unit:latex_inline}",
|
|
41
|
+
title=f"Ant {baseline_data.ant_1} - Ant {baseline_data.ant_2}",
|
|
42
|
+
)
|
|
43
|
+
output_path = (
|
|
44
|
+
output_dir
|
|
45
|
+
/ f"baseline_data_{baseline_data.ant_1}_{baseline_data.ant_2}{suffix}.png"
|
|
46
|
+
)
|
|
47
|
+
fig.savefig(output_path)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def plot_baseline_comparison_data(
|
|
51
|
+
before_baseline_data: BaselineData,
|
|
52
|
+
after_baseline_data: BaselineData,
|
|
53
|
+
before_delays: DelayTime,
|
|
54
|
+
after_delays: DelayTime,
|
|
55
|
+
output_dir: Path,
|
|
56
|
+
suffix: str = "",
|
|
57
|
+
w_delays: WDelays | None = None,
|
|
58
|
+
) -> Path:
|
|
59
|
+
from astropy.visualization import (
|
|
60
|
+
ImageNormalize,
|
|
61
|
+
LogStretch,
|
|
62
|
+
MinMaxInterval,
|
|
63
|
+
SqrtStretch,
|
|
64
|
+
ZScaleInterval,
|
|
65
|
+
quantity_support,
|
|
66
|
+
time_support,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
with quantity_support(), time_support():
|
|
70
|
+
before_amp_stokesi = np.abs(
|
|
71
|
+
(
|
|
72
|
+
before_baseline_data.masked_data[..., 0]
|
|
73
|
+
+ before_baseline_data.masked_data[..., -1]
|
|
74
|
+
)
|
|
75
|
+
/ 2
|
|
76
|
+
)
|
|
77
|
+
after_amp_stokesi = np.abs(
|
|
78
|
+
(
|
|
79
|
+
after_baseline_data.masked_data[..., 0]
|
|
80
|
+
+ after_baseline_data.masked_data[..., -1]
|
|
81
|
+
)
|
|
82
|
+
/ 2
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
norm = ImageNormalize(
|
|
86
|
+
after_amp_stokesi, interval=ZScaleInterval(), stretch=SqrtStretch()
|
|
87
|
+
)
|
|
88
|
+
cmap = plt.cm.viridis
|
|
89
|
+
|
|
90
|
+
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(
|
|
91
|
+
2, 2, figsize=(12, 10), sharex=True, sharey="row"
|
|
92
|
+
)
|
|
93
|
+
im = ax1.pcolormesh(
|
|
94
|
+
before_baseline_data.time,
|
|
95
|
+
before_baseline_data.freq_chan,
|
|
96
|
+
before_amp_stokesi.T,
|
|
97
|
+
norm=norm,
|
|
98
|
+
cmap=cmap,
|
|
99
|
+
)
|
|
100
|
+
ax1.set(
|
|
101
|
+
ylabel=f"Frequency / {before_baseline_data.freq_chan.unit:latex_inline}",
|
|
102
|
+
title="Before",
|
|
103
|
+
)
|
|
104
|
+
ax2.pcolormesh(
|
|
105
|
+
after_baseline_data.time,
|
|
106
|
+
after_baseline_data.freq_chan,
|
|
107
|
+
after_amp_stokesi.T,
|
|
108
|
+
norm=norm,
|
|
109
|
+
cmap=cmap,
|
|
110
|
+
)
|
|
111
|
+
ax2.set(
|
|
112
|
+
ylabel=f"Frequency / {after_baseline_data.freq_chan.unit:latex_inline}",
|
|
113
|
+
title="After",
|
|
114
|
+
)
|
|
115
|
+
for ax in (ax1, ax2):
|
|
116
|
+
fig.colorbar(im, ax=ax, label="Stokes I Amplitude / Jy")
|
|
117
|
+
|
|
118
|
+
# TODO: Move these delay calculations outside of the plotting function
|
|
119
|
+
# And here we calculate the delay information
|
|
120
|
+
|
|
121
|
+
before_delays_i = np.abs(
|
|
122
|
+
(before_delays.delay_time[:, :, 0] + before_delays.delay_time[:, :, -1]) / 2
|
|
123
|
+
)
|
|
124
|
+
after_delays_i = np.abs(
|
|
125
|
+
(after_delays.delay_time[:, :, 0] + after_delays.delay_time[:, :, -1]) / 2
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
delay_norm = ImageNormalize(
|
|
129
|
+
before_delays_i, interval=MinMaxInterval(), stretch=LogStretch()
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
im = ax3.pcolormesh(
|
|
133
|
+
before_baseline_data.time,
|
|
134
|
+
before_delays.delay,
|
|
135
|
+
before_delays_i.T,
|
|
136
|
+
norm=delay_norm,
|
|
137
|
+
cmap=cmap,
|
|
138
|
+
)
|
|
139
|
+
ax3.set(ylabel="Delay / s", title="Before")
|
|
140
|
+
ax4.pcolormesh(
|
|
141
|
+
after_baseline_data.time,
|
|
142
|
+
after_delays.delay,
|
|
143
|
+
after_delays_i.T,
|
|
144
|
+
norm=delay_norm,
|
|
145
|
+
cmap=cmap,
|
|
146
|
+
)
|
|
147
|
+
ax4.set(ylabel="Delay / s", title="After")
|
|
148
|
+
for ax in (ax3, ax4):
|
|
149
|
+
fig.colorbar(im, ax=ax, label="Stokes I Amplitude / Jy")
|
|
150
|
+
|
|
151
|
+
if w_delays is not None:
|
|
152
|
+
for ax, baseline_data in zip( # type:ignore[call-overload]
|
|
153
|
+
(ax3, ax4),
|
|
154
|
+
(before_baseline_data, after_baseline_data),
|
|
155
|
+
strict=True,
|
|
156
|
+
):
|
|
157
|
+
ant_1, ant_2 = baseline_data.ant_1, baseline_data.ant_2
|
|
158
|
+
b_idx = w_delays.b_map[ant_1, ant_2]
|
|
159
|
+
ax.plot(
|
|
160
|
+
baseline_data.time,
|
|
161
|
+
w_delays.w_delays[b_idx],
|
|
162
|
+
color="tab:red",
|
|
163
|
+
linestyle="-",
|
|
164
|
+
label=f"Delay for {w_delays.object_name}",
|
|
165
|
+
)
|
|
166
|
+
ax.legend()
|
|
167
|
+
|
|
168
|
+
output_path = (
|
|
169
|
+
output_dir
|
|
170
|
+
/ f"baseline_data_{before_baseline_data.ant_1}_{before_baseline_data.ant_2}{suffix}.png"
|
|
171
|
+
)
|
|
172
|
+
fig.suptitle(
|
|
173
|
+
f"Ant {after_baseline_data.ant_1} - Ant {after_baseline_data.ant_2}"
|
|
174
|
+
)
|
|
175
|
+
fig.tight_layout()
|
|
176
|
+
fig.savefig(output_path)
|
|
177
|
+
|
|
178
|
+
return output_path
|
jolly_roger/tractor.py
ADDED
|
@@ -0,0 +1,809 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from argparse import ArgumentParser
|
|
4
|
+
from collections.abc import Generator
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import astropy.units as u
|
|
10
|
+
import numpy as np
|
|
11
|
+
from astropy.coordinates import (
|
|
12
|
+
SkyCoord,
|
|
13
|
+
)
|
|
14
|
+
from astropy.time import Time
|
|
15
|
+
from casacore.tables import makecoldesc, table, taql
|
|
16
|
+
from numpy.typing import NDArray
|
|
17
|
+
from tqdm.auto import tqdm
|
|
18
|
+
|
|
19
|
+
from jolly_roger.delays import data_to_delay_time, delay_time_to_data
|
|
20
|
+
from jolly_roger.logging import logger
|
|
21
|
+
from jolly_roger.plots import plot_baseline_comparison_data
|
|
22
|
+
from jolly_roger.uvws import WDelays, get_object_delay_for_ms
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(frozen=True)
|
|
26
|
+
class OpenMSTables:
|
|
27
|
+
"""Open MS table references"""
|
|
28
|
+
|
|
29
|
+
main_table: table
|
|
30
|
+
"""The main MS table"""
|
|
31
|
+
spw_table: table
|
|
32
|
+
"""The spectral window table"""
|
|
33
|
+
field_table: table
|
|
34
|
+
"""The field table"""
|
|
35
|
+
ms_path: Path
|
|
36
|
+
"""The path to the MS used to open tables"""
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_open_ms_tables(ms_path: Path, read_only: bool = True) -> OpenMSTables:
|
|
40
|
+
"""Open up the set of MS table and sub-tables necessary for tractoring.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
ms_path (Path): The path to the measurement set
|
|
44
|
+
read_only (bool, optional): Whether to open in a read-only mode. Defaults to True.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
OpenMSTables: Set of open table references
|
|
48
|
+
"""
|
|
49
|
+
main_table = table(str(ms_path), ack=False, readonly=read_only)
|
|
50
|
+
spw_table = table(str(ms_path / "SPECTRAL_WINDOW"), ack=False, readonly=read_only)
|
|
51
|
+
field_table = table(str(ms_path / "FIELD"), ack=False, readonly=read_only)
|
|
52
|
+
|
|
53
|
+
# TODO: Get the data without auto-correlations e.g.
|
|
54
|
+
# no_auto_main_table = taql(
|
|
55
|
+
# "select from $main_table where ANTENNA1 != ANTENNA2",
|
|
56
|
+
# )
|
|
57
|
+
|
|
58
|
+
return OpenMSTables(
|
|
59
|
+
main_table=main_table,
|
|
60
|
+
spw_table=spw_table,
|
|
61
|
+
field_table=field_table,
|
|
62
|
+
ms_path=ms_path,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def tukey_taper(
|
|
67
|
+
x: np.typing.NDArray[np.floating],
|
|
68
|
+
outer_width: float = np.pi / 4,
|
|
69
|
+
tukey_width: float = np.pi / 8,
|
|
70
|
+
tukey_x_offset: NDArray[np.floating] | None = None,
|
|
71
|
+
) -> np.ndarray:
|
|
72
|
+
x_freq = np.linspace(-np.pi, np.pi, len(x))
|
|
73
|
+
|
|
74
|
+
if tukey_x_offset is not None:
|
|
75
|
+
x_freq = x_freq[:, None] - tukey_x_offset[None, :]
|
|
76
|
+
|
|
77
|
+
taper = np.ones_like(x_freq)
|
|
78
|
+
logger.debug(f"{x_freq.shape=} {type(x_freq)=}")
|
|
79
|
+
# Fully zero region
|
|
80
|
+
taper[np.abs(x_freq) > outer_width] = 0
|
|
81
|
+
|
|
82
|
+
# Transition regions
|
|
83
|
+
left_idx = (-outer_width < x_freq) & (x_freq < -outer_width + tukey_width)
|
|
84
|
+
right_idx = (outer_width - tukey_width < x_freq) & (x_freq < outer_width)
|
|
85
|
+
|
|
86
|
+
taper[left_idx] = (
|
|
87
|
+
1 - np.cos(np.pi * (x_freq[left_idx] + outer_width) / tukey_width)
|
|
88
|
+
) / 2
|
|
89
|
+
|
|
90
|
+
taper[right_idx] = (
|
|
91
|
+
1 - np.cos(np.pi * (outer_width - x_freq[right_idx]) / tukey_width)
|
|
92
|
+
) / 2
|
|
93
|
+
|
|
94
|
+
return taper
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@dataclass
|
|
98
|
+
class BaselineData:
|
|
99
|
+
"""Container for baseline data and associated metadata."""
|
|
100
|
+
|
|
101
|
+
masked_data: np.ma.MaskedArray
|
|
102
|
+
"""The baseline data, masked where flags are set. shape=(time, chan, pol)"""
|
|
103
|
+
freq_chan: u.Quantity
|
|
104
|
+
"""The frequency channels corresponding to the data."""
|
|
105
|
+
phase_center: SkyCoord
|
|
106
|
+
"""The target sky coordinate for the baseline."""
|
|
107
|
+
uvws_phase_center: u.Quantity
|
|
108
|
+
"""The UVW coordinates of the phase center of the baseline."""
|
|
109
|
+
time: Time
|
|
110
|
+
"""The time of the observations."""
|
|
111
|
+
ant_1: int
|
|
112
|
+
"""The first antenna in the baseline."""
|
|
113
|
+
ant_2: int
|
|
114
|
+
"""The second antenna in the baseline."""
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
@dataclass
|
|
118
|
+
class BaselineArrays:
|
|
119
|
+
data: NDArray[np.complexfloating]
|
|
120
|
+
flags: NDArray[np.bool_]
|
|
121
|
+
uvws: NDArray[np.floating]
|
|
122
|
+
time_centroid: NDArray[np.floating]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@dataclass
|
|
126
|
+
class DataChunkArray:
|
|
127
|
+
"""Container for a chunk of data"""
|
|
128
|
+
|
|
129
|
+
data: NDArray[np.complexfloating]
|
|
130
|
+
"""The data from the nominated data column loaded"""
|
|
131
|
+
flags: NDArray[np.bool_]
|
|
132
|
+
"""Flags that correspond to the loaded data"""
|
|
133
|
+
uvws: NDArray[np.floating]
|
|
134
|
+
"""The uvw coordinates for each loaded data record"""
|
|
135
|
+
time_centroid: NDArray[np.floating]
|
|
136
|
+
"""The time of each data record"""
|
|
137
|
+
ant_1: NDArray[np.int64]
|
|
138
|
+
"""Antenna 1 that formed the baseline"""
|
|
139
|
+
ant_2: NDArray[np.int64]
|
|
140
|
+
"""Antenna 2 that formed the baseline"""
|
|
141
|
+
row_start: int
|
|
142
|
+
"""The starting row of the portion of data loaded"""
|
|
143
|
+
chunk_size: int
|
|
144
|
+
"""The size of the data chunk loaded (may be larger if this is the last record)"""
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@dataclass
|
|
148
|
+
class DataChunk:
|
|
149
|
+
"""Container for a collection of data and associated metadata.
|
|
150
|
+
Here data are drawn from a series of rows.
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
masked_data: np.ma.MaskedArray
|
|
154
|
+
"""The baseline data, masked where flags are set. shape=(time, chan, pol)"""
|
|
155
|
+
freq_chan: u.Quantity
|
|
156
|
+
"""The frequency channels corresponding to the data."""
|
|
157
|
+
phase_center: SkyCoord
|
|
158
|
+
"""The target sky coordinate for the baseline."""
|
|
159
|
+
uvws_phase_center: u.Quantity
|
|
160
|
+
"""The UVW coordinates of the phase center of the baseline."""
|
|
161
|
+
time: Time
|
|
162
|
+
"""The time of the observations."""
|
|
163
|
+
time_mjds: NDArray[np.floating]
|
|
164
|
+
"""The raw time extracted from the measurement set in MJDs"""
|
|
165
|
+
ant_1: NDArray[np.int64]
|
|
166
|
+
"""The first antenna in the baseline."""
|
|
167
|
+
ant_2: NDArray[np.int64]
|
|
168
|
+
"""The second antenna in the baseline."""
|
|
169
|
+
row_start: int
|
|
170
|
+
"""Starting row index of the data"""
|
|
171
|
+
chunk_size: int
|
|
172
|
+
"""Size of the chunked portion of the data"""
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _list_to_array(
|
|
176
|
+
list_of_rows: list[dict[str, Any]], key: str
|
|
177
|
+
) -> np.typing.NDArray[Any]:
|
|
178
|
+
"""Helper to make a simple numpy object from list of items"""
|
|
179
|
+
return np.array([row[key] for row in list_of_rows])
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _get_data_chunk_from_main_table(
|
|
183
|
+
ms_table: table,
|
|
184
|
+
chunk_size: int,
|
|
185
|
+
data_column: str,
|
|
186
|
+
) -> Generator[DataChunkArray, None, None]:
|
|
187
|
+
"""Return an appropriately size data chunk from the main
|
|
188
|
+
table of a measurement set. These data are ase they are
|
|
189
|
+
in the measurement set without any additional scaling
|
|
190
|
+
or unit adjustments.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
ms_table (table): The opened main table of a measurement set
|
|
194
|
+
chunk_size (int): The size of the data to chunk and return
|
|
195
|
+
data_column (str): The data column to be returned
|
|
196
|
+
|
|
197
|
+
Yields:
|
|
198
|
+
Generator[DataChunkArray, None, None]: A segment of rows and columns
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
table_length = len(ms_table)
|
|
202
|
+
logger.debug(f"Length of open table: {table_length} rows")
|
|
203
|
+
|
|
204
|
+
lower_row = 0
|
|
205
|
+
|
|
206
|
+
while lower_row < table_length:
|
|
207
|
+
data = ms_table.getcol(data_column, startrow=lower_row, nrow=chunk_size)
|
|
208
|
+
flags = ms_table.getcol("FLAG", startrow=lower_row, nrow=chunk_size)
|
|
209
|
+
uvws = ms_table.getcol("UVW", startrow=lower_row, nrow=chunk_size)
|
|
210
|
+
time_centroid = ms_table.getcol(
|
|
211
|
+
"TIME_CENTROID", startrow=lower_row, nrow=chunk_size
|
|
212
|
+
)
|
|
213
|
+
ant_1 = ms_table.getcol("ANTENNA1", startrow=lower_row, nrow=chunk_size)
|
|
214
|
+
ant_2 = ms_table.getcol("ANTENNA2", startrow=lower_row, nrow=chunk_size)
|
|
215
|
+
|
|
216
|
+
yield DataChunkArray(
|
|
217
|
+
data=data,
|
|
218
|
+
flags=flags,
|
|
219
|
+
uvws=uvws,
|
|
220
|
+
time_centroid=time_centroid,
|
|
221
|
+
ant_1=ant_1,
|
|
222
|
+
ant_2=ant_2,
|
|
223
|
+
row_start=lower_row,
|
|
224
|
+
chunk_size=chunk_size,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
lower_row += chunk_size
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def get_data_chunks(
|
|
231
|
+
open_ms_tables: OpenMSTables,
|
|
232
|
+
chunk_size: int,
|
|
233
|
+
data_column: str,
|
|
234
|
+
) -> Generator[DataChunk, None, None]:
|
|
235
|
+
"""Yield a collection of rows with appropriate units
|
|
236
|
+
attached to the quantities. These quantities are not
|
|
237
|
+
the same data encoded in the measurement set, e.g.
|
|
238
|
+
masked array has been formed, astropy units have
|
|
239
|
+
been attached.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
open_ms_tables (OpenMSTables): References to open tables from the measurement set
|
|
243
|
+
chunk_size (int): The number of rows to return at a time
|
|
244
|
+
data_column (str): The data column that would be modified
|
|
245
|
+
|
|
246
|
+
Yields:
|
|
247
|
+
Generator[DataChunk, None, None]: Representation of the current chunk of rows
|
|
248
|
+
"""
|
|
249
|
+
freq_chan = open_ms_tables.spw_table.getcol("CHAN_FREQ")
|
|
250
|
+
phase_dir = open_ms_tables.field_table.getcol("PHASE_DIR")
|
|
251
|
+
|
|
252
|
+
freq_chan = freq_chan.squeeze() * u.Hz
|
|
253
|
+
target = SkyCoord(*(phase_dir * u.rad).squeeze())
|
|
254
|
+
|
|
255
|
+
for data_chunk_array in _get_data_chunk_from_main_table(
|
|
256
|
+
ms_table=open_ms_tables.main_table,
|
|
257
|
+
chunk_size=chunk_size,
|
|
258
|
+
data_column=data_column,
|
|
259
|
+
):
|
|
260
|
+
# Transform the native arrays but attach astropy quantities
|
|
261
|
+
uvws_phase_center = data_chunk_array.uvws * u.m
|
|
262
|
+
time = Time(
|
|
263
|
+
data_chunk_array.time_centroid.squeeze() * u.s,
|
|
264
|
+
format="mjd",
|
|
265
|
+
scale="utc",
|
|
266
|
+
)
|
|
267
|
+
masked_data = np.ma.masked_array(
|
|
268
|
+
data_chunk_array.data, mask=data_chunk_array.flags
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
yield DataChunk(
|
|
272
|
+
masked_data=masked_data,
|
|
273
|
+
freq_chan=freq_chan,
|
|
274
|
+
phase_center=target,
|
|
275
|
+
uvws_phase_center=uvws_phase_center,
|
|
276
|
+
time=time,
|
|
277
|
+
time_mjds=data_chunk_array.time_centroid,
|
|
278
|
+
ant_1=data_chunk_array.ant_1,
|
|
279
|
+
ant_2=data_chunk_array.ant_2,
|
|
280
|
+
row_start=data_chunk_array.row_start,
|
|
281
|
+
chunk_size=data_chunk_array.chunk_size,
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _get_baseline_data(
|
|
286
|
+
ms_tab: table,
|
|
287
|
+
ant_1: int,
|
|
288
|
+
ant_2: int,
|
|
289
|
+
data_column: str = "DATA",
|
|
290
|
+
) -> BaselineArrays:
|
|
291
|
+
_ = ms_tab, ant_1, ant_2
|
|
292
|
+
with taql(
|
|
293
|
+
"select from $ms_tab where ANTENNA1 == $ant_1 and ANTENNA2 == $ant_2",
|
|
294
|
+
) as subtab:
|
|
295
|
+
logger.info(f"Opening subtable for baseline {ant_1} {ant_2}")
|
|
296
|
+
data = subtab.getcol(data_column)
|
|
297
|
+
flags = subtab.getcol("FLAG")
|
|
298
|
+
uvws = subtab.getcol("UVW")
|
|
299
|
+
time_centroid = subtab.getcol("TIME_CENTROID")
|
|
300
|
+
|
|
301
|
+
return BaselineArrays(
|
|
302
|
+
data=data,
|
|
303
|
+
flags=flags,
|
|
304
|
+
uvws=uvws,
|
|
305
|
+
time_centroid=time_centroid,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def get_baseline_data(
|
|
310
|
+
open_ms_tables: OpenMSTables,
|
|
311
|
+
ant_1: int,
|
|
312
|
+
ant_2: int,
|
|
313
|
+
data_column: str = "DATA",
|
|
314
|
+
) -> BaselineData:
|
|
315
|
+
"""Get data of a baseline from a measurement set
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
open_ms_tables (OpenMSTables): The measurement set to draw data from
|
|
319
|
+
ant_1 (int): The first antenna of the baseline
|
|
320
|
+
ant_2 (int): The second antenna of the baseline
|
|
321
|
+
data_column (str, optional): The data column to extract. Defaults to "DATA".
|
|
322
|
+
|
|
323
|
+
Returns:
|
|
324
|
+
BaselineData: Extracted baseline data
|
|
325
|
+
"""
|
|
326
|
+
logger.info(f"Getting baseline {ant_1} {ant_2}")
|
|
327
|
+
|
|
328
|
+
freq_chan = open_ms_tables.spw_table.getcol("CHAN_FREQ")
|
|
329
|
+
phase_dir = open_ms_tables.field_table.getcol("PHASE_DIR")
|
|
330
|
+
|
|
331
|
+
logger.debug(f"Processing {ant_1=} {ant_2=}")
|
|
332
|
+
|
|
333
|
+
baseline_data = _get_baseline_data(
|
|
334
|
+
ms_tab=open_ms_tables.main_table,
|
|
335
|
+
ant_1=ant_1,
|
|
336
|
+
ant_2=ant_2,
|
|
337
|
+
data_column=data_column,
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
freq_chan = freq_chan.squeeze() * u.Hz
|
|
341
|
+
target = SkyCoord(*(phase_dir * u.rad).squeeze())
|
|
342
|
+
uvws_phase_center = np.swapaxes(baseline_data.uvws * u.m, 0, 1)
|
|
343
|
+
time = Time(
|
|
344
|
+
baseline_data.time_centroid.squeeze() * u.s,
|
|
345
|
+
format="mjd",
|
|
346
|
+
scale="utc",
|
|
347
|
+
)
|
|
348
|
+
masked_data = np.ma.masked_array(baseline_data.data, mask=baseline_data.flags)
|
|
349
|
+
|
|
350
|
+
logger.info(f"Got data for baseline {ant_1} {ant_2} with shape {masked_data.shape}")
|
|
351
|
+
return BaselineData(
|
|
352
|
+
masked_data=masked_data,
|
|
353
|
+
freq_chan=freq_chan,
|
|
354
|
+
phase_center=target,
|
|
355
|
+
uvws_phase_center=uvws_phase_center,
|
|
356
|
+
time=time,
|
|
357
|
+
ant_1=ant_1,
|
|
358
|
+
ant_2=ant_2,
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
def add_output_column(
|
|
363
|
+
tab: table,
|
|
364
|
+
data_column: str = "DATA",
|
|
365
|
+
output_column: str = "CORRECTED_DATA",
|
|
366
|
+
overwrite: bool = False,
|
|
367
|
+
copy_column_data: bool = False,
|
|
368
|
+
) -> None:
|
|
369
|
+
"""Add in the output data column where the modified data
|
|
370
|
+
will be recorded
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
tab (table): Open reference to the table to modify
|
|
374
|
+
data_column (str, optional): The base data column the new will be based from. Defaults to "DATA".
|
|
375
|
+
output_column (str, optional): The new data column to be created. Defaults to "CORRECTED_DATA".
|
|
376
|
+
overwrite (bool, optional): Whether to overwrite the new output column. Defaults to False.
|
|
377
|
+
copy_column_data (bool, optional): Copy the original data over to the output column. Defaults to False.
|
|
378
|
+
|
|
379
|
+
Raises:
|
|
380
|
+
ValueError: Raised if the output column already exists and overwrite is False
|
|
381
|
+
"""
|
|
382
|
+
colnames = tab.colnames()
|
|
383
|
+
if output_column in colnames:
|
|
384
|
+
if not overwrite:
|
|
385
|
+
msg = f"Output column {output_column} already exists in the measurement set. Not overwriting."
|
|
386
|
+
raise ValueError(msg)
|
|
387
|
+
|
|
388
|
+
logger.warning(
|
|
389
|
+
f"Output column {output_column} already exists in the measurement set. Will be overwritten!"
|
|
390
|
+
)
|
|
391
|
+
else:
|
|
392
|
+
logger.info(f"Adding {output_column=}")
|
|
393
|
+
desc = makecoldesc(data_column, tab.getcoldesc(data_column))
|
|
394
|
+
desc["name"] = output_column
|
|
395
|
+
tab.addcols(desc)
|
|
396
|
+
tab.flush()
|
|
397
|
+
|
|
398
|
+
if copy_column_data:
|
|
399
|
+
logger.info(f"Copying {data_column=} to {output_column=}")
|
|
400
|
+
taql(f"UPDATE $tab SET {output_column}={data_column}")
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def write_output_column(
|
|
404
|
+
ms_path: Path,
|
|
405
|
+
output_column: str,
|
|
406
|
+
baseline_data: BaselineData,
|
|
407
|
+
update_flags: bool = False,
|
|
408
|
+
) -> None:
|
|
409
|
+
"""Write the output column to the measurement set."""
|
|
410
|
+
ant_1 = baseline_data.ant_1
|
|
411
|
+
ant_2 = baseline_data.ant_2
|
|
412
|
+
_ = ant_1, ant_2
|
|
413
|
+
logger.info(f"Writing {output_column=} for baseline {ant_1} {ant_2}")
|
|
414
|
+
with table(str(ms_path), readonly=False) as tab:
|
|
415
|
+
colnames = tab.colnames()
|
|
416
|
+
if output_column not in colnames:
|
|
417
|
+
msg = f"Output column {output_column} does not exist in the measurement set. Cannot write data."
|
|
418
|
+
raise ValueError(msg)
|
|
419
|
+
|
|
420
|
+
with taql(
|
|
421
|
+
"select from $tab where ANTENNA1 == $ant_1 and ANTENNA2 == $ant_2",
|
|
422
|
+
) as subtab:
|
|
423
|
+
logger.info(f"Writing {output_column=}")
|
|
424
|
+
subtab.putcol(output_column, baseline_data.masked_data.filled(0 + 0j))
|
|
425
|
+
if update_flags:
|
|
426
|
+
# If we want to update the flags, we need to set the flags to False
|
|
427
|
+
# for the output column
|
|
428
|
+
subtab.putcol("FLAG", baseline_data.masked_data.mask)
|
|
429
|
+
subtab.flush()
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
def make_plot_results(
|
|
433
|
+
open_ms_tables: OpenMSTables,
|
|
434
|
+
data_column: str,
|
|
435
|
+
output_column: str,
|
|
436
|
+
w_delays: WDelays | None = None,
|
|
437
|
+
) -> list[Path]:
|
|
438
|
+
output_paths = []
|
|
439
|
+
output_dir = open_ms_tables.ms_path.parent / "plots"
|
|
440
|
+
output_dir.mkdir(exist_ok=True, parents=True)
|
|
441
|
+
for i in range(10):
|
|
442
|
+
logger.info(f"Plotting baseline={i + 1}")
|
|
443
|
+
before_baseline_data = get_baseline_data(
|
|
444
|
+
open_ms_tables=open_ms_tables,
|
|
445
|
+
ant_1=0,
|
|
446
|
+
ant_2=i + 1,
|
|
447
|
+
data_column=data_column,
|
|
448
|
+
)
|
|
449
|
+
after_baseline_data = get_baseline_data(
|
|
450
|
+
open_ms_tables=open_ms_tables,
|
|
451
|
+
ant_1=0,
|
|
452
|
+
ant_2=i + 1,
|
|
453
|
+
data_column=output_column,
|
|
454
|
+
)
|
|
455
|
+
before_delays = data_to_delay_time(data=before_baseline_data)
|
|
456
|
+
after_delays = data_to_delay_time(data=after_baseline_data)
|
|
457
|
+
|
|
458
|
+
# TODO: the baseline data and delay times could be put into a single
|
|
459
|
+
# structure to pass around easier.
|
|
460
|
+
plot_path = plot_baseline_comparison_data(
|
|
461
|
+
before_baseline_data=before_baseline_data,
|
|
462
|
+
after_baseline_data=after_baseline_data,
|
|
463
|
+
before_delays=before_delays,
|
|
464
|
+
after_delays=after_delays,
|
|
465
|
+
output_dir=output_dir,
|
|
466
|
+
suffix="_comparison",
|
|
467
|
+
w_delays=w_delays,
|
|
468
|
+
)
|
|
469
|
+
output_paths.append(plot_path)
|
|
470
|
+
|
|
471
|
+
return output_paths
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
def _get_baseline_time_indicies(
|
|
475
|
+
w_delays: WDelays, data_chunk: DataChunk
|
|
476
|
+
) -> tuple[NDArray[np.int_], NDArray[np.int_]]:
|
|
477
|
+
"""Extract the mappings into the data array"""
|
|
478
|
+
|
|
479
|
+
# When computing uvws we have ignored auto-correlations!
|
|
480
|
+
# TODO: Either extend the uvw calculations to include auto-correlations
|
|
481
|
+
# or ignore them during iterations. Certainly the former is the better
|
|
482
|
+
# approach.
|
|
483
|
+
|
|
484
|
+
# Again, note the auto-correlations are ignored!!! Here be pirates mate
|
|
485
|
+
baseline_idx = np.array(
|
|
486
|
+
[
|
|
487
|
+
w_delays.b_map[(int(ant_1), int(ant_2))] if ant_1 != ant_2 else 0
|
|
488
|
+
for ant_1, ant_2 in zip( # type: ignore[call-overload]
|
|
489
|
+
data_chunk.ant_1, data_chunk.ant_2, strict=False
|
|
490
|
+
)
|
|
491
|
+
]
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
time_idx = np.array(
|
|
495
|
+
[w_delays.time_map[time * u.s] for time in data_chunk.time_mjds]
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
return baseline_idx, time_idx
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def _tukey_tractor(
|
|
502
|
+
data_chunk: DataChunk,
|
|
503
|
+
tukey_tractor_options: TukeyTractorOptions,
|
|
504
|
+
w_delays: WDelays | None = None,
|
|
505
|
+
) -> NDArray[np.complex128]:
|
|
506
|
+
"""Compute a tukey taper for a dataset and then apply it
|
|
507
|
+
to the dataset. Here the data corresponds to a (chan, time, pol)
|
|
508
|
+
array. Data is not necessarily a single baseline.
|
|
509
|
+
|
|
510
|
+
If a `w_delays` is provided it represents the delay (in seconds)
|
|
511
|
+
between the phase direction of the measurement set and the Sun.
|
|
512
|
+
This quantity may be derived in a number of ways, but in `jolly_roger`
|
|
513
|
+
it is based on the difference of the w-coordinated towards these
|
|
514
|
+
two directions. It should have a shape of [baselines, time]
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
data_chunk (DataChunk): The representation of the data with attached units
|
|
518
|
+
tukey_tractor_options (TukeyTractorOptions): Options for the tukey taper
|
|
519
|
+
w_delays (WDelays | None, optional): The w-derived delays to apply. If None taper is applied to large delays. Defaults to None.
|
|
520
|
+
|
|
521
|
+
Returns:
|
|
522
|
+
NDArray[np.complex128]: Scaled complex visibilities
|
|
523
|
+
"""
|
|
524
|
+
|
|
525
|
+
delay_time = data_to_delay_time(data=data_chunk)
|
|
526
|
+
|
|
527
|
+
# Look up the delay offset if requested
|
|
528
|
+
tukey_x_offset: u.Quantity = np.zeros_like(delay_time.delay)
|
|
529
|
+
|
|
530
|
+
if w_delays is not None:
|
|
531
|
+
baseline_idx, time_idx = _get_baseline_time_indicies(
|
|
532
|
+
w_delays=w_delays, data_chunk=data_chunk
|
|
533
|
+
)
|
|
534
|
+
tukey_x_offset = w_delays.w_delays[baseline_idx, time_idx]
|
|
535
|
+
# logger.info(f"{tukey_x_offset=}")
|
|
536
|
+
|
|
537
|
+
# need to scale the x offsert to the -pi to pi
|
|
538
|
+
# The delay should be symmetric
|
|
539
|
+
tukey_x_offset = (
|
|
540
|
+
tukey_x_offset / (np.max(delay_time.delay) / np.pi).decompose()
|
|
541
|
+
).value
|
|
542
|
+
# logger.info(f"{tukey_x_offset=}")
|
|
543
|
+
|
|
544
|
+
taper = tukey_taper(
|
|
545
|
+
x=delay_time.delay,
|
|
546
|
+
outer_width=tukey_tractor_options.outer_width,
|
|
547
|
+
tukey_width=tukey_tractor_options.tukey_width,
|
|
548
|
+
tukey_x_offset=tukey_x_offset,
|
|
549
|
+
)
|
|
550
|
+
if w_delays is not None:
|
|
551
|
+
# The use of the `tukey_x_offset` changes the
|
|
552
|
+
# shape of the output array. The internals of that
|
|
553
|
+
# function returns a different shape via the broadcasting
|
|
554
|
+
taper = np.swapaxes(taper[:, :, None], 0, 1)
|
|
555
|
+
|
|
556
|
+
# Since we want to dampen the target object we invert the taper
|
|
557
|
+
taper = 1.0 - taper
|
|
558
|
+
|
|
559
|
+
# Delay with the elevation of the target object
|
|
560
|
+
# TODO: Allow elevation to be a user parameter
|
|
561
|
+
elevation_mask = w_delays.elevation < tukey_tractor_options.elevation_cut
|
|
562
|
+
taper[elevation_mask[time_idx], :, :] = 1.0
|
|
563
|
+
|
|
564
|
+
# TODO: Handle case of aliased delays
|
|
565
|
+
|
|
566
|
+
# TODO: Create heuristic to determine where baseline is long enough to
|
|
567
|
+
# ignore the tapering. Aliasing may give us this though...
|
|
568
|
+
|
|
569
|
+
# TODO: Create flags where delay is 'close' to 0
|
|
570
|
+
|
|
571
|
+
else:
|
|
572
|
+
taper = taper[None, :, None]
|
|
573
|
+
|
|
574
|
+
# Delay-time is a 3D array: (time, delay, pol)
|
|
575
|
+
# Taper is 1D: (delay,)
|
|
576
|
+
tapered_delay_time_data_real = delay_time.delay_time.real * taper
|
|
577
|
+
tapered_delay_time_data_imag = delay_time.delay_time.imag * taper
|
|
578
|
+
tapered_delay_time_data = (
|
|
579
|
+
tapered_delay_time_data_real + 1j * tapered_delay_time_data_imag
|
|
580
|
+
)
|
|
581
|
+
tapered_delay_time = delay_time
|
|
582
|
+
tapered_delay_time.delay_time = tapered_delay_time_data
|
|
583
|
+
|
|
584
|
+
tapered_data = delay_time_to_data(
|
|
585
|
+
delay_time=tapered_delay_time,
|
|
586
|
+
original_data=data_chunk,
|
|
587
|
+
)
|
|
588
|
+
logger.debug(f"{tapered_data.masked_data.shape=} {tapered_data.masked_data.dtype}")
|
|
589
|
+
|
|
590
|
+
return tapered_data
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
@dataclass
|
|
594
|
+
class TukeyTractorOptions:
|
|
595
|
+
"""Options to describe the tukey taper to apply"""
|
|
596
|
+
|
|
597
|
+
ms_path: Path
|
|
598
|
+
"""Measurement set to be modified"""
|
|
599
|
+
outer_width: float = np.pi / 4
|
|
600
|
+
"""The start of the tapering in frequency space"""
|
|
601
|
+
tukey_width: float = np.pi / 8
|
|
602
|
+
"""The width of the tapered region in frequency space"""
|
|
603
|
+
data_column: str = "DATA"
|
|
604
|
+
"""The visibility column to modify"""
|
|
605
|
+
output_column: str = "CORRECTED_DATA"
|
|
606
|
+
"""The output column to be created with the modified data"""
|
|
607
|
+
copy_column_data: bool = False
|
|
608
|
+
"""Copy the data from the data column to the output column before applying the taper"""
|
|
609
|
+
dry_run: bool = False
|
|
610
|
+
"""Indicates whether the data will be written back to the measurement set"""
|
|
611
|
+
make_plots: bool = False
|
|
612
|
+
"""Create a small set of diagnostic plots"""
|
|
613
|
+
overwrite: bool = False
|
|
614
|
+
"""If the output column exists it will be overwritten"""
|
|
615
|
+
chunk_size: int = 1000
|
|
616
|
+
"""Size of the row-wise chunking iterator"""
|
|
617
|
+
apply_towards_object: bool = False
|
|
618
|
+
"""apply the taper using the delay towards the target object."""
|
|
619
|
+
target_object: str = "Sun"
|
|
620
|
+
"""The target object to apply the delay towards."""
|
|
621
|
+
elevation_cut: u.Quantity = -1 * u.deg
|
|
622
|
+
"""The elevation cut-off for the target object. Defaults to 0 degrees."""
|
|
623
|
+
|
|
624
|
+
|
|
625
|
+
def tukey_tractor(
|
|
626
|
+
tukey_tractor_options: TukeyTractorOptions,
|
|
627
|
+
) -> None:
|
|
628
|
+
"""Iterate row-wise over a specified measurement set and
|
|
629
|
+
apply a tukey taper operation to the delay data. Iteration
|
|
630
|
+
is performed based on a chunk soize, indicating the number
|
|
631
|
+
of rows to read in at a time.
|
|
632
|
+
|
|
633
|
+
Full description of options are outlined in `TukeyTaperOptions`.
|
|
634
|
+
|
|
635
|
+
Args:
|
|
636
|
+
tukey_tractor_options (TukeyTractorOptions): The settings to use during the taper, and measurement set to apply them to.
|
|
637
|
+
"""
|
|
638
|
+
logger.info("jolly-roger")
|
|
639
|
+
logger.info(f"Options: {tukey_tractor_options}")
|
|
640
|
+
|
|
641
|
+
# acquire all the tables necessary to get unit information and data from
|
|
642
|
+
open_ms_tables = get_open_ms_tables(
|
|
643
|
+
ms_path=tukey_tractor_options.ms_path, read_only=False
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
if not tukey_tractor_options.dry_run:
|
|
647
|
+
add_output_column(
|
|
648
|
+
tab=open_ms_tables.main_table,
|
|
649
|
+
output_column=tukey_tractor_options.output_column,
|
|
650
|
+
data_column=tukey_tractor_options.data_column,
|
|
651
|
+
overwrite=tukey_tractor_options.overwrite,
|
|
652
|
+
copy_column_data=tukey_tractor_options.copy_column_data,
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
# Generate the delay for all baselines and time steps
|
|
656
|
+
w_delays: WDelays | None = None
|
|
657
|
+
if tukey_tractor_options.apply_towards_object:
|
|
658
|
+
logger.info(
|
|
659
|
+
f"Pre-calculating delays towards the target: {tukey_tractor_options.target_object}"
|
|
660
|
+
)
|
|
661
|
+
w_delays = get_object_delay_for_ms(
|
|
662
|
+
ms_path=tukey_tractor_options.ms_path,
|
|
663
|
+
object_name=tukey_tractor_options.target_object,
|
|
664
|
+
)
|
|
665
|
+
assert len(w_delays.w_delays.shape) == 2
|
|
666
|
+
|
|
667
|
+
if not tukey_tractor_options.dry_run:
|
|
668
|
+
with tqdm(total=len(open_ms_tables.main_table)) as pbar:
|
|
669
|
+
for data_chunk in get_data_chunks(
|
|
670
|
+
open_ms_tables=open_ms_tables,
|
|
671
|
+
chunk_size=tukey_tractor_options.chunk_size,
|
|
672
|
+
data_column=tukey_tractor_options.data_column,
|
|
673
|
+
):
|
|
674
|
+
taper_data_chunk = _tukey_tractor(
|
|
675
|
+
data_chunk=data_chunk,
|
|
676
|
+
tukey_tractor_options=tukey_tractor_options,
|
|
677
|
+
w_delays=w_delays,
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
pbar.update(len(taper_data_chunk.masked_data))
|
|
681
|
+
|
|
682
|
+
# only put if not a dry run
|
|
683
|
+
open_ms_tables.main_table.putcol(
|
|
684
|
+
columnname=tukey_tractor_options.output_column,
|
|
685
|
+
value=taper_data_chunk.masked_data,
|
|
686
|
+
startrow=taper_data_chunk.row_start,
|
|
687
|
+
nrow=taper_data_chunk.chunk_size,
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
if tukey_tractor_options.make_plots:
|
|
691
|
+
plot_paths = make_plot_results(
|
|
692
|
+
open_ms_tables=open_ms_tables,
|
|
693
|
+
data_column=tukey_tractor_options.data_column,
|
|
694
|
+
output_column=tukey_tractor_options.output_column,
|
|
695
|
+
w_delays=w_delays,
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
logger.info(f"Made {len(plot_paths)} output plots")
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
def get_parser() -> ArgumentParser:
|
|
702
|
+
"""Create the CLI argument parser
|
|
703
|
+
|
|
704
|
+
Returns:
|
|
705
|
+
ArgumentParser: Constructed argument parser
|
|
706
|
+
"""
|
|
707
|
+
parser = ArgumentParser(description="Run the Jolly Roger Tractor")
|
|
708
|
+
subparsers = parser.add_subparsers(dest="mode")
|
|
709
|
+
|
|
710
|
+
tukey_parser = subparsers.add_parser(
|
|
711
|
+
name="tukey", help="Perform a dumb Tukey taper across delay-time data"
|
|
712
|
+
)
|
|
713
|
+
tukey_parser.add_argument(
|
|
714
|
+
"ms_path",
|
|
715
|
+
type=Path,
|
|
716
|
+
help="The measurement set to process with the Tukey tractor",
|
|
717
|
+
)
|
|
718
|
+
tukey_parser.add_argument(
|
|
719
|
+
"--outer-width",
|
|
720
|
+
type=float,
|
|
721
|
+
default=np.pi / 4,
|
|
722
|
+
help="The outer width of the Tukey taper in radians",
|
|
723
|
+
)
|
|
724
|
+
tukey_parser.add_argument(
|
|
725
|
+
"--tukey-width",
|
|
726
|
+
type=float,
|
|
727
|
+
default=np.pi / 8,
|
|
728
|
+
help="The Tukey width of the Tukey taper in radians",
|
|
729
|
+
)
|
|
730
|
+
tukey_parser.add_argument(
|
|
731
|
+
"--data-column",
|
|
732
|
+
type=str,
|
|
733
|
+
default="DATA",
|
|
734
|
+
help="The data column to use for the Tukey tractor",
|
|
735
|
+
)
|
|
736
|
+
tukey_parser.add_argument(
|
|
737
|
+
"--output-column",
|
|
738
|
+
type=str,
|
|
739
|
+
default="CORRECTED_DATA",
|
|
740
|
+
help="The output column to write the Tukey tractor results to",
|
|
741
|
+
)
|
|
742
|
+
tukey_parser.add_argument(
|
|
743
|
+
"--copy-column-data",
|
|
744
|
+
action="store_true",
|
|
745
|
+
help="If set, the Tukey tractor will copy the data from the data column to the output column before applying the taper",
|
|
746
|
+
)
|
|
747
|
+
tukey_parser.add_argument(
|
|
748
|
+
"--dry-run",
|
|
749
|
+
action="store_true",
|
|
750
|
+
help="If set, the Tukey tractor will not write any output, but will log what it would do",
|
|
751
|
+
)
|
|
752
|
+
tukey_parser.add_argument(
|
|
753
|
+
"--make-plots",
|
|
754
|
+
action="store_true",
|
|
755
|
+
help="If set, the Tukey tractor will make plots of the results",
|
|
756
|
+
)
|
|
757
|
+
tukey_parser.add_argument(
|
|
758
|
+
"--overwrite",
|
|
759
|
+
action="store_true",
|
|
760
|
+
help="If set, the Tukey tractor will overwrite the output column if it already exists",
|
|
761
|
+
)
|
|
762
|
+
tukey_parser.add_argument(
|
|
763
|
+
"--chunk-size",
|
|
764
|
+
type=int,
|
|
765
|
+
default=10000,
|
|
766
|
+
help="The number of rows to process in one chunk. Larger numbers require more memory but fewer interactions with I/O.",
|
|
767
|
+
)
|
|
768
|
+
tukey_parser.add_argument(
|
|
769
|
+
"--target-object",
|
|
770
|
+
type=str,
|
|
771
|
+
default="Sun",
|
|
772
|
+
help="The target object to apply the delay towards. Defaults to 'Sun'.",
|
|
773
|
+
)
|
|
774
|
+
tukey_parser.add_argument(
|
|
775
|
+
"--apply-towards-object",
|
|
776
|
+
action="store_true",
|
|
777
|
+
help="Whether the tukey taper is applied towards the target object (e.g. the Sun). If not set, the taper is applied towards large delays.",
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
return parser
|
|
781
|
+
|
|
782
|
+
|
|
783
|
+
def cli() -> None:
|
|
784
|
+
"""Command line interface for the Jolly Roger Tractor."""
|
|
785
|
+
parser = get_parser()
|
|
786
|
+
args = parser.parse_args()
|
|
787
|
+
|
|
788
|
+
if args.mode == "tukey":
|
|
789
|
+
tukey_tractor_options = TukeyTractorOptions(
|
|
790
|
+
ms_path=args.ms_path,
|
|
791
|
+
outer_width=args.outer_width,
|
|
792
|
+
tukey_width=args.tukey_width,
|
|
793
|
+
data_column=args.data_column,
|
|
794
|
+
output_column=args.output_column,
|
|
795
|
+
copy_column_data=args.copy_column_data,
|
|
796
|
+
dry_run=args.dry_run,
|
|
797
|
+
make_plots=args.make_plots,
|
|
798
|
+
overwrite=args.overwrite,
|
|
799
|
+
chunk_size=args.chunk_size,
|
|
800
|
+
target_object=args.target_object,
|
|
801
|
+
apply_towards_object=args.apply_towards_object,
|
|
802
|
+
)
|
|
803
|
+
tukey_tractor(tukey_tractor_options=tukey_tractor_options)
|
|
804
|
+
else:
|
|
805
|
+
parser.print_help()
|
|
806
|
+
|
|
807
|
+
|
|
808
|
+
if __name__ == "__main__":
|
|
809
|
+
cli()
|
jolly_roger/uvws.py
CHANGED
|
@@ -11,17 +11,67 @@ from astropy.constants import c as speed_of_light
|
|
|
11
11
|
from casacore.tables import table, taql
|
|
12
12
|
from tqdm import tqdm
|
|
13
13
|
|
|
14
|
-
from jolly_roger.baselines import Baselines
|
|
15
|
-
from jolly_roger.hour_angles import PositionHourAngles
|
|
14
|
+
from jolly_roger.baselines import Baselines, get_baselines_from_ms
|
|
15
|
+
from jolly_roger.hour_angles import PositionHourAngles, make_hour_angles_for_ms
|
|
16
16
|
from jolly_roger.logging import logger
|
|
17
17
|
|
|
18
18
|
|
|
19
|
+
@dataclass(frozen=True)
|
|
20
|
+
class WDelays:
|
|
21
|
+
"""Representation and mappings for the w-coordinate derived delays"""
|
|
22
|
+
|
|
23
|
+
object_name: str
|
|
24
|
+
"""The name of the object that the delays are derived towards"""
|
|
25
|
+
w_delays: u.Quantity
|
|
26
|
+
"""The w-derived delay. Shape is [baseline, time]"""
|
|
27
|
+
b_map: dict[tuple[int, int], int]
|
|
28
|
+
"""The mapping between (ANTENNA1,ANTENNA2) to baseline index"""
|
|
29
|
+
time_map: dict[u.Quantity, int]
|
|
30
|
+
"""The mapping between time (MJDs from measurement set) to index"""
|
|
31
|
+
elevation: u.Quantity
|
|
32
|
+
"""The elevation of the target object in time order of steps in the MS"""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_object_delay_for_ms(
|
|
36
|
+
ms_path: Path,
|
|
37
|
+
object_name: str = "sun",
|
|
38
|
+
) -> WDelays:
|
|
39
|
+
# Generate the two sets of uvw coordinate objects
|
|
40
|
+
baselines: Baselines = get_baselines_from_ms(ms_path=ms_path)
|
|
41
|
+
hour_angles_phase = make_hour_angles_for_ms(
|
|
42
|
+
ms_path=ms_path,
|
|
43
|
+
position=None, # gets the position from phase direction
|
|
44
|
+
)
|
|
45
|
+
uvws_phase: UVWs = xyz_to_uvw(baselines=baselines, hour_angles=hour_angles_phase)
|
|
46
|
+
|
|
47
|
+
hour_angles_object = make_hour_angles_for_ms(
|
|
48
|
+
ms_path=ms_path,
|
|
49
|
+
position=object_name, # gets the position from phase direction
|
|
50
|
+
)
|
|
51
|
+
uvws_object: UVWs = xyz_to_uvw(baselines=baselines, hour_angles=hour_angles_object)
|
|
52
|
+
|
|
53
|
+
# Subtract the w-coordinates out. Since these uvws have
|
|
54
|
+
# been computed towards different directions the difference
|
|
55
|
+
# in w-coordinate is the delay distance
|
|
56
|
+
w_diffs = uvws_object.uvws[2] - uvws_phase.uvws[2]
|
|
57
|
+
|
|
58
|
+
delay_object = (w_diffs / speed_of_light).decompose()
|
|
59
|
+
|
|
60
|
+
return WDelays(
|
|
61
|
+
object_name=object_name,
|
|
62
|
+
w_delays=delay_object,
|
|
63
|
+
b_map=baselines.b_map,
|
|
64
|
+
time_map=hour_angles_phase.time_map,
|
|
65
|
+
elevation=hour_angles_object.elevation,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
19
69
|
@dataclass
|
|
20
70
|
class UVWs:
|
|
21
71
|
"""A small container to represent uvws"""
|
|
22
72
|
|
|
23
73
|
uvws: np.ndarray
|
|
24
|
-
"""The (U,V,W)
|
|
74
|
+
"""The (U,V,W) coordinatesm shape [coord, baseline, time]"""
|
|
25
75
|
hour_angles: PositionHourAngles
|
|
26
76
|
"""The hour angle information used to construct the UVWs"""
|
|
27
77
|
baselines: Baselines
|
|
@@ -51,9 +101,9 @@ def xyz_to_uvw(
|
|
|
51
101
|
declination = hour_angles.position.dec
|
|
52
102
|
|
|
53
103
|
# This is necessary for broadcastung in the matrix to work.
|
|
54
|
-
# Should the position be a solar object like the
|
|
104
|
+
# Should the position be a solar object like the sun its position
|
|
55
105
|
# will change throughout the observation. but it will have
|
|
56
|
-
|
|
106
|
+
# been created consistently with the hour angles. If it is fixed
|
|
57
107
|
# then the use of the numpy ones like will ensure the same shape.
|
|
58
108
|
declination = (np.ones(len(ha)) * declination).decompose()
|
|
59
109
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: jolly-roger
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: The pirate flagger
|
|
5
5
|
Project-URL: Homepage, https://github.com/flint-crew/jolly-roger
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/flint-crew/jolly-roger/issues
|
|
@@ -23,6 +23,8 @@ Classifier: Topic :: Scientific/Engineering
|
|
|
23
23
|
Classifier: Typing :: Typed
|
|
24
24
|
Requires-Python: >=3.11
|
|
25
25
|
Requires-Dist: astropy
|
|
26
|
+
Requires-Dist: dask-ms
|
|
27
|
+
Requires-Dist: matplotlib
|
|
26
28
|
Requires-Dist: numpy>=2.0.0
|
|
27
29
|
Requires-Dist: python-casacore>=3.6.0
|
|
28
30
|
Requires-Dist: tqdm
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
jolly_roger/__init__.py,sha256=7xiZLdeY-7sgrYGQ1gNdCjgCfqnoPXK7AeaHncY_DGU,204
|
|
2
|
+
jolly_roger/_version.py,sha256=AGmG_Lx0-9ztFw_7d9mYbaYuC-2abxE1oXOUNAY29YY,511
|
|
3
|
+
jolly_roger/_version.pyi,sha256=j5kbzfm6lOn8BzASXWjGIA1yT0OlHTWqlbyZ8Si_o0E,118
|
|
4
|
+
jolly_roger/baselines.py,sha256=C_vC3v_ciU2T_si31oS0hUmsMNTQA0USxrm4118vYvY,4615
|
|
5
|
+
jolly_roger/delays.py,sha256=cvLMhChkkB6PkS11v6JU8Wn23Zqv5bQY1HTMzeIGTNw,3015
|
|
6
|
+
jolly_roger/flagger.py,sha256=tlC-M_MpLpqOvkF544zw2EvOUpbSpasO2zlMlXMcxSs,3034
|
|
7
|
+
jolly_roger/hour_angles.py,sha256=ld3jiEDQXlYLHrChUxYD_UBSxKH0qarstakBPLQ0M8s,6044
|
|
8
|
+
jolly_roger/logging.py,sha256=04YVHnF_8tKDkXNtXQ-iMyJ2BLV-qowbPAqqMFDxYE4,1338
|
|
9
|
+
jolly_roger/plots.py,sha256=3aRIy4LcFY5mrlPim_geiz1B22wOYp0EfMvGh90GCUA,5473
|
|
10
|
+
jolly_roger/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
jolly_roger/tractor.py,sha256=Z6LyrhNZZuTKVyFMUhyOGhLgYAjrkfR9YP93LdsCHOY,27907
|
|
12
|
+
jolly_roger/uvws.py,sha256=ujZdIIxNY2k4HY9p65kUyH-VqN6thNpOrBb-wpL9mYM,12424
|
|
13
|
+
jolly_roger-0.3.0.dist-info/METADATA,sha256=Jzt-pUJETG8TYNSXKCe1HUVuS5Fmc7K07RwhR3SFfiQ,4221
|
|
14
|
+
jolly_roger-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
15
|
+
jolly_roger-0.3.0.dist-info/entry_points.txt,sha256=q8RYosASYsPShzsIo58NxOhIMuB4F-gQ2uG6zS2p224,98
|
|
16
|
+
jolly_roger-0.3.0.dist-info/licenses/LICENSE,sha256=7G-TthaPSOehr-pdj4TJydXj3eIUmerMbCUSatMr8hc,1522
|
|
17
|
+
jolly_roger-0.3.0.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
jolly_roger/__init__.py,sha256=7xiZLdeY-7sgrYGQ1gNdCjgCfqnoPXK7AeaHncY_DGU,204
|
|
2
|
-
jolly_roger/_version.py,sha256=-LyU5F1uZDjn6Q8_Z6-_FJt_8RE4Kq9zcKdg1abSSps,511
|
|
3
|
-
jolly_roger/_version.pyi,sha256=j5kbzfm6lOn8BzASXWjGIA1yT0OlHTWqlbyZ8Si_o0E,118
|
|
4
|
-
jolly_roger/baselines.py,sha256=C_vC3v_ciU2T_si31oS0hUmsMNTQA0USxrm4118vYvY,4615
|
|
5
|
-
jolly_roger/flagger.py,sha256=tlC-M_MpLpqOvkF544zw2EvOUpbSpasO2zlMlXMcxSs,3034
|
|
6
|
-
jolly_roger/hour_angles.py,sha256=SUUN_DcT3xzYp9JZ1U9ZcJpMjsfYETJ4D0YghUjee7Y,6096
|
|
7
|
-
jolly_roger/logging.py,sha256=04YVHnF_8tKDkXNtXQ-iMyJ2BLV-qowbPAqqMFDxYE4,1338
|
|
8
|
-
jolly_roger/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
jolly_roger/uvws.py,sha256=42FngtA425Fk8QSlEIbl-9tEBd_-In0FtV2LYcnk6-0,10555
|
|
10
|
-
jolly_roger-0.1.0.dist-info/METADATA,sha256=d40uefWM1e6HJtaqxNoak1PFBNFFZaBaS_18wno_Au8,4172
|
|
11
|
-
jolly_roger-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
12
|
-
jolly_roger-0.1.0.dist-info/entry_points.txt,sha256=ZwEZAe4DBn5nznVI0tP0a1wUinYouwXxxcZP6p7Pkvk,58
|
|
13
|
-
jolly_roger-0.1.0.dist-info/licenses/LICENSE,sha256=7G-TthaPSOehr-pdj4TJydXj3eIUmerMbCUSatMr8hc,1522
|
|
14
|
-
jolly_roger-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|