PyStormTracker 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pystormtracker/__init__.py +4 -0
- pystormtracker/models/__init__.py +5 -0
- pystormtracker/models/center.py +58 -0
- pystormtracker/models/grid.py +40 -0
- pystormtracker/models/tracks.py +29 -0
- pystormtracker/simple/__init__.py +4 -0
- pystormtracker/simple/detector.py +315 -0
- pystormtracker/simple/linker.py +104 -0
- pystormtracker/stormtracker.py +244 -0
- pystormtracker-0.2.1.dist-info/METADATA +146 -0
- pystormtracker-0.2.1.dist-info/RECORD +14 -0
- pystormtracker-0.2.1.dist-info/WHEEL +4 -0
- pystormtracker-0.2.1.dist-info/entry_points.txt +2 -0
- pystormtracker-0.2.1.dist-info/licenses/LICENSE +40 -0
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Center:
|
|
6
|
+
R: float = 6367.0
|
|
7
|
+
DEGTORAD: float = math.pi / 180.0
|
|
8
|
+
|
|
9
|
+
def __init__(self, time: Any, lat: float, lon: float, var: Any) -> None:
|
|
10
|
+
self.time = time
|
|
11
|
+
self.lat = lat
|
|
12
|
+
self.lon = lon
|
|
13
|
+
self.var = var
|
|
14
|
+
|
|
15
|
+
def __repr__(self) -> str:
|
|
16
|
+
return str(self.var)
|
|
17
|
+
|
|
18
|
+
def __str__(self) -> str:
|
|
19
|
+
return f"[time={self.time}, lat={self.lat}, lon={self.lon}, var={self.var}]"
|
|
20
|
+
|
|
21
|
+
def abs_dist(self, center: "Center") -> float:
|
|
22
|
+
"""Haversine formula for calculating the great circle distance"""
|
|
23
|
+
|
|
24
|
+
if not isinstance(center, Center):
|
|
25
|
+
raise TypeError("must be compared with a Center object")
|
|
26
|
+
|
|
27
|
+
dlat = center.lat - self.lat
|
|
28
|
+
dlon = center.lon - self.lon
|
|
29
|
+
|
|
30
|
+
return (
|
|
31
|
+
self.R
|
|
32
|
+
* 2
|
|
33
|
+
* math.asin(
|
|
34
|
+
math.sqrt(
|
|
35
|
+
math.sin(dlat / 2 * self.DEGTORAD) ** 2
|
|
36
|
+
+ math.cos(self.lat * self.DEGTORAD)
|
|
37
|
+
* math.cos(center.lat * self.DEGTORAD)
|
|
38
|
+
* math.sin(dlon / 2 * self.DEGTORAD) ** 2
|
|
39
|
+
)
|
|
40
|
+
)
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
def lat_dist(self, center: "Center") -> float:
|
|
44
|
+
if not isinstance(center, Center):
|
|
45
|
+
raise TypeError("must be compared with a Center object")
|
|
46
|
+
|
|
47
|
+
dlat = center.lat - self.lat
|
|
48
|
+
|
|
49
|
+
return self.R * dlat * self.DEGTORAD
|
|
50
|
+
|
|
51
|
+
def lon_dist(self, center: "Center") -> float:
|
|
52
|
+
if not isinstance(center, Center):
|
|
53
|
+
raise TypeError("must be compared with a Center object")
|
|
54
|
+
|
|
55
|
+
avglat = (self.lat + center.lat) / 2
|
|
56
|
+
dlon = center.lon - self.lon
|
|
57
|
+
|
|
58
|
+
return self.R * dlon * self.DEGTORAD * math.cos(avglat * self.DEGTORAD)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from abc import ABCMeta, abstractmethod
|
|
2
|
+
from typing import Any, Literal
|
|
3
|
+
|
|
4
|
+
from .center import Center
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Grid(metaclass=ABCMeta):
|
|
8
|
+
@abstractmethod
|
|
9
|
+
def get_var(self, chart: int | tuple[int, int] | None = None) -> Any:
|
|
10
|
+
raise NotImplementedError
|
|
11
|
+
|
|
12
|
+
@abstractmethod
|
|
13
|
+
def get_time(self) -> Any:
|
|
14
|
+
raise NotImplementedError
|
|
15
|
+
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def get_time_obj(self) -> Any:
|
|
18
|
+
raise NotImplementedError
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def get_lat(self) -> Any:
|
|
22
|
+
raise NotImplementedError
|
|
23
|
+
|
|
24
|
+
@abstractmethod
|
|
25
|
+
def get_lon(self) -> Any:
|
|
26
|
+
raise NotImplementedError
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def split(self, num: int) -> list["Grid"]:
|
|
30
|
+
raise NotImplementedError
|
|
31
|
+
|
|
32
|
+
@abstractmethod
|
|
33
|
+
def detect(
|
|
34
|
+
self,
|
|
35
|
+
size: int = 5,
|
|
36
|
+
threshold: float = 0.0,
|
|
37
|
+
chart_buffer: int = 400,
|
|
38
|
+
minmaxmode: Literal["min", "max"] = "min",
|
|
39
|
+
) -> list[list[Center]]:
|
|
40
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from collections.abc import Iterator
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from .center import Center
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Tracks:
|
|
8
|
+
def __init__(self) -> None:
|
|
9
|
+
self._tracks: list[list[Center]] = []
|
|
10
|
+
self.head: list[int] = []
|
|
11
|
+
self.tail: list[int] = []
|
|
12
|
+
self.tstart: Any | None = None
|
|
13
|
+
self.tend: Any | None = None
|
|
14
|
+
self.dt: Any | None = None
|
|
15
|
+
|
|
16
|
+
def __getitem__(self, index: int) -> list[Center]:
|
|
17
|
+
return self._tracks[index]
|
|
18
|
+
|
|
19
|
+
def __setitem__(self, index: int, value: list[Center]) -> None:
|
|
20
|
+
self._tracks[index] = value
|
|
21
|
+
|
|
22
|
+
def __iter__(self) -> Iterator[list[Center]]:
|
|
23
|
+
return iter(self._tracks)
|
|
24
|
+
|
|
25
|
+
def __len__(self) -> int:
|
|
26
|
+
return len(self._tracks)
|
|
27
|
+
|
|
28
|
+
def append(self, obj: list[Center]) -> None:
|
|
29
|
+
self._tracks.append(obj)
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
from typing import Any, Literal
|
|
2
|
+
|
|
3
|
+
import netCDF4
|
|
4
|
+
import numpy as np
|
|
5
|
+
from scipy.ndimage import generic_filter, laplace
|
|
6
|
+
|
|
7
|
+
from ..models.center import Center
|
|
8
|
+
from ..models.grid import Grid
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SimpleDetector(Grid):
|
|
12
|
+
def __init__(
|
|
13
|
+
self, pathname: str, varname: str, trange: tuple[int, int] | None = None
|
|
14
|
+
) -> None:
|
|
15
|
+
|
|
16
|
+
self.pathname = pathname
|
|
17
|
+
self.varname = varname
|
|
18
|
+
self.trange = trange
|
|
19
|
+
|
|
20
|
+
self._open_file: bool = False
|
|
21
|
+
self._var: Any = None
|
|
22
|
+
self._time: Any = None
|
|
23
|
+
self._lat: Any = None
|
|
24
|
+
self._lon: Any = None
|
|
25
|
+
|
|
26
|
+
self.f: Any = None
|
|
27
|
+
self.time: Any = None
|
|
28
|
+
self.lat: Any = None
|
|
29
|
+
self.lon: Any = None
|
|
30
|
+
|
|
31
|
+
def _init(self) -> None:
|
|
32
|
+
if self._open_file is False:
|
|
33
|
+
self._open_file = True
|
|
34
|
+
self.f = netCDF4.Dataset(self.pathname, "r")
|
|
35
|
+
|
|
36
|
+
# Dimension of var is time, lat, lon
|
|
37
|
+
self._var = self.f.variables[self.varname]
|
|
38
|
+
# Disable auto mask and scale as it may mask valid SLP values in some files
|
|
39
|
+
self._var.set_auto_maskandscale(False)
|
|
40
|
+
|
|
41
|
+
self._time = self.f.variables["time"]
|
|
42
|
+
|
|
43
|
+
if "latitude" in self.f.variables:
|
|
44
|
+
self._lat = self.f.variables["latitude"]
|
|
45
|
+
elif "lat" in self.f.variables:
|
|
46
|
+
self._lat = self.f.variables["lat"]
|
|
47
|
+
else:
|
|
48
|
+
raise KeyError(
|
|
49
|
+
"Neither 'latitude' nor 'lat' found in NetCDF variables."
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
if "longitude" in self.f.variables:
|
|
53
|
+
self._lon = self.f.variables["longitude"]
|
|
54
|
+
elif "lon" in self.f.variables:
|
|
55
|
+
self._lon = self.f.variables["lon"]
|
|
56
|
+
else:
|
|
57
|
+
raise KeyError(
|
|
58
|
+
"Neither 'longitude' nor 'lon' found in NetCDF variables."
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
self.time = None
|
|
62
|
+
self.lat = None
|
|
63
|
+
self.lon = None
|
|
64
|
+
|
|
65
|
+
def get_var(self, chart: int | tuple[int, int] | None = None) -> Any:
|
|
66
|
+
|
|
67
|
+
if self.trange is not None:
|
|
68
|
+
if self.trange[0] >= self.trange[1]:
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
if chart is not None:
|
|
72
|
+
if isinstance(chart, tuple):
|
|
73
|
+
if (
|
|
74
|
+
len(chart) != 2
|
|
75
|
+
or not isinstance(chart[0], int)
|
|
76
|
+
or not isinstance(chart[1], int)
|
|
77
|
+
):
|
|
78
|
+
raise TypeError("chart must be a tuple of two integers")
|
|
79
|
+
elif not isinstance(chart, int):
|
|
80
|
+
raise TypeError("chart must be an integer or tuple")
|
|
81
|
+
|
|
82
|
+
if self.trange is not None:
|
|
83
|
+
if isinstance(chart, int):
|
|
84
|
+
if chart < 0 or chart >= self.trange[1] - self.trange[0]:
|
|
85
|
+
raise IndexError("chart is out of bound of trange")
|
|
86
|
+
if isinstance(chart, tuple):
|
|
87
|
+
if chart[0] == chart[1]:
|
|
88
|
+
return None
|
|
89
|
+
if chart[0] > chart[1]:
|
|
90
|
+
raise IndexError("chart[1] must be larger than chart[0]")
|
|
91
|
+
if chart[0] < 0 or chart[0] > self.trange[1] - self.trange[0]:
|
|
92
|
+
raise IndexError("chart[0] is out of bound of trange")
|
|
93
|
+
if chart[1] < 0 or chart[1] > self.trange[1] - self.trange[0]:
|
|
94
|
+
raise IndexError("chart[1] is out of bound of trange")
|
|
95
|
+
|
|
96
|
+
self._init()
|
|
97
|
+
|
|
98
|
+
if isinstance(chart, int):
|
|
99
|
+
if self.trange is None:
|
|
100
|
+
return self._var[chart, :, :]
|
|
101
|
+
else:
|
|
102
|
+
return self._var[self.trange[0] + chart, :, :]
|
|
103
|
+
elif isinstance(chart, tuple):
|
|
104
|
+
if self.trange is None:
|
|
105
|
+
return self._var[chart[0] : chart[1], :, :]
|
|
106
|
+
else:
|
|
107
|
+
return self._var[
|
|
108
|
+
self.trange[0] + chart[0] : self.trange[0] + chart[1], :, :
|
|
109
|
+
]
|
|
110
|
+
else:
|
|
111
|
+
return self._var[:]
|
|
112
|
+
|
|
113
|
+
def get_time(self) -> Any:
|
|
114
|
+
|
|
115
|
+
if self.trange is not None:
|
|
116
|
+
if self.trange[0] >= self.trange[1]:
|
|
117
|
+
return None
|
|
118
|
+
|
|
119
|
+
self._init()
|
|
120
|
+
if self.time is None:
|
|
121
|
+
if self.trange is None:
|
|
122
|
+
self.time = self._time[:]
|
|
123
|
+
else:
|
|
124
|
+
self.time = self._time[self.trange[0] : self.trange[1]]
|
|
125
|
+
return self.time
|
|
126
|
+
|
|
127
|
+
def get_time_obj(self) -> Any:
|
|
128
|
+
self._init()
|
|
129
|
+
return self._time
|
|
130
|
+
|
|
131
|
+
def get_lat(self) -> Any:
|
|
132
|
+
|
|
133
|
+
self._init()
|
|
134
|
+
if self.lat is None:
|
|
135
|
+
self.lat = self._lat[:]
|
|
136
|
+
return self.lat
|
|
137
|
+
|
|
138
|
+
def get_lon(self) -> Any:
|
|
139
|
+
|
|
140
|
+
self._init()
|
|
141
|
+
if self.lon is None:
|
|
142
|
+
self.lon = self._lon[:]
|
|
143
|
+
return self.lon
|
|
144
|
+
|
|
145
|
+
def split(self, num: int) -> list["Grid"]:
|
|
146
|
+
|
|
147
|
+
if not isinstance(num, int):
|
|
148
|
+
raise TypeError("number to split must be an integer")
|
|
149
|
+
|
|
150
|
+
if self._open_file is False:
|
|
151
|
+
if self.trange is not None:
|
|
152
|
+
time_len = self.trange[1] - self.trange[0]
|
|
153
|
+
tstart = self.trange[0]
|
|
154
|
+
else:
|
|
155
|
+
f = netCDF4.Dataset(self.pathname, "r")
|
|
156
|
+
time_len = f.dimensions["time"].size
|
|
157
|
+
f.close()
|
|
158
|
+
tstart = 0
|
|
159
|
+
|
|
160
|
+
chunk_size = time_len // num
|
|
161
|
+
remainder = time_len % num
|
|
162
|
+
|
|
163
|
+
tranges = [
|
|
164
|
+
(
|
|
165
|
+
tstart + i * chunk_size + remainder * i // num,
|
|
166
|
+
tstart + (i + 1) * chunk_size + remainder * (i + 1) // num,
|
|
167
|
+
)
|
|
168
|
+
for i in range(num)
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
return [
|
|
172
|
+
SimpleDetector(self.pathname, self.varname, trange=it) for it in tranges
|
|
173
|
+
]
|
|
174
|
+
|
|
175
|
+
else:
|
|
176
|
+
raise RuntimeError(
|
|
177
|
+
"SimpleDetector must not be initialized before running split()"
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
def _local_extrema_func(
|
|
181
|
+
self,
|
|
182
|
+
buffer: np.ndarray,
|
|
183
|
+
size: int,
|
|
184
|
+
threshold: float,
|
|
185
|
+
minmaxmode: Literal["min", "max"],
|
|
186
|
+
) -> bool:
|
|
187
|
+
|
|
188
|
+
half_size = size // 2
|
|
189
|
+
|
|
190
|
+
search_window = buffer.reshape((size, size))
|
|
191
|
+
origin = (half_size, half_size)
|
|
192
|
+
|
|
193
|
+
center_val = search_window[origin]
|
|
194
|
+
|
|
195
|
+
# If the center value is masked, it cannot be an extrema
|
|
196
|
+
if np.ma.is_masked(center_val):
|
|
197
|
+
return False
|
|
198
|
+
|
|
199
|
+
if threshold == 0.0:
|
|
200
|
+
if minmaxmode == "min":
|
|
201
|
+
return bool(center_val == search_window.min())
|
|
202
|
+
elif minmaxmode == "max":
|
|
203
|
+
return bool(center_val == search_window.max())
|
|
204
|
+
elif center_val == search_window.min():
|
|
205
|
+
if minmaxmode == "min":
|
|
206
|
+
# At least 8 of values in buffer should be larger than threshold
|
|
207
|
+
return bool(sorted(buffer)[8] - center_val > threshold)
|
|
208
|
+
elif center_val == search_window.max():
|
|
209
|
+
if minmaxmode == "max":
|
|
210
|
+
return bool(sorted(buffer)[0] - center_val < -1 * threshold)
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
def _local_extrema_filter(
|
|
214
|
+
self,
|
|
215
|
+
input_arr: np.ndarray,
|
|
216
|
+
size: int,
|
|
217
|
+
threshold: float = 0.0,
|
|
218
|
+
minmaxmode: Literal["min", "max"] = "min",
|
|
219
|
+
) -> np.ndarray:
|
|
220
|
+
|
|
221
|
+
if size % 2 != 1:
|
|
222
|
+
raise ValueError("size must be an odd number")
|
|
223
|
+
|
|
224
|
+
half_size = size // 2
|
|
225
|
+
|
|
226
|
+
output = generic_filter(
|
|
227
|
+
input_arr,
|
|
228
|
+
self._local_extrema_func,
|
|
229
|
+
size=size,
|
|
230
|
+
mode="wrap",
|
|
231
|
+
extra_keywords={
|
|
232
|
+
"size": size,
|
|
233
|
+
"threshold": threshold,
|
|
234
|
+
"minmaxmode": minmaxmode,
|
|
235
|
+
},
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
# Mask the extreme latitudes
|
|
239
|
+
output[:half_size, :] = 0.0
|
|
240
|
+
output[-half_size:, :] = 0.0
|
|
241
|
+
|
|
242
|
+
return np.asarray(output)
|
|
243
|
+
|
|
244
|
+
def _local_max_laplace(self, buffer: np.ndarray, size: int) -> bool:
|
|
245
|
+
origin = (size * size) // 2
|
|
246
|
+
return bool(buffer[origin] and buffer[origin] == buffer.max())
|
|
247
|
+
|
|
248
|
+
def _remove_dup_laplace(
|
|
249
|
+
self, data: np.ndarray, mask: np.ndarray, size: int = 5
|
|
250
|
+
) -> np.ndarray:
|
|
251
|
+
laplacian = np.multiply(laplace(data, mode="wrap"), mask)
|
|
252
|
+
|
|
253
|
+
return np.asarray(
|
|
254
|
+
generic_filter(
|
|
255
|
+
laplacian,
|
|
256
|
+
self._local_max_laplace,
|
|
257
|
+
size=size,
|
|
258
|
+
mode="wrap",
|
|
259
|
+
extra_keywords={"size": size},
|
|
260
|
+
)
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
def detect(
|
|
264
|
+
self,
|
|
265
|
+
size: int = 5,
|
|
266
|
+
threshold: float = 0.0,
|
|
267
|
+
chart_buffer: int = 400,
|
|
268
|
+
minmaxmode: Literal["min", "max"] = "min",
|
|
269
|
+
) -> list[list[Center]]:
|
|
270
|
+
"""Returns a list of list of Center's"""
|
|
271
|
+
|
|
272
|
+
if self.trange is not None and self.trange[0] >= self.trange[1]:
|
|
273
|
+
return []
|
|
274
|
+
|
|
275
|
+
time = self.get_time()
|
|
276
|
+
lat = self.get_lat()
|
|
277
|
+
lon = self.get_lon()
|
|
278
|
+
|
|
279
|
+
centers = []
|
|
280
|
+
|
|
281
|
+
var: Any = None
|
|
282
|
+
|
|
283
|
+
num_steps = len(time)
|
|
284
|
+
for it, t in enumerate(time):
|
|
285
|
+
ibuffer = it % chart_buffer
|
|
286
|
+
if ibuffer == 0:
|
|
287
|
+
var = self.get_var(chart=(it, min(it + chart_buffer, num_steps)))
|
|
288
|
+
|
|
289
|
+
if var is not None:
|
|
290
|
+
chart = var[ibuffer, :, :]
|
|
291
|
+
|
|
292
|
+
# Fill masked values so they aren't detected as extrema
|
|
293
|
+
if minmaxmode == "min":
|
|
294
|
+
filled_chart = np.ma.filled(chart, fill_value=np.inf)
|
|
295
|
+
else:
|
|
296
|
+
filled_chart = np.ma.filled(chart, fill_value=-np.inf)
|
|
297
|
+
|
|
298
|
+
extrema = self._local_extrema_filter(
|
|
299
|
+
filled_chart, size, threshold=threshold, minmaxmode=minmaxmode
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
# Ensure we don't detect centers on originally masked pixels
|
|
303
|
+
if np.ma.is_masked(chart):
|
|
304
|
+
extrema[chart.mask] = 0
|
|
305
|
+
|
|
306
|
+
extrema = self._remove_dup_laplace(filled_chart, extrema, size=5)
|
|
307
|
+
|
|
308
|
+
center_list = [
|
|
309
|
+
Center(t, float(lat[i]), float(lon[j]), chart[i, j])
|
|
310
|
+
for i, j in np.transpose(extrema.nonzero())
|
|
311
|
+
]
|
|
312
|
+
print(f"Step {it + 1}/{num_steps}: Found {len(center_list)} centers")
|
|
313
|
+
centers.append(center_list)
|
|
314
|
+
|
|
315
|
+
return centers
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
from ..models.center import Center
|
|
2
|
+
from ..models.tracks import Tracks
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class SimpleLinker:
|
|
6
|
+
def __init__(self, threshold: float = 500.0) -> None:
|
|
7
|
+
self.threshold = threshold
|
|
8
|
+
|
|
9
|
+
def match_center(self, tracks: Tracks, centers: list[Center]) -> list[int | None]:
|
|
10
|
+
ends = [tracks[i][-1] for i in tracks.tail]
|
|
11
|
+
|
|
12
|
+
dforward: list[dict[int, float]] = [{} for _ in range(len(ends))]
|
|
13
|
+
dbackward: list[dict[int, float]] = [{} for _ in range(len(centers))]
|
|
14
|
+
|
|
15
|
+
for ic1, c1 in enumerate(ends):
|
|
16
|
+
for ic2, c2 in enumerate(centers):
|
|
17
|
+
dist = c1.abs_dist(c2)
|
|
18
|
+
if dist < self.threshold:
|
|
19
|
+
dforward[ic1][ic2] = dist
|
|
20
|
+
dbackward[ic2][ic1] = dist
|
|
21
|
+
|
|
22
|
+
matched: list[int | None] = [None for _ in range(len(centers))]
|
|
23
|
+
|
|
24
|
+
while True:
|
|
25
|
+
has_match = False
|
|
26
|
+
|
|
27
|
+
for i, db in enumerate(dbackward):
|
|
28
|
+
if matched[i] is None and len(db) > 0:
|
|
29
|
+
iforward = min(db, key=db.get) # type: ignore
|
|
30
|
+
di = dforward[iforward]
|
|
31
|
+
|
|
32
|
+
if min(di, key=di.get) == i: # type: ignore
|
|
33
|
+
matched[i] = iforward
|
|
34
|
+
|
|
35
|
+
db.clear()
|
|
36
|
+
for j in dbackward:
|
|
37
|
+
if iforward in j:
|
|
38
|
+
del j[iforward]
|
|
39
|
+
di.clear()
|
|
40
|
+
|
|
41
|
+
for j in dforward:
|
|
42
|
+
if i in j:
|
|
43
|
+
del j[i]
|
|
44
|
+
|
|
45
|
+
has_match = True
|
|
46
|
+
|
|
47
|
+
if has_match is False:
|
|
48
|
+
break
|
|
49
|
+
|
|
50
|
+
return [tracks.tail[i] if i is not None else None for i in matched]
|
|
51
|
+
|
|
52
|
+
def match_track(self, tracks1: Tracks, tracks2: Tracks) -> list[int | None]:
|
|
53
|
+
centers = [tracks2[i][0] for i in tracks2.head]
|
|
54
|
+
return self.match_center(tracks1, centers)
|
|
55
|
+
|
|
56
|
+
def append_center(self, tracks: Tracks, centers: list[Center]) -> None:
|
|
57
|
+
new_tail: list[int] = []
|
|
58
|
+
|
|
59
|
+
matched_index = self.match_center(tracks, centers)
|
|
60
|
+
|
|
61
|
+
for i, d in enumerate(matched_index):
|
|
62
|
+
if tracks.tstart is None:
|
|
63
|
+
tracks.append([centers[i]])
|
|
64
|
+
tracks.head.append(len(tracks) - 1)
|
|
65
|
+
new_tail.append(len(tracks) - 1)
|
|
66
|
+
elif d is None or (
|
|
67
|
+
tracks.tend is not None
|
|
68
|
+
and tracks.dt is not None
|
|
69
|
+
and centers[0].time - tracks.dt > tracks.tend
|
|
70
|
+
):
|
|
71
|
+
tracks.append([centers[i]])
|
|
72
|
+
new_tail.append(len(tracks) - 1)
|
|
73
|
+
else:
|
|
74
|
+
tracks[d].append(centers[i])
|
|
75
|
+
new_tail.append(d)
|
|
76
|
+
|
|
77
|
+
tracks.tail = new_tail
|
|
78
|
+
|
|
79
|
+
tracks.tend = centers[0].time
|
|
80
|
+
if tracks.tstart is None:
|
|
81
|
+
tracks.tstart = centers[0].time
|
|
82
|
+
elif tracks.dt is None:
|
|
83
|
+
tracks.dt = tracks.tend - tracks.tstart
|
|
84
|
+
|
|
85
|
+
def extend_track(self, tracks1: Tracks, tracks2: Tracks) -> None:
|
|
86
|
+
new_tail: list[int] = []
|
|
87
|
+
|
|
88
|
+
matched_index = self.match_track(tracks1, tracks2)
|
|
89
|
+
matched_dict = {d: matched_index[i] for i, d in enumerate(tracks2.head)}
|
|
90
|
+
tail_dict = dict.fromkeys(tracks2.tail)
|
|
91
|
+
|
|
92
|
+
for i, d in enumerate(tracks2):
|
|
93
|
+
match_idx = matched_dict.get(i)
|
|
94
|
+
if match_idx is not None:
|
|
95
|
+
tracks1[match_idx].extend(d)
|
|
96
|
+
if i in tail_dict:
|
|
97
|
+
new_tail.append(match_idx)
|
|
98
|
+
else:
|
|
99
|
+
tracks1.append(d)
|
|
100
|
+
if i in tail_dict:
|
|
101
|
+
new_tail.append(len(tracks1) - 1)
|
|
102
|
+
|
|
103
|
+
tracks1.tail = new_tail
|
|
104
|
+
tracks1.tend = tracks2.tend
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
import timeit
|
|
5
|
+
from argparse import ArgumentParser, Namespace
|
|
6
|
+
from typing import Any, Literal
|
|
7
|
+
|
|
8
|
+
import netCDF4
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
from .models import Center, Grid, Tracks
|
|
12
|
+
from .simple import SimpleDetector, SimpleLinker
|
|
13
|
+
|
|
14
|
+
Backend = Literal["serial", "mpi", "dask"]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def export_to_csv(
|
|
18
|
+
tracks: Tracks, outfile: str, grid: Grid, decimal_places: int = 4
|
|
19
|
+
) -> None:
|
|
20
|
+
"""Exports detected tracks to a user-friendly CSV file."""
|
|
21
|
+
time_obj = grid.get_time_obj()
|
|
22
|
+
units = getattr(time_obj, "units", "")
|
|
23
|
+
calendar = getattr(time_obj, "calendar", "standard")
|
|
24
|
+
|
|
25
|
+
if not outfile.endswith(".csv"):
|
|
26
|
+
outfile += ".csv"
|
|
27
|
+
|
|
28
|
+
with open(outfile, "w", newline="") as f:
|
|
29
|
+
writer = csv.writer(f)
|
|
30
|
+
writer.writerow(["track_id", "time", "lat", "lon", "var"])
|
|
31
|
+
for i, track in enumerate(tracks):
|
|
32
|
+
for center in track:
|
|
33
|
+
try:
|
|
34
|
+
dt = netCDF4.num2date(center.time, units=units, calendar=calendar)
|
|
35
|
+
# dt can be a cftime.datetime or a standard datetime
|
|
36
|
+
time_val = dt.strftime("%Y-%m-%d %H:%M:%S") # type: ignore
|
|
37
|
+
except Exception:
|
|
38
|
+
time_val = str(center.time)
|
|
39
|
+
|
|
40
|
+
if center.var is None or np.ma.is_masked(center.var):
|
|
41
|
+
var_val = "--"
|
|
42
|
+
else:
|
|
43
|
+
var_val = f"{float(center.var):.{decimal_places}f}"
|
|
44
|
+
writer.writerow([i, time_val, center.lat, center.lon, var_val])
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _detect_serial(
|
|
48
|
+
infile: str, varname: str, trange: tuple[int, int] | None, mode: str
|
|
49
|
+
) -> tuple[list[list[Center]], Grid]:
|
|
50
|
+
grid = SimpleDetector(pathname=infile, varname=varname, trange=trange)
|
|
51
|
+
return grid.detect(minmaxmode=mode), grid # type: ignore
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _detect_mpi(
|
|
55
|
+
infile: str, varname: str, trange: tuple[int, int] | None, mode: str
|
|
56
|
+
) -> tuple[list[list[Center]], Grid, Any]:
|
|
57
|
+
from mpi4py import MPI
|
|
58
|
+
|
|
59
|
+
comm = MPI.COMM_WORLD
|
|
60
|
+
rank = comm.Get_rank()
|
|
61
|
+
size = comm.Get_size()
|
|
62
|
+
root = 0
|
|
63
|
+
|
|
64
|
+
if rank == root:
|
|
65
|
+
grid_obj = SimpleDetector(pathname=infile, varname=varname, trange=trange)
|
|
66
|
+
grids: list[Grid] | None = grid_obj.split(size)
|
|
67
|
+
else:
|
|
68
|
+
grids = None
|
|
69
|
+
|
|
70
|
+
grid = comm.scatter(grids, root=root)
|
|
71
|
+
return grid.detect(minmaxmode=mode), grid, comm
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _detect_dask(
|
|
75
|
+
infile: str,
|
|
76
|
+
varname: str,
|
|
77
|
+
trange: tuple[int, int] | None,
|
|
78
|
+
mode: str,
|
|
79
|
+
n_workers: int | None,
|
|
80
|
+
) -> tuple[list[list[Center]], Grid]:
|
|
81
|
+
import dask
|
|
82
|
+
from distributed import Client, LocalCluster
|
|
83
|
+
|
|
84
|
+
if n_workers is None or n_workers <= 0:
|
|
85
|
+
n_workers = os.cpu_count() or 4
|
|
86
|
+
|
|
87
|
+
grid_obj = SimpleDetector(pathname=infile, varname=varname, trange=trange)
|
|
88
|
+
grids = grid_obj.split(n_workers)
|
|
89
|
+
|
|
90
|
+
with LocalCluster(
|
|
91
|
+
n_workers=n_workers, threads_per_worker=1
|
|
92
|
+
) as cluster, Client(cluster): # type: ignore
|
|
93
|
+
delayed_results = [
|
|
94
|
+
dask.delayed(g.detect)(minmaxmode=mode) for g in grids # type: ignore
|
|
95
|
+
]
|
|
96
|
+
results = dask.compute(*delayed_results) # type: ignore
|
|
97
|
+
|
|
98
|
+
# Flatten results from chunks
|
|
99
|
+
flattened_results: list[list[Center]] = []
|
|
100
|
+
for chunk in results:
|
|
101
|
+
flattened_results.extend(chunk)
|
|
102
|
+
|
|
103
|
+
return flattened_results, grid_obj
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _link_centers(centers: list[list[Center]]) -> Tracks:
|
|
107
|
+
tracks = Tracks()
|
|
108
|
+
linker = SimpleLinker()
|
|
109
|
+
for step_centers in centers:
|
|
110
|
+
linker.append_center(tracks, step_centers)
|
|
111
|
+
return tracks
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _combine_mpi_tracks(tracks: Tracks, comm: Any) -> Tracks:
|
|
115
|
+
rank = comm.Get_rank()
|
|
116
|
+
size = comm.Get_size()
|
|
117
|
+
linker = SimpleLinker()
|
|
118
|
+
|
|
119
|
+
nstripe = 2
|
|
120
|
+
while nstripe <= size:
|
|
121
|
+
if rank % nstripe == nstripe // 2:
|
|
122
|
+
comm.send(tracks, dest=rank - nstripe // 2, tag=nstripe)
|
|
123
|
+
elif rank % nstripe == 0 and rank + nstripe // 2 < size:
|
|
124
|
+
tracks_recv = comm.recv(source=rank + nstripe // 2, tag=nstripe)
|
|
125
|
+
linker.extend_track(tracks, tracks_recv)
|
|
126
|
+
nstripe *= 2
|
|
127
|
+
return tracks
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def run_tracker(
|
|
131
|
+
infile: str,
|
|
132
|
+
varname: str,
|
|
133
|
+
outfile: str,
|
|
134
|
+
trange: tuple[int, int] | None = None,
|
|
135
|
+
mode: Literal["min", "max"] = "min",
|
|
136
|
+
backend: Backend = "dask",
|
|
137
|
+
n_workers: int | None = None,
|
|
138
|
+
) -> None:
|
|
139
|
+
"""Orchestrates the storm tracking process."""
|
|
140
|
+
timer: dict[str, float] = {}
|
|
141
|
+
use_mpi = backend == "mpi"
|
|
142
|
+
use_dask = backend == "dask"
|
|
143
|
+
|
|
144
|
+
rank = 0
|
|
145
|
+
if use_mpi:
|
|
146
|
+
from mpi4py import MPI
|
|
147
|
+
|
|
148
|
+
rank = MPI.COMM_WORLD.Get_rank()
|
|
149
|
+
|
|
150
|
+
if rank == 0:
|
|
151
|
+
timer["detector"] = timeit.default_timer()
|
|
152
|
+
|
|
153
|
+
# Detection Phase
|
|
154
|
+
comm = None
|
|
155
|
+
if use_mpi:
|
|
156
|
+
centers, grid, comm = _detect_mpi(infile, varname, trange, mode)
|
|
157
|
+
elif use_dask:
|
|
158
|
+
centers, grid = _detect_dask(infile, varname, trange, mode, n_workers)
|
|
159
|
+
else:
|
|
160
|
+
centers, grid = _detect_serial(infile, varname, trange, mode)
|
|
161
|
+
|
|
162
|
+
if use_mpi and comm is not None:
|
|
163
|
+
comm.Barrier()
|
|
164
|
+
|
|
165
|
+
if rank == 0:
|
|
166
|
+
timer["detector"] = timeit.default_timer() - timer["detector"]
|
|
167
|
+
timer["linker"] = timeit.default_timer()
|
|
168
|
+
|
|
169
|
+
# Linking Phase
|
|
170
|
+
tracks = _link_centers(centers)
|
|
171
|
+
|
|
172
|
+
# Consolidation Phase
|
|
173
|
+
if use_mpi and comm is not None:
|
|
174
|
+
timer["combiner"] = timeit.default_timer()
|
|
175
|
+
tracks = _combine_mpi_tracks(tracks, comm)
|
|
176
|
+
timer["combiner"] = timeit.default_timer() - timer["combiner"]
|
|
177
|
+
|
|
178
|
+
# Export Phase
|
|
179
|
+
if rank == 0:
|
|
180
|
+
timer["linker"] = timeit.default_timer() - timer["linker"]
|
|
181
|
+
|
|
182
|
+
print(f"Detector time: {timer['detector']:.4f}s")
|
|
183
|
+
print(f"Linker time: {timer['linker']:.4f}s")
|
|
184
|
+
if "combiner" in timer:
|
|
185
|
+
print(f"Combiner time: {timer['combiner']:.4f}s")
|
|
186
|
+
|
|
187
|
+
num_tracks = len(
|
|
188
|
+
[t for t in tracks if len(t) >= 8 and t[0].abs_dist(t[-1]) >= 1000.0]
|
|
189
|
+
)
|
|
190
|
+
print(f"Number of long tracks (>= 8 steps, >= 1000km): {num_tracks}")
|
|
191
|
+
|
|
192
|
+
export_to_csv(tracks, outfile, grid)
|
|
193
|
+
final_outfile = outfile if outfile.endswith(".csv") else f"{outfile}.csv"
|
|
194
|
+
print(f"Results exported to {final_outfile}")
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def parse_args() -> Namespace:
|
|
198
|
+
"""Parses command line arguments."""
|
|
199
|
+
parser = ArgumentParser(description="PyStormTracker: A tool for tracking storms.")
|
|
200
|
+
parser.add_argument("-i", "--input", required=True, help="Input NetCDF file.")
|
|
201
|
+
parser.add_argument("-v", "--var", required=True, help="Variable to track.")
|
|
202
|
+
parser.add_argument("-o", "--output", required=True, help="Output CSV file.")
|
|
203
|
+
parser.add_argument("-n", "--num", type=int, help="Number of time steps.")
|
|
204
|
+
parser.add_argument(
|
|
205
|
+
"-m", "--mode", choices=["min", "max"], default="min", help="Detection mode."
|
|
206
|
+
)
|
|
207
|
+
parser.add_argument(
|
|
208
|
+
"-b",
|
|
209
|
+
"--backend",
|
|
210
|
+
choices=["serial", "mpi", "dask"],
|
|
211
|
+
default="dask",
|
|
212
|
+
help="Parallel backend. Default is 'dask'.",
|
|
213
|
+
)
|
|
214
|
+
parser.add_argument(
|
|
215
|
+
"-w",
|
|
216
|
+
"--workers",
|
|
217
|
+
type=int,
|
|
218
|
+
default=None,
|
|
219
|
+
help="Number of workers for Dask. Defaults to number of CPU cores.",
|
|
220
|
+
)
|
|
221
|
+
return parser.parse_args()
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def main() -> None:
|
|
225
|
+
args = parse_args()
|
|
226
|
+
trange = (0, args.num) if args.num is not None else None
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
run_tracker(
|
|
230
|
+
infile=args.input,
|
|
231
|
+
varname=args.var,
|
|
232
|
+
outfile=args.output,
|
|
233
|
+
trange=trange,
|
|
234
|
+
mode=args.mode,
|
|
235
|
+
backend=args.backend,
|
|
236
|
+
n_workers=args.workers,
|
|
237
|
+
)
|
|
238
|
+
except Exception as e:
|
|
239
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
240
|
+
sys.exit(1)
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
if __name__ == "__main__":
|
|
244
|
+
main()
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: PyStormTracker
|
|
3
|
+
Version: 0.2.1
|
|
4
|
+
Summary: A Parallel Object-Oriented Cyclone Tracker in Python
|
|
5
|
+
Project-URL: Homepage, https://github.com/mwyau/PyStormTracker
|
|
6
|
+
Project-URL: Repository, https://github.com/mwyau/PyStormTracker.git
|
|
7
|
+
Project-URL: Issues, https://github.com/mwyau/PyStormTracker/issues
|
|
8
|
+
Author-email: "Albert M. W. Yau" <albert@mwyau.com>
|
|
9
|
+
Maintainer-email: "Albert M. W. Yau" <albert@mwyau.com>
|
|
10
|
+
License: BSD-3-Clause
|
|
11
|
+
License-File: LICENSE
|
|
12
|
+
Classifier: Development Status :: 4 - Beta
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
|
|
20
|
+
Requires-Python: >=3.10
|
|
21
|
+
Requires-Dist: dask>=2024.2.0
|
|
22
|
+
Requires-Dist: distributed>=2024.2.0
|
|
23
|
+
Requires-Dist: netcdf4>=1.6.5
|
|
24
|
+
Requires-Dist: numpy>=1.24.0
|
|
25
|
+
Requires-Dist: scipy>=1.10.0
|
|
26
|
+
Provides-Extra: dev
|
|
27
|
+
Requires-Dist: mpi4py>=3.1.5; extra == 'dev'
|
|
28
|
+
Requires-Dist: mypy>=1.9.0; extra == 'dev'
|
|
29
|
+
Requires-Dist: netcdf4>=1.6.5; extra == 'dev'
|
|
30
|
+
Requires-Dist: pandas>=2.2.0; extra == 'dev'
|
|
31
|
+
Requires-Dist: pre-commit>=3.7.0; extra == 'dev'
|
|
32
|
+
Requires-Dist: pytest>=8.1.1; extra == 'dev'
|
|
33
|
+
Requires-Dist: ruff>=0.3.4; extra == 'dev'
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
|
|
36
|
+
# PyStormTracker
|
|
37
|
+
|
|
38
|
+
[](https://doi.org/10.5281/zenodo.18764813)
|
|
39
|
+
|
|
40
|
+
PyStormTracker provides the implementation of the "Simple Tracker" algorithm used for cyclone trajectory analysis in **Yau and Chang (2020)**. It was originally developed at the **National Center for Atmospheric Research (NCAR)** as part of the **2015 Summer Internships in Parallel Computational Science (SIParCS)** program, utilizing a task-parallel strategy with temporal decomposition and a tree reduction algorithm to process large climate datasets.
|
|
41
|
+
|
|
42
|
+
## Features
|
|
43
|
+
|
|
44
|
+
- **Modern Python 3 Support**: Fully migrated from Python 2 with comprehensive type hints.
|
|
45
|
+
- **Flexible Data Support**: Works with `netCDF4` and handles various coordinate naming conventions (`lat`/`lon` vs `latitude`/`longitude`).
|
|
46
|
+
- **Parallel Backends**:
|
|
47
|
+
- **Dask (Default)**: Automatically scales to all available CPU cores on local machines.
|
|
48
|
+
- **MPI**: Supports distributed execution via `mpi4py`.
|
|
49
|
+
- **Serial**: Standard sequential execution for smaller datasets or debugging.
|
|
50
|
+
- **Robust Detection**: Handles masked/missing data correctly and includes automated unit/integration tests.
|
|
51
|
+
- **User-Friendly Output**: Results are exported directly to CSV with readable datetime strings and formatted numeric values.
|
|
52
|
+
|
|
53
|
+
## Technical Methodology
|
|
54
|
+
|
|
55
|
+
PyStormTracker treats meteorological fields as 2D images and leverages `scipy.ndimage` for robust feature detection:
|
|
56
|
+
|
|
57
|
+
- **Local Extrema Detection**: Uses `generic_filter` with a sliding window (default 5x5) to identify local minima (cyclones) or maxima (anticyclones/vorticity).
|
|
58
|
+
- **Intensity & Refinement**: Applies the **Laplacian operator** (`laplace`) to measure the "sharpness" of the field at each detected center point. This is used to resolve duplicates and ensure only the most physically intense point is kept when multiple adjacent pixels are flagged.
|
|
59
|
+
- **Spherical Continuity**: Utilizes `mode='wrap'` for all filters to correctly handle periodic boundaries across the Prime Meridian, enabling seamless tracking across the entire globe.
|
|
60
|
+
- **Heuristic Linking**: Implements a nearest-neighbor linking strategy to connect detected centers into trajectories across successive time steps.
|
|
61
|
+
|
|
62
|
+
## Installation
|
|
63
|
+
|
|
64
|
+
### Prerequisites
|
|
65
|
+
- Python 3.10+
|
|
66
|
+
- (Optional) MS-MPI or OpenMPI for MPI support.
|
|
67
|
+
|
|
68
|
+
### From PyPI (Recommended)
|
|
69
|
+
You can install the latest stable version of PyStormTracker directly from PyPI:
|
|
70
|
+
```bash
|
|
71
|
+
pip install PyStormTracker
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### From Source
|
|
75
|
+
1. Clone the repository:
|
|
76
|
+
```bash
|
|
77
|
+
git clone https://github.com/mwyau/PyStormTracker.git
|
|
78
|
+
cd PyStormTracker
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
2. Install the package in editable mode:
|
|
82
|
+
```bash
|
|
83
|
+
pip install -e .
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
## Usage
|
|
87
|
+
|
|
88
|
+
Once installed, you can use the `stormtracker` command directly:
|
|
89
|
+
|
|
90
|
+
```bash
|
|
91
|
+
stormtracker -i data/test/slp.2012.nc -v slp -o my_tracks
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### Command Line Arguments
|
|
95
|
+
|
|
96
|
+
| Argument | Short | Description |
|
|
97
|
+
| :--- | :--- | :--- |
|
|
98
|
+
| `--input` | `-i` | **Required.** Path to the input NetCDF file. |
|
|
99
|
+
| `--var` | `-v` | **Required.** Variable name to track (e.g., `slp`, `vo`). |
|
|
100
|
+
| `--output` | `-o` | **Required.** Path to the output CSV file (appends `.csv` if missing). |
|
|
101
|
+
| `--num` | `-n` | Number of time steps to process. |
|
|
102
|
+
| `--mode` | `-m` | `min` (default) for low pressure, `max` for vorticity/high pressure. |
|
|
103
|
+
| `--backend` | `-b` | `dask` (default), `serial`, or `mpi`. |
|
|
104
|
+
| `--workers` | `-w` | Number of Dask workers (defaults to CPU core count). |
|
|
105
|
+
|
|
106
|
+
### Examples
|
|
107
|
+
|
|
108
|
+
**Run with Dask (Auto-detected cores):**
|
|
109
|
+
```bash
|
|
110
|
+
stormtracker -i input.nc -v slp -o tracks
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
**Run with MPI (Distributed):**
|
|
114
|
+
```bash
|
|
115
|
+
mpiexec -n 4 stormtracker -i input.nc -v slp -o tracks -b mpi
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## Project Structure
|
|
119
|
+
|
|
120
|
+
- `src/pystormtracker/models/`: Data structures (`Center`, `Grid`, `Tracks`).
|
|
121
|
+
- `src/pystormtracker/simple/`: Implementation of the Simple Tracker logic (`SimpleDetector`, `SimpleLinker`).
|
|
122
|
+
- `src/pystormtracker/stormtracker.py`: CLI orchestration and parallel backends.
|
|
123
|
+
|
|
124
|
+
## Testing
|
|
125
|
+
|
|
126
|
+
Run the full test suite (unit and integration tests) using `pytest`:
|
|
127
|
+
|
|
128
|
+
```bash
|
|
129
|
+
pytest
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
## Citations
|
|
133
|
+
|
|
134
|
+
If you use this software in your research, please cite the following:
|
|
135
|
+
|
|
136
|
+
- **Yau, A. M. W., and E. K. M. Chang**, 2020: Finding Storm Track Activity Metrics That Are Highly Correlated with Weather Impacts. Part I: Frameworks for Evaluation and Accumulated Track Activity. *J. Climate*, **33**, 10169–10186, https://doi.org/10.1175/JCLI-D-20-0393.1.
|
|
137
|
+
|
|
138
|
+
- **Yau, A. M. W.**, 2026: mwyau/PyStormTracker. *Zenodo*, https://doi.org/10.5281/zenodo.18764813.
|
|
139
|
+
|
|
140
|
+
## References
|
|
141
|
+
|
|
142
|
+
- **Yau, A. M. W., K. Paul and J. Dennis**, 2016: PyStormTracker: A Parallel Object-Oriented Cyclone Tracker in Python. *96th American Meteorological Society Annual Meeting*, New Orleans, LA. *Zenodo*, https://doi.org/10.5281/zenodo.18868625.
|
|
143
|
+
|
|
144
|
+
## License
|
|
145
|
+
|
|
146
|
+
This project is licensed under the terms found in the `LICENSE` file.
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
pystormtracker/__init__.py,sha256=jlK08FvQMrsH73lCnjX0UBIvdVLZqQFDap7YZw5Iz-k,164
|
|
2
|
+
pystormtracker/stormtracker.py,sha256=WzXQGfnrCSBBYPUSYu6DiDW0Bj2ImrjLdjrsWxoHXFk,7549
|
|
3
|
+
pystormtracker/models/__init__.py,sha256=ag5M_Ux68Qh3UJOyUJUvyfKIfMxYCCg4mH38DWSQUWc,117
|
|
4
|
+
pystormtracker/models/center.py,sha256=4T29JuOag7VSs56I8tIyRkaw6s0wyIZ96j8uk3ZIaYc,1735
|
|
5
|
+
pystormtracker/models/grid.py,sha256=prkkvODHukgL2A-R3IYGDWpwkXuLb6QiMAaJq8bo720,962
|
|
6
|
+
pystormtracker/models/tracks.py,sha256=QBqdaooyRA3tUaDx7q_6mE97mTPRxUr3MsMNBdIvM7o,789
|
|
7
|
+
pystormtracker/simple/__init__.py,sha256=p2Xc3gLfB3rQY3jQE3uK28qnBRxlFA8nV_OLItvdXtM,116
|
|
8
|
+
pystormtracker/simple/detector.py,sha256=LJUprquT5AAsNBzQfpHdjw1N-46WmlfMRoqf8KUjKAY,10124
|
|
9
|
+
pystormtracker/simple/linker.py,sha256=vvSJNFJTJbS0jgtwFZP6teewGIxHRHw-Eb9qIqKlQTA,3657
|
|
10
|
+
pystormtracker-0.2.1.dist-info/METADATA,sha256=MonuLFZb2mW_5xRAS8TcAXgLcPZdvnFn5X1BlGZA2CU,6313
|
|
11
|
+
pystormtracker-0.2.1.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
12
|
+
pystormtracker-0.2.1.dist-info/entry_points.txt,sha256=3X4UgACtC5CjzcEQ2ykR2j1rgctmvHm0PoSVDBBV5JA,66
|
|
13
|
+
pystormtracker-0.2.1.dist-info/licenses/LICENSE,sha256=6X-vHoP9tzZOg2t88mmLa0kwgcS-MDPcmjHfj5nX7NU,2214
|
|
14
|
+
pystormtracker-0.2.1.dist-info/RECORD,,
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
Copyright (c) 2015, University Corporation for Atmospheric Research (UCAR)
|
|
2
|
+
Copyright (c) 2026, Albert M. W. Yau
|
|
3
|
+
All rights reserved.
|
|
4
|
+
|
|
5
|
+
Developed by: Application Scalability and Performance Group
|
|
6
|
+
University Corporation for Atmospheric Research -
|
|
7
|
+
National Center for Atmospheric Research
|
|
8
|
+
https://www.cisl.ucar.edu/
|
|
9
|
+
|
|
10
|
+
Developed by: Albert M. W. Yau
|
|
11
|
+
https://github.com/mwyau/PyStormTracker
|
|
12
|
+
|
|
13
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
|
14
|
+
a copy of this software and associated documentation files (the "Software"),
|
|
15
|
+
to deal with the Software without restriction, including without limitation
|
|
16
|
+
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
17
|
+
and/or sell copies of the Software, and to permit persons to whom
|
|
18
|
+
the Software is furnished to do so, subject to the following conditions:
|
|
19
|
+
|
|
20
|
+
- Redistributions of source code must retain the above copyright notice,
|
|
21
|
+
this list of conditions and the following disclaimers.
|
|
22
|
+
- Redistributions in binary form must reproduce the above copyright notice,
|
|
23
|
+
this list of conditions and the following disclaimers in the documentation
|
|
24
|
+
and/or other materials provided with the distribution.
|
|
25
|
+
- Neither the names of the Application Scalability and Performance (ASAP)
|
|
26
|
+
Group, UCAR, nor the names of its contributors may be used to endorse or
|
|
27
|
+
promote products derived from this Software without specific prior written
|
|
28
|
+
permission.
|
|
29
|
+
|
|
30
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
31
|
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
32
|
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
33
|
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
34
|
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
35
|
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
36
|
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
37
|
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
38
|
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
39
|
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
40
|
+
POSSIBILITY OF SUCH DAMAGE.
|