solarviewer 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. solar_radio_image_viewer/__init__.py +12 -0
  2. solar_radio_image_viewer/assets/add_tab_default.png +0 -0
  3. solar_radio_image_viewer/assets/add_tab_default_light.png +0 -0
  4. solar_radio_image_viewer/assets/add_tab_hover.png +0 -0
  5. solar_radio_image_viewer/assets/add_tab_hover_light.png +0 -0
  6. solar_radio_image_viewer/assets/browse.png +0 -0
  7. solar_radio_image_viewer/assets/browse_light.png +0 -0
  8. solar_radio_image_viewer/assets/close_tab_default.png +0 -0
  9. solar_radio_image_viewer/assets/close_tab_default_light.png +0 -0
  10. solar_radio_image_viewer/assets/close_tab_hover.png +0 -0
  11. solar_radio_image_viewer/assets/close_tab_hover_light.png +0 -0
  12. solar_radio_image_viewer/assets/ellipse_selection.png +0 -0
  13. solar_radio_image_viewer/assets/ellipse_selection_light.png +0 -0
  14. solar_radio_image_viewer/assets/icons8-ellipse-90.png +0 -0
  15. solar_radio_image_viewer/assets/icons8-ellipse-90_light.png +0 -0
  16. solar_radio_image_viewer/assets/icons8-info-90.png +0 -0
  17. solar_radio_image_viewer/assets/icons8-info-90_light.png +0 -0
  18. solar_radio_image_viewer/assets/profile.png +0 -0
  19. solar_radio_image_viewer/assets/profile_light.png +0 -0
  20. solar_radio_image_viewer/assets/rectangle_selection.png +0 -0
  21. solar_radio_image_viewer/assets/rectangle_selection_light.png +0 -0
  22. solar_radio_image_viewer/assets/reset.png +0 -0
  23. solar_radio_image_viewer/assets/reset_light.png +0 -0
  24. solar_radio_image_viewer/assets/ruler.png +0 -0
  25. solar_radio_image_viewer/assets/ruler_light.png +0 -0
  26. solar_radio_image_viewer/assets/search.png +0 -0
  27. solar_radio_image_viewer/assets/search_light.png +0 -0
  28. solar_radio_image_viewer/assets/settings.png +0 -0
  29. solar_radio_image_viewer/assets/settings_light.png +0 -0
  30. solar_radio_image_viewer/assets/splash.fits +0 -0
  31. solar_radio_image_viewer/assets/zoom_60arcmin.png +0 -0
  32. solar_radio_image_viewer/assets/zoom_60arcmin_light.png +0 -0
  33. solar_radio_image_viewer/assets/zoom_in.png +0 -0
  34. solar_radio_image_viewer/assets/zoom_in_light.png +0 -0
  35. solar_radio_image_viewer/assets/zoom_out.png +0 -0
  36. solar_radio_image_viewer/assets/zoom_out_light.png +0 -0
  37. solar_radio_image_viewer/create_video.py +1345 -0
  38. solar_radio_image_viewer/dialogs.py +2665 -0
  39. solar_radio_image_viewer/from_simpl/__init__.py +184 -0
  40. solar_radio_image_viewer/from_simpl/caltable_visualizer.py +1001 -0
  41. solar_radio_image_viewer/from_simpl/dynamic_spectra_dialog.py +332 -0
  42. solar_radio_image_viewer/from_simpl/make_dynamic_spectra.py +351 -0
  43. solar_radio_image_viewer/from_simpl/pipeline_logger_gui.py +1232 -0
  44. solar_radio_image_viewer/from_simpl/simpl_theme.py +352 -0
  45. solar_radio_image_viewer/from_simpl/utils.py +984 -0
  46. solar_radio_image_viewer/from_simpl/view_dynamic_spectra_GUI.py +1975 -0
  47. solar_radio_image_viewer/helioprojective.py +1916 -0
  48. solar_radio_image_viewer/helioprojective_viewer.py +817 -0
  49. solar_radio_image_viewer/helioviewer_browser.py +1514 -0
  50. solar_radio_image_viewer/main.py +148 -0
  51. solar_radio_image_viewer/move_phasecenter.py +1269 -0
  52. solar_radio_image_viewer/napari_viewer.py +368 -0
  53. solar_radio_image_viewer/noaa_events/__init__.py +32 -0
  54. solar_radio_image_viewer/noaa_events/noaa_events.py +430 -0
  55. solar_radio_image_viewer/noaa_events/noaa_events_gui.py +1922 -0
  56. solar_radio_image_viewer/norms.py +293 -0
  57. solar_radio_image_viewer/radio_data_downloader/__init__.py +25 -0
  58. solar_radio_image_viewer/radio_data_downloader/radio_data_downloader.py +756 -0
  59. solar_radio_image_viewer/radio_data_downloader/radio_data_downloader_gui.py +528 -0
  60. solar_radio_image_viewer/searchable_combobox.py +220 -0
  61. solar_radio_image_viewer/solar_context/__init__.py +41 -0
  62. solar_radio_image_viewer/solar_context/active_regions.py +371 -0
  63. solar_radio_image_viewer/solar_context/cme_alerts.py +234 -0
  64. solar_radio_image_viewer/solar_context/context_images.py +297 -0
  65. solar_radio_image_viewer/solar_context/realtime_data.py +528 -0
  66. solar_radio_image_viewer/solar_data_downloader/__init__.py +35 -0
  67. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader.py +1667 -0
  68. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader_cli.py +901 -0
  69. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader_gui.py +1210 -0
  70. solar_radio_image_viewer/styles.py +643 -0
  71. solar_radio_image_viewer/utils/__init__.py +32 -0
  72. solar_radio_image_viewer/utils/rate_limiter.py +255 -0
  73. solar_radio_image_viewer/utils.py +952 -0
  74. solar_radio_image_viewer/video_dialog.py +2629 -0
  75. solar_radio_image_viewer/video_utils.py +656 -0
  76. solar_radio_image_viewer/viewer.py +11174 -0
  77. solarviewer-1.0.2.dist-info/METADATA +343 -0
  78. solarviewer-1.0.2.dist-info/RECORD +82 -0
  79. solarviewer-1.0.2.dist-info/WHEEL +5 -0
  80. solarviewer-1.0.2.dist-info/entry_points.txt +8 -0
  81. solarviewer-1.0.2.dist-info/licenses/LICENSE +21 -0
  82. solarviewer-1.0.2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,756 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Radio Data Downloader - Core module for downloading and processing radio solar data.
4
+
5
+ This module provides functions for downloading radio solar data from various observatories
6
+ and converting it to FITS format compatible with the Dynamic Spectrum Viewer.
7
+
8
+ Currently supported:
9
+ - Learmonth Solar Observatory (Australia) - SRS spectrograph data
10
+ - San Vito (Italy) - RSTN SRS spectrograph data
11
+ - Palehua (Hawaii) - RSTN SRS spectrograph data
12
+ - Holloman (New Mexico) - RSTN SRS spectrograph data
13
+ """
14
+
15
+ import os
16
+ import sys
17
+ import numpy as np
18
+ import pandas as pd
19
+ from struct import unpack
20
+ from datetime import datetime
21
+ from scipy.signal import medfilt
22
+ from scipy import interpolate
23
+ from typing import Optional, Tuple, List
24
+ import urllib.request
25
+ import urllib.error
26
+
27
+
28
+ # ============================================================================
29
+ # RSTN Site Configuration
30
+ # ============================================================================
31
+
32
+ # All RSTN sites use the same 826-byte SRS binary format
33
+ RSTN_SITES = {
34
+ "Learmonth": {
35
+ "id": 3,
36
+ "file_prefix": "LM", # Uppercase for BOM Australia
37
+ "location": "Australia",
38
+ "latitude": -22.22,
39
+ "longitude": 114.09,
40
+ "url_template": "https://downloads.sws.bom.gov.au/wdc/wdc_spec/data/learmonth/raw/{year2}/{filename}",
41
+ "alt_url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/learmonth/{year4}/{month}/{filename_lower}.gz",
42
+ },
43
+ "San Vito": {
44
+ "id": 4,
45
+ "file_prefix": "sv", # Lowercase for NOAA NCEI
46
+ "location": "Italy",
47
+ "latitude": 40.63,
48
+ "longitude": 17.86,
49
+ "url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/san-vito/{year4}/{month}/{filename}.gz",
50
+ "alt_url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/san-vito/{year4}/{month}/{filename}",
51
+ },
52
+ "Palehua": {
53
+ "id": 1,
54
+ "file_prefix": "kp", # Ka'ena Point prefix used by NOAA
55
+ "location": "Hawaii, USA",
56
+ "latitude": 21.42,
57
+ "longitude": -158.03,
58
+ "url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/palehua/{year4}/{month}/{filename}.gz",
59
+ "alt_url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/palehua/{year4}/{month}/{filename}",
60
+ },
61
+ "Holloman": {
62
+ "id": 2,
63
+ "file_prefix": "ho", # Lowercase for NOAA NCEI
64
+ "location": "New Mexico, USA",
65
+ "latitude": 32.95,
66
+ "longitude": -106.01,
67
+ "url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/holloman/{year4}/{month}/{filename}.gz",
68
+ "alt_url_template": "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-radio/rstn-spectral/holloman/{year4}/{month}/{filename}",
69
+ "note": "Data limited: approximately April 2000 to July 2004",
70
+ },
71
+ }
72
+
73
+ SITE_NAMES = {
74
+ 1: "Palehua",
75
+ 2: "Holloman",
76
+ 3: "Learmonth",
77
+ 4: "San Vito",
78
+ }
79
+
80
+ # ============================================================================
81
+ # SRS File Parser (adapted from learmonth-py/srs_data.py)
82
+ # ============================================================================
83
+
84
+ RECORD_SIZE = 826
85
+ RECORD_HEADER_SIZE = 24
86
+ RECORD_ARRAY_SIZE = 401
87
+
88
+
89
+ class SRSRecord:
90
+ """Holds one 826 byte SRS Record."""
91
+
92
+ def __init__(self):
93
+ self.year = None
94
+ self.month = None
95
+ self.day = None
96
+ self.hour = None
97
+ self.minute = None
98
+ self.seconds = None
99
+ self.site_number = None
100
+ self.site_name = None
101
+ self.n_bands_per_record = None
102
+
103
+ self.a_start_freq = None
104
+ self.a_end_freq = None
105
+ self.a_values = {} # frequency -> level
106
+
107
+ self.b_start_freq = None
108
+ self.b_end_freq = None
109
+ self.b_values = {} # frequency -> level
110
+
111
+ def _parse_header(self, header_bytes):
112
+ """Parse the 24-byte record header."""
113
+ fields = unpack(
114
+ '>BBBBBBBB' # Year, Month, Day, Hour, Minute, Second, Site, n_bands
115
+ 'hHHBB' # A-band: start, end, n_bytes, ref_level, attenuation
116
+ 'HHHBB', # B-band: start, end, n_bytes, ref_level, attenuation
117
+ header_bytes
118
+ )
119
+
120
+ self.year = fields[0]
121
+ self.month = fields[1]
122
+ self.day = fields[2]
123
+ self.hour = fields[3]
124
+ self.minute = fields[4]
125
+ self.seconds = fields[5]
126
+ self.site_number = fields[6]
127
+ self.site_name = SITE_NAMES.get(self.site_number, "Unknown")
128
+ self.n_bands_per_record = fields[7]
129
+
130
+ self.a_start_freq = fields[8]
131
+ self.a_end_freq = fields[9]
132
+ self.b_start_freq = fields[13]
133
+ self.b_end_freq = fields[14]
134
+
135
+ def _parse_a_levels(self, a_bytes):
136
+ """Parse the A-band (25-75 MHz) levels."""
137
+ for i in range(401):
138
+ freq_a = 25 + 50 * i / 400.0
139
+ level_a = unpack('>B', a_bytes[i:i+1])[0]
140
+ self.a_values[freq_a] = level_a
141
+
142
+ def _parse_b_levels(self, b_bytes):
143
+ """Parse the B-band (75-180 MHz) levels."""
144
+ for i in range(401):
145
+ freq_b = 75 + 105 * i / 400.0
146
+ level_b = unpack('>B', b_bytes[i:i+1])[0]
147
+ self.b_values[freq_b] = level_b
148
+
149
+ def get_timestamp(self) -> datetime:
150
+ """Get the timestamp for this record."""
151
+ # Handle 2-digit year
152
+ full_year = 2000 + self.year if self.year < 100 else self.year
153
+ return datetime(full_year, self.month, self.day,
154
+ self.hour, self.minute, self.seconds)
155
+
156
+ def __str__(self):
157
+ return f"{self.day:02d}/{self.month:02d}/{self.year:02d}, {self.hour:02d}:{self.minute:02d}:{self.seconds:02d}"
158
+
159
+
160
+ def read_srs_file(fname: str) -> List[SRSRecord]:
161
+ """Parse an SRS file and return a list of SRSRecord objects."""
162
+ srs_records = []
163
+ with open(fname, "rb") as f:
164
+ while True:
165
+ record_data = f.read(RECORD_SIZE)
166
+ if len(record_data) == 0:
167
+ break
168
+ if len(record_data) < RECORD_SIZE:
169
+ break
170
+
171
+ header_bytes = record_data[:RECORD_HEADER_SIZE]
172
+ a_bytes = record_data[RECORD_HEADER_SIZE:RECORD_HEADER_SIZE + RECORD_ARRAY_SIZE]
173
+ b_bytes = record_data[RECORD_HEADER_SIZE + RECORD_ARRAY_SIZE:RECORD_HEADER_SIZE + 2 * RECORD_ARRAY_SIZE]
174
+
175
+ record = SRSRecord()
176
+ record._parse_header(header_bytes)
177
+ record._parse_a_levels(a_bytes)
178
+ record._parse_b_levels(b_bytes)
179
+ srs_records.append(record)
180
+
181
+ return srs_records
182
+
183
+
184
+ # ============================================================================
185
+ # Download Functions
186
+ # ============================================================================
187
+
188
+ def download_rstn_data(
189
+ site: str,
190
+ date: str,
191
+ output_dir: str = ".",
192
+ progress_callback=None,
193
+ ) -> Optional[str]:
194
+ """
195
+ Download RSTN spectrograph data for a given site and date.
196
+
197
+ Args:
198
+ site: Station name (Learmonth, San Vito, Palehua, Holloman)
199
+ date: Date in format 'YYYY-MM-DD' or 'DD-MM-YYYY'
200
+ output_dir: Directory to save the downloaded file
201
+ progress_callback: Optional callback function for progress updates
202
+
203
+ Returns:
204
+ Path to the downloaded SRS file, or None if download failed
205
+ """
206
+ # Validate site
207
+ if site not in RSTN_SITES:
208
+ if progress_callback:
209
+ progress_callback(f"Unknown site: {site}. Available: {list(RSTN_SITES.keys())}")
210
+ return None
211
+
212
+ site_config = RSTN_SITES[site]
213
+
214
+ # Parse the date
215
+ try:
216
+ if '-' in date:
217
+ parts = date.split('-')
218
+ if len(parts[0]) == 4: # YYYY-MM-DD
219
+ dt = datetime.strptime(date, '%Y-%m-%d')
220
+ else: # DD-MM-YYYY
221
+ dt = datetime.strptime(date, '%d-%m-%Y')
222
+ else:
223
+ raise ValueError(f"Invalid date format: {date}")
224
+ except ValueError as e:
225
+ if progress_callback:
226
+ progress_callback(f"Error parsing date: {e}")
227
+ return None
228
+
229
+ # Construct filename
230
+ year2 = str(dt.year)[2:] # Last 2 digits
231
+ year4 = str(dt.year) # Full year
232
+ month = f"{dt.month:02d}"
233
+ day_stamp = f"{dt.day:02d}"
234
+ prefix = site_config["file_prefix"]
235
+ file_name = f"{prefix}{year2}{month}{day_stamp}.srs"
236
+ file_name_lower = file_name.lower() # For NOAA URLs
237
+
238
+ output_path = os.path.join(output_dir, file_name)
239
+
240
+ # Check if file already exists
241
+ if os.path.exists(output_path):
242
+ if progress_callback:
243
+ progress_callback(f"File already exists: {file_name}")
244
+ return output_path
245
+
246
+ # Create output directory if needed
247
+ os.makedirs(output_dir, exist_ok=True)
248
+
249
+ # Construct download URL
250
+ url_template = site_config["url_template"]
251
+ download_url = url_template.format(
252
+ year2=year2, year4=year4, month=month,
253
+ filename=file_name, filename_lower=file_name_lower
254
+ )
255
+
256
+ if progress_callback:
257
+ progress_callback(f"Downloading from: {download_url}")
258
+
259
+ # Helper function to download and decompress if needed
260
+ def download_and_decompress(url, out_path):
261
+ import gzip
262
+ import shutil
263
+ import tempfile
264
+
265
+ if url.endswith('.gz'):
266
+ # Download to temp file, then decompress
267
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.srs.gz') as tmp:
268
+ tmp_path = tmp.name
269
+ urllib.request.urlretrieve(url, tmp_path)
270
+ with gzip.open(tmp_path, 'rb') as f_in:
271
+ with open(out_path, 'wb') as f_out:
272
+ shutil.copyfileobj(f_in, f_out)
273
+ os.remove(tmp_path)
274
+ else:
275
+ urllib.request.urlretrieve(url, out_path)
276
+
277
+ try:
278
+ download_and_decompress(download_url, output_path)
279
+ if progress_callback:
280
+ progress_callback(f"Downloaded: {file_name}")
281
+ return output_path
282
+ except (urllib.error.HTTPError, urllib.error.URLError) as e:
283
+ # Try alternate URL if available
284
+ if "alt_url_template" in site_config:
285
+ alt_url = site_config["alt_url_template"].format(
286
+ year2=year2, year4=year4, month=month,
287
+ filename=file_name, filename_lower=file_name_lower
288
+ )
289
+ if progress_callback:
290
+ progress_callback(f"Primary URL failed, trying alternate: {alt_url}")
291
+ try:
292
+ download_and_decompress(alt_url, output_path)
293
+ if progress_callback:
294
+ progress_callback(f"Downloaded: {file_name}")
295
+ return output_path
296
+ except Exception as e2:
297
+ if progress_callback:
298
+ progress_callback(f"Alternate URL also failed: {e2}")
299
+
300
+ if progress_callback:
301
+ progress_callback(f"Download failed: {e}")
302
+ return None
303
+ except Exception as e:
304
+ if progress_callback:
305
+ progress_callback(f"Download error: {e}")
306
+ return None
307
+
308
+
309
+ def download_learmonth(
310
+ date: str,
311
+ output_dir: str = ".",
312
+ progress_callback=None,
313
+ ) -> Optional[str]:
314
+ """
315
+ Download Learmonth spectrograph data for a given date.
316
+
317
+ This is a convenience wrapper around download_rstn_data for backwards compatibility.
318
+
319
+ Args:
320
+ date: Date in format 'YYYY-MM-DD' or 'DD-MM-YYYY'
321
+ output_dir: Directory to save the downloaded file
322
+ progress_callback: Optional callback function for progress updates
323
+
324
+ Returns:
325
+ Path to the downloaded SRS file, or None if download failed
326
+ """
327
+ return download_rstn_data("Learmonth", date, output_dir, progress_callback)
328
+
329
+
330
+ # ============================================================================
331
+ # Data Processing Functions
332
+ # ============================================================================
333
+
334
+ def fill_nan(arr: np.ndarray) -> np.ndarray:
335
+ """Interpolate to fill NaN values in array."""
336
+ try:
337
+ inds = np.arange(arr.shape[0])
338
+ good = np.where(np.isfinite(arr))
339
+ if len(good[0]) == 0:
340
+ return arr
341
+ f = interpolate.interp1d(
342
+ inds[good], arr[good],
343
+ bounds_error=False, kind='linear', fill_value='extrapolate'
344
+ )
345
+ out_arr = np.where(np.isfinite(arr), arr, f(inds))
346
+ except Exception:
347
+ out_arr = arr
348
+ return out_arr
349
+
350
+
351
+ def srs_to_dataframe(
352
+ srs_file: str,
353
+ bkg_sub: bool = False,
354
+ do_flag: bool = True,
355
+ flag_cal_time: bool = True,
356
+ progress_callback=None,
357
+ ) -> Tuple[Optional[pd.DataFrame], Optional[np.ndarray], Optional[np.ndarray]]:
358
+ """
359
+ Convert SRS file to pandas DataFrame with processing.
360
+
361
+ Args:
362
+ srs_file: Path to the SRS file
363
+ bkg_sub: Whether to perform background subtraction
364
+ do_flag: Whether to flag known bad channels
365
+ flag_cal_time: Whether to flag calibration time periods
366
+ progress_callback: Optional callback for progress updates
367
+
368
+ Returns:
369
+ Tuple of (DataFrame, frequencies array, timestamps array)
370
+ """
371
+ if progress_callback:
372
+ progress_callback("Reading SRS file...")
373
+
374
+ srs_records = read_srs_file(srs_file)
375
+
376
+ if not srs_records:
377
+ return None, None, None
378
+
379
+ if progress_callback:
380
+ progress_callback(f"Read {len(srs_records)} records")
381
+
382
+ # Extract timestamps
383
+ timestamps = [record.get_timestamp() for record in srs_records]
384
+ timestamps = pd.to_datetime(timestamps)
385
+
386
+ # Get frequency arrays
387
+ a_freqs = list(srs_records[0].a_values.keys())
388
+ b_freqs = list(srs_records[0].b_values.keys())
389
+ freqs = np.array(a_freqs + b_freqs)
390
+ freqs = np.round(freqs, 1)
391
+
392
+ # Build data array
393
+ if progress_callback:
394
+ progress_callback("Building data array...")
395
+
396
+ data = []
397
+ for record in srs_records:
398
+ a_data = list(record.a_values.values())
399
+ b_data = list(record.b_values.values())
400
+ data.append(a_data + b_data)
401
+
402
+ data = np.array(data).astype('float')
403
+
404
+ # Create DataFrame
405
+ df = pd.DataFrame(data, index=timestamps, columns=freqs)
406
+ df = df.sort_index(axis=0)
407
+ df = df.sort_index(axis=1)
408
+
409
+ # Get sorted arrays
410
+ final_freqs = df.columns.values
411
+ final_timestamps = df.index
412
+ final_data = df.to_numpy().astype('float')
413
+
414
+ if progress_callback:
415
+ progress_callback("Processing data...")
416
+
417
+ # Flagging bad channels
418
+ if do_flag:
419
+ # Known bad frequency channel ranges (as indices)
420
+ bad_ranges = [
421
+ (488, 499), (524, 533), (540, 550), (638, 642),
422
+ (119, 129), (108, 111), (150, 160), (197, 199),
423
+ (285, 289), (621, 632), (592, 600), (700, 712),
424
+ (410, 416), (730, 741), (635, 645), (283, 292),
425
+ (216, 222), (590, 602), (663, 667), (684, 690),
426
+ (63, 66), (54, 59), (27, 31),
427
+ ]
428
+ for start, end in bad_ranges:
429
+ if start < final_data.shape[1] and end <= final_data.shape[1]:
430
+ final_data[:, start:end] = np.nan
431
+
432
+ # Flag calibration times if requested
433
+ if flag_cal_time:
434
+ y = np.nanmedian(final_data, axis=1)
435
+ c = y / medfilt(y, min(1001, len(y) // 2 * 2 + 1)) # Ensure odd kernel size
436
+ c_std = np.nanstd(c)
437
+ pos = np.where(c > 1 + (10 * c_std))
438
+ final_data[pos, :] = np.nan
439
+
440
+ # Interpolate over NaNs
441
+ if progress_callback:
442
+ progress_callback("Interpolating missing data...")
443
+
444
+ for i in range(final_data.shape[0]):
445
+ final_data[i, :] = fill_nan(final_data[i, :])
446
+
447
+ # Flag edge channels
448
+ if do_flag and final_data.shape[1] > 780:
449
+ final_data[:, 780:] = np.nan
450
+
451
+ # Background subtraction
452
+ if bkg_sub:
453
+ if progress_callback:
454
+ progress_callback("Subtracting background...")
455
+ for ch in range(final_data.shape[1]):
456
+ median_val = np.nanmedian(final_data[:, ch])
457
+ if median_val > 0:
458
+ final_data[:, ch] = final_data[:, ch] / median_val
459
+
460
+ # Create final DataFrame
461
+ result_df = pd.DataFrame(final_data, index=final_timestamps, columns=final_freqs)
462
+
463
+ return result_df, final_freqs, final_timestamps
464
+
465
+
466
+ def dataframe_to_fits(
467
+ df: pd.DataFrame,
468
+ freqs: np.ndarray,
469
+ timestamps: pd.DatetimeIndex,
470
+ output_file: str,
471
+ site_name: str = "Learmonth",
472
+ progress_callback=None,
473
+ ) -> Optional[str]:
474
+ """
475
+ Convert DataFrame to FITS file compatible with Dynamic Spectrum Viewer.
476
+
477
+ Args:
478
+ df: DataFrame with shape (n_times, n_freqs)
479
+ freqs: Frequency array in MHz
480
+ timestamps: Timestamp array
481
+ output_file: Output FITS file path
482
+ site_name: Name of the observatory
483
+ progress_callback: Optional callback for progress updates
484
+
485
+ Returns:
486
+ Path to the created FITS file, or None if failed
487
+ """
488
+ try:
489
+ from astropy.io import fits
490
+ from astropy.time import Time
491
+ except ImportError:
492
+ print("Error: astropy is required for FITS output")
493
+ return None
494
+
495
+ if progress_callback:
496
+ progress_callback("Creating FITS file...")
497
+
498
+ # Get data array (time x frequency)
499
+ data = df.to_numpy().astype(np.float32)
500
+
501
+ # Transpose to (frequency x time) for standard dynamic spectrum format
502
+ data = data.T
503
+
504
+ # Create primary HDU with the data
505
+ hdu = fits.PrimaryHDU(data)
506
+
507
+ # Add header keywords
508
+ header = hdu.header
509
+
510
+ # Basic info
511
+ header['TELESCOP'] = site_name
512
+ header['INSTRUME'] = f'{site_name} Spectrograph'
513
+ header['OBJECT'] = 'Sun'
514
+ header['BUNIT'] = 'arbitrary'
515
+
516
+ # Time info
517
+ t_start = Time(timestamps[0])
518
+ t_end = Time(timestamps[-1])
519
+ header['DATE-OBS'] = t_start.isot
520
+ header['DATE-END'] = t_end.isot
521
+ header['TIMESYS'] = 'UTC'
522
+
523
+ # Frequency axis (axis 1 = rows = frequency)
524
+ header['CTYPE1'] = 'FREQ'
525
+ header['CUNIT1'] = 'MHz'
526
+ header['CRPIX1'] = 1
527
+ header['CRVAL1'] = float(freqs[0])
528
+ if len(freqs) > 1:
529
+ header['CDELT1'] = float(freqs[1] - freqs[0])
530
+ else:
531
+ header['CDELT1'] = 1.0
532
+ header['NAXIS1'] = len(freqs)
533
+
534
+ # Time axis (axis 2 = columns = time)
535
+ header['CTYPE2'] = 'TIME'
536
+ header['CUNIT2'] = 's'
537
+ header['CRPIX2'] = 1
538
+ header['CRVAL2'] = 0.0
539
+ if len(timestamps) > 1:
540
+ dt = (timestamps[1] - timestamps[0]).total_seconds()
541
+ header['CDELT2'] = dt
542
+ else:
543
+ header['CDELT2'] = 3.0 # Default 3 second cadence
544
+ header['NAXIS2'] = len(timestamps)
545
+
546
+ # Frequency range for convenience
547
+ header['FREQ_MIN'] = float(np.nanmin(freqs))
548
+ header['FREQ_MAX'] = float(np.nanmax(freqs))
549
+
550
+ # History
551
+ header['HISTORY'] = f'Created by Radio Solar Data Downloader'
552
+ header['HISTORY'] = f'Source: {site_name} Solar Spectrograph'
553
+
554
+ # Create HDU list starting with primary
555
+ hdul = fits.HDUList([hdu])
556
+
557
+ # Add TIME_AXIS extension with MJD times (required by Dynamic Spectra Viewer)
558
+ try:
559
+ from astropy.table import Table
560
+
561
+ # Convert timestamps to MJD
562
+ time_objs = Time(list(timestamps))
563
+ time_mjd = time_objs.mjd
564
+
565
+ time_table = Table()
566
+ time_table["TIME_MJD"] = time_mjd
567
+ time_hdu = fits.BinTableHDU(time_table, name="TIME_AXIS")
568
+ hdul.append(time_hdu)
569
+
570
+ if progress_callback:
571
+ progress_callback("Added TIME_AXIS extension with MJD times")
572
+ except Exception as e:
573
+ if progress_callback:
574
+ progress_callback(f"Warning: Could not add TIME_AXIS: {e}")
575
+
576
+ # Add FREQ_AXIS extension with frequencies in MHz (required by Dynamic Spectra Viewer)
577
+ try:
578
+ freq_table = Table()
579
+ freq_table["FREQ_MHz"] = freqs.astype(np.float64)
580
+ freq_hdu = fits.BinTableHDU(freq_table, name="FREQ_AXIS")
581
+ hdul.append(freq_hdu)
582
+
583
+ if progress_callback:
584
+ progress_callback("Added FREQ_AXIS extension with MHz frequencies")
585
+ except Exception as e:
586
+ if progress_callback:
587
+ progress_callback(f"Warning: Could not add FREQ_AXIS: {e}")
588
+
589
+ # Ensure output directory exists
590
+ os.makedirs(os.path.dirname(output_file) if os.path.dirname(output_file) else '.', exist_ok=True)
591
+
592
+ hdul.writeto(output_file, overwrite=True)
593
+
594
+ if progress_callback:
595
+ progress_callback(f"FITS file created: {output_file}")
596
+
597
+ return output_file
598
+
599
+
600
+ # ============================================================================
601
+ # High-level convenience function
602
+ # ============================================================================
603
+
604
+ def download_and_convert_rstn(
605
+ site: str,
606
+ date: str,
607
+ output_dir: str = ".",
608
+ start_time: Optional[str] = None,
609
+ end_time: Optional[str] = None,
610
+ bkg_sub: bool = False,
611
+ do_flag: bool = True,
612
+ flag_cal_time: bool = True,
613
+ progress_callback=None,
614
+ ) -> Optional[str]:
615
+ """
616
+ Download RSTN data from any site and convert to FITS in one step.
617
+
618
+ Args:
619
+ site: Station name (Learmonth, San Vito, Palehua, Holloman)
620
+ date: Date in format 'YYYY-MM-DD' or 'DD-MM-YYYY'
621
+ output_dir: Directory for output files
622
+ start_time: Optional start time in format 'HH:MM:SS' to filter data
623
+ end_time: Optional end time in format 'HH:MM:SS' to filter data
624
+ bkg_sub: Whether to perform background subtraction
625
+ do_flag: Whether to flag known bad channels
626
+ flag_cal_time: Whether to flag calibration time periods
627
+ progress_callback: Optional callback for progress updates
628
+
629
+ Returns:
630
+ Path to the created FITS file, or None if failed
631
+ """
632
+ # Download SRS file
633
+ srs_file = download_rstn_data(site, date, output_dir, progress_callback)
634
+ if not srs_file:
635
+ return None
636
+
637
+ # Convert to DataFrame
638
+ df, freqs, timestamps = srs_to_dataframe(
639
+ srs_file, bkg_sub, do_flag, flag_cal_time, progress_callback
640
+ )
641
+ if df is None:
642
+ return None
643
+
644
+ # Filter by time range if specified
645
+ if start_time or end_time:
646
+ if progress_callback:
647
+ progress_callback(f"Filtering time range: {start_time or 'start'} to {end_time or 'end'}")
648
+
649
+ # Debug: show actual data time range
650
+ if progress_callback:
651
+ progress_callback(f"Data time range: {timestamps[0]} to {timestamps[-1]}")
652
+
653
+ original_len = len(df)
654
+
655
+ # The SRS data can span two calendar days (e.g., Dec 24 21:43 to Dec 25 10:56 UTC)
656
+ # So we need to filter by time-of-day, not by absolute datetime
657
+
658
+ if start_time and end_time:
659
+ # Parse times
660
+ start_h, start_m, start_s = map(int, start_time.split(':'))
661
+ end_h, end_m, end_s = map(int, end_time.split(':'))
662
+
663
+ # Create time objects for comparison
664
+ from datetime import time as dt_time
665
+ start_t = dt_time(start_h, start_m, start_s)
666
+ end_t = dt_time(end_h, end_m, end_s)
667
+
668
+ if progress_callback:
669
+ progress_callback(f"Filtering for times between {start_t} and {end_t}")
670
+
671
+ # Filter by time-of-day
672
+ mask = [(idx.time() >= start_t) and (idx.time() <= end_t) for idx in df.index]
673
+ df = df[mask]
674
+
675
+ elif start_time:
676
+ start_h, start_m, start_s = map(int, start_time.split(':'))
677
+ from datetime import time as dt_time
678
+ start_t = dt_time(start_h, start_m, start_s)
679
+ mask = [idx.time() >= start_t for idx in df.index]
680
+ df = df[mask]
681
+
682
+ elif end_time:
683
+ end_h, end_m, end_s = map(int, end_time.split(':'))
684
+ from datetime import time as dt_time
685
+ end_t = dt_time(end_h, end_m, end_s)
686
+ mask = [idx.time() <= end_t for idx in df.index]
687
+ df = df[mask]
688
+
689
+ # Update timestamps and freqs from filtered dataframe
690
+ if len(df) > 0:
691
+ timestamps = df.index
692
+ freqs = df.columns.values
693
+
694
+ if progress_callback:
695
+ progress_callback(f"Filtered from {original_len} to {len(df)} time samples")
696
+
697
+ if len(df) == 0:
698
+ if progress_callback:
699
+ progress_callback("Error: No data in selected time range")
700
+ return None
701
+
702
+ # Create FITS file with time range info in filename
703
+ base_name = os.path.basename(srs_file).replace('.srs', '')
704
+ if start_time and end_time:
705
+ time_suffix = f"_{start_time.replace(':', '')}-{end_time.replace(':', '')}"
706
+ elif start_time:
707
+ time_suffix = f"_{start_time.replace(':', '')}-end"
708
+ elif end_time:
709
+ time_suffix = f"_start-{end_time.replace(':', '')}"
710
+ else:
711
+ time_suffix = ""
712
+
713
+ fits_file = os.path.join(output_dir, f"{base_name}{time_suffix}_dynamic_spectrum.fits")
714
+ result = dataframe_to_fits(df, freqs, timestamps, fits_file, site, progress_callback)
715
+
716
+ return result
717
+
718
+
719
+ def download_and_convert_learmonth(
720
+ date: str,
721
+ output_dir: str = ".",
722
+ start_time: Optional[str] = None,
723
+ end_time: Optional[str] = None,
724
+ bkg_sub: bool = False,
725
+ do_flag: bool = True,
726
+ flag_cal_time: bool = True,
727
+ progress_callback=None,
728
+ ) -> Optional[str]:
729
+ """
730
+ Download Learmonth data and convert to FITS in one step.
731
+
732
+ Convenience wrapper around download_and_convert_rstn for backwards compatibility.
733
+ """
734
+ return download_and_convert_rstn(
735
+ "Learmonth", date, output_dir, start_time, end_time,
736
+ bkg_sub, do_flag, flag_cal_time, progress_callback
737
+ )
738
+
739
+
740
+ if __name__ == "__main__":
741
+ # Test with a sample date
742
+ import sys
743
+ if len(sys.argv) > 1:
744
+ date = sys.argv[1]
745
+ else:
746
+ date = "2024-01-15"
747
+
748
+ def progress(msg):
749
+ print(f" {msg}")
750
+
751
+ print(f"Downloading and converting Learmonth data for {date}...")
752
+ result = download_and_convert_learmonth(date, output_dir="./learmonth_data", progress_callback=progress)
753
+ if result:
754
+ print(f"Success! FITS file: {result}")
755
+ else:
756
+ print("Failed to download/convert data")