solarviewer 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. solar_radio_image_viewer/__init__.py +12 -0
  2. solar_radio_image_viewer/assets/add_tab_default.png +0 -0
  3. solar_radio_image_viewer/assets/add_tab_default_light.png +0 -0
  4. solar_radio_image_viewer/assets/add_tab_hover.png +0 -0
  5. solar_radio_image_viewer/assets/add_tab_hover_light.png +0 -0
  6. solar_radio_image_viewer/assets/browse.png +0 -0
  7. solar_radio_image_viewer/assets/browse_light.png +0 -0
  8. solar_radio_image_viewer/assets/close_tab_default.png +0 -0
  9. solar_radio_image_viewer/assets/close_tab_default_light.png +0 -0
  10. solar_radio_image_viewer/assets/close_tab_hover.png +0 -0
  11. solar_radio_image_viewer/assets/close_tab_hover_light.png +0 -0
  12. solar_radio_image_viewer/assets/ellipse_selection.png +0 -0
  13. solar_radio_image_viewer/assets/ellipse_selection_light.png +0 -0
  14. solar_radio_image_viewer/assets/icons8-ellipse-90.png +0 -0
  15. solar_radio_image_viewer/assets/icons8-ellipse-90_light.png +0 -0
  16. solar_radio_image_viewer/assets/icons8-info-90.png +0 -0
  17. solar_radio_image_viewer/assets/icons8-info-90_light.png +0 -0
  18. solar_radio_image_viewer/assets/profile.png +0 -0
  19. solar_radio_image_viewer/assets/profile_light.png +0 -0
  20. solar_radio_image_viewer/assets/rectangle_selection.png +0 -0
  21. solar_radio_image_viewer/assets/rectangle_selection_light.png +0 -0
  22. solar_radio_image_viewer/assets/reset.png +0 -0
  23. solar_radio_image_viewer/assets/reset_light.png +0 -0
  24. solar_radio_image_viewer/assets/ruler.png +0 -0
  25. solar_radio_image_viewer/assets/ruler_light.png +0 -0
  26. solar_radio_image_viewer/assets/search.png +0 -0
  27. solar_radio_image_viewer/assets/search_light.png +0 -0
  28. solar_radio_image_viewer/assets/settings.png +0 -0
  29. solar_radio_image_viewer/assets/settings_light.png +0 -0
  30. solar_radio_image_viewer/assets/splash.fits +0 -0
  31. solar_radio_image_viewer/assets/zoom_60arcmin.png +0 -0
  32. solar_radio_image_viewer/assets/zoom_60arcmin_light.png +0 -0
  33. solar_radio_image_viewer/assets/zoom_in.png +0 -0
  34. solar_radio_image_viewer/assets/zoom_in_light.png +0 -0
  35. solar_radio_image_viewer/assets/zoom_out.png +0 -0
  36. solar_radio_image_viewer/assets/zoom_out_light.png +0 -0
  37. solar_radio_image_viewer/create_video.py +1345 -0
  38. solar_radio_image_viewer/dialogs.py +2665 -0
  39. solar_radio_image_viewer/from_simpl/__init__.py +184 -0
  40. solar_radio_image_viewer/from_simpl/caltable_visualizer.py +1001 -0
  41. solar_radio_image_viewer/from_simpl/dynamic_spectra_dialog.py +332 -0
  42. solar_radio_image_viewer/from_simpl/make_dynamic_spectra.py +351 -0
  43. solar_radio_image_viewer/from_simpl/pipeline_logger_gui.py +1232 -0
  44. solar_radio_image_viewer/from_simpl/simpl_theme.py +352 -0
  45. solar_radio_image_viewer/from_simpl/utils.py +984 -0
  46. solar_radio_image_viewer/from_simpl/view_dynamic_spectra_GUI.py +1975 -0
  47. solar_radio_image_viewer/helioprojective.py +1916 -0
  48. solar_radio_image_viewer/helioprojective_viewer.py +817 -0
  49. solar_radio_image_viewer/helioviewer_browser.py +1514 -0
  50. solar_radio_image_viewer/main.py +148 -0
  51. solar_radio_image_viewer/move_phasecenter.py +1269 -0
  52. solar_radio_image_viewer/napari_viewer.py +368 -0
  53. solar_radio_image_viewer/noaa_events/__init__.py +32 -0
  54. solar_radio_image_viewer/noaa_events/noaa_events.py +430 -0
  55. solar_radio_image_viewer/noaa_events/noaa_events_gui.py +1922 -0
  56. solar_radio_image_viewer/norms.py +293 -0
  57. solar_radio_image_viewer/radio_data_downloader/__init__.py +25 -0
  58. solar_radio_image_viewer/radio_data_downloader/radio_data_downloader.py +756 -0
  59. solar_radio_image_viewer/radio_data_downloader/radio_data_downloader_gui.py +528 -0
  60. solar_radio_image_viewer/searchable_combobox.py +220 -0
  61. solar_radio_image_viewer/solar_context/__init__.py +41 -0
  62. solar_radio_image_viewer/solar_context/active_regions.py +371 -0
  63. solar_radio_image_viewer/solar_context/cme_alerts.py +234 -0
  64. solar_radio_image_viewer/solar_context/context_images.py +297 -0
  65. solar_radio_image_viewer/solar_context/realtime_data.py +528 -0
  66. solar_radio_image_viewer/solar_data_downloader/__init__.py +35 -0
  67. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader.py +1667 -0
  68. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader_cli.py +901 -0
  69. solar_radio_image_viewer/solar_data_downloader/solar_data_downloader_gui.py +1210 -0
  70. solar_radio_image_viewer/styles.py +643 -0
  71. solar_radio_image_viewer/utils/__init__.py +32 -0
  72. solar_radio_image_viewer/utils/rate_limiter.py +255 -0
  73. solar_radio_image_viewer/utils.py +952 -0
  74. solar_radio_image_viewer/video_dialog.py +2629 -0
  75. solar_radio_image_viewer/video_utils.py +656 -0
  76. solar_radio_image_viewer/viewer.py +11174 -0
  77. solarviewer-1.0.2.dist-info/METADATA +343 -0
  78. solarviewer-1.0.2.dist-info/RECORD +82 -0
  79. solarviewer-1.0.2.dist-info/WHEEL +5 -0
  80. solarviewer-1.0.2.dist-info/entry_points.txt +8 -0
  81. solarviewer-1.0.2.dist-info/licenses/LICENSE +21 -0
  82. solarviewer-1.0.2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1667 @@
1
+ import drms, time, os, glob, warnings
2
+ from sunpy.map import Map
3
+
4
+ # from sunpy.instr.aia import aiaprep # This import is no longer available in sunpy 6.0.5
5
+ # In newer versions of sunpy, aiaprep has been moved to the aiapy package
6
+ try:
7
+ from aiapy.calibrate import register, update_pointing, correct_degradation
8
+ from aiapy.psf import deconvolve as aia_deconvolve
9
+
10
+ HAS_AIAPY = True
11
+ except ImportError:
12
+ # If aiapy is not installed, we'll provide a helpful message
13
+ HAS_AIAPY = False
14
+ print(
15
+ "Warning: aiapy package not found. Level 1.5 calibration will not be available."
16
+ )
17
+ print("To install aiapy: pip install aiapy")
18
+ from astropy.io import fits
19
+ from datetime import datetime, timedelta
20
+ import astropy.units as u # Import astropy units for use throughout the code
21
+
22
+ """
23
+ Solar Data Download and Calibration Module
24
+
25
+ This module provides functionality to download and process data from various solar observatories:
26
+ - SDO/AIA (Atmospheric Imaging Assembly)
27
+ - SDO/HMI (Helioseismic and Magnetic Imager)
28
+ - IRIS (Interface Region Imaging Spectrograph)
29
+ - SOHO (Solar and Heliospheric Observatory)
30
+
31
+ It can be used as a standalone script or imported as a module in other Python scripts.
32
+
33
+ Functions:
34
+ - get_key: Find a key in a dictionary by its value
35
+ - aiaexport: Generate an export command for AIA data
36
+ - download_aia: Download and process AIA data for a given time range
37
+ - get_time_list: Generate a list of timestamps within a given range
38
+ - download_aia_with_fido: Download AIA data using SunPy's Fido client
39
+ - download_hmi: Download and process HMI data
40
+ - download_hmi_with_fido: Download HMI data using Fido
41
+ - download_iris: Download IRIS data
42
+ - download_soho: Download SOHO data (EIT, LASCO, MDI)
43
+ - download_goes_suvi: Download GOES SUVI data
44
+ - download_stereo: Download STEREO SECCHI data
45
+ - download_gong: Download GONG magnetogram data
46
+
47
+ Notes:
48
+ - This module uses the DRMS client to access JSOC data for SDO instruments
49
+ - An email address is technically optional for small requests but recommended
50
+ - For large requests, an email address is required for notification when data is ready
51
+ - Alternative download methods include using the SunPy Fido client or directly
52
+ downloading from https://sdo.gsfc.nasa.gov/data/
53
+ """
54
+
55
+ # AIA Series Constants
56
+ AIA_SERIES = {
57
+ "12s": "aia.lev1_euv_12s",
58
+ "24s": "aia.lev1_uv_24s",
59
+ "1h": "aia.lev1_vis_1h",
60
+ }
61
+
62
+ # HMI Series Constants
63
+ # Note: For Fido downloads, M_ and B_ series both use LOS_magnetic_field physobs
64
+ HMI_SERIES = {
65
+ "45s": "hmi.M_45s", # LOS magnetogram (45s cadence)
66
+ "720s": "hmi.M_720s", # LOS magnetogram (12 min cadence)
67
+ "B_45s": "hmi.B_45s", # Line-of-sight magnetogram (same as M_)
68
+ "B_720s": "hmi.B_720s", # Line-of-sight magnetogram (12 min)
69
+ "Ic_45s": "hmi.Ic_45s", # Continuum intensity
70
+ "Ic_720s": "hmi.Ic_720s", # Continuum intensity (12 min)
71
+ "V_45s": "hmi.V_45s", # LOS velocity
72
+ "V_720s": "hmi.V_720s", # LOS velocity (12 min)
73
+ }
74
+
75
+ # Wavelength Options by Cadence
76
+ WAVELENGTHS = {
77
+ "12s": ["94", "131", "171", "193", "211", "304", "335"],
78
+ "24s": ["1600", "1700"],
79
+ "1h": ["4500"],
80
+ }
81
+
82
+
83
+ def get_key(val, my_dict):
84
+ """
85
+ Find a key in a dictionary by its value.
86
+
87
+ Args:
88
+ val: The value to search for
89
+ my_dict: The dictionary to search in
90
+
91
+ Returns:
92
+ The key corresponding to the value, or None if not found
93
+ """
94
+ for key, value in my_dict.items():
95
+ if val == value:
96
+ return key
97
+ return None
98
+
99
+
100
+ def aiaexport(wavelength, cadence, time):
101
+ """
102
+ Generate an export command for AIA data.
103
+
104
+ Args:
105
+ wavelength (str): AIA wavelength (e.g., '171', '1600')
106
+ cadence (str): Time cadence ('12s', '24s', or '1h')
107
+ time (str): Start time in 'YYYY.MM.DD_HH:MM:SS' format
108
+
109
+ Returns:
110
+ str: The export command string or None if invalid parameters
111
+ """
112
+ # Validate wavelength for the given cadence
113
+ if cadence not in AIA_SERIES:
114
+ print(f"Error: Invalid cadence '{cadence}'. Use '12s', '24s', or '1h'.")
115
+ return None
116
+
117
+ if wavelength not in WAVELENGTHS[cadence]:
118
+ print(f"Error: {wavelength}Å image not available for {cadence} cadence")
119
+ return None
120
+
121
+ # Format time for the export command - ensure proper format for DRMS
122
+ # The format should be YYYY.MM.DD_HH:MM:SS_UTC with no spaces
123
+ # Input time is expected to be in YYYY.MM.DD_HH:MM:SS format
124
+ time_utc = time + "_UTC"
125
+
126
+ # Create export command
127
+ export_cmd = f"{AIA_SERIES[cadence]}[{time_utc}/1h@{cadence}][{wavelength}]"
128
+ return export_cmd
129
+
130
+
131
+ def get_time_list(start_time, end_time, interval_seconds=0.5):
132
+ """
133
+ Generate a list of timestamps within a given range.
134
+
135
+ Args:
136
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
137
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
138
+ interval_seconds (float): Time interval between timestamps in seconds
139
+
140
+ Returns:
141
+ list: List of timestamps in 'HH:MM:SS' format
142
+ """
143
+ stt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
144
+ ett = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
145
+ time_list = []
146
+
147
+ while stt <= ett:
148
+ tm = datetime.strftime(stt, "%Y.%m.%d %H:%M:%S").split(" ")[-1]
149
+ time_list.append(tm)
150
+ stt += timedelta(seconds=interval_seconds)
151
+
152
+ return time_list
153
+
154
+
155
+ def download_aia(
156
+ wavelength,
157
+ cadence,
158
+ start_time,
159
+ end_time,
160
+ output_dir,
161
+ email=None,
162
+ interval_seconds=0.5,
163
+ skip_calibration=False,
164
+ ):
165
+ """
166
+ Download and process AIA data for a given time range.
167
+
168
+ Args:
169
+ wavelength (str): AIA wavelength (e.g., '171', '1600')
170
+ cadence (str): Time cadence ('12s', '24s', or '1h')
171
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
172
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
173
+ output_dir (str): Directory to save downloaded files
174
+ email (str, optional): Email for DRMS client. Recommended for reliability.
175
+ Small requests may work without an email, but large requests
176
+ require an email for notification when data is ready.
177
+ interval_seconds (float, optional): Time interval between images
178
+ skip_calibration (bool, optional): If True, skip Level 1.5 calibration even if aiapy is available
179
+
180
+ Returns:
181
+ list: Paths to downloaded Level 1.5 FITS files (or Level 1.0 if calibration is skipped/unavailable)
182
+
183
+ Notes:
184
+ Alternative download methods if you don't want to provide an email:
185
+ 1. Use SunPy's Fido client (import sunpy.net; from sunpy.net import Fido, attrs)
186
+ 2. Download directly from https://sdo.gsfc.nasa.gov/data/
187
+ """
188
+ # Check if we can perform calibration
189
+ can_calibrate = HAS_AIAPY and not skip_calibration
190
+
191
+ # Create output directory if it doesn't exist
192
+ if not os.path.isdir(output_dir):
193
+ os.makedirs(output_dir)
194
+
195
+ # Create temp directory for downloads
196
+ temp_dir = os.path.join(output_dir, "temp")
197
+ if not os.path.isdir(temp_dir):
198
+ os.makedirs(temp_dir)
199
+
200
+ # Initialize DRMS client
201
+ # Email is technically optional for small requests but recommended for reliability
202
+ # For large export requests, an email address is required for JSOC to notify you
203
+ if email is None:
204
+ print(
205
+ "Warning: No email provided. Small requests may work, but larger requests will likely fail."
206
+ )
207
+ print(
208
+ "Consider providing an email address or using alternative download methods."
209
+ )
210
+ client = drms.Client(email=email)
211
+
212
+ # Format start time for export command - YYYY.MM.DD_HH:MM:SS format required by DRMS
213
+ # This expects start_time in format YYYY.MM.DD HH:MM:SS
214
+ start_time_fmt = start_time.replace(" ", "_")
215
+
216
+ # Create export command
217
+ export_cmd = aiaexport(wavelength=wavelength, cadence=cadence, time=start_time_fmt)
218
+ if export_cmd is None:
219
+ return []
220
+
221
+ # Request data export
222
+ print(f"Requesting data export with command: {export_cmd}")
223
+ try:
224
+ response = client.export(export_cmd)
225
+ record = response.data.record
226
+ record_list = record.values.tolist()
227
+ except Exception as e:
228
+ print(f"Error during data export: {str(e)}")
229
+ print("Try using the --use-fido option as an alternative download method.")
230
+ return []
231
+
232
+ # Process records to get timestamps
233
+ record_dict = {}
234
+ for i in range(len(record_list)):
235
+ rec = record_list[i].split("{")[-1][:-2]
236
+ timestamp = record_list[i].split("[")[1].split("T")[1][:-2]
237
+ if rec == "image_lev":
238
+ record_dict[i] = timestamp
239
+
240
+ aia_time_list = list(record_dict.values())
241
+
242
+ # Get list of times to download
243
+ time_list = get_time_list(start_time, end_time, interval_seconds)
244
+
245
+ # Download and process files
246
+ downloaded_files = []
247
+ for current_time in time_list:
248
+ if current_time in aia_time_list:
249
+ key = get_key(current_time, record_dict)
250
+ filename = f"{current_time}_{wavelength}"
251
+
252
+ # Define output files for Level 1.0 and Level 1.5
253
+ level1_file = os.path.join(output_dir, f"aia_{filename}.fits")
254
+ level1_5_file = os.path.join(output_dir, f"aia_{filename}_lev1.5.fits")
255
+
256
+ # Determine which file to check for existence and add to downloaded_files
257
+ output_file = level1_5_file if can_calibrate else level1_file
258
+
259
+ if not os.path.isfile(output_file):
260
+ # Download level 1.0 file
261
+ response.download(temp_dir, key)
262
+ temp_file = glob.glob(os.path.join(temp_dir, "*.fits"))[0]
263
+ os.rename(temp_file, level1_file)
264
+
265
+ if can_calibrate:
266
+ try:
267
+ # Convert to level 1.5 using aiapy.calibrate.register
268
+ print(
269
+ f"Processing {os.path.basename(level1_file)} to Level 1.5..."
270
+ )
271
+ aia_map = Map(level1_file)
272
+ warnings.filterwarnings("ignore")
273
+
274
+ # Use aiapy's register function (replacement for aiaprep)
275
+ lev1_5map = register(aia_map)
276
+ lev1_5map.save(level1_5_file)
277
+
278
+ # Clean up level 1.0 file if successful
279
+ os.remove(level1_file)
280
+
281
+ print(
282
+ f"Downloaded and processed: {os.path.basename(level1_5_file)}"
283
+ )
284
+ except Exception as e:
285
+ print(f"Error during Level 1.5 calibration: {str(e)}")
286
+ print(
287
+ f"Using Level 1.0 file instead: {os.path.basename(level1_file)}"
288
+ )
289
+ output_file = level1_file # Use Level 1.0 file instead
290
+ else:
291
+ print(f"Downloaded Level 1.0 file: {os.path.basename(level1_file)}")
292
+ if not HAS_AIAPY:
293
+ print(
294
+ "For Level 1.5 calibration, install aiapy: pip install aiapy"
295
+ )
296
+
297
+ downloaded_files.append(output_file)
298
+
299
+ # Clean up temp directory
300
+ if os.path.exists(temp_dir):
301
+ for file in glob.glob(os.path.join(temp_dir, "*")):
302
+ os.remove(file)
303
+ os.rmdir(temp_dir)
304
+
305
+ return downloaded_files
306
+
307
+
308
+ def main():
309
+ """
310
+ Main function to run when the script is executed directly.
311
+ """
312
+ import argparse
313
+
314
+ parser = argparse.ArgumentParser(
315
+ description="Download and process data from solar observatories",
316
+ epilog="""
317
+ Instruments and typical parameters:
318
+ - AIA: --instrument aia --wavelength 171 --cadence 12s
319
+ - HMI: --instrument hmi --series 45s (or B_45s, Ic_720s, etc.)
320
+ - IRIS: --instrument iris --obs-type SJI --wavelength 1400
321
+ - SOHO/EIT: --instrument soho --soho-instrument EIT --wavelength 195
322
+ - SOHO/LASCO: --instrument soho --soho-instrument LASCO --detector C2
323
+
324
+ Troubleshooting:
325
+ - If you get 'Bad record-set subset specification' errors, try using --use-fido
326
+ - For 'email required' errors, either provide --email or use --use-fido
327
+ - If downloads fail, try a smaller time range between start and end times
328
+ """,
329
+ )
330
+
331
+ # General arguments
332
+ parser.add_argument(
333
+ "--instrument",
334
+ type=str,
335
+ default="aia",
336
+ choices=["aia", "hmi", "iris", "soho"],
337
+ help="Observatory instrument to download data from",
338
+ )
339
+ parser.add_argument(
340
+ "--start-time",
341
+ type=str,
342
+ required=True,
343
+ help="Start time in YYYY.MM.DD HH:MM:SS format",
344
+ )
345
+ parser.add_argument(
346
+ "--end-time",
347
+ type=str,
348
+ required=True,
349
+ help="End time in YYYY.MM.DD HH:MM:SS format",
350
+ )
351
+ parser.add_argument(
352
+ "--output-dir",
353
+ type=str,
354
+ default="./solar_data",
355
+ help="Directory to save downloaded files",
356
+ )
357
+ parser.add_argument(
358
+ "--email",
359
+ type=str,
360
+ help="Email for DRMS client. Recommended for reliability. Required for large requests.",
361
+ )
362
+ parser.add_argument(
363
+ "--skip-calibration",
364
+ action="store_true",
365
+ help="Skip calibration steps even if available",
366
+ )
367
+ parser.add_argument(
368
+ "--use-fido",
369
+ action="store_true",
370
+ help="Use SunPy's Fido client instead of DRMS (no email required)",
371
+ )
372
+
373
+ # AIA-specific arguments
374
+ parser.add_argument(
375
+ "--wavelength", type=str, help="Wavelength or channel (instrument-specific)"
376
+ )
377
+ parser.add_argument(
378
+ "--cadence",
379
+ type=str,
380
+ default="12s",
381
+ help="Time cadence for AIA (12s, 24s, or 1h)",
382
+ )
383
+
384
+ # HMI-specific arguments
385
+ parser.add_argument(
386
+ "--series",
387
+ type=str,
388
+ help="Series for HMI (45s, 720s, B_45s, B_720s, Ic_45s, Ic_720s)",
389
+ )
390
+
391
+ # IRIS-specific arguments
392
+ parser.add_argument(
393
+ "--obs-type",
394
+ type=str,
395
+ default="SJI",
396
+ help="IRIS observation type (SJI or raster)",
397
+ )
398
+
399
+ # SOHO-specific arguments
400
+ parser.add_argument(
401
+ "--soho-instrument",
402
+ type=str,
403
+ choices=["EIT", "LASCO", "MDI"],
404
+ help="SOHO instrument (EIT, LASCO, or MDI)",
405
+ )
406
+ parser.add_argument(
407
+ "--detector", type=str, help="Detector for SOHO/LASCO (C1, C2, C3)"
408
+ )
409
+
410
+ args = parser.parse_args()
411
+
412
+ try:
413
+ # Handle downloading based on selected instrument
414
+ if args.instrument.lower() == "aia":
415
+ # AIA data download
416
+ if args.use_fido:
417
+ if not args.wavelength:
418
+ print("Error: --wavelength is required for AIA data")
419
+ return 1
420
+
421
+ downloaded_files = download_aia_with_fido(
422
+ wavelength=args.wavelength,
423
+ start_time=args.start_time,
424
+ end_time=args.end_time,
425
+ output_dir=args.output_dir,
426
+ skip_calibration=args.skip_calibration,
427
+ )
428
+ else:
429
+ if not args.wavelength:
430
+ print("Error: --wavelength is required for AIA data")
431
+ return 1
432
+
433
+ downloaded_files = download_aia(
434
+ wavelength=args.wavelength,
435
+ cadence=args.cadence,
436
+ start_time=args.start_time,
437
+ end_time=args.end_time,
438
+ output_dir=args.output_dir,
439
+ email=args.email,
440
+ skip_calibration=args.skip_calibration,
441
+ )
442
+
443
+ elif args.instrument.lower() == "hmi":
444
+ # HMI data download
445
+ if not args.series:
446
+ print("Error: --series is required for HMI data")
447
+ return 1
448
+
449
+ if args.use_fido:
450
+ downloaded_files = download_hmi_with_fido(
451
+ series=args.series,
452
+ start_time=args.start_time,
453
+ end_time=args.end_time,
454
+ output_dir=args.output_dir,
455
+ skip_calibration=args.skip_calibration,
456
+ )
457
+ else:
458
+ downloaded_files = download_hmi(
459
+ series=args.series,
460
+ start_time=args.start_time,
461
+ end_time=args.end_time,
462
+ output_dir=args.output_dir,
463
+ email=args.email,
464
+ skip_calibration=args.skip_calibration,
465
+ )
466
+
467
+ elif args.instrument.lower() == "iris":
468
+ # IRIS data download (only Fido is supported)
469
+ downloaded_files = download_iris(
470
+ start_time=args.start_time,
471
+ end_time=args.end_time,
472
+ output_dir=args.output_dir,
473
+ obs_type=args.obs_type,
474
+ wavelength=args.wavelength,
475
+ skip_calibration=args.skip_calibration,
476
+ )
477
+
478
+ elif args.instrument.lower() == "soho":
479
+ # SOHO data download (only Fido is supported)
480
+ if not args.soho_instrument:
481
+ print("Error: --soho-instrument is required for SOHO data")
482
+ return 1
483
+
484
+ downloaded_files = download_soho(
485
+ instrument=args.soho_instrument,
486
+ start_time=args.start_time,
487
+ end_time=args.end_time,
488
+ output_dir=args.output_dir,
489
+ wavelength=args.wavelength,
490
+ detector=args.detector,
491
+ skip_calibration=args.skip_calibration,
492
+ )
493
+
494
+ else:
495
+ print(f"Error: Unsupported instrument: {args.instrument}")
496
+ return 1
497
+
498
+ # Report download results
499
+ instrument_name = args.instrument.upper()
500
+ if args.instrument.lower() == "soho" and args.soho_instrument:
501
+ instrument_name = f"SOHO/{args.soho_instrument}"
502
+
503
+ print(
504
+ f"Download complete. Downloaded {len(downloaded_files)} {instrument_name} files to {args.output_dir}"
505
+ )
506
+
507
+ except Exception as e:
508
+ print(f"Error: {str(e)}")
509
+ print("\nTroubleshooting tips:")
510
+ print("1. Try using the --use-fido option if you're having issues with DRMS")
511
+ print("2. Make sure your time format is correct (YYYY.MM.DD HH:MM:SS)")
512
+ print("3. Try a smaller time range between start and end times")
513
+ print("4. If using DRMS, consider providing an email with --email")
514
+ print("5. If using Fido, ensure you have the latest sunpy version installed")
515
+ print(" You can update with: pip install --upgrade sunpy")
516
+ return 1
517
+
518
+ return 0
519
+
520
+
521
+ def download_aia_with_fido(
522
+ wavelength,
523
+ start_time,
524
+ end_time,
525
+ output_dir,
526
+ skip_calibration=False,
527
+ apply_psf=False,
528
+ apply_degradation=True,
529
+ apply_exposure_norm=True,
530
+ ):
531
+ """
532
+ Alternative download function using SunPy's Fido client which doesn't require an email.
533
+
534
+ Args:
535
+ wavelength (str): AIA wavelength (e.g., '171', '1600')
536
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
537
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
538
+ output_dir (str): Directory to save downloaded files
539
+ skip_calibration (bool, optional): If True, skip Level 1.5 calibration
540
+ apply_psf (bool, optional): If True, apply PSF deconvolution (slow, ~30-60s/image)
541
+ apply_degradation (bool, optional): If True, apply time-dependent degradation correction
542
+ apply_exposure_norm (bool, optional): If True, normalize by exposure time
543
+
544
+ Returns:
545
+ list: Paths to downloaded Level 1.5 FITS files (or Level 1.0 if calibration is skipped/unavailable)
546
+ """
547
+ try:
548
+ import sunpy.net
549
+ from sunpy.net import Fido, attrs as a
550
+ except ImportError:
551
+ print("Error: SunPy not installed or not properly configured.")
552
+ return []
553
+
554
+ # Create output directory if it doesn't exist
555
+ if not os.path.isdir(output_dir):
556
+ os.makedirs(output_dir)
557
+
558
+ # Check if we can perform calibration
559
+ can_calibrate = HAS_AIAPY and not skip_calibration
560
+
561
+ # Parse the time strings
562
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
563
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
564
+
565
+ # Convert wavelength string to integer
566
+ wl_int = int(wavelength)
567
+
568
+ print(f"Searching for AIA {wavelength}Å data from {start_time} to {end_time}")
569
+
570
+ try:
571
+ # Create the query with correct unit import
572
+ result = Fido.search(
573
+ a.Time(start_dt, end_dt),
574
+ a.Instrument("AIA"),
575
+ a.Wavelength(wl_int * u.angstrom), # Now using the correct astropy units
576
+ )
577
+
578
+ if len(result) == 0 or len(result[0]) == 0:
579
+ print("No data found for the specified parameters.")
580
+ return []
581
+
582
+ print(f"Found {len(result[0])} files. Downloading...")
583
+
584
+ # Download the files
585
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
586
+ except Exception as e:
587
+ print(f"Error during Fido search/fetch: {str(e)}")
588
+ print("Check your search parameters and ensure sunpy is properly installed.")
589
+ return []
590
+
591
+ downloaded_files = []
592
+
593
+ # Process the downloaded files if calibration is requested
594
+ for file_path in downloaded:
595
+ file_path = str(file_path)
596
+ # Determine output file names
597
+ base_name = os.path.basename(file_path)
598
+ level1_5_file = os.path.join(
599
+ output_dir, f"{base_name.replace('lev1','lev1_5')}"
600
+ )
601
+ output_file = level1_5_file if can_calibrate else file_path
602
+
603
+ if can_calibrate and not os.path.isfile(level1_5_file):
604
+ try:
605
+ # Convert to level 1.5 using aiapy calibration
606
+ # Order: 1) update_pointing, 2) PSF deconvolve, 3) register, 4) correct_degradation
607
+ print(f"Processing {base_name} to Level 1.5...")
608
+ aia_map = Map(file_path)
609
+ warnings.filterwarnings("ignore")
610
+
611
+ # Step 1: Update pointing information from JSOC
612
+ try:
613
+ aia_map = update_pointing(aia_map)
614
+ print(f" - Updated pointing for {base_name}")
615
+ except Exception as e:
616
+ print(f" - Warning: Could not update pointing: {e}")
617
+
618
+ # Step 2: PSF deconvolution (MUST be done on Level 1 before registration)
619
+ if apply_psf:
620
+ try:
621
+ print(f" - Applying PSF deconvolution (this may take 30-60 seconds)...")
622
+ aia_map = aia_deconvolve(aia_map, iterations=25)
623
+ print(f" - Applied PSF deconvolution (25 iterations)")
624
+ except Exception as e:
625
+ print(f" - Warning: Could not apply PSF deconvolution: {e}")
626
+
627
+ # Step 3: Register (rotate, scale to 0.6"/px, center sun)
628
+ lev1_5map = register(aia_map)
629
+ print(f" - Registered (rotated, scaled, centered)")
630
+
631
+ # Step 4: Correct for time-dependent degradation
632
+ if apply_degradation:
633
+ try:
634
+ lev1_5map = correct_degradation(lev1_5map)
635
+ print(f" - Applied degradation correction")
636
+ except Exception as e:
637
+ print(f" - Warning: Could not apply degradation correction: {e}")
638
+
639
+ # Step 5: Normalize by exposure time
640
+ if apply_exposure_norm and lev1_5map.exposure_time.value > 0:
641
+ lev1_5map = lev1_5map / lev1_5map.exposure_time
642
+ print(f" - Normalized by exposure time")
643
+
644
+ lev1_5map.save(level1_5_file)
645
+
646
+ print(f"Successfully processed: {os.path.basename(level1_5_file)}")
647
+ output_file = level1_5_file
648
+ os.remove(file_path)
649
+ except Exception as e:
650
+ print(f"Error during Level 1.5 calibration: {str(e)}")
651
+ print(f"Using Level 1.0 file instead: {base_name}")
652
+ output_file = file_path
653
+ else:
654
+ print(f"Downloaded Level 1.0 file: {base_name}")
655
+ if not HAS_AIAPY:
656
+ print("For Level 1.5 calibration, install aiapy: pip install aiapy")
657
+
658
+ downloaded_files.append(output_file)
659
+
660
+ return downloaded_files
661
+
662
+
663
+
664
+ def hmiexport(series, time):
665
+ """
666
+ Generate an export command for HMI data.
667
+
668
+ Args:
669
+ series (str): HMI series (e.g., 'M_45s', 'B_45s', 'Ic_720s')
670
+ time (str): Start time in 'YYYY.MM.DD_HH:MM:SS' format
671
+
672
+ Returns:
673
+ str: The export command string or None if invalid parameters
674
+ """
675
+ # Validate series
676
+ if series not in HMI_SERIES.keys():
677
+ print(
678
+ f"Error: Invalid HMI series '{series}'. Use one of: {', '.join(HMI_SERIES.keys())}"
679
+ )
680
+ return None
681
+
682
+ # Format time for the export command
683
+ time_utc = time + "_UTC"
684
+
685
+ # Create export command
686
+ export_cmd = f"{HMI_SERIES[series]}[{time_utc}/1h]"
687
+ return export_cmd
688
+
689
+
690
+ def download_hmi(
691
+ series,
692
+ start_time,
693
+ end_time,
694
+ output_dir,
695
+ email=None,
696
+ interval_seconds=45.0,
697
+ skip_calibration=False,
698
+ ):
699
+ """
700
+ Download and process HMI data for a given time range.
701
+
702
+ Args:
703
+ series (str): HMI series type ('45s', '720s', 'B_45s', 'B_720s', 'Ic_45s', 'Ic_720s')
704
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
705
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
706
+ output_dir (str): Directory to save downloaded files
707
+ email (str, optional): Email for DRMS client. Recommended for reliability.
708
+ Small requests may work without an email, but large requests
709
+ require an email for notification when data is ready.
710
+ interval_seconds (float, optional): Time interval between images.
711
+ Default is 45.0 seconds for '45s' series.
712
+ For '720s' series, consider using 720.0.
713
+ skip_calibration (bool, optional): If True, skip calibration steps
714
+
715
+ Returns:
716
+ list: Paths to downloaded FITS files
717
+
718
+ Notes:
719
+ HMI data calibration is different from AIA. For proper scientific analysis,
720
+ consider using the SunPy or additional HMI-specific tools to further calibrate the data.
721
+ """
722
+ # Create output directory if it doesn't exist
723
+ if not os.path.isdir(output_dir):
724
+ os.makedirs(output_dir)
725
+
726
+ # Create temp directory for downloads
727
+ temp_dir = os.path.join(output_dir, "temp")
728
+ if not os.path.isdir(temp_dir):
729
+ os.makedirs(temp_dir)
730
+
731
+ # Initialize DRMS client
732
+ # Email is technically optional for small requests but recommended for reliability
733
+ # For large export requests, an email address is required for JSOC to notify you
734
+ if email is None:
735
+ print(
736
+ "Warning: No email provided. Small requests may work, but larger requests will likely fail."
737
+ )
738
+ print(
739
+ "Consider providing an email address or using alternative download methods."
740
+ )
741
+ client = drms.Client(email=email)
742
+
743
+ # Format start time for export command - YYYY.MM.DD_HH:MM:SS format required by DRMS
744
+ # This expects start_time in format YYYY.MM.DD HH:MM:SS
745
+ start_time_fmt = start_time.replace(" ", "_")
746
+
747
+ # Create export command
748
+ export_cmd = hmiexport(series=series, time=start_time_fmt)
749
+ if export_cmd is None:
750
+ return []
751
+
752
+ # Request data export
753
+ print(f"Requesting data export with command: {export_cmd}")
754
+ try:
755
+ response = client.export(export_cmd)
756
+ record = response.data.record
757
+ record_list = record.values.tolist()
758
+ except Exception as e:
759
+ print(f"Error during data export: {str(e)}")
760
+ print("Try using the --use-fido option as an alternative download method.")
761
+ return []
762
+
763
+ # Process records to get timestamps
764
+ record_dict = {}
765
+ for i in range(len(record_list)):
766
+ # HMI records have a different format than AIA records
767
+ try:
768
+ parts = record_list[i].split("[")
769
+ if len(parts) > 1:
770
+ timestamp_part = parts[1].split("_")[0:3] # Get the date/time part
771
+ timestamp = "_".join(timestamp_part)
772
+ record_dict[i] = timestamp
773
+ except Exception as e:
774
+ print(f"Warning: Could not parse record {i}: {str(e)}")
775
+ continue
776
+
777
+ hmi_time_list = list(record_dict.values())
778
+
779
+ # Get list of times to download
780
+ time_list = get_time_list(start_time, end_time, interval_seconds)
781
+
782
+ # Download and process files
783
+ downloaded_files = []
784
+ for current_time in time_list:
785
+ formatted_current_time = current_time.replace(":", "_").replace(".", "_")
786
+ matching_times = [t for t in hmi_time_list if formatted_current_time in t]
787
+
788
+ if matching_times:
789
+ for match_time in matching_times:
790
+ key = get_key(match_time, record_dict)
791
+ filename = f"hmi_{series}_{match_time.replace(':', '_')}"
792
+ output_file = os.path.join(output_dir, f"{filename}.fits")
793
+
794
+ if not os.path.isfile(output_file):
795
+ # Download file
796
+ try:
797
+ response.download(temp_dir, key)
798
+ temp_files = glob.glob(os.path.join(temp_dir, "*.fits"))
799
+ if temp_files:
800
+ temp_file = temp_files[0]
801
+ os.rename(temp_file, output_file)
802
+ print(f"Downloaded: {os.path.basename(output_file)}")
803
+ else:
804
+ print(f"Warning: No files downloaded for {match_time}")
805
+ continue
806
+ except Exception as e:
807
+ print(f"Error downloading file for {match_time}: {str(e)}")
808
+ continue
809
+
810
+ downloaded_files.append(output_file)
811
+
812
+ if not skip_calibration:
813
+ for file_path in downloaded_files:
814
+ lvl1_map = Map(file_path)
815
+ print(f"Processing {os.path.basename(file_path)} to Level 1.5...")
816
+ lvl1_5_map = update_hmi_pointing(lvl1_map)
817
+ lvl1_5_map_output_file = os.path.join(
818
+ output_dir, f"{os.path.basename(file_path)}_lvl1.5.fits"
819
+ )
820
+ lvl1_5_map.save(lvl1_5_map_output_file, filetype="fits")
821
+ print(f"Successfully processed {os.path.basename(file_path)} to Level 1.5")
822
+ print(f"Deleting {file_path}")
823
+ os.remove(file_path)
824
+ # Clean up temp directory
825
+ if os.path.exists(temp_dir):
826
+ for file in glob.glob(os.path.join(temp_dir, "*")):
827
+ os.remove(file)
828
+ os.rmdir(temp_dir)
829
+
830
+ return downloaded_files
831
+
832
+
833
+ def download_hmi_with_fido(
834
+ series,
835
+ start_time,
836
+ end_time,
837
+ output_dir,
838
+ skip_calibration=False,
839
+ ):
840
+ """
841
+ Alternative download function for HMI data using SunPy's Fido client which doesn't require an email.
842
+
843
+ Args:
844
+ series (str): HMI series ('45s', '720s', 'B_45s', 'B_720s', 'Ic_45s', 'Ic_720s')
845
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
846
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
847
+ output_dir (str): Directory to save downloaded files
848
+ skip_calibration (bool, optional): If True, skip calibration steps
849
+
850
+ Returns:
851
+ list: Paths to downloaded FITS files
852
+ """
853
+ try:
854
+ import sunpy.net
855
+ from sunpy.net import Fido, attrs as a
856
+ except ImportError:
857
+ print("Error: SunPy not installed or not properly configured.")
858
+ return []
859
+
860
+ # Create output directory if it doesn't exist
861
+ if not os.path.isdir(output_dir):
862
+ os.makedirs(output_dir)
863
+
864
+ # Parse the time strings
865
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
866
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
867
+
868
+ # Map series to physobs for Fido queries
869
+ # Note: M_ and B_ series both correspond to LOS_magnetic_field
870
+ # Vector magnetograms are not directly available via simple Fido queries
871
+ physobs = None
872
+ if "V_" in series:
873
+ physobs = "LOS_velocity"
874
+ elif "Ic_" in series:
875
+ physobs = "intensity" # continuum intensity
876
+ else: # M_ and B_ series both use LOS magnetic field
877
+ physobs = "LOS_magnetic_field"
878
+
879
+ # Determine cadence
880
+ if "45s" in series:
881
+ sample = 45 * u.second
882
+ else: # 720s
883
+ sample = 720 * u.second
884
+
885
+ print(f"Searching for HMI {series} data from {start_time} to {end_time}")
886
+
887
+ try:
888
+ # Create the query
889
+ if physobs:
890
+ result = Fido.search(
891
+ a.Time(start_dt, end_dt),
892
+ a.Instrument("HMI"),
893
+ a.Physobs(physobs),
894
+ a.Sample(sample),
895
+ )
896
+ else:
897
+ # Fallback to just instrument and time if physobs mapping is unclear
898
+ result = Fido.search(
899
+ a.Time(start_dt, end_dt), a.Instrument("HMI"), a.Sample(sample)
900
+ )
901
+
902
+ if len(result) == 0 or len(result[0]) == 0:
903
+ print("No data found for the specified parameters.")
904
+ return []
905
+
906
+ print(f"Found {len(result[0])} files. Downloading...")
907
+
908
+ # Download the files
909
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
910
+ except Exception as e:
911
+ print(f"Error during Fido search/fetch: {str(e)}")
912
+ print("Check your search parameters and ensure sunpy is properly installed.")
913
+ return []
914
+
915
+ downloaded_files = [str(file_path) for file_path in downloaded]
916
+ if not skip_calibration:
917
+ for file_path in downloaded_files:
918
+ lvl1_map = Map(file_path)
919
+ print(f"Processing {os.path.basename(file_path)} to Level 1.5...")
920
+ lvl1_5_map = update_hmi_pointing(lvl1_map)
921
+ lvl1_5_map_output_file = os.path.join(
922
+ output_dir, f"{os.path.basename(file_path)}_lvl1.5.fits"
923
+ )
924
+ lvl1_5_map.save(lvl1_5_map_output_file, filetype="fits")
925
+ print(f"Successfully processed {os.path.basename(file_path)} to Level 1.5")
926
+ print(f"Deleting {file_path}")
927
+ os.remove(file_path)
928
+ print(f"Successfully downloaded {len(downloaded_files)} HMI files.")
929
+ return downloaded_files
930
+
931
+
932
+ def update_hmi_pointing(hmi_map):
933
+ """
934
+ Calibrate HMI Level-1 data to Level-1.5.
935
+
936
+ This function performs comprehensive HMI calibration including:
937
+ 1. Roll angle correction (CROTA2) - rotate to solar north up
938
+ 2. Re-centering to disk center (update CRPIX)
939
+ 3. Plate scale normalization to 0.5 arcsec/pixel (HMI standard)
940
+
941
+ For HMI Level-1 data, the basic calibrations (dark subtraction, flat-fielding,
942
+ bad pixel correction) have already been applied by the JSOC pipeline.
943
+
944
+ Parameters:
945
+ hmi_map (sunpy.map.Map): Input Level-1 HMI map.
946
+
947
+ Returns:
948
+ sunpy.map.Map: A new map with Level-1.5 calibration applied.
949
+ """
950
+ import copy
951
+
952
+ # Target plate scale for HMI Level-1.5 (0.5 arcsec/pixel)
953
+ TARGET_SCALE = 0.5 # arcsec/pixel
954
+
955
+ # Get current metadata
956
+ meta = copy.deepcopy(hmi_map.meta)
957
+
958
+ # Step 1: Get rotation angle and rotate to solar north
959
+ current_crota = float(meta.get("CROTA2", 0.0))
960
+
961
+ # Step 2: Calculate re-centering offset
962
+ # CRPIX should be at the center of the sun (CRVAL = 0, 0 in helioprojective)
963
+ # Current sun center in pixels
964
+ current_crpix1 = float(meta.get("CRPIX1", meta.get("NAXIS1", 4096) / 2))
965
+ current_crpix2 = float(meta.get("CRPIX2", meta.get("NAXIS2", 4096) / 2))
966
+
967
+ # Target center (middle of image)
968
+ naxis1 = int(meta.get("NAXIS1", 4096))
969
+ naxis2 = int(meta.get("NAXIS2", 4096))
970
+ target_crpix1 = (naxis1 + 1) / 2.0
971
+ target_crpix2 = (naxis2 + 1) / 2.0
972
+
973
+ # Step 3: Get current plate scale
974
+ current_cdelt1 = abs(float(meta.get("CDELT1", TARGET_SCALE)))
975
+ current_cdelt2 = abs(float(meta.get("CDELT2", TARGET_SCALE)))
976
+
977
+ print(f" - Current CROTA2: {current_crota:.4f}°, CDELT: {current_cdelt1:.4f}×{current_cdelt2:.4f} arcsec/px")
978
+ print(f" - Current CRPIX: ({current_crpix1:.1f}, {current_crpix2:.1f}), Target: ({target_crpix1:.1f}, {target_crpix2:.1f})")
979
+
980
+ # Perform rotation to remove roll angle
981
+ if abs(current_crota) > 0.01: # Only rotate if significant
982
+ rotated_map = hmi_map.rotate(angle=-current_crota * u.deg, recenter=True, order=3)
983
+ print(f" - Rotated by {-current_crota:.4f}° to remove roll angle")
984
+ else:
985
+ rotated_map = hmi_map
986
+ print(f" - No significant rotation needed (CROTA2 = {current_crota:.4f}°)")
987
+
988
+ # Check if plate scale normalization is needed
989
+ scale_factor = current_cdelt1 / TARGET_SCALE
990
+ if abs(scale_factor - 1.0) > 0.01: # Only rescale if different by >1%
991
+ # Calculate new dimensions
992
+ new_naxis1 = int(naxis1 * scale_factor)
993
+ new_naxis2 = int(naxis2 * scale_factor)
994
+ new_dimensions = [new_naxis1, new_naxis2] * u.pixel
995
+
996
+ # Resample to target plate scale
997
+ try:
998
+ calibrated_map = rotated_map.resample(new_dimensions)
999
+ print(f" - Rescaled to {TARGET_SCALE} arcsec/px ({naxis1}→{new_naxis1} pixels)")
1000
+ except Exception as e:
1001
+ print(f" - Warning: Could not rescale: {e}")
1002
+ calibrated_map = rotated_map
1003
+ else:
1004
+ calibrated_map = rotated_map
1005
+ print(f" - Plate scale already at ~{TARGET_SCALE} arcsec/px")
1006
+
1007
+ print(f" - HMI Level-1.5 calibration complete")
1008
+
1009
+ return calibrated_map
1010
+
1011
+
1012
+
1013
+ def download_iris(
1014
+ start_time,
1015
+ end_time,
1016
+ output_dir,
1017
+ obs_type="SJI", # "SJI" for slit-jaw images or "raster" for spectrograph data
1018
+ wavelength=None, # For SJI: 1330, 1400, 2796, 2832
1019
+ skip_calibration=False,
1020
+ ):
1021
+ """
1022
+ Download IRIS (Interface Region Imaging Spectrograph) data for a given time range.
1023
+
1024
+ IRIS data is not available through DRMS/JSOC, so this function uses SunPy's Fido client.
1025
+
1026
+ Args:
1027
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
1028
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
1029
+ output_dir (str): Directory to save downloaded files
1030
+ obs_type (str): Type of observation - "SJI" for slit-jaw images or "raster" for spectral data
1031
+ wavelength (int, optional): For SJI, specify wavelength (1330, 1400, 2796, 2832)
1032
+ skip_calibration (bool, optional): If True, skip calibration steps
1033
+
1034
+ Returns:
1035
+ list: Paths to downloaded FITS files
1036
+ """
1037
+ try:
1038
+ import sunpy.net
1039
+ from sunpy.net import Fido, attrs as a
1040
+ except ImportError:
1041
+ print("Error: SunPy not installed or not properly configured.")
1042
+ return []
1043
+
1044
+ # Create output directory if it doesn't exist
1045
+ if not os.path.isdir(output_dir):
1046
+ os.makedirs(output_dir)
1047
+
1048
+ # Parse the time strings
1049
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
1050
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
1051
+
1052
+ print(f"Searching for IRIS {obs_type} data from {start_time} to {end_time}")
1053
+
1054
+ try:
1055
+ # Create the query based on observation type
1056
+ if obs_type.lower() == "sji":
1057
+ if wavelength is not None:
1058
+ # SJI with specific wavelength
1059
+ wl = int(wavelength) * u.angstrom
1060
+ result = Fido.search(
1061
+ a.Time(start_dt, end_dt),
1062
+ a.Instrument("IRIS"),
1063
+ a.Wavelength(wl),
1064
+ )
1065
+ else:
1066
+ # Any SJI
1067
+ result = Fido.search(
1068
+ a.Time(start_dt, end_dt),
1069
+ a.Instrument("IRIS"),
1070
+ a.Physobs("intensity"),
1071
+ )
1072
+ else:
1073
+ # Spectral/raster data
1074
+ result = Fido.search(
1075
+ a.Time(start_dt, end_dt),
1076
+ a.Instrument("IRIS"),
1077
+ a.Physobs("intensity"),
1078
+ a.Level(2),
1079
+ )
1080
+
1081
+ if len(result) == 0 or len(result[0]) == 0:
1082
+ print("No data found for the specified parameters.")
1083
+ return []
1084
+
1085
+ print(f"Found {len(result[0])} files. Downloading...")
1086
+
1087
+ # Download the files
1088
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
1089
+ except Exception as e:
1090
+ print(f"Error during Fido search/fetch: {str(e)}")
1091
+ print("Check your search parameters and ensure sunpy is properly installed.")
1092
+ return []
1093
+
1094
+ downloaded_files = [str(file_path) for file_path in downloaded]
1095
+
1096
+ # Post-process IRIS files for solarviewer compatibility
1097
+ # IRIS SJI files are 3D data cubes that need to be converted to 2D FITS
1098
+ # IRIS raster files come as tar.gz archives that need extraction
1099
+ if not skip_calibration:
1100
+ import tarfile
1101
+
1102
+ # First, extract any tar.gz files (raster data comes as archives)
1103
+ extracted_files = []
1104
+ for file_path in downloaded_files:
1105
+ if file_path.endswith('.tar.gz') or file_path.endswith('.tar'):
1106
+ try:
1107
+ print(f"Extracting archive: {os.path.basename(file_path)}...")
1108
+ with tarfile.open(file_path, 'r:*') as tar:
1109
+ tar.extractall(path=output_dir)
1110
+ for member in tar.getmembers():
1111
+ if member.isfile():
1112
+ extracted_path = os.path.join(output_dir, member.name)
1113
+ extracted_files.append(extracted_path)
1114
+ print(f" - Extracted: {member.name}")
1115
+ os.remove(file_path) # Remove the archive after extraction
1116
+ except Exception as e:
1117
+ print(f"Warning: Could not extract {file_path}: {e}")
1118
+ extracted_files.append(file_path)
1119
+ else:
1120
+ extracted_files.append(file_path)
1121
+
1122
+ downloaded_files = extracted_files
1123
+
1124
+ processed_files = []
1125
+ for file_path in downloaded_files:
1126
+ try:
1127
+ base_name = os.path.basename(file_path)
1128
+ print(f"Processing {base_name}...")
1129
+
1130
+ with fits.open(file_path) as hdu:
1131
+ data = hdu[0].data
1132
+ header_orig = hdu[0].header.copy()
1133
+
1134
+ # Check if 3D (time series) and extract all frames
1135
+ if data is not None and data.ndim == 3:
1136
+ n_frames = data.shape[0]
1137
+ print(f" - Found {n_frames} frames, extracting all...")
1138
+
1139
+ base_no_ext = base_name.replace('.fits.gz', '').replace('.fits', '').replace('.gz', '')
1140
+
1141
+ for frame_idx in range(n_frames):
1142
+ header = header_orig.copy()
1143
+ data_2d = data[frame_idx]
1144
+
1145
+ # Update header for 2D data
1146
+ header['NAXIS'] = 2
1147
+ header['NAXIS1'] = data_2d.shape[1]
1148
+ header['NAXIS2'] = data_2d.shape[0]
1149
+ header['FRAME'] = frame_idx
1150
+ if 'NAXIS3' in header:
1151
+ del header['NAXIS3']
1152
+
1153
+ # Add coordinate units if missing
1154
+ if header.get('CUNIT1') is None and header.get('CTYPE1'):
1155
+ header['CUNIT1'] = 'arcsec'
1156
+ if header.get('CUNIT2') is None and header.get('CTYPE2'):
1157
+ header['CUNIT2'] = 'arcsec'
1158
+
1159
+ # Create output filename with frame number
1160
+ out_name = f"iris_{base_no_ext}_frame{frame_idx:03d}.fits"
1161
+ output_file = os.path.join(output_dir, out_name)
1162
+
1163
+ # Save as 2D FITS
1164
+ hdu_out = fits.PrimaryHDU(data_2d, header=header)
1165
+ hdu_out.writeto(output_file, overwrite=True)
1166
+
1167
+ processed_files.append(output_file)
1168
+
1169
+ print(f" - Saved {n_frames} frames as individual FITS files")
1170
+
1171
+ # Remove original compressed file
1172
+ if file_path.endswith('.gz'):
1173
+ os.remove(file_path)
1174
+ else:
1175
+ # 2D data, just decompress if needed
1176
+ if file_path.endswith('.gz'):
1177
+ out_name = base_name.replace('.gz', '')
1178
+ output_file = os.path.join(output_dir, out_name)
1179
+ hdu_out = fits.PrimaryHDU(data, header=header_orig)
1180
+ hdu_out.writeto(output_file, overwrite=True)
1181
+ os.remove(file_path)
1182
+ processed_files.append(output_file)
1183
+ else:
1184
+ processed_files.append(file_path)
1185
+ except Exception as e:
1186
+ print(f"Warning: Could not process {base_name}: {e}")
1187
+ processed_files.append(file_path)
1188
+
1189
+ downloaded_files = processed_files
1190
+ print("Note: IRIS data is Level 2 (pre-calibrated).")
1191
+ print(" - SJI files: 2D image frames extracted for solarviewer")
1192
+ print(" - Raster files: 3D spectroscopic data (requires specialized tools)")
1193
+
1194
+ print(f"Successfully downloaded {len(downloaded_files)} IRIS files.")
1195
+ return downloaded_files
1196
+
1197
+
1198
+ def download_soho(
1199
+ instrument,
1200
+ start_time,
1201
+ end_time,
1202
+ output_dir,
1203
+ wavelength=None,
1204
+ detector=None,
1205
+ skip_calibration=False,
1206
+ ):
1207
+ """
1208
+ Download SOHO (Solar and Heliospheric Observatory) data for a given time range.
1209
+
1210
+ SOHO data is not available through DRMS/JSOC, so this function uses SunPy's Fido client.
1211
+
1212
+ Args:
1213
+ instrument (str): SOHO instrument ('EIT', 'LASCO', 'MDI')
1214
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
1215
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
1216
+ output_dir (str): Directory to save downloaded files
1217
+ wavelength (int, optional): For EIT, wavelength in Angstroms (171, 195, 284, 304)
1218
+ detector (str, optional): For LASCO, detector name ('C1', 'C2', 'C3')
1219
+ skip_calibration (bool, optional): If True, skip calibration steps
1220
+
1221
+ Returns:
1222
+ list: Paths to downloaded FITS files
1223
+ """
1224
+ try:
1225
+ import sunpy.net
1226
+ from sunpy.net import Fido, attrs as a
1227
+ except ImportError:
1228
+ print("Error: SunPy not installed or not properly configured.")
1229
+ return []
1230
+
1231
+ # Create output directory if it doesn't exist
1232
+ if not os.path.isdir(output_dir):
1233
+ os.makedirs(output_dir)
1234
+
1235
+ # Parse the time strings
1236
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
1237
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
1238
+
1239
+ # Validate and normalize instrument name
1240
+ instrument = instrument.upper()
1241
+ if instrument not in ["EIT", "LASCO", "MDI"]:
1242
+ print(
1243
+ f"Error: Invalid SOHO instrument '{instrument}'. Use 'EIT', 'LASCO', or 'MDI'."
1244
+ )
1245
+ return []
1246
+
1247
+ print(f"Searching for SOHO/{instrument} data from {start_time} to {end_time}")
1248
+
1249
+ try:
1250
+ # Build query based on instrument
1251
+ query_args = [
1252
+ a.Time(start_dt, end_dt),
1253
+ a.Instrument(instrument),
1254
+ ]
1255
+
1256
+ # Add instrument-specific parameters
1257
+ if instrument == "EIT":
1258
+ # Use SDAC provider which works more reliably for EIT
1259
+ query_args.append(a.Provider("SDAC"))
1260
+ # For EIT wavelength filtering, use a range to improve matching
1261
+ if wavelength is not None:
1262
+ wl = int(wavelength)
1263
+ # Use a small tolerance range for wavelength matching
1264
+ query_args.append(a.Wavelength((wl - 1) * u.angstrom, (wl + 1) * u.angstrom))
1265
+ elif instrument == "LASCO" and detector is not None:
1266
+ query_args.append(a.Detector(detector.upper()))
1267
+
1268
+ result = Fido.search(*query_args)
1269
+
1270
+ # Count total files across all result tables
1271
+ total_files = sum(len(r) for r in result) if len(result) > 0 else 0
1272
+
1273
+ if total_files == 0:
1274
+ # Try again without wavelength filter for EIT
1275
+ if instrument == "EIT" and wavelength is not None:
1276
+ print(f"No exact wavelength match, searching all EIT data...")
1277
+ result = Fido.search(
1278
+ a.Time(start_dt, end_dt),
1279
+ a.Instrument("EIT"),
1280
+ a.Provider("SDAC"),
1281
+ )
1282
+ total_files = sum(len(r) for r in result) if len(result) > 0 else 0
1283
+
1284
+ if total_files == 0:
1285
+ print("No data found for the specified parameters.")
1286
+ return []
1287
+
1288
+ print(f"Found {total_files} files. Downloading...")
1289
+
1290
+ # Download the files
1291
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
1292
+ except Exception as e:
1293
+ print(f"Error during Fido search/fetch: {str(e)}")
1294
+ print("Check your search parameters and ensure sunpy is properly installed.")
1295
+ return []
1296
+
1297
+ downloaded_files = [str(file_path) for file_path in downloaded]
1298
+
1299
+ # Fix files without .fits extension (SOHO/EIT files from SDAC often lack proper extension)
1300
+ fixed_files = []
1301
+ for file_path in downloaded_files:
1302
+ if not file_path.endswith('.fits') and not file_path.endswith('.fits.gz') and not file_path.endswith('.fts'):
1303
+ # Check if it's actually a FITS file
1304
+ try:
1305
+ with fits.open(file_path) as hdu:
1306
+ # It's a valid FITS file, rename it
1307
+ new_path = file_path + '.fits'
1308
+ os.rename(file_path, new_path)
1309
+ print(f"Renamed {os.path.basename(file_path)} -> {os.path.basename(new_path)}")
1310
+ fixed_files.append(new_path)
1311
+ except Exception:
1312
+ # Not a FITS file or can't open, keep original
1313
+ fixed_files.append(file_path)
1314
+ else:
1315
+ fixed_files.append(file_path)
1316
+ downloaded_files = fixed_files
1317
+
1318
+ if not skip_calibration and not downloaded_files:
1319
+ print("Warning: No files were downloaded to calibrate.")
1320
+ return []
1321
+
1322
+ # Calibration for SOHO data
1323
+ if not skip_calibration and len(downloaded_files) > 0:
1324
+ calibrated_files = []
1325
+
1326
+ if instrument == "EIT":
1327
+ print("Performing EIT Level 1.5 calibration...")
1328
+ for file_path in downloaded_files:
1329
+ try:
1330
+ eit_map = Map(file_path)
1331
+ base_name = os.path.basename(file_path)
1332
+ print(f"Processing {base_name}...")
1333
+
1334
+ # Step 1: Rotate to solar north using SC_ROLL (EIT-specific)
1335
+ crota = float(eit_map.meta.get("sc_roll", eit_map.meta.get("crota", eit_map.meta.get("crota2", 0.0))))
1336
+ if abs(crota) > 0.01:
1337
+ # Convert to float and use NaN for missing pixels (displays as transparent)
1338
+ import numpy as np
1339
+ float_data = eit_map.data.astype(np.float64)
1340
+ eit_map = Map(float_data, eit_map.meta)
1341
+ eit_map = eit_map.rotate(angle=-crota * u.deg, recenter=True, missing=np.nan)
1342
+ print(f" - Rotated {-crota:.2f}° to solar north")
1343
+
1344
+ # Step 2: Fix WCS metadata for solarviewer compatibility
1345
+ # EIT uses "Solar-X/Solar-Y" which needs to be HPLN-TAN/HPLT-TAN
1346
+ meta = eit_map.meta.copy()
1347
+ if meta.get('ctype1', '').lower() in ['solar-x', 'solar_x', '']:
1348
+ meta['ctype1'] = 'HPLN-TAN'
1349
+ meta['ctype2'] = 'HPLT-TAN'
1350
+ if meta.get('cunit1') is None:
1351
+ meta['cunit1'] = 'arcsec'
1352
+ meta['cunit2'] = 'arcsec'
1353
+ # Negate CDELT1 to correct Solar-X direction after rotation
1354
+ meta['cdelt1'] = -abs(meta.get('cdelt1', 2.63))
1355
+ eit_map = Map(eit_map.data, meta)
1356
+ print(f" - Fixed WCS (HPLN-TAN, arcsec, Solar-X corrected)")
1357
+
1358
+ # Step 3: Normalize by exposure time
1359
+ exptime = eit_map.exposure_time.value if hasattr(eit_map, 'exposure_time') else 0
1360
+ if exptime > 0:
1361
+ eit_map = eit_map / eit_map.exposure_time
1362
+ print(f" - Normalized by exposure time ({exptime:.2f}s)")
1363
+
1364
+ # Save calibrated file
1365
+ output_file = os.path.join(output_dir, f"eit_lev1_5_{base_name}")
1366
+ # Ensure .fits extension
1367
+ if not output_file.endswith('.fits'):
1368
+ output_file = output_file + '.fits'
1369
+ eit_map.save(output_file, overwrite=True)
1370
+ print(f" - Saved as {os.path.basename(output_file)}")
1371
+
1372
+ # Remove original
1373
+ if os.path.exists(output_file) and file_path != output_file:
1374
+ os.remove(file_path)
1375
+ calibrated_files.append(output_file)
1376
+ else:
1377
+ calibrated_files.append(output_file)
1378
+ except Exception as e:
1379
+ print(f"Warning: Could not calibrate {os.path.basename(file_path)}: {e}")
1380
+ calibrated_files.append(file_path)
1381
+ downloaded_files = calibrated_files
1382
+
1383
+ elif instrument == "LASCO":
1384
+ print("Performing LASCO basic calibration...")
1385
+ for file_path in downloaded_files:
1386
+ try:
1387
+ lasco_map = Map(file_path)
1388
+ base_name = os.path.basename(file_path)
1389
+
1390
+ # LASCO calibration is complex (stray light, F-corona, vignetting)
1391
+ # We do basic exposure normalization
1392
+ exptime = lasco_map.exposure_time.value if hasattr(lasco_map, 'exposure_time') else 0
1393
+ if exptime > 0:
1394
+ lasco_map = lasco_map / lasco_map.exposure_time
1395
+
1396
+ output_file = os.path.join(output_dir, f"lasco_cal_{base_name}")
1397
+ lasco_map.save(output_file, overwrite=True)
1398
+ print(f"Calibrated {base_name} (exposure normalized)")
1399
+
1400
+ if os.path.exists(output_file):
1401
+ os.remove(file_path)
1402
+ calibrated_files.append(output_file)
1403
+ else:
1404
+ calibrated_files.append(file_path)
1405
+ else:
1406
+ calibrated_files.append(file_path)
1407
+ except Exception as e:
1408
+ print(f"Warning: Could not process {os.path.basename(file_path)}: {e}")
1409
+ calibrated_files.append(file_path)
1410
+ downloaded_files = calibrated_files
1411
+ print("Note: For full LASCO calibration (stray light, F-corona removal),")
1412
+ print(" additional specialized tools are required.")
1413
+
1414
+ print(f"Successfully downloaded {len(downloaded_files)} SOHO/{instrument} files.")
1415
+ return downloaded_files
1416
+
1417
+
1418
+
1419
+ def download_goes_suvi(
1420
+ start_time,
1421
+ end_time,
1422
+ output_dir,
1423
+ wavelength=None,
1424
+ level="2",
1425
+ ):
1426
+ """
1427
+ Download GOES SUVI (Solar Ultraviolet Imager) data for a given time range.
1428
+
1429
+ SUVI is the EUV imager on the GOES-16/17/18 satellites, providing
1430
+ similar coverage to SDO/AIA but from geostationary orbit.
1431
+
1432
+ Args:
1433
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
1434
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
1435
+ output_dir (str): Directory to save downloaded files
1436
+ wavelength (int, optional): Wavelength in Angstroms (94, 131, 171, 195, 284, 304)
1437
+ level (str): Data level ('1b' or '2')
1438
+
1439
+ Returns:
1440
+ list: Paths to downloaded FITS files
1441
+ """
1442
+ try:
1443
+ from sunpy.net import Fido, attrs as a
1444
+ except ImportError:
1445
+ print("Error: SunPy not installed or not properly configured.")
1446
+ return []
1447
+
1448
+ # Create output directory if it doesn't exist
1449
+ if not os.path.isdir(output_dir):
1450
+ os.makedirs(output_dir)
1451
+
1452
+ # Parse the time strings
1453
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
1454
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
1455
+
1456
+ print(f"Searching for GOES SUVI data from {start_time} to {end_time}")
1457
+
1458
+ try:
1459
+ # Build query
1460
+ query_args = [
1461
+ a.Time(start_dt, end_dt),
1462
+ a.Instrument("SUVI"),
1463
+ a.Level(level),
1464
+ ]
1465
+
1466
+ if wavelength is not None:
1467
+ wl = int(wavelength)
1468
+ query_args.append(a.Wavelength((wl - 1) * u.angstrom, (wl + 1) * u.angstrom))
1469
+
1470
+ result = Fido.search(*query_args)
1471
+
1472
+ # Count total files across all result tables
1473
+ total_files = sum(len(r) for r in result) if len(result) > 0 else 0
1474
+
1475
+ if total_files == 0:
1476
+ print("No data found for the specified parameters.")
1477
+ return []
1478
+
1479
+ print(f"Found {total_files} files. Downloading...")
1480
+
1481
+ # Download the files
1482
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
1483
+ except Exception as e:
1484
+ print(f"Error during Fido search/fetch: {str(e)}")
1485
+ return []
1486
+
1487
+ # Post-process SUVI files to fix WCS for solarviewer compatibility
1488
+ # SUVI files store data in compressed HDU 1, but solarviewer reads HDU 0
1489
+ downloaded_files = []
1490
+ for file_path in downloaded:
1491
+ file_path = str(file_path)
1492
+ try:
1493
+ # Use sunpy to load the file correctly (handles compressed HDU)
1494
+ suvi_map = Map(file_path)
1495
+
1496
+ # Create output filename
1497
+ base_name = os.path.basename(file_path)
1498
+ processed_file = os.path.join(output_dir, f"processed_{base_name}")
1499
+
1500
+ # Save as standard FITS with WCS in HDU 0
1501
+ suvi_map.save(processed_file, overwrite=True)
1502
+
1503
+ # Remove original compressed file
1504
+ if os.path.exists(processed_file) and processed_file != file_path:
1505
+ os.remove(file_path)
1506
+ print(f"Processed {base_name} for solarviewer compatibility")
1507
+ downloaded_files.append(processed_file)
1508
+ else:
1509
+ downloaded_files.append(file_path)
1510
+ except Exception as e:
1511
+ print(f"Warning: Could not process {os.path.basename(file_path)}: {e}")
1512
+ downloaded_files.append(file_path)
1513
+
1514
+ print(f"Successfully downloaded {len(downloaded_files)} GOES SUVI files.")
1515
+ return downloaded_files
1516
+
1517
+
1518
+
1519
+ def download_stereo(
1520
+ start_time,
1521
+ end_time,
1522
+ output_dir,
1523
+ spacecraft="A",
1524
+ instrument="EUVI",
1525
+ wavelength=None,
1526
+ ):
1527
+ """
1528
+ Download STEREO SECCHI data for a given time range.
1529
+
1530
+ STEREO consists of two spacecraft (A and B) providing stereoscopic
1531
+ views of solar activity. STEREO-B lost contact in 2014.
1532
+
1533
+ Args:
1534
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
1535
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
1536
+ output_dir (str): Directory to save downloaded files
1537
+ spacecraft (str): 'A' or 'B' (B only available until 2014)
1538
+ instrument (str): 'EUVI' (EUV), 'COR1' (inner coronagraph),
1539
+ 'COR2' (outer coronagraph), 'HI1', 'HI2' (heliospheric imagers)
1540
+ wavelength (int, optional): For EUVI - 171, 195, 284, or 304 Angstroms
1541
+
1542
+ Returns:
1543
+ list: Paths to downloaded FITS files
1544
+ """
1545
+ try:
1546
+ from sunpy.net import Fido, attrs as a
1547
+ except ImportError:
1548
+ print("Error: SunPy not installed or not properly configured.")
1549
+ return []
1550
+
1551
+ # Create output directory if it doesn't exist
1552
+ if not os.path.isdir(output_dir):
1553
+ os.makedirs(output_dir)
1554
+
1555
+ # Parse the time strings
1556
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
1557
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
1558
+
1559
+ # Normalize inputs
1560
+ spacecraft = spacecraft.upper()
1561
+ instrument = instrument.upper()
1562
+ source = f"STEREO_{spacecraft}"
1563
+
1564
+ print(f"Searching for {source}/{instrument} data from {start_time} to {end_time}")
1565
+
1566
+ try:
1567
+ # Build query
1568
+ query_args = [
1569
+ a.Time(start_dt, end_dt),
1570
+ a.Source(source),
1571
+ a.Instrument("SECCHI"),
1572
+ a.Detector(instrument),
1573
+ ]
1574
+
1575
+ if instrument == "EUVI" and wavelength is not None:
1576
+ wl = int(wavelength)
1577
+ query_args.append(a.Wavelength((wl - 1) * u.angstrom, (wl + 1) * u.angstrom))
1578
+
1579
+ result = Fido.search(*query_args)
1580
+
1581
+ # Count total files across all result tables
1582
+ total_files = sum(len(r) for r in result) if len(result) > 0 else 0
1583
+
1584
+ if total_files == 0:
1585
+ print("No data found for the specified parameters.")
1586
+ return []
1587
+
1588
+ print(f"Found {total_files} files. Downloading...")
1589
+
1590
+ # Download the files
1591
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
1592
+ except Exception as e:
1593
+ print(f"Error during Fido search/fetch: {str(e)}")
1594
+ return []
1595
+
1596
+ downloaded_files = [str(file_path) for file_path in downloaded]
1597
+ print(f"Successfully downloaded {len(downloaded_files)} {source}/{instrument} files.")
1598
+ return downloaded_files
1599
+
1600
+
1601
+ def download_gong(
1602
+ start_time,
1603
+ end_time,
1604
+ output_dir,
1605
+ ):
1606
+ """
1607
+ Download GONG (Global Oscillation Network Group) magnetogram data.
1608
+
1609
+ GONG provides ground-based magnetogram observations with continuous
1610
+ coverage from a network of stations around the world.
1611
+
1612
+ Args:
1613
+ start_time (str): Start time in 'YYYY.MM.DD HH:MM:SS' format
1614
+ end_time (str): End time in 'YYYY.MM.DD HH:MM:SS' format
1615
+ output_dir (str): Directory to save downloaded files
1616
+
1617
+ Returns:
1618
+ list: Paths to downloaded FITS files
1619
+ """
1620
+ try:
1621
+ from sunpy.net import Fido, attrs as a
1622
+ except ImportError:
1623
+ print("Error: SunPy not installed or not properly configured.")
1624
+ return []
1625
+
1626
+ # Create output directory if it doesn't exist
1627
+ if not os.path.isdir(output_dir):
1628
+ os.makedirs(output_dir)
1629
+
1630
+ # Parse the time strings
1631
+ start_dt = datetime.strptime(start_time, "%Y.%m.%d %H:%M:%S")
1632
+ end_dt = datetime.strptime(end_time, "%Y.%m.%d %H:%M:%S")
1633
+
1634
+ print(f"Searching for GONG magnetogram data from {start_time} to {end_time}")
1635
+
1636
+ try:
1637
+ # GONG data query
1638
+ result = Fido.search(
1639
+ a.Time(start_dt, end_dt),
1640
+ a.Instrument("GONG"),
1641
+ a.Physobs("LOS_magnetic_field"),
1642
+ )
1643
+
1644
+ # Count total files across all result tables
1645
+ total_files = sum(len(r) for r in result) if len(result) > 0 else 0
1646
+
1647
+ if total_files == 0:
1648
+ print("No data found for the specified parameters.")
1649
+ return []
1650
+
1651
+ print(f"Found {total_files} files. Downloading...")
1652
+
1653
+ # Download the files
1654
+ downloaded = Fido.fetch(result, path=output_dir + "/{file}")
1655
+ except Exception as e:
1656
+ print(f"Error during Fido search/fetch: {str(e)}")
1657
+ return []
1658
+
1659
+ downloaded_files = [str(file_path) for file_path in downloaded]
1660
+ print(f"Successfully downloaded {len(downloaded_files)} GONG magnetogram files.")
1661
+ return downloaded_files
1662
+
1663
+
1664
+ if __name__ == "__main__":
1665
+ import sys
1666
+
1667
+ sys.exit(main())