ECOv003-L2T-STARS 1.2.0__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. ECOv003_L2T_STARS/L2T_STARS.py +9 -24
  2. ECOv003_L2T_STARS/LPDAAC/LPDAACDataPool.py +42 -40
  3. ECOv003_L2T_STARS/Project.toml +0 -4
  4. ECOv003_L2T_STARS/VIIRS/VNP09GA.py +5 -4
  5. ECOv003_L2T_STARS/VNP43NRT/VNP43NRT.py +9 -4
  6. ECOv003_L2T_STARS/__init__.py +1 -1
  7. ECOv003_L2T_STARS/cksum.py +66 -0
  8. ECOv003_L2T_STARS/constants.py +1 -0
  9. ECOv003_L2T_STARS/exceptions.py +2 -0
  10. ECOv003_L2T_STARS/generate_STARS_inputs.py +105 -103
  11. ECOv003_L2T_STARS/generate_downsampled_filename.py +20 -0
  12. ECOv003_L2T_STARS/{instantiate_STARS_jl.py → instantiate_STARSDataFusion_jl.py} +3 -3
  13. ECOv003_L2T_STARS/load_prior.py +4 -4
  14. ECOv003_L2T_STARS/login.py +61 -0
  15. ECOv003_L2T_STARS/{ECOv003_L2T_STARS.py → main.py} +7 -0
  16. ECOv003_L2T_STARS/process_ECOSTRESS_data_fusion_distributed_bias.jl +19 -26
  17. ECOv003_L2T_STARS/process_STARS_product.py +26 -40
  18. ECOv003_L2T_STARS/process_julia_data_fusion.py +10 -8
  19. {ecov003_l2t_stars-1.2.0.dist-info → ecov003_l2t_stars-1.4.0.dist-info}/METADATA +1 -2
  20. {ecov003_l2t_stars-1.2.0.dist-info → ecov003_l2t_stars-1.4.0.dist-info}/RECORD +24 -26
  21. ecov003_l2t_stars-1.4.0.dist-info/entry_points.txt +3 -0
  22. ECOv003_L2T_STARS/VIIRS/VIIRS_CMR_LOGIN.py +0 -36
  23. ECOv003_L2T_STARS/generate_NDVI_coarse_directory.py +0 -21
  24. ECOv003_L2T_STARS/generate_NDVI_fine_directory.py +0 -14
  25. ECOv003_L2T_STARS/generate_albedo_coarse_directory.py +0 -18
  26. ECOv003_L2T_STARS/generate_albedo_fine_directory.py +0 -17
  27. ECOv003_L2T_STARS/install_STARS_jl.py +0 -43
  28. ecov003_l2t_stars-1.2.0.dist-info/entry_points.txt +0 -3
  29. {ecov003_l2t_stars-1.2.0.dist-info → ecov003_l2t_stars-1.4.0.dist-info}/WHEEL +0 -0
  30. {ecov003_l2t_stars-1.2.0.dist-info → ecov003_l2t_stars-1.4.0.dist-info}/licenses/LICENSE +0 -0
  31. {ecov003_l2t_stars-1.2.0.dist-info → ecov003_l2t_stars-1.4.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  from typing import Union
2
2
  from datetime import date, datetime
3
3
  from dateutil.rrule import rrule, DAILY
4
+ from os.path import exists
4
5
  import logging
5
6
 
6
7
  import colored_logging as cl
@@ -16,6 +17,7 @@ from .generate_NDVI_coarse_image import generate_NDVI_coarse_image
16
17
  from .generate_NDVI_fine_image import generate_NDVI_fine_image
17
18
  from .generate_albedo_coarse_image import generate_albedo_coarse_image
18
19
  from .generate_albedo_fine_image import generate_albedo_fine_image
20
+ from .generate_downsampled_filename import generate_downsampled_filename
19
21
  from .calibrate_fine_to_coarse import calibrate_fine_to_coarse
20
22
  from .VIIRS.VIIRSDownloader import VIIRSDownloaderAlbedo, VIIRSDownloaderNDVI
21
23
 
@@ -33,11 +35,7 @@ def generate_STARS_inputs(
33
35
  target_resolution: int,
34
36
  NDVI_coarse_geometry: RasterGeometry,
35
37
  albedo_coarse_geometry: RasterGeometry,
36
- working_directory: str,
37
- NDVI_coarse_directory: str,
38
- NDVI_fine_directory: str,
39
- albedo_coarse_directory: str,
40
- albedo_fine_directory: str,
38
+ downsampled_directory: str,
41
39
  HLS_connection: HLS2CMR,
42
40
  NDVI_VIIRS_connection: VIIRSDownloaderNDVI,
43
41
  albedo_VIIRS_connection: VIIRSDownloaderAlbedo,
@@ -80,68 +78,83 @@ def generate_STARS_inputs(
80
78
  """
81
79
  missing_coarse_dates = set() # Track dates where coarse data could not be generated
82
80
 
81
+ logger.info(f"preparing coarse and fine images for STARS at {cl.place(tile)}")
82
+
83
83
  # Process each day within the VIIRS data fusion window
84
84
  for processing_date in [
85
85
  get_date(dt) for dt in rrule(DAILY, dtstart=VIIRS_start_date, until=VIIRS_end_date)
86
86
  ]:
87
- logger.info(
88
- f"Preparing coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
87
+ NDVI_coarse_filename = generate_downsampled_filename(
88
+ directory=downsampled_directory,
89
+ variable="NDVI",
90
+ date_UTC=processing_date,
91
+ tile=tile,
92
+ cell_size=NDVI_resolution
93
+ )
94
+
95
+ NDVI_fine_filename = generate_downsampled_filename(
96
+ directory=downsampled_directory,
97
+ variable="NDVI",
98
+ date_UTC=processing_date,
99
+ tile=tile,
100
+ cell_size=target_resolution
101
+ )
102
+
103
+ albedo_coarse_filename = generate_downsampled_filename(
104
+ directory=downsampled_directory,
105
+ variable="albedo",
106
+ date_UTC=processing_date,
107
+ tile=tile,
108
+ cell_size=albedo_resolution
109
+ )
110
+
111
+ albedo_fine_filename = generate_downsampled_filename(
112
+ directory=downsampled_directory,
113
+ variable="albedo",
114
+ date_UTC=processing_date,
115
+ tile=tile,
116
+ cell_size=target_resolution
89
117
  )
90
118
 
91
119
  try:
92
- # Generate coarse NDVI image
93
- NDVI_coarse_image = generate_NDVI_coarse_image(
94
- date_UTC=processing_date,
95
- VIIRS_connection=NDVI_VIIRS_connection,
96
- geometry=NDVI_coarse_geometry,
97
- )
120
+ # Cache whether the NDVI coarse exists to avoid ToCToU
121
+ NDVI_coarse_exists = exists(NDVI_coarse_filename)
122
+ if not NDVI_coarse_exists:
123
+ logger.info(f"preparing coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
124
+
125
+ NDVI_coarse_image = generate_NDVI_coarse_image(
126
+ date_UTC=processing_date,
127
+ VIIRS_connection=NDVI_VIIRS_connection,
128
+ geometry=NDVI_coarse_geometry
129
+ )
98
130
 
99
- # Define filename for coarse NDVI and save
100
- NDVI_coarse_filename = generate_filename(
101
- directory=NDVI_coarse_directory,
102
- variable="NDVI",
103
- date_UTC=processing_date,
104
- tile=tile,
105
- cell_size=NDVI_resolution,
106
- )
107
- logger.info(
108
- f"Saving coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_coarse_filename}"
109
- )
110
- NDVI_coarse_image.to_geotiff(NDVI_coarse_filename)
131
+ logger.info(
132
+ f"saving coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_coarse_filename}")
133
+ NDVI_coarse_image.to_geotiff(NDVI_coarse_filename)
111
134
 
112
- # If the processing date is within the HLS range, generate fine NDVI
113
135
  if processing_date >= HLS_start_date:
114
- logger.info(
115
- f"Preparing fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
116
- )
117
136
  try:
118
- NDVI_fine_image = generate_NDVI_fine_image(
119
- date_UTC=processing_date,
120
- tile=tile,
121
- HLS_connection=HLS_connection,
122
- )
123
-
124
- # Optionally calibrate the fine NDVI image to the coarse NDVI image
125
- if calibrate_fine:
137
+ if not exists(NDVI_fine_filename):
126
138
  logger.info(
127
- f"Calibrating fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
128
- )
129
- NDVI_fine_image = calibrate_fine_to_coarse(
130
- NDVI_fine_image, NDVI_coarse_image
139
+ f"preparing fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
140
+
141
+ NDVI_fine_image = generate_NDVI_fine_image(
142
+ date_UTC=processing_date,
143
+ tile=tile,
144
+ HLS_connection=HLS_connection
131
145
  )
132
146
 
133
- # Define filename for fine NDVI and save
134
- NDVI_fine_filename = generate_filename(
135
- directory=NDVI_fine_directory,
136
- variable="NDVI",
137
- date_UTC=processing_date,
138
- tile=tile,
139
- cell_size=target_resolution,
140
- )
141
- logger.info(
142
- f"Saving fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_fine_filename}"
143
- )
144
- NDVI_fine_image.to_geotiff(NDVI_fine_filename)
147
+ if calibrate_fine:
148
+ # Ensure that the NDVI_coarse_image variable is set
149
+ if NDVI_coarse_exists:
150
+ NDVI_coarse_image = Raster.open(NDVI_coarse_filename)
151
+ logger.info(
152
+ f"calibrating fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
153
+ NDVI_fine_image = calibrate_fine_to_coarse(NDVI_fine_image, NDVI_coarse_image)
154
+
155
+ logger.info(
156
+ f"saving fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_fine_filename}")
157
+ NDVI_fine_image.to_geotiff(NDVI_fine_filename)
145
158
  except Exception: # Catch any exception during HLS fine image generation
146
159
  logger.info(f"HLS NDVI is not available on {processing_date}")
147
160
  except Exception as e:
@@ -151,63 +164,47 @@ def generate_STARS_inputs(
151
164
  )
152
165
  missing_coarse_dates.add(processing_date) # Add date to missing set
153
166
 
154
- logger.info(
155
- f"Preparing coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
156
- )
157
167
  try:
158
- # Generate coarse albedo image
159
- albedo_coarse_image = generate_albedo_coarse_image(
160
- date_UTC=processing_date,
161
- VIIRS_connection=albedo_VIIRS_connection,
162
- geometry=albedo_coarse_geometry,
163
- )
168
+ # Cache whether the albedo coarse exists to avoid ToCToU
169
+ albedo_coarse_exists = exists(albedo_coarse_filename)
170
+ if not albedo_coarse_exists:
171
+ logger.info(
172
+ f"preparing coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
164
173
 
165
- # Define filename for coarse albedo and save
166
- albedo_coarse_filename = generate_filename(
167
- directory=albedo_coarse_directory,
168
- variable="albedo",
169
- date_UTC=processing_date,
170
- tile=tile,
171
- cell_size=albedo_resolution,
172
- )
173
- logger.info(
174
- f"Saving coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_coarse_filename}"
175
- )
176
- albedo_coarse_image.to_geotiff(albedo_coarse_filename)
174
+ albedo_coarse_image = generate_albedo_coarse_image(
175
+ date_UTC=processing_date,
176
+ VIIRS_connection=albedo_VIIRS_connection,
177
+ geometry=albedo_coarse_geometry
178
+ )
177
179
 
178
- # If the processing date is within the HLS range, generate fine albedo
179
- if processing_date >= HLS_start_date:
180
180
  logger.info(
181
- f"Preparing fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
182
- )
181
+ f"saving coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_coarse_filename}")
182
+ albedo_coarse_image.to_geotiff(albedo_coarse_filename)
183
+
184
+ if processing_date >= HLS_start_date:
183
185
  try:
184
- albedo_fine_image = generate_albedo_fine_image(
185
- date_UTC=processing_date,
186
- tile=tile,
187
- HLS_connection=HLS_connection,
188
- )
189
-
190
- # Optionally calibrate the fine albedo image to the coarse albedo image
191
- if calibrate_fine:
186
+ if not exists(albedo_fine_filename):
192
187
  logger.info(
193
- f"Calibrating fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
194
- )
195
- albedo_fine_image = calibrate_fine_to_coarse(
196
- albedo_fine_image, albedo_coarse_image
188
+ f"preparing fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
189
+
190
+ albedo_fine_image = generate_albedo_fine_image(
191
+ date_UTC=processing_date,
192
+ tile=tile,
193
+ HLS_connection=HLS_connection
197
194
  )
198
195
 
199
- # Define filename for fine albedo and save
200
- albedo_fine_filename = generate_filename(
201
- directory=albedo_fine_directory,
202
- variable="albedo",
203
- date_UTC=processing_date,
204
- tile=tile,
205
- cell_size=target_resolution,
206
- )
207
- logger.info(
208
- f"Saving fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_fine_filename}"
209
- )
210
- albedo_fine_image.to_geotiff(albedo_fine_filename)
196
+ if calibrate_fine:
197
+ # Ensure that the albedo_coarse_image variable is set
198
+ if albedo_coarse_exists:
199
+ albedo_coarse_image = Raster.open(albedo_coarse_filename)
200
+
201
+ logger.info(
202
+ f"calibrating fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
203
+ albedo_fine_image = calibrate_fine_to_coarse(albedo_fine_image, albedo_coarse_image)
204
+
205
+ logger.info(
206
+ f"saving fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_fine_filename}")
207
+ albedo_fine_image.to_geotiff(albedo_fine_filename)
211
208
  except Exception: # Catch any exception during HLS fine image generation
212
209
  logger.info(f"HLS albedo is not available on {processing_date}")
213
210
  except Exception as e:
@@ -217,7 +214,12 @@ def generate_STARS_inputs(
217
214
  )
218
215
  missing_coarse_dates.add(processing_date) # Add date to missing set
219
216
 
220
- # Check for missing coarse dates within the give-up window
217
+ # We need to deal with the possibility that VIIRS has not yet published their data yet.
218
+ # VIIRS_GIVEUP_DAYS is the number of days before we assume that missing observations aren't coming.
219
+ # If any missing days are closer to now than VIIRS_GIVEUP_DAYS, we want to retry this run later, when VIIRS
220
+ # might have uploaded the missing observations. To cause this retry, we'll throw the `AncillaryLatency` exception.
221
+ # L2T_STARS converts this exception to an exit code, and the orchestration system marks this run
222
+ # as needing a retry at a later date.
221
223
  coarse_latency_dates = [
222
224
  d
223
225
  for d in missing_coarse_dates
@@ -0,0 +1,20 @@
1
+ from os import makedirs
2
+ from os.path import join, dirname
3
+ from dateutil import parser
4
+ from datetime import date
5
+
6
+ from typing import Union
7
+
8
+ def generate_downsampled_filename(directory: str, variable: str, date_UTC: Union[date, str], tile: str, cell_size: int) -> str:
9
+ if isinstance(date_UTC, str):
10
+ date_UTC = parser.parse(date_UTC).date()
11
+
12
+ variable = str(variable)
13
+ year = str(date_UTC.year)
14
+ timestamp = date_UTC.strftime("%Y-%m-%d")
15
+ tile = str(tile)
16
+ cell_size = int(cell_size)
17
+ filename = join(directory, year, timestamp, tile, f"STARS_{variable}_{tile}_{cell_size}m.tif")
18
+ makedirs(dirname(filename), exist_ok=True)
19
+
20
+ return filename
@@ -3,11 +3,11 @@ import logging
3
3
 
4
4
  logger = logging.getLogger(__name__)
5
5
 
6
- def instantiate_STARS_jl(package_location: str) -> subprocess.CompletedProcess:
6
+ def instantiate_STARSDataFusion_jl(package_location: str) -> subprocess.CompletedProcess:
7
7
  """
8
8
  Activates a Julia project at a given location and instantiates its dependencies.
9
9
 
10
- This is necessary to ensure all required Julia packages for STARS.jl are
10
+ This is necessary to ensure all required Julia packages for STARSDataFusion.jl are
11
11
  downloaded and ready for use within the specified project environment.
12
12
 
13
13
  Args:
@@ -30,7 +30,7 @@ def instantiate_STARS_jl(package_location: str) -> subprocess.CompletedProcess:
30
30
 
31
31
  if result.returncode == 0:
32
32
  logger.info(
33
- f"STARS.jl instantiated successfully in directory '{package_location}'!"
33
+ f"STARSDataFusion.jl instantiated successfully in directory '{package_location}'!"
34
34
  )
35
35
  else:
36
36
  logger.error("Error instantiating STARS.jl:")
@@ -98,7 +98,7 @@ def load_prior(
98
98
  cell_size=target_resolution,
99
99
  )
100
100
  # Assuming L2T_STARS_prior_granule has a .NDVI_bias attribute
101
- if hasattr(L2T_STARS_prior_granule, "NDVI_bias") and L2T_STARS_prior_granule.NDVI_bias is not None:
101
+ if L2T_STARS_prior_granule.NDVI_bias is not None:
102
102
  L2T_STARS_prior_granule.NDVI_bias.to_geotiff(prior_NDVI_bias_filename)
103
103
  else:
104
104
  prior_NDVI_bias_filename = None # Set to None if not available
@@ -111,7 +111,7 @@ def load_prior(
111
111
  cell_size=target_resolution,
112
112
  )
113
113
  # Assuming L2T_STARS_prior_granule has a .NDVI_bias_UQ attribute
114
- if hasattr(L2T_STARS_prior_granule, "NDVI_bias_UQ") and L2T_STARS_prior_granule.NDVI_bias_UQ is not None:
114
+ if L2T_STARS_prior_granule.NDVI_bias_UQ is not None:
115
115
  L2T_STARS_prior_granule.NDVI_bias_UQ.to_geotiff(prior_NDVI_bias_UQ_filename)
116
116
  else:
117
117
  prior_NDVI_bias_UQ_filename = None # Set to None if not available
@@ -143,7 +143,7 @@ def load_prior(
143
143
  cell_size=target_resolution,
144
144
  )
145
145
  # Assuming L2T_STARS_prior_granule has a .albedo_bias attribute
146
- if hasattr(L2T_STARS_prior_granule, "albedo_bias") and L2T_STARS_prior_granule.albedo_bias is not None:
146
+ if L2T_STARS_prior_granule.albedo_bias is not None:
147
147
  L2T_STARS_prior_granule.albedo_bias.to_geotiff(prior_albedo_bias_filename)
148
148
  else:
149
149
  prior_albedo_bias_filename = None # Set to None if not available
@@ -156,7 +156,7 @@ def load_prior(
156
156
  cell_size=target_resolution,
157
157
  )
158
158
  # Assuming L2T_STARS_prior_granule has a .albedo_bias_UQ attribute
159
- if hasattr(L2T_STARS_prior_granule, "albedo_bias_UQ") and L2T_STARS_prior_granule.albedo_bias_UQ is not None:
159
+ if L2T_STARS_prior_granule.albedo_bias_UQ is not None:
160
160
  L2T_STARS_prior_granule.albedo_bias_UQ.to_geotiff(prior_albedo_bias_UQ_filename)
161
161
  else:
162
162
  prior_albedo_bias_UQ_filename = None # Set to None if not available
@@ -0,0 +1,61 @@
1
+ import logging
2
+ import netrc
3
+ import os
4
+
5
+ import earthaccess
6
+
7
+ from .exceptions import *
8
+
9
+ __author__ = "Evan Davis"
10
+
11
+ _AUTH = None
12
+
13
+ def login() -> earthaccess.Auth:
14
+ """
15
+ Login to Earthdata using environment variables if available, falling back to netrc credentials, then interactive login.
16
+ """
17
+ # Only login to earthaccess once
18
+ global _AUTH
19
+ if _AUTH is not None:
20
+ return _AUTH
21
+
22
+ # Check if we're in a testing environment where authentication should be skipped
23
+ if os.environ.get("SKIP_EARTHDATA_LOGIN", "").lower() in ("true", "1", "yes"):
24
+ # Return a mock auth object for testing
25
+ class MockAuth:
26
+ def __init__(self):
27
+ self.authenticated = True
28
+ _AUTH = MockAuth()
29
+ return _AUTH
30
+
31
+ # Temporarily suppress INFO logs from earthaccess during login
32
+ earthaccess_logger = logging.getLogger('earthaccess')
33
+ original_level = earthaccess_logger.level
34
+ earthaccess_logger.setLevel(logging.WARNING)
35
+
36
+ try:
37
+ # First priority: environment variables
38
+ if "EARTHDATA_USERNAME" in os.environ and "EARTHDATA_PASSWORD" in os.environ:
39
+ _AUTH = earthaccess.login(strategy="environment")
40
+ return _AUTH
41
+
42
+ # Second priority: netrc credentials
43
+ try:
44
+ secrets = netrc.netrc()
45
+ auth = secrets.authenticators("urs.earthdata.nasa.gov")
46
+ if auth:
47
+ _AUTH = earthaccess.login(strategy="netrc")
48
+ return _AUTH
49
+ except (FileNotFoundError, netrc.NetrcParseError):
50
+ # .netrc file doesn't exist or is malformed, continue to interactive login
51
+ pass
52
+
53
+ # Last resort: interactive login
54
+ _AUTH = earthaccess.login(strategy="interactive")
55
+ return _AUTH
56
+
57
+ except Exception as e:
58
+ raise CMRServerUnreachable(e)
59
+ finally:
60
+ # Restore original logging level
61
+ earthaccess_logger.setLevel(original_level)
@@ -115,6 +115,12 @@ def main():
115
115
  default=True,
116
116
  help="Do NOT remove posterior intermediate files after product generation.",
117
117
  )
118
+ parser.add_argument(
119
+ "--initialize-julia",
120
+ action="store_true",
121
+ dest="initialize_julia",
122
+ help="Initialize a julia environment before running julia.",
123
+ )
118
124
  parser.add_argument(
119
125
  "--threads",
120
126
  type=str,
@@ -157,6 +163,7 @@ def main():
157
163
  remove_input_staging=args.remove_input_staging,
158
164
  remove_prior=args.remove_prior,
159
165
  remove_posterior=args.remove_posterior,
166
+ initialize_julia=args.initialize_julia,
160
167
  threads=args.threads,
161
168
  num_workers=args.num_workers,
162
169
  overwrite=args.overwrite, # Pass the new overwrite argument
@@ -8,24 +8,8 @@ using STARSDataFusion.sentinel_tiles
8
8
  using STARSDataFusion.HLS
9
9
  using STARSDataFusion.VNP43
10
10
  using Logging
11
- using Pkg
12
11
  using Statistics
13
12
  using Distributed
14
- Pkg.add("OpenSSL")
15
- using HTTP
16
- using JSON
17
-
18
- function read_json(file::String)::Dict
19
- open(file, "r") do f
20
- return JSON.parse(f)
21
- end
22
- end
23
-
24
- function write_json(file::String, data::Dict)
25
- open(file, "w") do f
26
- JSON.print(f, data)
27
- end
28
- end
29
13
 
30
14
  @info "processing STARS data fusion"
31
15
 
@@ -78,10 +62,10 @@ HLS_start_date = Date(ARGS[7])
78
62
  @info "HLS start date: $(HLS_start_date)"
79
63
  HLS_end_date = Date(ARGS[8])
80
64
  @info "HLS end date: $(HLS_end_date)"
81
- coarse_directory = ARGS[9]
82
- @info "coarse inputs directory: $(coarse_directory)"
83
- fine_directory = ARGS[10]
84
- @info "fine inputs directory: $(fine_directory)"
65
+ downsampled_directory = ARGS[9]
66
+ @info "downsampled inputs directory: $(downsampled_directory)"
67
+ product_name = ARGS[10]
68
+ @info "Computing $(product_name) product"
85
69
  posterior_filename = ARGS[11]
86
70
  @info "posterior filename: $(posterior_filename)"
87
71
  posterior_UQ_filename = ARGS[12]
@@ -130,20 +114,29 @@ y_fine_size = size(y_fine)[1]
130
114
  @info "fine x size: $(x_fine_size)"
131
115
  @info "fine y size: $(y_fine_size)"
132
116
 
133
- coarse_image_filenames = sort(glob("*.tif", coarse_directory))
134
- coarse_dates_found = [Date(split(basename(filename), "_")[3]) for filename in coarse_image_filenames]
135
-
136
- fine_image_filenames = sort(glob("*.tif", fine_directory))
137
- fine_dates_found = [Date(split(basename(filename), "_")[3]) for filename in fine_image_filenames]
138
-
117
+ # The range of dates to check for VIIRS files
139
118
  coarse_start_date = VIIRS_start_date
140
119
  coarse_end_date = VIIRS_end_date
141
120
 
121
+ # Check each coarse date for a downsampled image
122
+ # For each day we find, convert the date directory back into a date object
123
+ coarse_dates = [coarse_start_date + Day(d - 1) for d in 1:((coarse_end_date - coarse_start_date).value + 1)]
124
+ coarse_image_filenames = [joinpath("$(downsampled_directory)", "$(year(date))", "$(Dates.format(date, dateformat"yyyy-mm-dd"))", "$(tile)", "STARS_$(product_name)_$(tile)_$(coarse_cell_size)m.tif") for date in coarse_dates]
125
+ coarse_image_filenames = [filename for filename in coarse_image_filenames if ispath(filename)]
126
+ coarse_dates_found = [Date(basename(dirname(dirname(filename)))) for filename in coarse_image_filenames]
127
+
128
+ # The range of dates to check for HLS files
142
129
  fine_flag_start_date = HLS_end_date - Day(7)
143
130
  fine_start_date = HLS_start_date
144
131
  fine_end_date = HLS_end_date
145
132
 
133
+ # Check each fine date for a downsampled image
134
+ # For each day we find, convert the date directory back into a date object
146
135
  dates = [fine_start_date + Day(d - 1) for d in 1:((fine_end_date - fine_start_date).value + 1)]
136
+ fine_image_filenames = [joinpath("$(downsampled_directory)", "$(year(date))", "$(Dates.format(date, dateformat"yyyy-mm-dd"))", "$(tile)", "STARS_$(product_name)_$(tile)_$(fine_cell_size)m.tif") for date in dates]
137
+ fine_image_filenames = [filename for filename in fine_image_filenames if ispath(filename)]
138
+ fine_dates_found = [Date(basename(dirname(dirname(filename)))) for filename in fine_image_filenames]
139
+
147
140
  t = Ti(dates)
148
141
  coarse_dims = (x_coarse, y_coarse, t)
149
142
  fine_dims = (x_fine, y_fine, t)