ECOv003-L2T-STARS 1.3.0__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,10 +35,6 @@ from .VNP43NRT import VNP43NRT
35
35
  from .runconfig import ECOSTRESSRunConfig
36
36
  from .L2TSTARSConfig import L2TSTARSConfig
37
37
  from .load_prior import load_prior
38
- from .generate_NDVI_coarse_directory import generate_NDVI_coarse_directory
39
- from .generate_NDVI_fine_directory import generate_NDVI_fine_directory
40
- from .generate_albedo_coarse_directory import generate_albedo_coarse_directory
41
- from .generate_albedo_fine_directory import generate_albedo_fine_directory
42
38
  from .generate_STARS_inputs import generate_STARS_inputs
43
39
  from .process_STARS_product import process_STARS_product
44
40
  from .retrieve_STARS_sources import retrieve_STARS_sources
@@ -58,6 +54,7 @@ def L2T_STARS(
58
54
  remove_input_staging: bool = True,
59
55
  remove_prior: bool = True,
60
56
  remove_posterior: bool = True,
57
+ initialize_julia: bool = False,
61
58
  threads: Union[int, str] = "auto",
62
59
  num_workers: int = 4,
63
60
  overwrite: bool = False, # New parameter for overwriting existing files
@@ -240,6 +237,8 @@ def L2T_STARS(
240
237
  logger.info(f"VNP09GA products directory: {cl.dir(VNP09GA_products_directory)}")
241
238
  VNP43NRT_products_directory = join(sources_directory, DEFAULT_VNP43NRT_PRODUCTS_DIRECTORY)
242
239
  logger.info(f"VNP43NRT products directory: {cl.dir(VNP43NRT_products_directory)}")
240
+ DOWNSAMPLED_products_directory = join(sources_directory, DEFAULT_STARS_DOWNSAMPLED_DIRECTORY)
241
+ logger.info(f"DOWNSAMPLED products directory: {cl.dir(DOWNSAMPLED_products_directory)}")
243
242
 
244
243
  # Re-check for existing product (double-check in case another process created it) with overwrite option
245
244
  if not overwrite and exists(L2T_STARS_zip_filename):
@@ -283,6 +282,7 @@ def L2T_STARS(
283
282
  GEOS5FP_products=GEOS5FP_products_directory,
284
283
  VNP09GA_directory=VNP09GA_products_directory,
285
284
  VNP43NRT_directory=VNP43NRT_products_directory,
285
+ initialize_julia=initialize_julia,
286
286
  )
287
287
 
288
288
  albedo_VIIRS_connection = VNP43NRT(
@@ -293,6 +293,7 @@ def L2T_STARS(
293
293
  GEOS5FP_products=GEOS5FP_products_directory,
294
294
  VNP09GA_directory=VNP09GA_products_directory,
295
295
  VNP43NRT_directory=VNP43NRT_products_directory,
296
+ initialize_julia=initialize_julia,
296
297
  )
297
298
  except CMRServerUnreachable as e:
298
299
  logger.exception(e)
@@ -404,19 +405,6 @@ def L2T_STARS(
404
405
  NDVI_coarse_geometry = HLS_connection.grid(tile=tile, cell_size=NDVI_resolution)
405
406
  albedo_coarse_geometry = HLS_connection.grid(tile=tile, cell_size=albedo_resolution)
406
407
 
407
- NDVI_coarse_directory = generate_NDVI_coarse_directory(
408
- input_staging_directory=input_staging_directory, tile=tile
409
- )
410
- NDVI_fine_directory = generate_NDVI_fine_directory(
411
- input_staging_directory=input_staging_directory, tile=tile
412
- )
413
- albedo_coarse_directory = generate_albedo_coarse_directory(
414
- input_staging_directory=input_staging_directory, tile=tile
415
- )
416
- albedo_fine_directory = generate_albedo_fine_directory(
417
- input_staging_directory=input_staging_directory, tile=tile
418
- )
419
-
420
408
  generate_STARS_inputs(
421
409
  tile=tile,
422
410
  date_UTC=date_UTC,
@@ -429,11 +417,7 @@ def L2T_STARS(
429
417
  target_resolution=target_resolution,
430
418
  NDVI_coarse_geometry=NDVI_coarse_geometry,
431
419
  albedo_coarse_geometry=albedo_coarse_geometry,
432
- working_directory=working_directory,
433
- NDVI_coarse_directory=NDVI_coarse_directory,
434
- NDVI_fine_directory=NDVI_fine_directory,
435
- albedo_coarse_directory=albedo_coarse_directory,
436
- albedo_fine_directory=albedo_fine_directory,
420
+ downsampled_directory=DOWNSAMPLED_products_directory,
437
421
  HLS_connection=HLS_connection,
438
422
  NDVI_VIIRS_connection=NDVI_VIIRS_connection,
439
423
  albedo_VIIRS_connection=albedo_VIIRS_connection,
@@ -454,7 +438,7 @@ def L2T_STARS(
454
438
  NDVI_resolution=NDVI_resolution,
455
439
  albedo_resolution=albedo_resolution,
456
440
  target_resolution=target_resolution,
457
- working_directory=working_directory,
441
+ downsampled_directory=DOWNSAMPLED_products_directory,
458
442
  model_directory=model_directory,
459
443
  input_staging_directory=input_staging_directory,
460
444
  L2T_STARS_granule_directory=L2T_STARS_granule_directory,
@@ -470,6 +454,7 @@ def L2T_STARS(
470
454
  remove_input_staging=remove_input_staging,
471
455
  remove_prior=remove_prior,
472
456
  remove_posterior=remove_posterior,
457
+ initialize_julia=initialize_julia,
473
458
  threads=threads,
474
459
  num_workers=num_workers,
475
460
  )
@@ -3,12 +3,8 @@ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
3
3
  DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
4
4
  Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
5
5
  Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
6
- HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
7
- JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
8
6
  LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
9
7
  Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
10
- OpenSSL = "4d8831e6-92b7-49fb-bdf8-b643e874388c"
11
- Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
12
8
  Rasters = "a3a2b9e3-a471-40c9-b274-f788e487c689"
13
9
  STARSDataFusion = "70ccc657-289f-4534-a407-e03a16fd1153"
14
10
  Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
@@ -1096,7 +1096,7 @@ class VNP09GA:
1096
1096
 
1097
1097
  self.resampling = resampling
1098
1098
 
1099
- self._granules = pd.DataFrame({"date_UTC": {}, "tile": {}, "granule": {}})
1099
+ self._granules = pd.DataFrame(columns=["date_UTC", "tile", "granule"])
1100
1100
 
1101
1101
  if working_directory is None:
1102
1102
  working_directory = self.DEFAULT_WORKING_DIRECTORY
@@ -1233,7 +1233,7 @@ class VNP09GA:
1233
1233
  if "date_UTC" not in self._granules.columns:
1234
1234
  raise ValueError(f"date_UTC column not in granules table")
1235
1235
 
1236
- subset = self._granules[(self._granules.date_UTC == date_UTC) & (self._granules.tile == tile)]
1236
+ subset = self._granules[(self._granules['date_UTC'] == date_UTC) & (self._granules['tile'] == tile)]
1237
1237
  if len(subset) > 0:
1238
1238
  return subset.iloc[0].granule
1239
1239
 
@@ -1246,7 +1246,7 @@ class VNP09GA:
1246
1246
  if len(granules) == 0:
1247
1247
  return None
1248
1248
 
1249
- if len(granules) > 0:
1249
+ if len(granules) > 1:
1250
1250
  logger.warning("Found more VIIRS granules than expected")
1251
1251
 
1252
1252
  self.add_granules(granules)
@@ -106,12 +106,14 @@ def process_julia_BRDF(
106
106
  sensor_zenith_directory: str,
107
107
  relative_azimuth_directory: str,
108
108
  SZA_filename: str,
109
- output_directory: str):
109
+ output_directory: str,
110
+ initialize_julia: bool):
110
111
  parent_directory = abspath(join(dirname(__file__), ".."))
111
112
  julia_source_directory = join(parent_directory, "VNP43NRT_jl")
112
113
  julia_script_filename = join(abspath(dirname(__file__)), "process_VNP43NRT.jl")
113
114
 
114
- instantiate_VNP43NRT_jl(julia_source_directory)
115
+ if initialize_julia:
116
+ instantiate_VNP43NRT_jl(julia_source_directory)
115
117
 
116
118
  command = f'julia "{julia_script_filename}" "{band}" "{h}" "{v}" "{tile_width_cells}" "{start_date:%Y-%m-%d}" "{end_date:%Y-%m-%d}" "{reflectance_directory}" "{solar_zenith_directory}" "{sensor_zenith_directory}" "{relative_azimuth_directory}" "{SZA_filename}" "{output_directory}"'
117
119
  logger.info(command)
@@ -265,7 +267,8 @@ class VNP43NRT(VIIRSDownloaderAlbedo, VIIRSDownloaderNDVI):
265
267
  VNP43NRT_staging_directory: str = None,
266
268
  GEOS5FP_connection: GEOS5FP = None,
267
269
  GEOS5FP_download: str = None,
268
- GEOS5FP_products: str = None):
270
+ GEOS5FP_products: str = None,
271
+ initialize_julia: bool = False):
269
272
  if working_directory is None:
270
273
  working_directory = VNP09GA.DEFAULT_WORKING_DIRECTORY
271
274
 
@@ -302,6 +305,7 @@ class VNP43NRT(VIIRSDownloaderAlbedo, VIIRSDownloaderNDVI):
302
305
  self.VNP43NRT_directory = VNP43NRT_directory
303
306
  self.GEOS5FP = GEOS5FP_connection
304
307
  self.VNP43NRT_staging_directory = VNP43NRT_staging_directory
308
+ self.initialize_julia = initialize_julia
305
309
 
306
310
  def __repr__(self):
307
311
  display_dict = {
@@ -493,7 +497,8 @@ class VNP43NRT(VIIRSDownloaderAlbedo, VIIRSDownloaderNDVI):
493
497
  sensor_zenith_directory=sensor_zenith_directory,
494
498
  relative_azimuth_directory=relative_azimuth_directory,
495
499
  SZA_filename=SZA_filename,
496
- output_directory=output_directory
500
+ output_directory=output_directory,
501
+ initialize_julia=self.initialize_julia,
497
502
  )
498
503
 
499
504
  WSA = Raster.open(join(output_directory, f"{date_UTC:%Y-%m-%d}_WSA.tif"))
@@ -22,6 +22,7 @@ DEFAULT_GEOS5FP_DOWNLOAD_DIRECTORY = "GEOS5FP_download"
22
22
  DEFAULT_GEOS5FP_PRODUCTS_DIRECTORY = "GEOS5FP_products"
23
23
  DEFAULT_VNP09GA_PRODUCTS_DIRECTORY = "VNP09GA_products"
24
24
  DEFAULT_VNP43NRT_PRODUCTS_DIRECTORY = "VNP43NRT_products"
25
+ DEFAULT_STARS_DOWNSAMPLED_DIRECTORY = "DOWNSAMPLED_products"
25
26
 
26
27
  # Processing parameters
27
28
  VIIRS_GIVEUP_DAYS = 4 # Number of days to give up waiting for VIIRS data
@@ -1,6 +1,7 @@
1
1
  from typing import Union
2
2
  from datetime import date, datetime
3
3
  from dateutil.rrule import rrule, DAILY
4
+ from os.path import exists
4
5
  import logging
5
6
 
6
7
  import colored_logging as cl
@@ -16,6 +17,7 @@ from .generate_NDVI_coarse_image import generate_NDVI_coarse_image
16
17
  from .generate_NDVI_fine_image import generate_NDVI_fine_image
17
18
  from .generate_albedo_coarse_image import generate_albedo_coarse_image
18
19
  from .generate_albedo_fine_image import generate_albedo_fine_image
20
+ from .generate_downsampled_filename import generate_downsampled_filename
19
21
  from .calibrate_fine_to_coarse import calibrate_fine_to_coarse
20
22
  from .VIIRS.VIIRSDownloader import VIIRSDownloaderAlbedo, VIIRSDownloaderNDVI
21
23
 
@@ -33,11 +35,7 @@ def generate_STARS_inputs(
33
35
  target_resolution: int,
34
36
  NDVI_coarse_geometry: RasterGeometry,
35
37
  albedo_coarse_geometry: RasterGeometry,
36
- working_directory: str,
37
- NDVI_coarse_directory: str,
38
- NDVI_fine_directory: str,
39
- albedo_coarse_directory: str,
40
- albedo_fine_directory: str,
38
+ downsampled_directory: str,
41
39
  HLS_connection: HLS2CMR,
42
40
  NDVI_VIIRS_connection: VIIRSDownloaderNDVI,
43
41
  albedo_VIIRS_connection: VIIRSDownloaderAlbedo,
@@ -80,68 +78,83 @@ def generate_STARS_inputs(
80
78
  """
81
79
  missing_coarse_dates = set() # Track dates where coarse data could not be generated
82
80
 
81
+ logger.info(f"preparing coarse and fine images for STARS at {cl.place(tile)}")
82
+
83
83
  # Process each day within the VIIRS data fusion window
84
84
  for processing_date in [
85
85
  get_date(dt) for dt in rrule(DAILY, dtstart=VIIRS_start_date, until=VIIRS_end_date)
86
86
  ]:
87
- logger.info(
88
- f"Preparing coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
87
+ NDVI_coarse_filename = generate_downsampled_filename(
88
+ directory=downsampled_directory,
89
+ variable="NDVI",
90
+ date_UTC=processing_date,
91
+ tile=tile,
92
+ cell_size=NDVI_resolution
93
+ )
94
+
95
+ NDVI_fine_filename = generate_downsampled_filename(
96
+ directory=downsampled_directory,
97
+ variable="NDVI",
98
+ date_UTC=processing_date,
99
+ tile=tile,
100
+ cell_size=target_resolution
101
+ )
102
+
103
+ albedo_coarse_filename = generate_downsampled_filename(
104
+ directory=downsampled_directory,
105
+ variable="albedo",
106
+ date_UTC=processing_date,
107
+ tile=tile,
108
+ cell_size=albedo_resolution
109
+ )
110
+
111
+ albedo_fine_filename = generate_downsampled_filename(
112
+ directory=downsampled_directory,
113
+ variable="albedo",
114
+ date_UTC=processing_date,
115
+ tile=tile,
116
+ cell_size=target_resolution
89
117
  )
90
118
 
91
119
  try:
92
- # Generate coarse NDVI image
93
- NDVI_coarse_image = generate_NDVI_coarse_image(
94
- date_UTC=processing_date,
95
- VIIRS_connection=NDVI_VIIRS_connection,
96
- geometry=NDVI_coarse_geometry,
97
- )
120
+ # Cache whether the NDVI coarse exists to avoid ToCToU
121
+ NDVI_coarse_exists = exists(NDVI_coarse_filename)
122
+ if not NDVI_coarse_exists:
123
+ logger.info(f"preparing coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
124
+
125
+ NDVI_coarse_image = generate_NDVI_coarse_image(
126
+ date_UTC=processing_date,
127
+ VIIRS_connection=NDVI_VIIRS_connection,
128
+ geometry=NDVI_coarse_geometry
129
+ )
98
130
 
99
- # Define filename for coarse NDVI and save
100
- NDVI_coarse_filename = generate_filename(
101
- directory=NDVI_coarse_directory,
102
- variable="NDVI",
103
- date_UTC=processing_date,
104
- tile=tile,
105
- cell_size=NDVI_resolution,
106
- )
107
- logger.info(
108
- f"Saving coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_coarse_filename}"
109
- )
110
- NDVI_coarse_image.to_geotiff(NDVI_coarse_filename)
131
+ logger.info(
132
+ f"saving coarse image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_coarse_filename}")
133
+ NDVI_coarse_image.to_geotiff(NDVI_coarse_filename)
111
134
 
112
- # If the processing date is within the HLS range, generate fine NDVI
113
135
  if processing_date >= HLS_start_date:
114
- logger.info(
115
- f"Preparing fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
116
- )
117
136
  try:
118
- NDVI_fine_image = generate_NDVI_fine_image(
119
- date_UTC=processing_date,
120
- tile=tile,
121
- HLS_connection=HLS_connection,
122
- )
123
-
124
- # Optionally calibrate the fine NDVI image to the coarse NDVI image
125
- if calibrate_fine:
137
+ if not exists(NDVI_fine_filename):
126
138
  logger.info(
127
- f"Calibrating fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}"
128
- )
129
- NDVI_fine_image = calibrate_fine_to_coarse(
130
- NDVI_fine_image, NDVI_coarse_image
139
+ f"preparing fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
140
+
141
+ NDVI_fine_image = generate_NDVI_fine_image(
142
+ date_UTC=processing_date,
143
+ tile=tile,
144
+ HLS_connection=HLS_connection
131
145
  )
132
146
 
133
- # Define filename for fine NDVI and save
134
- NDVI_fine_filename = generate_filename(
135
- directory=NDVI_fine_directory,
136
- variable="NDVI",
137
- date_UTC=processing_date,
138
- tile=tile,
139
- cell_size=target_resolution,
140
- )
141
- logger.info(
142
- f"Saving fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_fine_filename}"
143
- )
144
- NDVI_fine_image.to_geotiff(NDVI_fine_filename)
147
+ if calibrate_fine:
148
+ # Ensure that the NDVI_coarse_image variable is set
149
+ if NDVI_coarse_exists:
150
+ NDVI_coarse_image = Raster.open(NDVI_coarse_filename)
151
+ logger.info(
152
+ f"calibrating fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}")
153
+ NDVI_fine_image = calibrate_fine_to_coarse(NDVI_fine_image, NDVI_coarse_image)
154
+
155
+ logger.info(
156
+ f"saving fine image for STARS NDVI at {cl.place(tile)} on {cl.time(processing_date)}: {NDVI_fine_filename}")
157
+ NDVI_fine_image.to_geotiff(NDVI_fine_filename)
145
158
  except Exception: # Catch any exception during HLS fine image generation
146
159
  logger.info(f"HLS NDVI is not available on {processing_date}")
147
160
  except Exception as e:
@@ -151,63 +164,47 @@ def generate_STARS_inputs(
151
164
  )
152
165
  missing_coarse_dates.add(processing_date) # Add date to missing set
153
166
 
154
- logger.info(
155
- f"Preparing coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
156
- )
157
167
  try:
158
- # Generate coarse albedo image
159
- albedo_coarse_image = generate_albedo_coarse_image(
160
- date_UTC=processing_date,
161
- VIIRS_connection=albedo_VIIRS_connection,
162
- geometry=albedo_coarse_geometry,
163
- )
168
+ # Cache whether the albedo coarse exists to avoid ToCToU
169
+ albedo_coarse_exists = exists(albedo_coarse_filename)
170
+ if not albedo_coarse_exists:
171
+ logger.info(
172
+ f"preparing coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
164
173
 
165
- # Define filename for coarse albedo and save
166
- albedo_coarse_filename = generate_filename(
167
- directory=albedo_coarse_directory,
168
- variable="albedo",
169
- date_UTC=processing_date,
170
- tile=tile,
171
- cell_size=albedo_resolution,
172
- )
173
- logger.info(
174
- f"Saving coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_coarse_filename}"
175
- )
176
- albedo_coarse_image.to_geotiff(albedo_coarse_filename)
174
+ albedo_coarse_image = generate_albedo_coarse_image(
175
+ date_UTC=processing_date,
176
+ VIIRS_connection=albedo_VIIRS_connection,
177
+ geometry=albedo_coarse_geometry
178
+ )
177
179
 
178
- # If the processing date is within the HLS range, generate fine albedo
179
- if processing_date >= HLS_start_date:
180
180
  logger.info(
181
- f"Preparing fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
182
- )
181
+ f"saving coarse image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_coarse_filename}")
182
+ albedo_coarse_image.to_geotiff(albedo_coarse_filename)
183
+
184
+ if processing_date >= HLS_start_date:
183
185
  try:
184
- albedo_fine_image = generate_albedo_fine_image(
185
- date_UTC=processing_date,
186
- tile=tile,
187
- HLS_connection=HLS_connection,
188
- )
189
-
190
- # Optionally calibrate the fine albedo image to the coarse albedo image
191
- if calibrate_fine:
186
+ if not exists(albedo_fine_filename):
192
187
  logger.info(
193
- f"Calibrating fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}"
194
- )
195
- albedo_fine_image = calibrate_fine_to_coarse(
196
- albedo_fine_image, albedo_coarse_image
188
+ f"preparing fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
189
+
190
+ albedo_fine_image = generate_albedo_fine_image(
191
+ date_UTC=processing_date,
192
+ tile=tile,
193
+ HLS_connection=HLS_connection
197
194
  )
198
195
 
199
- # Define filename for fine albedo and save
200
- albedo_fine_filename = generate_filename(
201
- directory=albedo_fine_directory,
202
- variable="albedo",
203
- date_UTC=processing_date,
204
- tile=tile,
205
- cell_size=target_resolution,
206
- )
207
- logger.info(
208
- f"Saving fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_fine_filename}"
209
- )
210
- albedo_fine_image.to_geotiff(albedo_fine_filename)
196
+ if calibrate_fine:
197
+ # Ensure that the albedo_coarse_image variable is set
198
+ if albedo_coarse_exists:
199
+ albedo_coarse_image = Raster.open(albedo_coarse_filename)
200
+
201
+ logger.info(
202
+ f"calibrating fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}")
203
+ albedo_fine_image = calibrate_fine_to_coarse(albedo_fine_image, albedo_coarse_image)
204
+
205
+ logger.info(
206
+ f"saving fine image for STARS albedo at {cl.place(tile)} on {cl.time(processing_date)}: {albedo_fine_filename}")
207
+ albedo_fine_image.to_geotiff(albedo_fine_filename)
211
208
  except Exception: # Catch any exception during HLS fine image generation
212
209
  logger.info(f"HLS albedo is not available on {processing_date}")
213
210
  except Exception as e:
@@ -217,7 +214,12 @@ def generate_STARS_inputs(
217
214
  )
218
215
  missing_coarse_dates.add(processing_date) # Add date to missing set
219
216
 
220
- # Check for missing coarse dates within the give-up window
217
+ # We need to deal with the possibility that VIIRS has not yet published their data yet.
218
+ # VIIRS_GIVEUP_DAYS is the number of days before we assume that missing observations aren't coming.
219
+ # If any missing days are closer to now than VIIRS_GIVEUP_DAYS, we want to retry this run later, when VIIRS
220
+ # might have uploaded the missing observations. To cause this retry, we'll throw the `AncillaryLatency` exception.
221
+ # L2T_STARS converts this exception to an exit code, and the orchestration system marks this run
222
+ # as needing a retry at a later date.
221
223
  coarse_latency_dates = [
222
224
  d
223
225
  for d in missing_coarse_dates
@@ -0,0 +1,20 @@
1
+ from os import makedirs
2
+ from os.path import join, dirname
3
+ from dateutil import parser
4
+ from datetime import date
5
+
6
+ from typing import Union
7
+
8
+ def generate_downsampled_filename(directory: str, variable: str, date_UTC: Union[date, str], tile: str, cell_size: int) -> str:
9
+ if isinstance(date_UTC, str):
10
+ date_UTC = parser.parse(date_UTC).date()
11
+
12
+ variable = str(variable)
13
+ year = str(date_UTC.year)
14
+ timestamp = date_UTC.strftime("%Y-%m-%d")
15
+ tile = str(tile)
16
+ cell_size = int(cell_size)
17
+ filename = join(directory, year, timestamp, tile, f"STARS_{variable}_{tile}_{cell_size}m.tif")
18
+ makedirs(dirname(filename), exist_ok=True)
19
+
20
+ return filename
@@ -98,7 +98,7 @@ def load_prior(
98
98
  cell_size=target_resolution,
99
99
  )
100
100
  # Assuming L2T_STARS_prior_granule has a .NDVI_bias attribute
101
- if hasattr(L2T_STARS_prior_granule, "NDVI_bias") and L2T_STARS_prior_granule.NDVI_bias is not None:
101
+ if L2T_STARS_prior_granule.NDVI_bias is not None:
102
102
  L2T_STARS_prior_granule.NDVI_bias.to_geotiff(prior_NDVI_bias_filename)
103
103
  else:
104
104
  prior_NDVI_bias_filename = None # Set to None if not available
@@ -111,7 +111,7 @@ def load_prior(
111
111
  cell_size=target_resolution,
112
112
  )
113
113
  # Assuming L2T_STARS_prior_granule has a .NDVI_bias_UQ attribute
114
- if hasattr(L2T_STARS_prior_granule, "NDVI_bias_UQ") and L2T_STARS_prior_granule.NDVI_bias_UQ is not None:
114
+ if L2T_STARS_prior_granule.NDVI_bias_UQ is not None:
115
115
  L2T_STARS_prior_granule.NDVI_bias_UQ.to_geotiff(prior_NDVI_bias_UQ_filename)
116
116
  else:
117
117
  prior_NDVI_bias_UQ_filename = None # Set to None if not available
@@ -143,7 +143,7 @@ def load_prior(
143
143
  cell_size=target_resolution,
144
144
  )
145
145
  # Assuming L2T_STARS_prior_granule has a .albedo_bias attribute
146
- if hasattr(L2T_STARS_prior_granule, "albedo_bias") and L2T_STARS_prior_granule.albedo_bias is not None:
146
+ if L2T_STARS_prior_granule.albedo_bias is not None:
147
147
  L2T_STARS_prior_granule.albedo_bias.to_geotiff(prior_albedo_bias_filename)
148
148
  else:
149
149
  prior_albedo_bias_filename = None # Set to None if not available
@@ -156,7 +156,7 @@ def load_prior(
156
156
  cell_size=target_resolution,
157
157
  )
158
158
  # Assuming L2T_STARS_prior_granule has a .albedo_bias_UQ attribute
159
- if hasattr(L2T_STARS_prior_granule, "albedo_bias_UQ") and L2T_STARS_prior_granule.albedo_bias_UQ is not None:
159
+ if L2T_STARS_prior_granule.albedo_bias_UQ is not None:
160
160
  L2T_STARS_prior_granule.albedo_bias_UQ.to_geotiff(prior_albedo_bias_UQ_filename)
161
161
  else:
162
162
  prior_albedo_bias_UQ_filename = None # Set to None if not available
ECOv003_L2T_STARS/main.py CHANGED
@@ -115,6 +115,12 @@ def main():
115
115
  default=True,
116
116
  help="Do NOT remove posterior intermediate files after product generation.",
117
117
  )
118
+ parser.add_argument(
119
+ "--initialize-julia",
120
+ action="store_true",
121
+ dest="initialize_julia",
122
+ help="Initialize a julia environment before running julia.",
123
+ )
118
124
  parser.add_argument(
119
125
  "--threads",
120
126
  type=str,
@@ -157,6 +163,7 @@ def main():
157
163
  remove_input_staging=args.remove_input_staging,
158
164
  remove_prior=args.remove_prior,
159
165
  remove_posterior=args.remove_posterior,
166
+ initialize_julia=args.initialize_julia,
160
167
  threads=args.threads,
161
168
  num_workers=args.num_workers,
162
169
  overwrite=args.overwrite, # Pass the new overwrite argument
@@ -8,24 +8,8 @@ using STARSDataFusion.sentinel_tiles
8
8
  using STARSDataFusion.HLS
9
9
  using STARSDataFusion.VNP43
10
10
  using Logging
11
- using Pkg
12
11
  using Statistics
13
12
  using Distributed
14
- Pkg.add("OpenSSL")
15
- using HTTP
16
- using JSON
17
-
18
- function read_json(file::String)::Dict
19
- open(file, "r") do f
20
- return JSON.parse(f)
21
- end
22
- end
23
-
24
- function write_json(file::String, data::Dict)
25
- open(file, "w") do f
26
- JSON.print(f, data)
27
- end
28
- end
29
13
 
30
14
  @info "processing STARS data fusion"
31
15
 
@@ -78,10 +62,10 @@ HLS_start_date = Date(ARGS[7])
78
62
  @info "HLS start date: $(HLS_start_date)"
79
63
  HLS_end_date = Date(ARGS[8])
80
64
  @info "HLS end date: $(HLS_end_date)"
81
- coarse_directory = ARGS[9]
82
- @info "coarse inputs directory: $(coarse_directory)"
83
- fine_directory = ARGS[10]
84
- @info "fine inputs directory: $(fine_directory)"
65
+ downsampled_directory = ARGS[9]
66
+ @info "downsampled inputs directory: $(downsampled_directory)"
67
+ product_name = ARGS[10]
68
+ @info "Computing $(product_name) product"
85
69
  posterior_filename = ARGS[11]
86
70
  @info "posterior filename: $(posterior_filename)"
87
71
  posterior_UQ_filename = ARGS[12]
@@ -130,20 +114,29 @@ y_fine_size = size(y_fine)[1]
130
114
  @info "fine x size: $(x_fine_size)"
131
115
  @info "fine y size: $(y_fine_size)"
132
116
 
133
- coarse_image_filenames = sort(glob("*.tif", coarse_directory))
134
- coarse_dates_found = [Date(split(basename(filename), "_")[3]) for filename in coarse_image_filenames]
135
-
136
- fine_image_filenames = sort(glob("*.tif", fine_directory))
137
- fine_dates_found = [Date(split(basename(filename), "_")[3]) for filename in fine_image_filenames]
138
-
117
+ # The range of dates to check for VIIRS files
139
118
  coarse_start_date = VIIRS_start_date
140
119
  coarse_end_date = VIIRS_end_date
141
120
 
121
+ # Check each coarse date for a downsampled image
122
+ # For each day we find, convert the date directory back into a date object
123
+ coarse_dates = [coarse_start_date + Day(d - 1) for d in 1:((coarse_end_date - coarse_start_date).value + 1)]
124
+ coarse_image_filenames = [joinpath("$(downsampled_directory)", "$(year(date))", "$(Dates.format(date, dateformat"yyyy-mm-dd"))", "$(tile)", "STARS_$(product_name)_$(tile)_$(coarse_cell_size)m.tif") for date in coarse_dates]
125
+ coarse_image_filenames = [filename for filename in coarse_image_filenames if ispath(filename)]
126
+ coarse_dates_found = [Date(basename(dirname(dirname(filename)))) for filename in coarse_image_filenames]
127
+
128
+ # The range of dates to check for HLS files
142
129
  fine_flag_start_date = HLS_end_date - Day(7)
143
130
  fine_start_date = HLS_start_date
144
131
  fine_end_date = HLS_end_date
145
132
 
133
+ # Check each fine date for a downsampled image
134
+ # For each day we find, convert the date directory back into a date object
146
135
  dates = [fine_start_date + Day(d - 1) for d in 1:((fine_end_date - fine_start_date).value + 1)]
136
+ fine_image_filenames = [joinpath("$(downsampled_directory)", "$(year(date))", "$(Dates.format(date, dateformat"yyyy-mm-dd"))", "$(tile)", "STARS_$(product_name)_$(tile)_$(fine_cell_size)m.tif") for date in dates]
137
+ fine_image_filenames = [filename for filename in fine_image_filenames if ispath(filename)]
138
+ fine_dates_found = [Date(basename(dirname(dirname(filename)))) for filename in fine_image_filenames]
139
+
147
140
  t = Ti(dates)
148
141
  coarse_dims = (x_coarse, y_coarse, t)
149
142
  fine_dims = (x_fine, y_fine, t)
@@ -17,10 +17,6 @@ from ECOv003_granules import L2TSTARS, NDVI_COLORMAP, ALBEDO_COLORMAP
17
17
  from ECOv003_exit_codes import BlankOutput
18
18
 
19
19
  from .VIIRS import VIIRSDownloaderNDVI, VIIRSDownloaderAlbedo
20
- from .generate_NDVI_coarse_directory import generate_NDVI_coarse_directory
21
- from .generate_NDVI_fine_directory import generate_NDVI_fine_directory
22
- from .generate_albedo_coarse_directory import generate_albedo_coarse_directory
23
- from .generate_albedo_fine_directory import generate_albedo_fine_directory
24
20
  from .generate_model_state_tile_date_directory import generate_model_state_tile_date_directory
25
21
  from .generate_STARS_inputs import generate_STARS_inputs
26
22
  from .generate_filename import generate_filename
@@ -43,7 +39,7 @@ def process_STARS_product(
43
39
  NDVI_resolution: int,
44
40
  albedo_resolution: int,
45
41
  target_resolution: int,
46
- working_directory: str,
42
+ downsampled_directory: str,
47
43
  model_directory: str,
48
44
  input_staging_directory: str,
49
45
  L2T_STARS_granule_directory: str,
@@ -59,6 +55,7 @@ def process_STARS_product(
59
55
  remove_input_staging: bool = True,
60
56
  remove_prior: bool = True,
61
57
  remove_posterior: bool = True,
58
+ initialize_julia: bool = False,
62
59
  threads: Union[int, str] = "auto",
63
60
  num_workers: int = 4,
64
61
  ):
@@ -103,6 +100,8 @@ def process_STARS_product(
103
100
  Defaults to True.
104
101
  remove_posterior (bool, optional): If True, remove posterior intermediate files after
105
102
  product generation. Defaults to True.
103
+ initialize_julia (bool, optional): If True, create a julia environment to run STARS in
104
+ as opposed to the default julia env. Defaults to False.
106
105
  threads (Union[int, str], optional): Number of Julia threads to use, or "auto".
107
106
  Defaults to "auto".
108
107
  num_workers (int, str): Number of Julia workers for distributed processing.
@@ -117,27 +116,6 @@ def process_STARS_product(
117
116
 
118
117
  logger.info(f"Processing the L2T_STARS product at tile {cl.place(tile)} for date {cl.time(date_UTC)}")
119
118
 
120
- # Define and create input staging directories for coarse and fine NDVI/albedo
121
- NDVI_coarse_directory = generate_NDVI_coarse_directory(
122
- input_staging_directory=input_staging_directory, tile=tile
123
- )
124
- logger.info(f"Staging coarse NDVI images: {cl.dir(NDVI_coarse_directory)}")
125
-
126
- NDVI_fine_directory = generate_NDVI_fine_directory(
127
- input_staging_directory=input_staging_directory, tile=tile
128
- )
129
- logger.info(f"Staging fine NDVI images: {cl.dir(NDVI_fine_directory)}")
130
-
131
- albedo_coarse_directory = generate_albedo_coarse_directory(
132
- input_staging_directory=input_staging_directory, tile=tile
133
- )
134
- logger.info(f"Staging coarse albedo images: {cl.dir(albedo_coarse_directory)}")
135
-
136
- albedo_fine_directory = generate_albedo_fine_directory(
137
- input_staging_directory=input_staging_directory, tile=tile
138
- )
139
- logger.info(f"Staging fine albedo images: {cl.dir(albedo_fine_directory)}")
140
-
141
119
  # Define and create the directory for storing posterior model state files
142
120
  posterior_tile_date_directory = generate_model_state_tile_date_directory(
143
121
  model_directory=model_directory, tile=tile, date_UTC=date_UTC
@@ -157,11 +135,7 @@ def process_STARS_product(
157
135
  target_resolution=target_resolution,
158
136
  NDVI_coarse_geometry=NDVI_coarse_geometry,
159
137
  albedo_coarse_geometry=albedo_coarse_geometry,
160
- working_directory=working_directory,
161
- NDVI_coarse_directory=NDVI_coarse_directory,
162
- NDVI_fine_directory=NDVI_fine_directory,
163
- albedo_coarse_directory=albedo_coarse_directory,
164
- albedo_fine_directory=albedo_fine_directory,
138
+ downsampled_directory=downsampled_directory,
165
139
  HLS_connection=HLS_connection,
166
140
  NDVI_VIIRS_connection=NDVI_VIIRS_connection,
167
141
  albedo_VIIRS_connection=albedo_VIIRS_connection,
@@ -226,8 +200,8 @@ def process_STARS_product(
226
200
  VIIRS_end_date=VIIRS_end_date,
227
201
  HLS_start_date=HLS_start_date,
228
202
  HLS_end_date=HLS_end_date,
229
- coarse_directory=NDVI_coarse_directory,
230
- fine_directory=NDVI_fine_directory,
203
+ downsampled_directory=downsampled_directory,
204
+ product_name="NDVI",
231
205
  posterior_filename=posterior_NDVI_filename,
232
206
  posterior_UQ_filename=posterior_NDVI_UQ_filename,
233
207
  posterior_flag_filename=posterior_NDVI_flag_filename,
@@ -237,6 +211,7 @@ def process_STARS_product(
237
211
  prior_UQ_filename=prior.prior_NDVI_UQ_filename,
238
212
  prior_bias_filename=prior.prior_NDVI_bias_filename,
239
213
  prior_bias_UQ_filename=prior.prior_NDVI_bias_UQ_filename,
214
+ initialize_julia=initialize_julia,
240
215
  threads=threads,
241
216
  num_workers=num_workers,
242
217
  )
@@ -250,13 +225,14 @@ def process_STARS_product(
250
225
  VIIRS_end_date=VIIRS_end_date,
251
226
  HLS_start_date=HLS_start_date,
252
227
  HLS_end_date=HLS_end_date,
253
- coarse_directory=NDVI_coarse_directory,
254
- fine_directory=NDVI_fine_directory,
228
+ downsampled_directory=downsampled_directory,
229
+ product_name="NDVI",
255
230
  posterior_filename=posterior_NDVI_filename,
256
231
  posterior_UQ_filename=posterior_NDVI_UQ_filename,
257
232
  posterior_flag_filename=posterior_NDVI_flag_filename,
258
233
  posterior_bias_filename=posterior_NDVI_bias_filename,
259
234
  posterior_bias_UQ_filename=posterior_NDVI_bias_UQ_filename,
235
+ initialize_julia=initialize_julia,
260
236
  threads=threads,
261
237
  num_workers=num_workers,
262
238
  )
@@ -264,6 +240,8 @@ def process_STARS_product(
264
240
  # Open the resulting NDVI rasters
265
241
  NDVI = Raster.open(posterior_NDVI_filename)
266
242
  NDVI_UQ = Raster.open(posterior_NDVI_UQ_filename)
243
+ NDVI_bias = Raster.open(posterior_NDVI_bias_filename)
244
+ NDVI_UQ_bias = Raster.open(posterior_NDVI_bias_UQ_filename)
267
245
  NDVI_flag = Raster.open(posterior_NDVI_flag_filename)
268
246
 
269
247
  # --- Process Albedo Data Fusion ---
@@ -324,8 +302,8 @@ def process_STARS_product(
324
302
  VIIRS_end_date=VIIRS_end_date,
325
303
  HLS_start_date=HLS_start_date,
326
304
  HLS_end_date=HLS_end_date,
327
- coarse_directory=albedo_coarse_directory,
328
- fine_directory=albedo_fine_directory,
305
+ downsampled_directory=downsampled_directory,
306
+ product_name="albedo",
329
307
  posterior_filename=posterior_albedo_filename,
330
308
  posterior_UQ_filename=posterior_albedo_UQ_filename,
331
309
  posterior_flag_filename=posterior_albedo_flag_filename,
@@ -335,6 +313,7 @@ def process_STARS_product(
335
313
  prior_UQ_filename=prior.prior_albedo_UQ_filename,
336
314
  prior_bias_filename=prior.prior_albedo_bias_filename,
337
315
  prior_bias_UQ_filename=prior.prior_albedo_bias_UQ_filename,
316
+ initialize_julia=initialize_julia,
338
317
  threads=threads,
339
318
  num_workers=num_workers,
340
319
  )
@@ -348,13 +327,14 @@ def process_STARS_product(
348
327
  VIIRS_end_date=VIIRS_end_date,
349
328
  HLS_start_date=HLS_start_date,
350
329
  HLS_end_date=HLS_end_date,
351
- coarse_directory=albedo_coarse_directory,
352
- fine_directory=albedo_fine_directory,
330
+ downsampled_directory=downsampled_directory,
331
+ product_name="albedo",
353
332
  posterior_filename=posterior_albedo_filename,
354
333
  posterior_UQ_filename=posterior_albedo_UQ_filename,
355
334
  posterior_flag_filename=posterior_albedo_flag_filename,
356
335
  posterior_bias_filename=posterior_albedo_bias_filename,
357
336
  posterior_bias_UQ_filename=posterior_albedo_bias_UQ_filename,
337
+ initialize_julia=initialize_julia,
358
338
  threads=threads,
359
339
  num_workers=num_workers,
360
340
  )
@@ -362,6 +342,8 @@ def process_STARS_product(
362
342
  # Open the resulting albedo rasters
363
343
  albedo = Raster.open(posterior_albedo_filename)
364
344
  albedo_UQ = Raster.open(posterior_albedo_UQ_filename)
345
+ albedo_bias = Raster.open(posterior_albedo_bias_filename)
346
+ albedo_UQ_bias = Raster.open(posterior_albedo_bias_UQ_filename)
365
347
  albedo_flag = Raster.open(posterior_albedo_flag_filename)
366
348
 
367
349
  # --- Validate Output and Create Final Product ---
@@ -391,9 +373,13 @@ def process_STARS_product(
391
373
  # Add the generated layers to the granule object
392
374
  granule.add_layer("NDVI", NDVI, cmap=NDVI_COLORMAP)
393
375
  granule.add_layer("NDVI-UQ", NDVI_UQ, cmap="jet")
376
+ granule.add_layer("NDVI-bias", NDVI_bias, cmap="viridis")
377
+ granule.add_layer("NDVI-UQ-bias", NDVI_UQ_bias, cmap="viridis")
394
378
  granule.add_layer("NDVI-flag", NDVI_flag, cmap="jet")
395
379
  granule.add_layer("albedo", albedo, cmap=ALBEDO_COLORMAP)
396
380
  granule.add_layer("albedo-UQ", albedo_UQ, cmap="jet")
381
+ granule.add_layer("albedo-bias", albedo_bias, cmap="viridis")
382
+ granule.add_layer("albedo-UQ-bias", albedo_UQ_bias, cmap="viridis")
397
383
  granule.add_layer("albedo-flag", albedo_flag, cmap="jet")
398
384
 
399
385
  # Update metadata and write to the granule
@@ -468,7 +454,7 @@ def process_STARS_product(
468
454
  if remove_input_staging:
469
455
  if exists(input_staging_directory):
470
456
  logger.info(f"Removing input staging directory: {cl.dir(input_staging_directory)}")
471
- shutil.rmtree(input_staging_directory)
457
+ shutil.rmtree(input_staging_directory, ignore_errors=True)
472
458
 
473
459
  if using_prior and remove_prior:
474
460
  # Remove prior intermediate files only if they exist
@@ -16,8 +16,8 @@ def process_julia_data_fusion(
16
16
  VIIRS_end_date: date,
17
17
  HLS_start_date: date,
18
18
  HLS_end_date: date,
19
- coarse_directory: str,
20
- fine_directory: str,
19
+ downsampled_directory: str,
20
+ product_name: str,
21
21
  posterior_filename: str,
22
22
  posterior_UQ_filename: str,
23
23
  posterior_flag_filename: str,
@@ -28,6 +28,7 @@ def process_julia_data_fusion(
28
28
  prior_bias_filename: str = None,
29
29
  prior_bias_UQ_filename: str = None,
30
30
  environment_name: str = "@ECOv003-L2T-STARS", # Unused in current Julia command, but kept for consistency
31
+ initialize_julia: bool = False,
31
32
  threads: Union[int, str] = "auto",
32
33
  num_workers: int = 4):
33
34
  """
@@ -46,8 +47,8 @@ def process_julia_data_fusion(
46
47
  VIIRS_end_date (date): End date for VIIRS data processing.
47
48
  HLS_start_date (date): Start date for HLS data processing.
48
49
  HLS_end_date (date): End date for HLS data processing.
49
- coarse_directory (str): Directory containing coarse resolution input images.
50
- fine_directory (str): Directory containing fine resolution input images.
50
+ downsampled_directory (str): Directory containing coarse and fine downsampled data.
51
+ product_name (str): Name of the product, e.g. "NDVI" or "albedo"
51
52
  posterior_filename (str): Output path for the fused posterior mean image.
52
53
  posterior_UQ_filename (str): Output path for the fused posterior uncertainty image.
53
54
  posterior_flag_filename (str): Output path for the fused posterior flag image.
@@ -71,15 +72,16 @@ def process_julia_data_fusion(
71
72
  STARS_source_directory = abspath(dirname(__file__))
72
73
 
73
74
  # Instantiate Julia dependencies
74
- instantiate_STARSDataFusion_jl(STARS_source_directory)
75
+ if initialize_julia:
76
+ instantiate_STARSDataFusion_jl(STARS_source_directory)
75
77
 
76
78
  # Base Julia command with required arguments
77
79
  command = (
78
80
  f'export JULIA_NUM_THREADS={threads}; julia --threads {threads} '
79
81
  f'"{julia_script_filename}" {num_workers} "{tile}" "{coarse_cell_size}" '
80
82
  f'"{fine_cell_size}" "{VIIRS_start_date}" "{VIIRS_end_date}" '
81
- f'"{HLS_start_date}" "{HLS_end_date}" "{coarse_directory}" '
82
- f'"{fine_directory}" "{posterior_filename}" "{posterior_UQ_filename}" '
83
+ f'"{HLS_start_date}" "{HLS_end_date}" "{downsampled_directory}" '
84
+ f'"{product_name}" "{posterior_filename}" "{posterior_UQ_filename}" '
83
85
  f'"{posterior_flag_filename}" "{posterior_bias_filename}" '
84
86
  f'"{posterior_bias_UQ_filename}"'
85
87
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ECOv003-L2T-STARS
3
- Version: 1.3.0
3
+ Version: 1.4.1
4
4
  Summary: ECOSTRESS Collection 3 JPL STARS Data Fusion Product Generating Executable (PGE)
5
5
  Author-email: "Gregory H. Halverson" <gregory.h.halverson@jpl.nasa.gov>
6
6
  Project-URL: Homepage, https://github.com/ECOSTRESS-Collection-3/ECOv003-L2T-STARS
@@ -2,37 +2,33 @@ ECOv003_L2T_STARS/ECOv003_DL.py,sha256=7ECuYIPMDsgptcAIcC2lgfvn-ljvPJHQ9tNPnMRxk
2
2
  ECOv003_L2T_STARS/ECOv003_DL.xml,sha256=TQxOlTJHnjcRf9RiAGVJe2mIS-ks6WL32Ze33z0_kxA,2032
3
3
  ECOv003_L2T_STARS/ECOv003_L2T_STARS.xml,sha256=Sg6GJx5PO1TVMIpGCDb4gatcShLRa6yBMwxwHKAWNKw,2084
4
4
  ECOv003_L2T_STARS/L2TSTARSConfig.py,sha256=pH5WAzjfUdlam8Ngi2hxmT8Sect5bi2D6cP9HbpM_Tw,8849
5
- ECOv003_L2T_STARS/L2T_STARS.py,sha256=4LLRJOoMhJNtmfwj9x7c6FLA7G-CLUct5TegJtIu8P8,25403
5
+ ECOv003_L2T_STARS/L2T_STARS.py,sha256=eVuKTYdjKysmoyfm60CggkWE8Dh05_GuSOauBLZzZV0,24660
6
6
  ECOv003_L2T_STARS/Manifest.toml,sha256=k6dzN8jpePGBIPqTkKFsYDv5ezV3DgO4ChHss_cI524,84043
7
- ECOv003_L2T_STARS/Project.toml,sha256=Ywo_YsreqwgpbuNP3NWMsXw3VY2m8NTUrBwKCyBUbuM,657
7
+ ECOv003_L2T_STARS/Project.toml,sha256=9-grsavASN_a3dNHcG4vc6-iulhK6Gdmmh1VkKRxnGM,471
8
8
  ECOv003_L2T_STARS/__init__.py,sha256=fdYigR4HXHd-NYUCafBD4GTqXUQV5LK337bJVN556fA,91
9
9
  ECOv003_L2T_STARS/calibrate_fine_to_coarse.py,sha256=2YQHo0hO5EuvGt6TOF_1WypiqNDAR5gjvs0cYe9F_vQ,2345
10
10
  ECOv003_L2T_STARS/cksum.py,sha256=Cw3FHD39Osrb4c9Dc1jSUB1SZaxIJsWHqsNVeWwYokA,1823
11
- ECOv003_L2T_STARS/constants.py,sha256=5-cxFiiq_zDKIvkK8Bi2iVVzST-15ytdu4QAvlLz_OY,1996
11
+ ECOv003_L2T_STARS/constants.py,sha256=-kxJuhupaxeDnMziTabwf2exFTtTm_-0Xp_eKAIaf3M,2057
12
12
  ECOv003_L2T_STARS/exceptions.py,sha256=ypahdRaZVMIoQrHUIZXiwzDNeLaCH46rUAfs_8MHKBE,48
13
13
  ECOv003_L2T_STARS/generate_L2T_STARS_runconfig.py,sha256=gyU0-xjkC0bZcK9NeoFwLaXjNJhsSrVD2YnxLVLVLJs,11142
14
- ECOv003_L2T_STARS/generate_NDVI_coarse_directory.py,sha256=i1pFHFyMuj2e6aQ2wTgrF2-rAQXWe8LE2mbCRksj3c8,590
15
14
  ECOv003_L2T_STARS/generate_NDVI_coarse_image.py,sha256=9XBBV1FFqjxkEFnr61xr9R2tnHQAlx2XN9KzLN8RApc,1215
16
- ECOv003_L2T_STARS/generate_NDVI_fine_directory.py,sha256=rCYfGd_X1fLYsOfl9LtfS3E57FLDa-a_8G46ToQFM2U,531
17
15
  ECOv003_L2T_STARS/generate_NDVI_fine_image.py,sha256=V-v2slDYEYf1scJCvxb3tZ6IvdpJTHuDPg6l_gNaoQI,911
18
- ECOv003_L2T_STARS/generate_STARS_inputs.py,sha256=aNau4pPDYzfgYfkFUpOVO5oJ0kqnqXm8wIS7QTpQ0JQ,10580
19
- ECOv003_L2T_STARS/generate_albedo_coarse_directory.py,sha256=7DLfI45L8tpXMG4CnZasJi1U7AN3HnA24Rcz8hy3Qy4,563
16
+ ECOv003_L2T_STARS/generate_STARS_inputs.py,sha256=NxKN822IPTquMeDLNSCaMai6aJUQekol8IEh_yRnFK0,11135
20
17
  ECOv003_L2T_STARS/generate_albedo_coarse_image.py,sha256=nBOKiWVsRnQ2TeOktWVfZbwVy9cOCa2QeP_8H0QDWrA,1162
21
- ECOv003_L2T_STARS/generate_albedo_fine_directory.py,sha256=0PaoccD0ktwzoUUMEd3zDWAP4Ed6nfyLRMUGaqHDrCA,554
22
18
  ECOv003_L2T_STARS/generate_albedo_fine_image.py,sha256=mXXprX9jQ5aOXFjBIXFB6j-O4X4j9V_4_BYIUAuLbpQ,923
19
+ ECOv003_L2T_STARS/generate_downsampled_filename.py,sha256=rAAEYehfME6G6k0KkJgj4H9itmQNh-vVTIXauFiTHtk,675
23
20
  ECOv003_L2T_STARS/generate_filename.py,sha256=XppMoRiKBG1Rf-uWu95UCO1klK6uv5GdK9pvg3gDmnk,1359
24
21
  ECOv003_L2T_STARS/generate_input_staging_directory.py,sha256=TlFKYliu6BbfDGLlwD0nlt0AZzDwKWeEgeTtzmicElY,800
25
22
  ECOv003_L2T_STARS/generate_model_state_tile_date_directory.py,sha256=U9d7vcuA1Udq6tuyRKUIibfoOPsJKV5dp5AWT7qLcfc,939
26
23
  ECOv003_L2T_STARS/generate_output_directory.py,sha256=Dr1zX6ljYzxFgC9XkocYBiF5aMGggHJejtpI9hbLwlM,889
27
- ECOv003_L2T_STARS/install_STARSDataFusion_jl.py,sha256=XbB_T0mzHqpmhh3cjKT0FpgqHxaTtS3iE_IZw4iZGKc,1778
28
24
  ECOv003_L2T_STARS/instantiate_STARSDataFusion_jl.py,sha256=B-N_tlSBY7DQ2gZK6mPtJ8WL8XCXA_edMDEOu2xldcs,1437
29
- ECOv003_L2T_STARS/load_prior.py,sha256=rVu3ImvReFxARalICCiZcMQ9ML_ehmQ9mc8HmXoHdm0,11335
25
+ ECOv003_L2T_STARS/load_prior.py,sha256=e26JqLhcr2V5FW_02qduUbRlEZc6pTf3jIAAT5Sog0s,11125
30
26
  ECOv003_L2T_STARS/login.py,sha256=zInQL33NibC8fNGzLw9qk0MDay71KYk87V-UsV-gkwA,1945
31
- ECOv003_L2T_STARS/main.py,sha256=Jgiwd2TRCMq37FiEA-JECJZQKLBm34oytIWS9PG_x30,5891
27
+ ECOv003_L2T_STARS/main.py,sha256=k4a1Kp51i00LuxIY3wrb4Xpwja6y9Cu_gj32AauzsEE,6131
32
28
  ECOv003_L2T_STARS/prior.py,sha256=fLE54pBIOG6sCas1G60nhn8LcHa2AqZ_eSY_J-MB4eM,2867
33
- ECOv003_L2T_STARS/process_ECOSTRESS_data_fusion_distributed_bias.jl,sha256=LrFBCQp4ovJ6wI-oIIbvpdZEiSQtt0YmScbPmxlaSoA,15400
34
- ECOv003_L2T_STARS/process_STARS_product.py,sha256=d26HdxcY9XBXa_MFCJfFm7BMCtmCaUrNdSVXiY0-D6Y,22406
35
- ECOv003_L2T_STARS/process_julia_data_fusion.py,sha256=t0178tuQDYnei3jfgx8GbgW2Q0uwfcnFNOpnhr1PZFA,5267
29
+ ECOv003_L2T_STARS/process_ECOSTRESS_data_fusion_distributed_bias.jl,sha256=OuMd-Y_zNQJStecIPRwz9_5N5Loh8JVmkhBLFu1nlNs,16089
30
+ ECOv003_L2T_STARS/process_STARS_product.py,sha256=vzve47IN1lcrxmNSVvfHAq1NnKej6QTIrBW9GhlHx1c,21882
31
+ ECOv003_L2T_STARS/process_julia_data_fusion.py,sha256=lpChy2WJBQAvaJXa3PJFLwItQzlRqj1Z13EBaQLB40s,5341
36
32
  ECOv003_L2T_STARS/retrieve_STARS_sources.py,sha256=s6026PQ5PRQTPFgjrDa4vgbHa8OqUanBqn0Wdoq0DbA,3838
37
33
  ECOv003_L2T_STARS/runconfig.py,sha256=TLaB3w6Y0qEZPqMa-YXuUzKSACrdpKmrozUNLh70aQw,1519
38
34
  ECOv003_L2T_STARS/version.py,sha256=CcCeNt2pNqb8AQ_vHLUbLJciE8hxTMeGmN79vAYObYQ,354
@@ -47,12 +43,12 @@ ECOv003_L2T_STARS/LPDAAC/__init__.py,sha256=o8qP8kTXyBp9dFKErVOwvcZuUo7BTVU0d5Uy
47
43
  ECOv003_L2T_STARS/LPDAAC/version.txt,sha256=2_CXjsK1h6XWGH_cxBzOn_LA647vrboOtR84QKtu60Y,5
48
44
  ECOv003_L2T_STARS/VIIRS/VIIRSDataPool.py,sha256=mht104y3ayLysElKf9IgaqX78I0Q-4NJJmfCxMgxDzs,8781
49
45
  ECOv003_L2T_STARS/VIIRS/VIIRSDownloader.py,sha256=NCf3ZItLIBkZOZugYHuQMurziAsH7LbhLKbg-ZbWC7g,616
50
- ECOv003_L2T_STARS/VIIRS/VNP09GA.py,sha256=9oCqSps3m8oGLcwQ-IuNV2KMiNLD1hSKYJHDIZIhCM8,44138
46
+ ECOv003_L2T_STARS/VIIRS/VNP09GA.py,sha256=cJnsOX40d2oXk788WVxkjf1XNvQcRB99SFZDceSNwDg,44140
51
47
  ECOv003_L2T_STARS/VIIRS/VNP43IA4.py,sha256=3qZbDHoLVhoiSr4hoojMxXXuDSNKkN4B9Dan-WMApNs,9881
52
48
  ECOv003_L2T_STARS/VIIRS/VNP43MA3.py,sha256=T_1mxdg_SII0vXp_D422aAU7fE0-7TY46IZzRJPGJ1Q,11043
53
49
  ECOv003_L2T_STARS/VIIRS/__init__.py,sha256=PVyb97Bg5gVMdcyC7JpErQCjJWSrOFdHJH4rNE__eL8,264
54
50
  ECOv003_L2T_STARS/VIIRS/version.txt,sha256=Ke8b6aOkBgjuceFV64cco0Yw4jvu6a8pRBq6nQ6nbVQ,5
55
- ECOv003_L2T_STARS/VNP43NRT/VNP43NRT.py,sha256=lDnj-TBbtGIGncoRtoB7X7M4NbhOK_1VYUEhxK56g8w,31508
51
+ ECOv003_L2T_STARS/VNP43NRT/VNP43NRT.py,sha256=qC6Ufvn6lQwTzchLucLgPUZsaLbMqTLPqTpHRYZBZxo,31719
56
52
  ECOv003_L2T_STARS/VNP43NRT/__init__.py,sha256=8LkfUUFW0pyaO-1S134RnimfKYAKP6UfvzO7kPAx9mo,24
57
53
  ECOv003_L2T_STARS/VNP43NRT/process_VNP43NRT.jl,sha256=pNa5Z0RcyXSul2pysPwEWwl1e6XuTX_XURqgJIsVfT8,7139
58
54
  ECOv003_L2T_STARS/VNP43NRT/version.txt,sha256=atlhOkVXmNbZLl9fOQq0uqcFlryGntaxf1zdKyhjXwY,5
@@ -67,9 +63,9 @@ ECOv003_L2T_STARS/daterange/__init__.py,sha256=54kYb9tmsm5twxMqjJKeD__5kGkNDz3Pp
67
63
  ECOv003_L2T_STARS/daterange/daterange.py,sha256=EHa2Xt9fiJ1gbX7aa_QV_br1rAXjg3pHrLSRasOsOhM,959
68
64
  ECOv003_L2T_STARS/timer/__init__.py,sha256=I_MQKp_aamBLUzZv0psEbRgs6GZLOJd4mmJ7bli0Ikc,21
69
65
  ECOv003_L2T_STARS/timer/timer.py,sha256=tn5e3NQmsh55Jp9Fstjf-8KJW4F8UIJs-d_ZLooFYE8,1610
70
- ecov003_l2t_stars-1.3.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
71
- ecov003_l2t_stars-1.3.0.dist-info/METADATA,sha256=5QCrdCDDlS4j4S2Hy_9Og6g7sQdhj6iI_8J09erRWCU,13283
72
- ecov003_l2t_stars-1.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
73
- ecov003_l2t_stars-1.3.0.dist-info/entry_points.txt,sha256=EVVKltKsqXBc94JIu4IjVrMP0DPqaNEdQoAgcZOApQQ,106
74
- ecov003_l2t_stars-1.3.0.dist-info/top_level.txt,sha256=lRivA5MjbrabH4sv-LUstMGaLZ865wRQPpz9Kh6-plg,18
75
- ecov003_l2t_stars-1.3.0.dist-info/RECORD,,
66
+ ecov003_l2t_stars-1.4.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
67
+ ecov003_l2t_stars-1.4.1.dist-info/METADATA,sha256=wGMPi6A5PxEpwIKlMxOT1URP8yIx3eQR03Xbe2Ng55w,13283
68
+ ecov003_l2t_stars-1.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
69
+ ecov003_l2t_stars-1.4.1.dist-info/entry_points.txt,sha256=EVVKltKsqXBc94JIu4IjVrMP0DPqaNEdQoAgcZOApQQ,106
70
+ ecov003_l2t_stars-1.4.1.dist-info/top_level.txt,sha256=lRivA5MjbrabH4sv-LUstMGaLZ865wRQPpz9Kh6-plg,18
71
+ ecov003_l2t_stars-1.4.1.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- from .generate_input_staging_directory import generate_input_staging_directory
2
-
3
- def generate_NDVI_coarse_directory(
4
- input_staging_directory: str,
5
- tile: str) -> str:
6
- """
7
- Generates the specific staging directory for coarse NDVI images.
8
-
9
- Args:
10
- input_staging_directory (str): The base input staging directory.
11
- tile (str): The HLS tile ID.
12
-
13
- Returns:
14
- str: The full path to the coarse NDVI staging directory.
15
- """
16
- return generate_input_staging_directory(
17
- input_staging_directory,
18
- tile,
19
- "NDVI_coarse"
20
- )
21
-
@@ -1,14 +0,0 @@
1
- from .generate_input_staging_directory import generate_input_staging_directory
2
-
3
- def generate_NDVI_fine_directory(input_staging_directory: str, tile: str) -> str:
4
- """
5
- Generates the specific staging directory for fine NDVI images.
6
-
7
- Args:
8
- input_staging_directory (str): The base input staging directory.
9
- tile (str): The HLS tile ID.
10
-
11
- Returns:
12
- str: The full path to the fine NDVI staging directory.
13
- """
14
- return generate_input_staging_directory(input_staging_directory, tile, "NDVI_fine")
@@ -1,18 +0,0 @@
1
-
2
- from .generate_input_staging_directory import generate_input_staging_directory
3
-
4
-
5
- def generate_albedo_coarse_directory(input_staging_directory: str, tile: str) -> str:
6
- """
7
- Generates the specific staging directory for coarse albedo images.
8
-
9
- Args:
10
- input_staging_directory (str): The base input staging directory.
11
- tile (str): The HLS tile ID.
12
-
13
- Returns:
14
- str: The full path to the coarse albedo staging directory.
15
- """
16
- return generate_input_staging_directory(
17
- input_staging_directory, tile, "albedo_coarse"
18
- )
@@ -1,17 +0,0 @@
1
-
2
- from .generate_input_staging_directory import generate_input_staging_directory
3
-
4
- def generate_albedo_fine_directory(input_staging_directory: str, tile: str) -> str:
5
- """
6
- Generates the specific staging directory for fine albedo images.
7
-
8
- Args:
9
- input_staging_directory (str): The base input staging directory.
10
- tile (str): The HLS tile ID.
11
-
12
- Returns:
13
- str: The full path to the fine albedo staging directory.
14
- """
15
- return generate_input_staging_directory(
16
- input_staging_directory, tile, "albedo_fine"
17
- )
@@ -1,43 +0,0 @@
1
- import subprocess
2
- import logging
3
-
4
- logger = logging.getLogger(__name__)
5
-
6
- def install_STARSDataFusion_jl(
7
- github_URL: str = "https://github.com/STARS-Data-Fusion/STARSDataFusion.jl",
8
- environment_name: str = "@ECOv003-L2T-STARS") -> subprocess.CompletedProcess:
9
- """
10
- Installs the STARSDataFusion.jl Julia package from GitHub into a specified Julia environment.
11
-
12
- This function executes a Julia command to activate a given environment and
13
- then develops (installs in editable mode) the STARS.jl package from its
14
- GitHub repository.
15
-
16
- Args:
17
- github_URL (str, optional): The URL of the GitHub repository containing STARS.jl.
18
- Defaults to "https://github.com/STARS-Data-Fusion/STARS.jl".
19
- environment_name (str, optional): The name of the Julia environment to install
20
- the package into. Defaults to "@ECOv003-L2T-STARS".
21
-
22
- Returns:
23
- subprocess.CompletedProcess: An object containing information about the
24
- execution of the Julia command (return code, stdout, stderr).
25
- """
26
- # Julia command to activate an environment and then add/develop a package from URL
27
- julia_command = [
28
- "julia",
29
- "-e",
30
- f'using Pkg; Pkg.activate("{environment_name}"); Pkg.develop(url="{github_URL}")',
31
- ]
32
-
33
- # Execute the Julia command as a subprocess
34
- result = subprocess.run(julia_command, capture_output=True, text=True, check=False)
35
-
36
- if result.returncode == 0:
37
- logger.info(
38
- f"STARSDataFusion.jl installed successfully in environment '{environment_name}'!"
39
- )
40
- else:
41
- logger.error("Error installing STARS.jl:")
42
- logger.error(result.stderr)
43
- return result