ECOv003-L2T-STARS 1.0.1__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. ECOv003_L2T_STARS/BRDF/BRDF.py +57 -0
  2. ECOv003_L2T_STARS/BRDF/SZA.py +65 -0
  3. ECOv003_L2T_STARS/BRDF/__init__.py +1 -0
  4. ECOv003_L2T_STARS/BRDF/statistical_radiative_transport.txt +90 -0
  5. ECOv003_L2T_STARS/BRDF/version.txt +1 -0
  6. ECOv003_L2T_STARS/ECOv003_DL.py +527 -0
  7. ECOv003_L2T_STARS/ECOv003_DL.xml +47 -0
  8. ECOv003_L2T_STARS/ECOv003_L2T_STARS.py +169 -0
  9. ECOv003_L2T_STARS/ECOv003_L2T_STARS.xml +47 -0
  10. ECOv003_L2T_STARS/L2TSTARSConfig.py +190 -0
  11. ECOv003_L2T_STARS/L2T_STARS.py +503 -0
  12. ECOv003_L2T_STARS/LPDAAC/LPDAACDataPool.py +444 -0
  13. ECOv003_L2T_STARS/LPDAAC/__init__.py +9 -0
  14. ECOv003_L2T_STARS/LPDAAC/version.txt +1 -0
  15. ECOv003_L2T_STARS/Manifest.toml +2332 -0
  16. ECOv003_L2T_STARS/Project.toml +14 -0
  17. ECOv003_L2T_STARS/VIIRS/VIIRSDataPool.py +294 -0
  18. ECOv003_L2T_STARS/VIIRS/VIIRSDownloader.py +26 -0
  19. ECOv003_L2T_STARS/VIIRS/VIIRS_CMR_LOGIN.py +36 -0
  20. ECOv003_L2T_STARS/VIIRS/VNP09GA.py +1278 -0
  21. ECOv003_L2T_STARS/VIIRS/VNP43IA4.py +288 -0
  22. ECOv003_L2T_STARS/VIIRS/VNP43MA3.py +323 -0
  23. ECOv003_L2T_STARS/VIIRS/__init__.py +9 -0
  24. ECOv003_L2T_STARS/VIIRS/version.txt +1 -0
  25. ECOv003_L2T_STARS/VNP43NRT/VNP43NRT.py +863 -0
  26. ECOv003_L2T_STARS/VNP43NRT/__init__.py +1 -0
  27. ECOv003_L2T_STARS/VNP43NRT/process_VNP43NRT.jl +169 -0
  28. ECOv003_L2T_STARS/VNP43NRT/version.txt +1 -0
  29. ECOv003_L2T_STARS/VNP43NRT_jl/Manifest.toml +995 -0
  30. ECOv003_L2T_STARS/VNP43NRT_jl/Project.toml +15 -0
  31. ECOv003_L2T_STARS/VNP43NRT_jl/__init__.py +0 -0
  32. ECOv003_L2T_STARS/VNP43NRT_jl/instantiate.jl +25 -0
  33. ECOv003_L2T_STARS/VNP43NRT_jl/instantiate.py +13 -0
  34. ECOv003_L2T_STARS/VNP43NRT_jl/src/VNP43NRT.jl +411 -0
  35. ECOv003_L2T_STARS/VNP43NRT_jl/src/__init__.py +0 -0
  36. ECOv003_L2T_STARS/__init__.py +3 -0
  37. ECOv003_L2T_STARS/calibrate_fine_to_coarse.py +60 -0
  38. ECOv003_L2T_STARS/constants.py +38 -0
  39. ECOv003_L2T_STARS/daterange/__init__.py +1 -0
  40. ECOv003_L2T_STARS/daterange/daterange.py +35 -0
  41. ECOv003_L2T_STARS/generate_L2T_STARS_runconfig.py +249 -0
  42. ECOv003_L2T_STARS/generate_NDVI_coarse_directory.py +21 -0
  43. ECOv003_L2T_STARS/generate_NDVI_coarse_image.py +30 -0
  44. ECOv003_L2T_STARS/generate_NDVI_fine_directory.py +14 -0
  45. ECOv003_L2T_STARS/generate_NDVI_fine_image.py +28 -0
  46. ECOv003_L2T_STARS/generate_STARS_inputs.py +231 -0
  47. ECOv003_L2T_STARS/generate_albedo_coarse_directory.py +18 -0
  48. ECOv003_L2T_STARS/generate_albedo_coarse_image.py +30 -0
  49. ECOv003_L2T_STARS/generate_albedo_fine_directory.py +17 -0
  50. ECOv003_L2T_STARS/generate_albedo_fine_image.py +30 -0
  51. ECOv003_L2T_STARS/generate_filename.py +37 -0
  52. ECOv003_L2T_STARS/generate_input_staging_directory.py +23 -0
  53. ECOv003_L2T_STARS/generate_model_state_tile_date_directory.py +28 -0
  54. ECOv003_L2T_STARS/generate_output_directory.py +28 -0
  55. ECOv003_L2T_STARS/install_STARS_jl.py +43 -0
  56. ECOv003_L2T_STARS/instantiate_STARS_jl.py +38 -0
  57. ECOv003_L2T_STARS/load_prior.py +250 -0
  58. ECOv003_L2T_STARS/prior.py +56 -0
  59. ECOv003_L2T_STARS/process_ECOSTRESS_data_fusion_distributed_bias.jl +420 -0
  60. ECOv003_L2T_STARS/process_STARS_product.py +507 -0
  61. ECOv003_L2T_STARS/process_julia_data_fusion.py +110 -0
  62. ECOv003_L2T_STARS/retrieve_STARS_sources.py +101 -0
  63. ECOv003_L2T_STARS/runconfig.py +70 -0
  64. ECOv003_L2T_STARS/timer/__init__.py +1 -0
  65. ECOv003_L2T_STARS/timer/timer.py +77 -0
  66. ECOv003_L2T_STARS/version.py +8 -0
  67. ECOv003_L2T_STARS/version.txt +1 -0
  68. {ECOv003_L2T_STARS-1.0.1.dist-info → ecov003_l2t_stars-1.2.0.dist-info}/METADATA +31 -24
  69. ecov003_l2t_stars-1.2.0.dist-info/RECORD +73 -0
  70. {ECOv003_L2T_STARS-1.0.1.dist-info → ecov003_l2t_stars-1.2.0.dist-info}/WHEEL +1 -1
  71. ecov003_l2t_stars-1.2.0.dist-info/entry_points.txt +3 -0
  72. ecov003_l2t_stars-1.2.0.dist-info/top_level.txt +1 -0
  73. ECOv003_L2T_STARS-1.0.1.dist-info/RECORD +0 -5
  74. ECOv003_L2T_STARS-1.0.1.dist-info/top_level.txt +0 -1
  75. {ECOv003_L2T_STARS-1.0.1.dist-info → ecov003_l2t_stars-1.2.0.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,507 @@
1
+ import logging
2
+ import shutil
3
+ from datetime import datetime, date
4
+ from os import remove
5
+ from os.path import basename, exists
6
+ from typing import Union
7
+ import logging
8
+
9
+ import numpy as np
10
+
11
+ import colored_logging as cl
12
+ from rasters import Raster
13
+
14
+ from harmonized_landsat_sentinel import HLS2CMR
15
+
16
+ from ECOv003_granules import L2TSTARS, NDVI_COLORMAP, ALBEDO_COLORMAP
17
+ from ECOv003_exit_codes import BlankOutput
18
+
19
+ from .VIIRS import VIIRSDownloaderNDVI, VIIRSDownloaderAlbedo
20
+ from .generate_NDVI_coarse_directory import generate_NDVI_coarse_directory
21
+ from .generate_NDVI_fine_directory import generate_NDVI_fine_directory
22
+ from .generate_albedo_coarse_directory import generate_albedo_coarse_directory
23
+ from .generate_albedo_fine_directory import generate_albedo_fine_directory
24
+ from .generate_model_state_tile_date_directory import generate_model_state_tile_date_directory
25
+ from .generate_STARS_inputs import generate_STARS_inputs
26
+ from .generate_filename import generate_filename
27
+ from .process_julia_data_fusion import process_julia_data_fusion
28
+
29
+ from .prior import Prior
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+ def process_STARS_product(
34
+ tile: str,
35
+ date_UTC: date,
36
+ time_UTC: datetime,
37
+ build: str,
38
+ product_counter: int,
39
+ HLS_start_date: date,
40
+ HLS_end_date: date,
41
+ VIIRS_start_date: date,
42
+ VIIRS_end_date: date,
43
+ NDVI_resolution: int,
44
+ albedo_resolution: int,
45
+ target_resolution: int,
46
+ working_directory: str,
47
+ model_directory: str,
48
+ input_staging_directory: str,
49
+ L2T_STARS_granule_directory: str,
50
+ L2T_STARS_zip_filename: str,
51
+ L2T_STARS_browse_filename: str,
52
+ metadata: dict,
53
+ prior: Prior,
54
+ HLS_connection: HLS2CMR,
55
+ NDVI_VIIRS_connection: VIIRSDownloaderNDVI,
56
+ albedo_VIIRS_connection: VIIRSDownloaderAlbedo,
57
+ using_prior: bool = False,
58
+ calibrate_fine: bool = False,
59
+ remove_input_staging: bool = True,
60
+ remove_prior: bool = True,
61
+ remove_posterior: bool = True,
62
+ threads: Union[int, str] = "auto",
63
+ num_workers: int = 4,
64
+ ):
65
+ """
66
+ Orchestrates the generation of the L2T_STARS product for a given tile and date.
67
+
68
+ This function handles the staging of input data, execution of the Julia data
69
+ fusion model for both NDVI and albedo, and the final assembly, metadata
70
+ writing, and archiving of the L2T_STARS product. It also manages cleanup
71
+ of intermediate and prior files.
72
+
73
+ Args:
74
+ tile (str): The HLS tile ID.
75
+ date_UTC (date): The UTC date for the current L2T_STARS product.
76
+ time_UTC (datetime): The UTC time for the current L2T_STARS product.
77
+ build (str): The build ID of the PGE.
78
+ product_counter (int): The product counter for the current run.
79
+ HLS_start_date (date): The start date for HLS data used in fusion.
80
+ HLS_end_date (date): The end date for HLS data used in fusion.
81
+ VIIRS_start_date (date): The start date for VIIRS data used in fusion.
82
+ VIIRS_end_date (date): The end date for VIIRS data used in fusion.
83
+ NDVI_resolution (int): The resolution of the coarse NDVI data.
84
+ albedo_resolution (int): The resolution of the coarse albedo data.
85
+ target_resolution (int): The desired output resolution of the fused product.
86
+ working_directory (str): The main working directory.
87
+ model_directory (str): Directory for model state files (priors, posteriors).
88
+ input_staging_directory (str): Directory for temporary input images for Julia.
89
+ L2T_STARS_granule_directory (str): Temporary directory for the unzipped product.
90
+ L2T_STARS_zip_filename (str): Final path for the zipped L2T_STARS product.
91
+ L2T_STARS_browse_filename (str): Final path for the browse image.
92
+ metadata (dict): Dictionary containing product metadata.
93
+ prior (Prior): An object containing information about the prior product.
94
+ HLS_connection (HLS2CMR): An initialized HLS data connection object.
95
+ NDVI_VIIRS_connection (VIIRSDownloaderNDVI): An initialized VIIRS NDVI downloader.
96
+ albedo_VIIRS_connection (VIIRSDownloaderAlbedo): An initialized VIIRS albedo downloader.
97
+ using_prior (bool, optional): If True, use the prior product in fusion. Defaults to False.
98
+ calibrate_fine (bool, optional): If True, calibrate fine images to coarse images.
99
+ Defaults to False.
100
+ remove_input_staging (bool, optional): If True, remove the input staging directory
101
+ after processing. Defaults to True.
102
+ remove_prior (bool, optional): If True, remove prior intermediate files after use.
103
+ Defaults to True.
104
+ remove_posterior (bool, optional): If True, remove posterior intermediate files after
105
+ product generation. Defaults to True.
106
+ threads (Union[int, str], optional): Number of Julia threads to use, or "auto".
107
+ Defaults to "auto".
108
+ num_workers (int, str): Number of Julia workers for distributed processing.
109
+ Defaults to 4.
110
+
111
+ Raises:
112
+ BlankOutput: If any of the final fused output rasters (NDVI, albedo, UQ, flag) are empty.
113
+ """
114
+ # Get the target geometries for coarse NDVI and albedo based on the HLS grid
115
+ NDVI_coarse_geometry = HLS_connection.grid(tile=tile, cell_size=NDVI_resolution)
116
+ albedo_coarse_geometry = HLS_connection.grid(tile=tile, cell_size=albedo_resolution)
117
+
118
+ logger.info(f"Processing the L2T_STARS product at tile {cl.place(tile)} for date {cl.time(date_UTC)}")
119
+
120
+ # Define and create input staging directories for coarse and fine NDVI/albedo
121
+ NDVI_coarse_directory = generate_NDVI_coarse_directory(
122
+ input_staging_directory=input_staging_directory, tile=tile
123
+ )
124
+ logger.info(f"Staging coarse NDVI images: {cl.dir(NDVI_coarse_directory)}")
125
+
126
+ NDVI_fine_directory = generate_NDVI_fine_directory(
127
+ input_staging_directory=input_staging_directory, tile=tile
128
+ )
129
+ logger.info(f"Staging fine NDVI images: {cl.dir(NDVI_fine_directory)}")
130
+
131
+ albedo_coarse_directory = generate_albedo_coarse_directory(
132
+ input_staging_directory=input_staging_directory, tile=tile
133
+ )
134
+ logger.info(f"Staging coarse albedo images: {cl.dir(albedo_coarse_directory)}")
135
+
136
+ albedo_fine_directory = generate_albedo_fine_directory(
137
+ input_staging_directory=input_staging_directory, tile=tile
138
+ )
139
+ logger.info(f"Staging fine albedo images: {cl.dir(albedo_fine_directory)}")
140
+
141
+ # Define and create the directory for storing posterior model state files
142
+ posterior_tile_date_directory = generate_model_state_tile_date_directory(
143
+ model_directory=model_directory, tile=tile, date_UTC=date_UTC
144
+ )
145
+ logger.info(f"Posterior directory: {cl.dir(posterior_tile_date_directory)}")
146
+
147
+ # Generate the actual input raster files (coarse and fine images)
148
+ generate_STARS_inputs(
149
+ tile=tile,
150
+ date_UTC=date_UTC,
151
+ HLS_start_date=HLS_start_date,
152
+ HLS_end_date=HLS_end_date,
153
+ VIIRS_start_date=VIIRS_start_date,
154
+ VIIRS_end_date=VIIRS_end_date,
155
+ NDVI_resolution=NDVI_resolution,
156
+ albedo_resolution=albedo_resolution,
157
+ target_resolution=target_resolution,
158
+ NDVI_coarse_geometry=NDVI_coarse_geometry,
159
+ albedo_coarse_geometry=albedo_coarse_geometry,
160
+ working_directory=working_directory,
161
+ NDVI_coarse_directory=NDVI_coarse_directory,
162
+ NDVI_fine_directory=NDVI_fine_directory,
163
+ albedo_coarse_directory=albedo_coarse_directory,
164
+ albedo_fine_directory=albedo_fine_directory,
165
+ HLS_connection=HLS_connection,
166
+ NDVI_VIIRS_connection=NDVI_VIIRS_connection,
167
+ albedo_VIIRS_connection=albedo_VIIRS_connection,
168
+ calibrate_fine=calibrate_fine,
169
+ )
170
+
171
+ # --- Process NDVI Data Fusion ---
172
+ # Define output filenames for NDVI posterior products
173
+ posterior_NDVI_filename = generate_filename(
174
+ directory=posterior_tile_date_directory,
175
+ variable="NDVI",
176
+ date_UTC=date_UTC,
177
+ tile=tile,
178
+ cell_size=target_resolution,
179
+ )
180
+ logger.info(f"Posterior NDVI file: {cl.file(posterior_NDVI_filename)}")
181
+
182
+ posterior_NDVI_UQ_filename = generate_filename(
183
+ directory=posterior_tile_date_directory,
184
+ variable="NDVI.UQ",
185
+ date_UTC=date_UTC,
186
+ tile=tile,
187
+ cell_size=target_resolution,
188
+ )
189
+ logger.info(f"Posterior NDVI UQ file: {cl.file(posterior_NDVI_UQ_filename)}")
190
+
191
+ posterior_NDVI_flag_filename = generate_filename(
192
+ directory=posterior_tile_date_directory,
193
+ variable="NDVI.flag",
194
+ date_UTC=date_UTC,
195
+ tile=tile,
196
+ cell_size=target_resolution,
197
+ )
198
+ logger.info(f"Posterior NDVI flag file: {cl.file(posterior_NDVI_flag_filename)}")
199
+
200
+ posterior_NDVI_bias_filename = generate_filename(
201
+ directory=posterior_tile_date_directory,
202
+ variable="NDVI.bias",
203
+ date_UTC=date_UTC,
204
+ tile=tile,
205
+ cell_size=target_resolution,
206
+ )
207
+ logger.info(f"Posterior NDVI bias file: {cl.file(posterior_NDVI_bias_filename)}")
208
+
209
+ posterior_NDVI_bias_UQ_filename = generate_filename(
210
+ directory=posterior_tile_date_directory,
211
+ variable="NDVI.bias.UQ",
212
+ date_UTC=date_UTC,
213
+ tile=tile,
214
+ cell_size=target_resolution,
215
+ )
216
+ logger.info(f"Posterior NDVI bias UQ file: {cl.file(posterior_NDVI_bias_UQ_filename)}")
217
+
218
+ # Run Julia data fusion for NDVI, conditionally including prior data
219
+ if using_prior:
220
+ logger.info("Running Julia data fusion for NDVI with prior data.")
221
+ process_julia_data_fusion(
222
+ tile=tile,
223
+ coarse_cell_size=NDVI_resolution,
224
+ fine_cell_size=target_resolution,
225
+ VIIRS_start_date=VIIRS_start_date,
226
+ VIIRS_end_date=VIIRS_end_date,
227
+ HLS_start_date=HLS_start_date,
228
+ HLS_end_date=HLS_end_date,
229
+ coarse_directory=NDVI_coarse_directory,
230
+ fine_directory=NDVI_fine_directory,
231
+ posterior_filename=posterior_NDVI_filename,
232
+ posterior_UQ_filename=posterior_NDVI_UQ_filename,
233
+ posterior_flag_filename=posterior_NDVI_flag_filename,
234
+ posterior_bias_filename=posterior_NDVI_bias_filename,
235
+ posterior_bias_UQ_filename=posterior_NDVI_bias_UQ_filename,
236
+ prior_filename=prior.prior_NDVI_filename,
237
+ prior_UQ_filename=prior.prior_NDVI_UQ_filename,
238
+ prior_bias_filename=prior.prior_NDVI_bias_filename,
239
+ prior_bias_UQ_filename=prior.prior_NDVI_bias_UQ_filename,
240
+ threads=threads,
241
+ num_workers=num_workers,
242
+ )
243
+ else:
244
+ logger.info("Running Julia data fusion for NDVI without prior data.")
245
+ process_julia_data_fusion(
246
+ tile=tile,
247
+ coarse_cell_size=NDVI_resolution,
248
+ fine_cell_size=target_resolution,
249
+ VIIRS_start_date=VIIRS_start_date,
250
+ VIIRS_end_date=VIIRS_end_date,
251
+ HLS_start_date=HLS_start_date,
252
+ HLS_end_date=HLS_end_date,
253
+ coarse_directory=NDVI_coarse_directory,
254
+ fine_directory=NDVI_fine_directory,
255
+ posterior_filename=posterior_NDVI_filename,
256
+ posterior_UQ_filename=posterior_NDVI_UQ_filename,
257
+ posterior_flag_filename=posterior_NDVI_flag_filename,
258
+ posterior_bias_filename=posterior_NDVI_bias_filename,
259
+ posterior_bias_UQ_filename=posterior_NDVI_bias_UQ_filename,
260
+ threads=threads,
261
+ num_workers=num_workers,
262
+ )
263
+
264
+ # Open the resulting NDVI rasters
265
+ NDVI = Raster.open(posterior_NDVI_filename)
266
+ NDVI_UQ = Raster.open(posterior_NDVI_UQ_filename)
267
+ NDVI_flag = Raster.open(posterior_NDVI_flag_filename)
268
+
269
+ # --- Process Albedo Data Fusion ---
270
+ # Define output filenames for albedo posterior products
271
+ posterior_albedo_filename = generate_filename(
272
+ directory=posterior_tile_date_directory,
273
+ variable="albedo",
274
+ date_UTC=date_UTC,
275
+ tile=tile,
276
+ cell_size=target_resolution,
277
+ )
278
+ logger.info(f"Posterior albedo file: {cl.file(posterior_albedo_filename)}")
279
+
280
+ posterior_albedo_UQ_filename = generate_filename(
281
+ directory=posterior_tile_date_directory,
282
+ variable="albedo.UQ",
283
+ date_UTC=date_UTC,
284
+ tile=tile,
285
+ cell_size=target_resolution,
286
+ )
287
+ logger.info(f"Posterior albedo UQ file: {cl.file(posterior_albedo_UQ_filename)}")
288
+
289
+ posterior_albedo_flag_filename = generate_filename(
290
+ directory=posterior_tile_date_directory,
291
+ variable="albedo.flag",
292
+ date_UTC=date_UTC,
293
+ tile=tile,
294
+ cell_size=target_resolution,
295
+ )
296
+ logger.info(f"Posterior albedo flag file: {cl.file(posterior_albedo_flag_filename)}")
297
+
298
+ posterior_albedo_bias_filename = generate_filename(
299
+ directory=posterior_tile_date_directory,
300
+ variable="albedo.bias",
301
+ date_UTC=date_UTC,
302
+ tile=tile,
303
+ cell_size=target_resolution,
304
+ )
305
+ logger.info(f"Posterior albedo bias file: {cl.file(posterior_albedo_bias_filename)}")
306
+
307
+ posterior_albedo_bias_UQ_filename = generate_filename(
308
+ directory=posterior_tile_date_directory,
309
+ variable="albedo.bias.UQ",
310
+ date_UTC=date_UTC,
311
+ tile=tile,
312
+ cell_size=target_resolution,
313
+ )
314
+ logger.info(f"Posterior albedo bias UQ file: {cl.file(posterior_albedo_bias_UQ_filename)}")
315
+
316
+ # Run Julia data fusion for albedo, conditionally including prior data
317
+ if using_prior:
318
+ logger.info("Running Julia data fusion for albedo with prior data.")
319
+ process_julia_data_fusion(
320
+ tile=tile,
321
+ coarse_cell_size=albedo_resolution,
322
+ fine_cell_size=target_resolution,
323
+ VIIRS_start_date=VIIRS_start_date,
324
+ VIIRS_end_date=VIIRS_end_date,
325
+ HLS_start_date=HLS_start_date,
326
+ HLS_end_date=HLS_end_date,
327
+ coarse_directory=albedo_coarse_directory,
328
+ fine_directory=albedo_fine_directory,
329
+ posterior_filename=posterior_albedo_filename,
330
+ posterior_UQ_filename=posterior_albedo_UQ_filename,
331
+ posterior_flag_filename=posterior_albedo_flag_filename,
332
+ posterior_bias_filename=posterior_albedo_bias_filename,
333
+ posterior_bias_UQ_filename=posterior_albedo_bias_UQ_filename,
334
+ prior_filename=prior.prior_albedo_filename,
335
+ prior_UQ_filename=prior.prior_albedo_UQ_filename,
336
+ prior_bias_filename=prior.prior_albedo_bias_filename,
337
+ prior_bias_UQ_filename=prior.prior_albedo_bias_UQ_filename,
338
+ threads=threads,
339
+ num_workers=num_workers,
340
+ )
341
+ else:
342
+ logger.info("Running Julia data fusion for albedo without prior data.")
343
+ process_julia_data_fusion(
344
+ tile=tile,
345
+ coarse_cell_size=albedo_resolution,
346
+ fine_cell_size=target_resolution,
347
+ VIIRS_start_date=VIIRS_start_date,
348
+ VIIRS_end_date=VIIRS_end_date,
349
+ HLS_start_date=HLS_start_date,
350
+ HLS_end_date=HLS_end_date,
351
+ coarse_directory=albedo_coarse_directory,
352
+ fine_directory=albedo_fine_directory,
353
+ posterior_filename=posterior_albedo_filename,
354
+ posterior_UQ_filename=posterior_albedo_UQ_filename,
355
+ posterior_flag_filename=posterior_albedo_flag_filename,
356
+ posterior_bias_filename=posterior_albedo_bias_filename,
357
+ posterior_bias_UQ_filename=posterior_albedo_bias_UQ_filename,
358
+ threads=threads,
359
+ num_workers=num_workers,
360
+ )
361
+
362
+ # Open the resulting albedo rasters
363
+ albedo = Raster.open(posterior_albedo_filename)
364
+ albedo_UQ = Raster.open(posterior_albedo_UQ_filename)
365
+ albedo_flag = Raster.open(posterior_albedo_flag_filename)
366
+
367
+ # --- Validate Output and Create Final Product ---
368
+ # Check if the output rasters are valid (not None, indicating a problem during Julia processing)
369
+ if NDVI is None:
370
+ raise BlankOutput("Unable to generate STARS NDVI")
371
+ if NDVI_UQ is None:
372
+ raise BlankOutput("Unable to generate STARS NDVI UQ")
373
+ if NDVI_flag is None:
374
+ raise BlankOutput("Unable to generate STARS NDVI flag")
375
+ if albedo is None:
376
+ raise BlankOutput("Unable to generate STARS albedo")
377
+ if albedo_UQ is None:
378
+ raise BlankOutput("Unable to generate STARS albedo UQ")
379
+ if albedo_flag is None:
380
+ raise BlankOutput("Unable to generate STARS albedo flag")
381
+
382
+ # Initialize the L2TSTARS granule object for the current product
383
+ granule = L2TSTARS(
384
+ product_location=L2T_STARS_granule_directory,
385
+ tile=tile,
386
+ time_UTC=time_UTC,
387
+ build=build,
388
+ process_count=product_counter,
389
+ )
390
+
391
+ # Add the generated layers to the granule object
392
+ granule.add_layer("NDVI", NDVI, cmap=NDVI_COLORMAP)
393
+ granule.add_layer("NDVI-UQ", NDVI_UQ, cmap="jet")
394
+ granule.add_layer("NDVI-flag", NDVI_flag, cmap="jet")
395
+ granule.add_layer("albedo", albedo, cmap=ALBEDO_COLORMAP)
396
+ granule.add_layer("albedo-UQ", albedo_UQ, cmap="jet")
397
+ granule.add_layer("albedo-flag", albedo_flag, cmap="jet")
398
+
399
+ # Update metadata and write to the granule
400
+ metadata["StandardMetadata"]["LocalGranuleID"] = basename(L2T_STARS_zip_filename)
401
+ metadata["StandardMetadata"]["SISName"] = "Level 2 STARS Product Specification Document"
402
+ granule.write_metadata(metadata)
403
+
404
+ # Write the zipped product and browse image
405
+ logger.info(f"Writing L2T STARS product zip: {cl.file(L2T_STARS_zip_filename)}")
406
+ granule.write_zip(L2T_STARS_zip_filename)
407
+ logger.info(f"Writing L2T STARS browse image: {cl.file(L2T_STARS_browse_filename)}")
408
+ granule.write_browse_image(PNG_filename=L2T_STARS_browse_filename)
409
+ logger.info(
410
+ f"Removing L2T STARS tile granule directory: {cl.dir(L2T_STARS_granule_directory)}"
411
+ )
412
+ shutil.rmtree(L2T_STARS_granule_directory)
413
+
414
+ # Re-check and regenerate browse image if it somehow didn't generate (e.g. if the granule dir was already deleted)
415
+ if not exists(L2T_STARS_browse_filename):
416
+ logger.info(
417
+ f"Browse image not found after initial creation attempt. Regenerating: {cl.file(L2T_STARS_browse_filename)}"
418
+ )
419
+ # Re-load granule from zip to create browse image if necessary
420
+ granule_for_browse = L2TSTARS(L2T_STARS_zip_filename)
421
+ granule_for_browse.write_browse_image(PNG_filename=L2T_STARS_browse_filename)
422
+
423
+ # Re-write posterior files (often done to ensure proper compression/color maps after processing)
424
+ # This step might be redundant if Julia already writes them correctly, but ensures consistency.
425
+ logger.info(f"Re-writing posterior NDVI: {posterior_NDVI_filename}")
426
+ Raster.open(posterior_NDVI_filename, cmap=NDVI_COLORMAP).to_geotiff(
427
+ posterior_NDVI_filename
428
+ )
429
+ logger.info(f"Re-writing posterior NDVI UQ: {posterior_NDVI_UQ_filename}")
430
+ Raster.open(posterior_NDVI_UQ_filename, cmap="jet").to_geotiff(
431
+ posterior_NDVI_UQ_filename
432
+ )
433
+ logger.info(f"Re-writing posterior NDVI flag: {posterior_NDVI_flag_filename}")
434
+ Raster.open(posterior_NDVI_flag_filename, cmap="jet").to_geotiff(
435
+ posterior_NDVI_flag_filename
436
+ )
437
+ logger.info(f"Re-writing posterior NDVI bias: {posterior_NDVI_bias_filename}")
438
+ Raster.open(posterior_NDVI_bias_filename, cmap=NDVI_COLORMAP).to_geotiff(
439
+ posterior_NDVI_bias_filename
440
+ )
441
+ logger.info(f"Re-writing posterior NDVI bias UQ: {posterior_NDVI_bias_UQ_filename}")
442
+ Raster.open(posterior_NDVI_bias_UQ_filename, cmap=NDVI_COLORMAP).to_geotiff(
443
+ posterior_NDVI_bias_UQ_filename
444
+ )
445
+
446
+ logger.info(f"Re-writing posterior albedo: {posterior_albedo_filename}")
447
+ Raster.open(posterior_albedo_filename, cmap=ALBEDO_COLORMAP).to_geotiff(
448
+ posterior_albedo_filename
449
+ )
450
+ logger.info(f"Re-writing posterior albedo UQ: {posterior_albedo_UQ_filename}")
451
+ Raster.open(posterior_albedo_UQ_filename, cmap="jet").to_geotiff(
452
+ posterior_albedo_UQ_filename
453
+ )
454
+ logger.info(f"Re-writing posterior albedo flag: {posterior_albedo_flag_filename}")
455
+ Raster.open(posterior_albedo_flag_filename, cmap="jet").to_geotiff(
456
+ posterior_albedo_flag_filename
457
+ )
458
+ logger.info(f"Re-writing posterior albedo bias: {posterior_albedo_bias_filename}")
459
+ Raster.open(posterior_albedo_bias_filename, cmap=ALBEDO_COLORMAP).to_geotiff(
460
+ posterior_albedo_bias_filename
461
+ )
462
+ logger.info(f"Re-writing posterior albedo bias UQ: {posterior_albedo_bias_UQ_filename}")
463
+ Raster.open(posterior_albedo_bias_UQ_filename, cmap=ALBEDO_COLORMAP).to_geotiff(
464
+ posterior_albedo_bias_UQ_filename
465
+ )
466
+
467
+ # --- Cleanup ---
468
+ if remove_input_staging:
469
+ if exists(input_staging_directory):
470
+ logger.info(f"Removing input staging directory: {cl.dir(input_staging_directory)}")
471
+ shutil.rmtree(input_staging_directory)
472
+
473
+ if using_prior and remove_prior:
474
+ # Remove prior intermediate files only if they exist
475
+ prior_files = [
476
+ prior.prior_NDVI_filename,
477
+ prior.prior_NDVI_UQ_filename,
478
+ prior.prior_NDVI_bias_filename,
479
+ prior.prior_NDVI_bias_UQ_filename,
480
+ prior.prior_albedo_filename,
481
+ prior.prior_albedo_UQ_filename,
482
+ prior.prior_albedo_bias_filename,
483
+ prior.prior_albedo_bias_UQ_filename,
484
+ ]
485
+ for f in prior_files:
486
+ if f is not None and exists(f):
487
+ logger.info(f"Removing prior file: {cl.file(f)}")
488
+ remove(f)
489
+
490
+ if remove_posterior:
491
+ # Remove posterior intermediate files only if they exist
492
+ posterior_files = [
493
+ posterior_NDVI_filename,
494
+ posterior_NDVI_UQ_filename,
495
+ posterior_NDVI_flag_filename,
496
+ posterior_NDVI_bias_filename,
497
+ posterior_NDVI_bias_UQ_filename,
498
+ posterior_albedo_filename,
499
+ posterior_albedo_UQ_filename,
500
+ posterior_albedo_flag_filename,
501
+ posterior_albedo_bias_filename,
502
+ posterior_albedo_bias_UQ_filename,
503
+ ]
504
+ for f in posterior_files:
505
+ if f is not None and exists(f):
506
+ logger.info(f"Removing posterior file: {cl.file(f)}")
507
+ remove(f)
@@ -0,0 +1,110 @@
1
+ import subprocess
2
+ from typing import Union
3
+ from datetime import date
4
+ from os.path import abspath, dirname, join, exists
5
+ import logging
6
+
7
+ from .instantiate_STARS_jl import instantiate_STARS_jl
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ def process_julia_data_fusion(
12
+ tile: str,
13
+ coarse_cell_size: int,
14
+ fine_cell_size: int,
15
+ VIIRS_start_date: date,
16
+ VIIRS_end_date: date,
17
+ HLS_start_date: date,
18
+ HLS_end_date: date,
19
+ coarse_directory: str,
20
+ fine_directory: str,
21
+ posterior_filename: str,
22
+ posterior_UQ_filename: str,
23
+ posterior_flag_filename: str,
24
+ posterior_bias_filename: str,
25
+ posterior_bias_UQ_filename: str,
26
+ prior_filename: str = None,
27
+ prior_UQ_filename: str = None,
28
+ prior_bias_filename: str = None,
29
+ prior_bias_UQ_filename: str = None,
30
+ environment_name: str = "@ECOv003-L2T-STARS", # Unused in current Julia command, but kept for consistency
31
+ threads: Union[int, str] = "auto",
32
+ num_workers: int = 4):
33
+ """
34
+ Executes the Julia-based data fusion process for NDVI or albedo.
35
+
36
+ This function prepares and runs a Julia script that performs the core
37
+ STARS data fusion. It passes all necessary input and output paths,
38
+ date ranges, and resolution parameters to the Julia script. Optionally,
39
+ it can also pass prior information to the Julia system.
40
+
41
+ Args:
42
+ tile (str): The HLS tile ID.
43
+ coarse_cell_size (int): The cell size of the coarse resolution data (e.g., VIIRS).
44
+ fine_cell_size (int): The cell size of the fine resolution data (e.g., HLS and target).
45
+ VIIRS_start_date (date): Start date for VIIRS data processing.
46
+ VIIRS_end_date (date): End date for VIIRS data processing.
47
+ HLS_start_date (date): Start date for HLS data processing.
48
+ HLS_end_date (date): End date for HLS data processing.
49
+ coarse_directory (str): Directory containing coarse resolution input images.
50
+ fine_directory (str): Directory containing fine resolution input images.
51
+ posterior_filename (str): Output path for the fused posterior mean image.
52
+ posterior_UQ_filename (str): Output path for the fused posterior uncertainty image.
53
+ posterior_flag_filename (str): Output path for the fused posterior flag image.
54
+ posterior_bias_filename (str): Output path for the fused posterior bias image.
55
+ posterior_bias_UQ_filename (str): Output path for the fused posterior bias uncertainty image.
56
+ prior_filename (str, optional): Path to the prior mean image. Defaults to None.
57
+ prior_UQ_filename (str, optional): Path to the prior uncertainty image. Defaults to None.
58
+ prior_bias_filename (str, optional): Path to the prior bias image. Defaults to None.
59
+ prior_bias_UQ_filename (str, optional): Path to the prior bias uncertainty image. Defaults to None.
60
+ environment_name (str, optional): Julia environment name. Defaults to "@ECOv003-L2T-STARS".
61
+ threads (Union[int, str], optional): Number of Julia threads to use, or "auto".
62
+ Defaults to "auto".
63
+ num_workers (int, optional): Number of Julia workers for distributed processing.
64
+ Defaults to 4.
65
+ """
66
+ # Construct the path to the Julia processing script
67
+ julia_script_filename = join(
68
+ abspath(dirname(__file__)), "process_ECOSTRESS_data_fusion_distributed_bias.jl"
69
+ )
70
+ # The directory where the Julia Project.toml is located
71
+ STARS_source_directory = abspath(dirname(__file__))
72
+
73
+ # Instantiate Julia dependencies
74
+ instantiate_STARS_jl(STARS_source_directory)
75
+
76
+ # Base Julia command with required arguments
77
+ command = (
78
+ f'export JULIA_NUM_THREADS={threads}; julia --threads {threads} '
79
+ f'"{julia_script_filename}" {num_workers} "{tile}" "{coarse_cell_size}" '
80
+ f'"{fine_cell_size}" "{VIIRS_start_date}" "{VIIRS_end_date}" '
81
+ f'"{HLS_start_date}" "{HLS_end_date}" "{coarse_directory}" '
82
+ f'"{fine_directory}" "{posterior_filename}" "{posterior_UQ_filename}" '
83
+ f'"{posterior_flag_filename}" "{posterior_bias_filename}" '
84
+ f'"{posterior_bias_UQ_filename}"'
85
+ )
86
+
87
+ # Conditionally add prior arguments if all prior filenames are provided and exist
88
+ if all(
89
+ [
90
+ filename is not None and exists(filename)
91
+ for filename in [
92
+ prior_filename,
93
+ prior_UQ_filename,
94
+ prior_bias_filename,
95
+ prior_bias_UQ_filename,
96
+ ]
97
+ ]
98
+ ):
99
+ logger.info("Passing prior into Julia data fusion system")
100
+ command += (
101
+ f' "{prior_filename}" "{prior_UQ_filename}" "{prior_bias_filename}" '
102
+ f'"{prior_bias_UQ_filename}"'
103
+ )
104
+ else:
105
+ logger.info("No complete prior set found; running Julia data fusion without prior.")
106
+
107
+ logger.info(f"Executing Julia command: {command}")
108
+ # Execute the Julia command. Using shell=True as the command string includes shell syntax (export).
109
+ # This assumes the Julia executable is in the system's PATH.
110
+ subprocess.run(command, shell=True, check=False)