ECOv003-L2T-STARS 1.1.0__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ECOv003_L2T_STARS/ECOv003_DL.py +14 -14
- ECOv003_L2T_STARS/ECOv003_DL.xml +1 -1
- ECOv003_L2T_STARS/ECOv003_L2T_STARS.xml +1 -1
- ECOv003_L2T_STARS/L2TSTARSConfig.py +17 -15
- ECOv003_L2T_STARS/L2T_STARS.py +20 -6
- ECOv003_L2T_STARS/LPDAAC/LPDAACDataPool.py +43 -41
- ECOv003_L2T_STARS/VIIRS/VNP09GA.py +5 -3
- ECOv003_L2T_STARS/__init__.py +1 -1
- ECOv003_L2T_STARS/cksum.py +66 -0
- ECOv003_L2T_STARS/exceptions.py +2 -0
- ECOv003_L2T_STARS/generate_L2T_STARS_runconfig.py +1 -1
- ECOv003_L2T_STARS/{install_STARS_jl.py → install_STARSDataFusion_jl.py} +4 -4
- ECOv003_L2T_STARS/{instantiate_STARS_jl.py → instantiate_STARSDataFusion_jl.py} +3 -3
- ECOv003_L2T_STARS/load_prior.py +4 -2
- ECOv003_L2T_STARS/login.py +61 -0
- ECOv003_L2T_STARS/{ECOv003_L2T_STARS.py → main.py} +8 -1
- ECOv003_L2T_STARS/process_julia_data_fusion.py +2 -2
- ECOv003_L2T_STARS/version.txt +1 -1
- {ecov003_l2t_stars-1.1.0.dist-info → ecov003_l2t_stars-1.3.0.dist-info}/METADATA +2 -3
- {ecov003_l2t_stars-1.1.0.dist-info → ecov003_l2t_stars-1.3.0.dist-info}/RECORD +24 -22
- ecov003_l2t_stars-1.3.0.dist-info/entry_points.txt +3 -0
- ECOv003_L2T_STARS/VIIRS/VIIRS_CMR_LOGIN.py +0 -36
- ecov003_l2t_stars-1.1.0.dist-info/entry_points.txt +0 -3
- {ecov003_l2t_stars-1.1.0.dist-info → ecov003_l2t_stars-1.3.0.dist-info}/WHEEL +0 -0
- {ecov003_l2t_stars-1.1.0.dist-info → ecov003_l2t_stars-1.3.0.dist-info}/licenses/LICENSE +0 -0
- {ecov003_l2t_stars-1.1.0.dist-info → ecov003_l2t_stars-1.3.0.dist-info}/top_level.txt +0 -0
ECOv003_L2T_STARS/ECOv003_DL.py
CHANGED
@@ -265,33 +265,33 @@ class ECOv003DLConfig(ECOSTRESSRunConfig):
|
|
265
265
|
runconfig = read_runconfig(filename)
|
266
266
|
|
267
267
|
# Validate and extract working directory
|
268
|
-
if "
|
269
|
-
raise MissingRunConfigValue(f"missing
|
270
|
-
if "ECOv003_DL_WORKING" not in runconfig["
|
268
|
+
if "StaticAncillaryFileGroup" not in runconfig:
|
269
|
+
raise MissingRunConfigValue(f"missing StaticAncillaryFileGroup in ECOv003_DL run-config: {filename}")
|
270
|
+
if "ECOv003_DL_WORKING" not in runconfig["StaticAncillaryFileGroup"]:
|
271
271
|
raise MissingRunConfigValue(
|
272
|
-
f"missing
|
273
|
-
working_directory = abspath(runconfig["
|
272
|
+
f"missing StaticAncillaryFileGroup/ECOv003_DL_WORKING in ECOv003_DL run-config: {filename}")
|
273
|
+
working_directory = abspath(runconfig["StaticAncillaryFileGroup"]["ECOv003_DL_WORKING"])
|
274
274
|
logger.info(f"working directory: {cl.dir(working_directory)}")
|
275
275
|
|
276
276
|
# Validate and extract L2T STARS sources directory
|
277
|
-
if "L2T_STARS_SOURCES" not in runconfig["
|
277
|
+
if "L2T_STARS_SOURCES" not in runconfig["StaticAncillaryFileGroup"]:
|
278
278
|
raise MissingRunConfigValue(
|
279
|
-
f"missing
|
280
|
-
L2T_STARS_sources_directory = abspath(runconfig["
|
279
|
+
f"missing StaticAncillaryFileGroup/L2T_STARS_SOURCES in ECOv003_DL run-config: {filename}")
|
280
|
+
L2T_STARS_sources_directory = abspath(runconfig["StaticAncillaryFileGroup"]["L2T_STARS_SOURCES"])
|
281
281
|
logger.info(f"L2T STARS sources directory: {cl.dir(L2T_STARS_sources_directory)}")
|
282
282
|
|
283
283
|
# Validate and extract L2T STARS indices directory
|
284
|
-
if "L2T_STARS_INDICES" not in runconfig["
|
284
|
+
if "L2T_STARS_INDICES" not in runconfig["StaticAncillaryFileGroup"]:
|
285
285
|
raise MissingRunConfigValue(
|
286
|
-
f"missing
|
287
|
-
L2T_STARS_indices_directory = abspath(runconfig["
|
286
|
+
f"missing StaticAncillaryFileGroup/L2T_STARS_INDICES in ECOv003_DL run-config: {filename}")
|
287
|
+
L2T_STARS_indices_directory = abspath(runconfig["StaticAncillaryFileGroup"]["L2T_STARS_INDICES"])
|
288
288
|
logger.info(f"L2T STARS indices directory: {cl.dir(L2T_STARS_indices_directory)}")
|
289
289
|
|
290
290
|
# Validate and extract L2T STARS model directory
|
291
|
-
if "L2T_STARS_MODEL" not in runconfig["
|
291
|
+
if "L2T_STARS_MODEL" not in runconfig["StaticAncillaryFileGroup"]:
|
292
292
|
raise MissingRunConfigValue(
|
293
|
-
f"missing
|
294
|
-
L2T_STARS_model_directory = abspath(runconfig["
|
293
|
+
f"missing StaticAncillaryFileGroup/L2T_STARS_MODEL in ECOv003_DL run-config: {filename}")
|
294
|
+
L2T_STARS_model_directory = abspath(runconfig["StaticAncillaryFileGroup"]["L2T_STARS_MODEL"])
|
295
295
|
logger.info(f"L2T STARS model directory: {cl.dir(L2T_STARS_model_directory)}")
|
296
296
|
|
297
297
|
# Validate ProductPathGroup
|
ECOv003_L2T_STARS/ECOv003_DL.xml
CHANGED
@@ -28,7 +28,7 @@
|
|
28
28
|
<element>L2T_LSTE_filename1</element>
|
29
29
|
</vector>
|
30
30
|
</group>
|
31
|
-
<group name="
|
31
|
+
<group name="StaticAncillaryFileGroup">
|
32
32
|
<scalar name="ECOv003_DL_WORKING">working_directory</scalar>
|
33
33
|
<scalar name="L2T_STARS_SOURCES">L2T_STARS_sources_directory</scalar>
|
34
34
|
<scalar name="L2T_STARS_INDICES">L2T_STARS_indices_directory</scalar>
|
@@ -22,7 +22,7 @@
|
|
22
22
|
<scalar name="L2T_LSTE">L2T_LSTE_filename</scalar>
|
23
23
|
<scalar name="L2T_STARS_PRIOR">prior_L2T_STARS_filename</scalar>
|
24
24
|
</group>
|
25
|
-
<group name="
|
25
|
+
<group name="StaticAncillaryFileGroup">
|
26
26
|
<scalar name="L2T_STARS_MODEL">model_directory</scalar>
|
27
27
|
<scalar name="L2T_STARS_INDICES">indices_directory</scalar>
|
28
28
|
<scalar name="L2T_STARS_SOURCES">sources_directory</scalar>
|
@@ -37,48 +37,50 @@ class L2TSTARSConfig(ECOSTRESSRunConfig):
|
|
37
37
|
# Read the run-config XML into a dictionary
|
38
38
|
runconfig = self.read_runconfig(filename)
|
39
39
|
|
40
|
+
# reverting to StaticAncillaryFileGroup for now instead of StaticAuxiliaryFileGroup
|
41
|
+
|
40
42
|
try:
|
41
|
-
# Validate and extract working directory from
|
42
|
-
if "
|
43
|
+
# Validate and extract working directory from StaticAncillaryFileGroup
|
44
|
+
if "StaticAncillaryFileGroup" not in runconfig:
|
43
45
|
raise MissingRunConfigValue(
|
44
|
-
f"Missing
|
46
|
+
f"Missing StaticAncillaryFileGroup in L2T_STARS run-config: {filename}"
|
45
47
|
)
|
46
|
-
if "L2T_STARS_WORKING" not in runconfig["
|
48
|
+
if "L2T_STARS_WORKING" not in runconfig["StaticAncillaryFileGroup"]:
|
47
49
|
raise MissingRunConfigValue(
|
48
|
-
f"Missing
|
50
|
+
f"Missing StaticAncillaryFileGroup/L2T_STARS_WORKING in L2T_STARS run-config: {filename}"
|
49
51
|
)
|
50
52
|
self.working_directory = abspath(
|
51
|
-
runconfig["
|
53
|
+
runconfig["StaticAncillaryFileGroup"]["L2T_STARS_WORKING"]
|
52
54
|
)
|
53
55
|
logger.info(f"Working directory: {cl.dir(self.working_directory)}")
|
54
56
|
|
55
57
|
# Validate and extract sources directory
|
56
|
-
if "L2T_STARS_SOURCES" not in runconfig["
|
58
|
+
if "L2T_STARS_SOURCES" not in runconfig["StaticAncillaryFileGroup"]:
|
57
59
|
raise MissingRunConfigValue(
|
58
|
-
f"Missing
|
60
|
+
f"Missing StaticAncillaryFileGroup/L2T_STARS_SOURCES in L2T_STARS run-config: {filename}"
|
59
61
|
)
|
60
62
|
self.sources_directory = abspath(
|
61
|
-
runconfig["
|
63
|
+
runconfig["StaticAncillaryFileGroup"]["L2T_STARS_SOURCES"]
|
62
64
|
)
|
63
65
|
logger.info(f"Sources directory: {cl.dir(self.sources_directory)}")
|
64
66
|
|
65
67
|
# Validate and extract indices directory
|
66
|
-
if "L2T_STARS_INDICES" not in runconfig["
|
68
|
+
if "L2T_STARS_INDICES" not in runconfig["StaticAncillaryFileGroup"]:
|
67
69
|
raise MissingRunConfigValue(
|
68
|
-
f"Missing
|
70
|
+
f"Missing StaticAncillaryFileGroup/L2T_STARS_INDICES in L2T_STARS run-config: {filename}"
|
69
71
|
)
|
70
72
|
self.indices_directory = abspath(
|
71
|
-
runconfig["
|
73
|
+
runconfig["StaticAncillaryFileGroup"]["L2T_STARS_INDICES"]
|
72
74
|
)
|
73
75
|
logger.info(f"Indices directory: {cl.dir(self.indices_directory)}")
|
74
76
|
|
75
77
|
# Validate and extract model directory
|
76
|
-
if "L2T_STARS_MODEL" not in runconfig["
|
78
|
+
if "L2T_STARS_MODEL" not in runconfig["StaticAncillaryFileGroup"]:
|
77
79
|
raise MissingRunConfigValue(
|
78
|
-
f"Missing
|
80
|
+
f"Missing StaticAncillaryFileGroup/L2T_STARS_MODEL in L2T_STARS run-config: {filename}"
|
79
81
|
)
|
80
82
|
self.model_directory = abspath(
|
81
|
-
runconfig["
|
83
|
+
runconfig["StaticAncillaryFileGroup"]["L2T_STARS_MODEL"]
|
82
84
|
)
|
83
85
|
logger.info(f"Model directory: {cl.dir(self.model_directory)}")
|
84
86
|
|
ECOv003_L2T_STARS/L2T_STARS.py
CHANGED
@@ -60,6 +60,7 @@ def L2T_STARS(
|
|
60
60
|
remove_posterior: bool = True,
|
61
61
|
threads: Union[int, str] = "auto",
|
62
62
|
num_workers: int = 4,
|
63
|
+
overwrite: bool = False, # New parameter for overwriting existing files
|
63
64
|
) -> int:
|
64
65
|
"""
|
65
66
|
ECOSTRESS Collection 3 L2T_STARS PGE (Product Generation Executive).
|
@@ -98,6 +99,8 @@ def L2T_STARS(
|
|
98
99
|
Defaults to "auto".
|
99
100
|
num_workers (int, optional): Number of Julia workers for distributed processing.
|
100
101
|
Defaults to 4.
|
102
|
+
overwrite (bool, optional): If True, existing output files will be overwritten.
|
103
|
+
Defaults to False.
|
101
104
|
|
102
105
|
Returns:
|
103
106
|
int: An exit code indicating the success or failure of the PGE execution.
|
@@ -127,11 +130,17 @@ def L2T_STARS(
|
|
127
130
|
L2T_STARS_browse_filename = runconfig.L2T_STARS_browse_filename
|
128
131
|
logger.info(f"Browse filename: " + cl.file(L2T_STARS_browse_filename))
|
129
132
|
|
130
|
-
# Check if the final product already exists
|
131
|
-
if exists(L2T_STARS_zip_filename) and exists(L2T_STARS_browse_filename):
|
133
|
+
# Check if the final product already exists and 'overwrite' is not enabled
|
134
|
+
if not overwrite and exists(L2T_STARS_zip_filename) and exists(L2T_STARS_browse_filename):
|
132
135
|
logger.info(f"Found existing L2T STARS file: {L2T_STARS_zip_filename}")
|
133
136
|
logger.info(f"Found existing L2T STARS preview: {L2T_STARS_browse_filename}")
|
137
|
+
logger.info("Overwrite option is not enabled, skipping reprocessing.")
|
134
138
|
return SUCCESS_EXIT_CODE
|
139
|
+
elif overwrite and exists(L2T_STARS_zip_filename) and exists(L2T_STARS_browse_filename):
|
140
|
+
logger.info(f"Found existing L2T STARS file: {L2T_STARS_zip_filename}")
|
141
|
+
logger.info(f"Found existing L2T STARS preview: {L2T_STARS_browse_filename}")
|
142
|
+
logger.info("Overwrite option is enabled, proceeding with reprocessing.")
|
143
|
+
|
135
144
|
|
136
145
|
logger.info(f"Working directory: {cl.dir(working_directory)}")
|
137
146
|
logger.info(f"Log file: {cl.file(log_filename)}")
|
@@ -232,12 +241,17 @@ def L2T_STARS(
|
|
232
241
|
VNP43NRT_products_directory = join(sources_directory, DEFAULT_VNP43NRT_PRODUCTS_DIRECTORY)
|
233
242
|
logger.info(f"VNP43NRT products directory: {cl.dir(VNP43NRT_products_directory)}")
|
234
243
|
|
235
|
-
# Re-check for existing product (double-check in case another process created it)
|
236
|
-
if exists(L2T_STARS_zip_filename):
|
244
|
+
# Re-check for existing product (double-check in case another process created it) with overwrite option
|
245
|
+
if not overwrite and exists(L2T_STARS_zip_filename):
|
237
246
|
logger.info(
|
238
|
-
f"Found L2T STARS product zip: {cl.file(L2T_STARS_zip_filename)}"
|
247
|
+
f"Found L2T STARS product zip: {cl.file(L2T_STARS_zip_filename)}. Overwrite is False, returning."
|
239
248
|
)
|
240
249
|
return exit_code
|
250
|
+
elif overwrite and exists(L2T_STARS_zip_filename):
|
251
|
+
logger.info(
|
252
|
+
f"Found L2T STARS product zip: {cl.file(L2T_STARS_zip_filename)}. Overwrite is True, proceeding."
|
253
|
+
)
|
254
|
+
|
241
255
|
|
242
256
|
# Initialize HLS data connection
|
243
257
|
logger.info(f"Connecting to CMR Search server: {CMR_SEARCH_URL}")
|
@@ -245,7 +259,7 @@ def L2T_STARS(
|
|
245
259
|
HLS_connection = HLS2CMR(
|
246
260
|
working_directory=working_directory,
|
247
261
|
download_directory=HLS_download_directory,
|
248
|
-
products_directory=HLS_products_directory,
|
262
|
+
# products_directory=HLS_products_directory,
|
249
263
|
target_resolution=target_resolution,
|
250
264
|
)
|
251
265
|
except CMRServerUnreachable as e:
|
@@ -1,6 +1,5 @@
|
|
1
|
-
import
|
1
|
+
import netrc
|
2
2
|
import hashlib
|
3
|
-
import json
|
4
3
|
import logging
|
5
4
|
import os
|
6
5
|
import posixpath
|
@@ -11,24 +10,27 @@ from datetime import date
|
|
11
10
|
from fnmatch import fnmatch
|
12
11
|
from http.cookiejar import CookieJar
|
13
12
|
from os import makedirs, remove
|
14
|
-
from os.path import abspath
|
15
13
|
from os.path import dirname
|
16
14
|
from os.path import exists
|
17
15
|
from os.path import getsize
|
18
16
|
from os.path import isdir
|
19
17
|
from os.path import join
|
18
|
+
from os.path import abspath
|
19
|
+
from os.path import expanduser
|
20
20
|
from time import sleep
|
21
21
|
from typing import List, OrderedDict
|
22
|
-
|
22
|
+
|
23
23
|
import requests
|
24
24
|
import xmltodict
|
25
25
|
from bs4 import BeautifulSoup
|
26
26
|
from dateutil import parser
|
27
|
-
from
|
27
|
+
from ..cksum import cksum
|
28
28
|
|
29
29
|
import colored_logging as cl
|
30
30
|
|
31
|
-
|
31
|
+
|
32
|
+
class DownloadFailed(Exception):
|
33
|
+
pass
|
32
34
|
|
33
35
|
CONNECTION_CLOSE = {
|
34
36
|
"Connection": "close",
|
@@ -54,10 +56,10 @@ class LPDAACServerUnreachable(ConnectionError):
|
|
54
56
|
class LPDAACDataPool:
|
55
57
|
logger = logging.getLogger(__name__)
|
56
58
|
DEFAULT_CHUNK_SIZE = 2 ** 20
|
57
|
-
DATE_REGEX = re.compile('^(19|20)\d\d[- /.](0[1-9]|1[012])[- /.](0[1-9]|[12][0-9]|3[01])$')
|
59
|
+
DATE_REGEX = re.compile(r'^(19|20)\d\d[- /.](0[1-9]|1[012])[- /.](0[1-9]|[12][0-9]|3[01])$')
|
58
60
|
DEFAULT_REMOTE = DEFAULT_REMOTE
|
59
61
|
|
60
|
-
def __init__(self, username: str = None, password: str = None, remote: str = None, offline_ok: bool =
|
62
|
+
def __init__(self, username: str = None, password: str = None, remote: str = None, offline_ok: bool = True):
|
61
63
|
if remote is None:
|
62
64
|
remote = DEFAULT_REMOTE
|
63
65
|
|
@@ -66,15 +68,14 @@ class LPDAACDataPool:
|
|
66
68
|
netrc_file = netrc.netrc()
|
67
69
|
username, _, password = netrc_file.authenticators("urs.earthdata.nasa.gov")
|
68
70
|
except Exception as e:
|
69
|
-
logger.exception(e)
|
70
71
|
logger.warning("netrc credentials not found for urs.earthdata.nasa.gov")
|
71
72
|
|
72
73
|
if username is None or password is None:
|
73
74
|
if not "LPDAAC_USERNAME" in os.environ or not "LPDAAC_PASSWORD" in os.environ:
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
75
|
+
logger.warning("missing environment variable 'LPDAAC_USERNAME' or 'LPDAAC_PASSWORD'")
|
76
|
+
else:
|
77
|
+
username = os.environ["LPDAAC_USERNAME"]
|
78
|
+
password = os.environ["LPDAAC_PASSWORD"]
|
78
79
|
|
79
80
|
self._remote = remote
|
80
81
|
self._username = username
|
@@ -86,14 +87,15 @@ class LPDAACDataPool:
|
|
86
87
|
|
87
88
|
self._listings = {}
|
88
89
|
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
90
|
+
if not self.offline_ok:
|
91
|
+
try:
|
92
|
+
self._authenticate()
|
93
|
+
self._check_remote()
|
94
|
+
except Exception as e:
|
95
|
+
if self.offline_ok:
|
96
|
+
logger.warning("unable to connect to LP-DAAC data pool")
|
97
|
+
else:
|
98
|
+
raise e
|
97
99
|
|
98
100
|
def _authenticate(self):
|
99
101
|
try:
|
@@ -128,7 +130,7 @@ class LPDAACDataPool:
|
|
128
130
|
raise ConnectionError(message)
|
129
131
|
|
130
132
|
def _check_remote(self):
|
131
|
-
logger.
|
133
|
+
logger.debug(f"checking URL: {cl.URL(self.remote)}")
|
132
134
|
|
133
135
|
try:
|
134
136
|
response = requests.head(self.remote, headers=CONNECTION_CLOSE)
|
@@ -145,7 +147,7 @@ class LPDAACDataPool:
|
|
145
147
|
raise LPDAACServerUnreachable(message)
|
146
148
|
|
147
149
|
if status == 200:
|
148
|
-
logger.
|
150
|
+
logger.debug(
|
149
151
|
"remote verified with status " + cl.val(200) +
|
150
152
|
" in " + cl.time(f"{duration:0.2f}") +
|
151
153
|
" seconds: " + cl.URL(self.remote))
|
@@ -307,7 +309,7 @@ class LPDAACDataPool:
|
|
307
309
|
else:
|
308
310
|
metadata_filename = f"{download_location}.xml"
|
309
311
|
|
310
|
-
makedirs(dirname(metadata_filename), exist_ok=True)
|
312
|
+
makedirs(abspath(dirname(expanduser(metadata_filename))), exist_ok=True)
|
311
313
|
|
312
314
|
if XML_retries is None:
|
313
315
|
XML_retries = XML_RETRIES
|
@@ -325,8 +327,8 @@ class LPDAACDataPool:
|
|
325
327
|
|
326
328
|
while XML_retries > 0:
|
327
329
|
XML_retries -= 1
|
328
|
-
command = f"wget -nc -c --user {self._username} --password {self._password} -O {metadata_filename} {metadata_URL}"
|
329
|
-
logger.info(command)
|
330
|
+
command = f"wget -nc -c --user {self._username} --password {self._password} -O {abspath(expanduser(metadata_filename))} {metadata_URL}"
|
331
|
+
# logger.info(command)
|
330
332
|
os.system(command)
|
331
333
|
|
332
334
|
if not exists(metadata_filename):
|
@@ -350,12 +352,12 @@ class LPDAACDataPool:
|
|
350
352
|
continue
|
351
353
|
|
352
354
|
try:
|
353
|
-
with open(metadata_filename, "r") as file:
|
355
|
+
with open(abspath(expanduser(metadata_filename)), "r") as file:
|
354
356
|
metadata = xmltodict.parse(file.read())
|
355
357
|
except Exception as e:
|
356
358
|
logger.warning(e)
|
357
359
|
logger.warning(f"unable to parse metadata file: {metadata_filename}")
|
358
|
-
os.remove(metadata_filename)
|
360
|
+
os.remove(abspath(expanduser(metadata_filename)))
|
359
361
|
logger.warning(f"waiting {XML_timeout_seconds} for retry")
|
360
362
|
sleep(XML_timeout_seconds)
|
361
363
|
continue
|
@@ -372,7 +374,7 @@ class LPDAACDataPool:
|
|
372
374
|
|
373
375
|
logger.info(
|
374
376
|
f"metadata retrieved {checksum_type} checksum: {cl.val(remote_checksum)} size: {cl.val(remote_filesize)} URL: {cl.URL(metadata_URL)}")
|
375
|
-
makedirs(dirname(filename), exist_ok=True)
|
377
|
+
makedirs(abspath(dirname(expanduser(filename))), exist_ok=True)
|
376
378
|
logger.info(f"downloading {cl.URL(URL)} -> {cl.file(filename)}")
|
377
379
|
|
378
380
|
# Use a temporary file for downloading
|
@@ -382,8 +384,8 @@ class LPDAACDataPool:
|
|
382
384
|
download_retries -=1
|
383
385
|
|
384
386
|
try:
|
385
|
-
if exists(temporary_filename):
|
386
|
-
temporary_filesize = self.get_local_filesize(temporary_filename)
|
387
|
+
if exists(abspath(expanduser(temporary_filename))):
|
388
|
+
temporary_filesize = self.get_local_filesize(abspath(expanduser(temporary_filename)))
|
387
389
|
|
388
390
|
if temporary_filesize > remote_filesize:
|
389
391
|
logger.warning(
|
@@ -391,11 +393,11 @@ class LPDAACDataPool:
|
|
391
393
|
remove(temporary_filename)
|
392
394
|
|
393
395
|
elif temporary_filesize == remote_filesize:
|
394
|
-
local_checksum = self.get_local_checksum(temporary_filename, checksum_type=checksum_type)
|
396
|
+
local_checksum = self.get_local_checksum(abspath(expanduser(temporary_filename)), checksum_type=checksum_type)
|
395
397
|
|
396
398
|
if local_checksum == remote_checksum:
|
397
399
|
try:
|
398
|
-
shutil.move(temporary_filename, filename)
|
400
|
+
shutil.move(abspath(expanduser(temporary_filename)), abspath(expanduser(filename)))
|
399
401
|
except Exception as e:
|
400
402
|
if exists(filename):
|
401
403
|
logger.warning(f"unable to move temporary file: {temporary_filename}")
|
@@ -408,27 +410,27 @@ class LPDAACDataPool:
|
|
408
410
|
else:
|
409
411
|
logger.warning(
|
410
412
|
f"removing corrupted file with local checksum {local_checksum} and remote checksum {remote_checksum}: {temporary_filename}")
|
411
|
-
remove(temporary_filename)
|
413
|
+
remove(abspath(expanduser(temporary_filename)))
|
412
414
|
else:
|
413
415
|
logger.info(f"resuming incomplete download: {cl.file(temporary_filename)}")
|
414
416
|
|
415
|
-
command = f"wget -nc -c --user {self._username} --password {self._password} -O {temporary_filename} {URL}"
|
416
|
-
logger.info(command)
|
417
|
+
command = f"wget -nc -c --user {self._username} --password {self._password} -O {abspath(expanduser(temporary_filename))} {URL}"
|
418
|
+
# logger.info(command)
|
417
419
|
os.system(command)
|
418
420
|
|
419
|
-
if not exists(temporary_filename):
|
421
|
+
if not exists(abspath(expanduser(temporary_filename))):
|
420
422
|
raise ConnectionError(f"unable to download URL: {URL}")
|
421
423
|
|
422
|
-
local_filesize = self.get_local_filesize(temporary_filename)
|
423
|
-
local_checksum = self.get_local_checksum(temporary_filename, checksum_type=checksum_type)
|
424
|
+
local_filesize = self.get_local_filesize(abspath(expanduser(temporary_filename)))
|
425
|
+
local_checksum = self.get_local_checksum(abspath(expanduser(temporary_filename)), checksum_type=checksum_type)
|
424
426
|
|
425
427
|
if local_filesize != remote_filesize or local_checksum != remote_checksum:
|
426
|
-
os.remove(temporary_filename)
|
428
|
+
os.remove(abspath(expanduser(temporary_filename)))
|
427
429
|
raise ConnectionError(
|
428
430
|
f"removing corrupted file with local filesize {local_filesize} remote filesize {remote_filesize} local checksum {local_checksum} remote checksum {remote_checksum}: {temporary_filename}")
|
429
431
|
|
430
432
|
# Download successful, rename the temporary file to its proper name
|
431
|
-
shutil.move(temporary_filename, filename)
|
433
|
+
shutil.move(abspath(expanduser(temporary_filename)), abspath(expanduser(filename)))
|
432
434
|
|
433
435
|
logger.info(
|
434
436
|
f"successful download with filesize {cl.val(local_filesize)} checksum {cl.val(local_checksum)}: {cl.file(filename)}")
|
@@ -24,10 +24,11 @@ from modland import generate_modland_grid
|
|
24
24
|
|
25
25
|
from ECOv003_exit_codes import *
|
26
26
|
|
27
|
+
from ..login import login
|
27
28
|
from ..daterange import get_date
|
28
29
|
from ..LPDAAC.LPDAACDataPool import RETRIES
|
29
30
|
from .VIIRSDataPool import VIIRSGranule
|
30
|
-
from
|
31
|
+
from ..exceptions import *
|
31
32
|
|
32
33
|
NDVI_COLORMAP = LinearSegmentedColormap.from_list(
|
33
34
|
name="NDVI",
|
@@ -1019,7 +1020,8 @@ def latest_datetime(date_in: Union[date, str]) -> datetime:
|
|
1019
1020
|
return parser.parse(f"{date_string}T23:59:59Z")
|
1020
1021
|
|
1021
1022
|
|
1022
|
-
VIIRS_FILENAME_REGEX = re.compile("^VNP09GA\.[^.]+\.([^.]+)\.002\.\d+\.h5$")
|
1023
|
+
VIIRS_FILENAME_REGEX = re.compile(r"^VNP09GA\.[^.]+\.([^.]+)\.002\.\d+\.h5$")
|
1024
|
+
|
1023
1025
|
def modland_tile_from_filename(filename: str) -> str:
|
1024
1026
|
match = VIIRS_FILENAME_REGEX.match(filename)
|
1025
1027
|
if match is None:
|
@@ -1121,7 +1123,7 @@ class VNP09GA:
|
|
1121
1123
|
self.products_directory = products_directory
|
1122
1124
|
self.mosaic_directory = mosaic_directory
|
1123
1125
|
|
1124
|
-
self.auth =
|
1126
|
+
self.auth = login()
|
1125
1127
|
|
1126
1128
|
def add_granules(self, granules: List[earthaccess.search.DataGranule]):
|
1127
1129
|
data = pd.DataFrame([
|
ECOv003_L2T_STARS/__init__.py
CHANGED
@@ -0,0 +1,66 @@
|
|
1
|
+
"""
|
2
|
+
Pure Python implementation of POSIX cksum algorithm.
|
3
|
+
|
4
|
+
This module provides a replacement for the pycksum package that is compatible
|
5
|
+
with Python 3.12 and later versions.
|
6
|
+
"""
|
7
|
+
|
8
|
+
|
9
|
+
def cksum(data_or_file):
|
10
|
+
"""
|
11
|
+
Calculate POSIX cksum checksum for data or file-like object.
|
12
|
+
|
13
|
+
Args:
|
14
|
+
data_or_file: Either bytes data or a file-like object opened in binary mode
|
15
|
+
|
16
|
+
Returns:
|
17
|
+
int: The POSIX cksum checksum value
|
18
|
+
"""
|
19
|
+
# Handle file-like objects
|
20
|
+
if hasattr(data_or_file, 'read'):
|
21
|
+
data = data_or_file.read()
|
22
|
+
else:
|
23
|
+
data = data_or_file
|
24
|
+
|
25
|
+
# Ensure we have bytes
|
26
|
+
if isinstance(data, str):
|
27
|
+
data = data.encode('utf-8')
|
28
|
+
|
29
|
+
# Initialize CRC with 0
|
30
|
+
crc = 0
|
31
|
+
|
32
|
+
# Process each byte of data
|
33
|
+
for byte in data:
|
34
|
+
# XOR the byte with the current CRC (shifted left 8 bits)
|
35
|
+
crc ^= byte << 24
|
36
|
+
|
37
|
+
# Process 8 bits
|
38
|
+
for _ in range(8):
|
39
|
+
if crc & 0x80000000: # If MSB is set
|
40
|
+
crc = (crc << 1) ^ 0x04c11db7 # CRC-32 polynomial
|
41
|
+
else:
|
42
|
+
crc = crc << 1
|
43
|
+
crc &= 0xffffffff # Keep it 32-bit
|
44
|
+
|
45
|
+
# Append the length in bytes as a big-endian value
|
46
|
+
length = len(data)
|
47
|
+
length_bytes = []
|
48
|
+
while length > 0:
|
49
|
+
length_bytes.insert(0, length & 0xff)
|
50
|
+
length >>= 8
|
51
|
+
|
52
|
+
# Process the length bytes
|
53
|
+
for byte in length_bytes:
|
54
|
+
crc ^= byte << 24
|
55
|
+
for _ in range(8):
|
56
|
+
if crc & 0x80000000:
|
57
|
+
crc = (crc << 1) ^ 0x04c11db7
|
58
|
+
else:
|
59
|
+
crc = crc << 1
|
60
|
+
crc &= 0xffffffff
|
61
|
+
|
62
|
+
# Final XOR and return as signed 32-bit integer equivalent
|
63
|
+
result = crc ^ 0xffffffff
|
64
|
+
|
65
|
+
# Convert to match expected return type (unsigned 32-bit integer)
|
66
|
+
return result
|
@@ -3,11 +3,11 @@ import logging
|
|
3
3
|
|
4
4
|
logger = logging.getLogger(__name__)
|
5
5
|
|
6
|
-
def
|
7
|
-
github_URL: str = "https://github.com/STARS-Data-Fusion/
|
6
|
+
def install_STARSDataFusion_jl(
|
7
|
+
github_URL: str = "https://github.com/STARS-Data-Fusion/STARSDataFusion.jl",
|
8
8
|
environment_name: str = "@ECOv003-L2T-STARS") -> subprocess.CompletedProcess:
|
9
9
|
"""
|
10
|
-
Installs the
|
10
|
+
Installs the STARSDataFusion.jl Julia package from GitHub into a specified Julia environment.
|
11
11
|
|
12
12
|
This function executes a Julia command to activate a given environment and
|
13
13
|
then develops (installs in editable mode) the STARS.jl package from its
|
@@ -35,7 +35,7 @@ def install_STARS_jl(
|
|
35
35
|
|
36
36
|
if result.returncode == 0:
|
37
37
|
logger.info(
|
38
|
-
f"
|
38
|
+
f"STARSDataFusion.jl installed successfully in environment '{environment_name}'!"
|
39
39
|
)
|
40
40
|
else:
|
41
41
|
logger.error("Error installing STARS.jl:")
|
@@ -3,11 +3,11 @@ import logging
|
|
3
3
|
|
4
4
|
logger = logging.getLogger(__name__)
|
5
5
|
|
6
|
-
def
|
6
|
+
def instantiate_STARSDataFusion_jl(package_location: str) -> subprocess.CompletedProcess:
|
7
7
|
"""
|
8
8
|
Activates a Julia project at a given location and instantiates its dependencies.
|
9
9
|
|
10
|
-
This is necessary to ensure all required Julia packages for
|
10
|
+
This is necessary to ensure all required Julia packages for STARSDataFusion.jl are
|
11
11
|
downloaded and ready for use within the specified project environment.
|
12
12
|
|
13
13
|
Args:
|
@@ -30,7 +30,7 @@ def instantiate_STARS_jl(package_location: str) -> subprocess.CompletedProcess:
|
|
30
30
|
|
31
31
|
if result.returncode == 0:
|
32
32
|
logger.info(
|
33
|
-
f"
|
33
|
+
f"STARSDataFusion.jl instantiated successfully in directory '{package_location}'!"
|
34
34
|
)
|
35
35
|
else:
|
36
36
|
logger.error("Error instantiating STARS.jl:")
|
ECOv003_L2T_STARS/load_prior.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
from os.path import exists
|
2
2
|
import logging
|
3
3
|
|
4
|
+
import colored_logging as cl
|
5
|
+
|
4
6
|
from ECOv003_granules import L2TSTARS
|
5
7
|
|
6
8
|
from .prior import Prior
|
@@ -235,12 +237,12 @@ def load_prior(
|
|
235
237
|
L2T_STARS_prior_filename=L2T_STARS_prior_filename,
|
236
238
|
prior_NDVI_filename=prior_NDVI_filename,
|
237
239
|
prior_NDVI_UQ_filename=prior_NDVI_UQ_filename,
|
238
|
-
prior_NDVI_flag_filename=prior_NDVI_flag_filename,
|
240
|
+
# prior_NDVI_flag_filename=prior_NDVI_flag_filename,
|
239
241
|
prior_NDVI_bias_filename=prior_NDVI_bias_filename,
|
240
242
|
prior_NDVI_bias_UQ_filename=prior_NDVI_bias_UQ_filename,
|
241
243
|
prior_albedo_filename=prior_albedo_filename,
|
242
244
|
prior_albedo_UQ_filename=prior_albedo_UQ_filename,
|
243
|
-
prior_albedo_flag_filename=prior_albedo_flag_filename,
|
245
|
+
# prior_albedo_flag_filename=prior_albedo_flag_filename,
|
244
246
|
prior_albedo_bias_filename=prior_albedo_bias_filename,
|
245
247
|
prior_albedo_bias_UQ_filename=prior_albedo_bias_UQ_filename,
|
246
248
|
)
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import logging
|
2
|
+
import netrc
|
3
|
+
import os
|
4
|
+
|
5
|
+
import earthaccess
|
6
|
+
|
7
|
+
from .exceptions import *
|
8
|
+
|
9
|
+
__author__ = "Evan Davis"
|
10
|
+
|
11
|
+
_AUTH = None
|
12
|
+
|
13
|
+
def login() -> earthaccess.Auth:
|
14
|
+
"""
|
15
|
+
Login to Earthdata using environment variables if available, falling back to netrc credentials, then interactive login.
|
16
|
+
"""
|
17
|
+
# Only login to earthaccess once
|
18
|
+
global _AUTH
|
19
|
+
if _AUTH is not None:
|
20
|
+
return _AUTH
|
21
|
+
|
22
|
+
# Check if we're in a testing environment where authentication should be skipped
|
23
|
+
if os.environ.get("SKIP_EARTHDATA_LOGIN", "").lower() in ("true", "1", "yes"):
|
24
|
+
# Return a mock auth object for testing
|
25
|
+
class MockAuth:
|
26
|
+
def __init__(self):
|
27
|
+
self.authenticated = True
|
28
|
+
_AUTH = MockAuth()
|
29
|
+
return _AUTH
|
30
|
+
|
31
|
+
# Temporarily suppress INFO logs from earthaccess during login
|
32
|
+
earthaccess_logger = logging.getLogger('earthaccess')
|
33
|
+
original_level = earthaccess_logger.level
|
34
|
+
earthaccess_logger.setLevel(logging.WARNING)
|
35
|
+
|
36
|
+
try:
|
37
|
+
# First priority: environment variables
|
38
|
+
if "EARTHDATA_USERNAME" in os.environ and "EARTHDATA_PASSWORD" in os.environ:
|
39
|
+
_AUTH = earthaccess.login(strategy="environment")
|
40
|
+
return _AUTH
|
41
|
+
|
42
|
+
# Second priority: netrc credentials
|
43
|
+
try:
|
44
|
+
secrets = netrc.netrc()
|
45
|
+
auth = secrets.authenticators("urs.earthdata.nasa.gov")
|
46
|
+
if auth:
|
47
|
+
_AUTH = earthaccess.login(strategy="netrc")
|
48
|
+
return _AUTH
|
49
|
+
except (FileNotFoundError, netrc.NetrcParseError):
|
50
|
+
# .netrc file doesn't exist or is malformed, continue to interactive login
|
51
|
+
pass
|
52
|
+
|
53
|
+
# Last resort: interactive login
|
54
|
+
_AUTH = earthaccess.login(strategy="interactive")
|
55
|
+
return _AUTH
|
56
|
+
|
57
|
+
except Exception as e:
|
58
|
+
raise CMRServerUnreachable(e)
|
59
|
+
finally:
|
60
|
+
# Restore original logging level
|
61
|
+
earthaccess_logger.setLevel(original_level)
|
@@ -26,6 +26,7 @@ def main():
|
|
26
26
|
" python {sys.argv[0]} --runconfig /path/to/RunConfig.xml\n"
|
27
27
|
" python {sys.argv[0]} --runconfig /path/to/RunConfig.xml --date 2023-01-15\n"
|
28
28
|
" python {sys.argv[0]} --runconfig /path/to/RunConfig.xml --sources-only\n"
|
29
|
+
" python {sys.argv[0]} --runconfig /path/to/RunConfig.xml --overwrite\n" # Added example usage
|
29
30
|
)
|
30
31
|
|
31
32
|
# Positional argument for the runconfig file
|
@@ -128,6 +129,11 @@ def main():
|
|
128
129
|
help=f"Number of Julia workers for distributed processing. Defaults to 4.",
|
129
130
|
metavar="COUNT"
|
130
131
|
)
|
132
|
+
parser.add_argument(
|
133
|
+
"--overwrite", # New argument for overwrite option
|
134
|
+
action="store_true",
|
135
|
+
help="Reproduce the output files even if they already exist.",
|
136
|
+
)
|
131
137
|
parser.add_argument(
|
132
138
|
"--version",
|
133
139
|
action="version",
|
@@ -153,10 +159,11 @@ def main():
|
|
153
159
|
remove_posterior=args.remove_posterior,
|
154
160
|
threads=args.threads,
|
155
161
|
num_workers=args.num_workers,
|
162
|
+
overwrite=args.overwrite, # Pass the new overwrite argument
|
156
163
|
)
|
157
164
|
|
158
165
|
sys.exit(exit_code)
|
159
166
|
|
160
167
|
|
161
168
|
if __name__ == "__main__":
|
162
|
-
main()
|
169
|
+
main()
|
@@ -4,7 +4,7 @@ from datetime import date
|
|
4
4
|
from os.path import abspath, dirname, join, exists
|
5
5
|
import logging
|
6
6
|
|
7
|
-
from .
|
7
|
+
from .instantiate_STARSDataFusion_jl import instantiate_STARSDataFusion_jl
|
8
8
|
|
9
9
|
logger = logging.getLogger(__name__)
|
10
10
|
|
@@ -71,7 +71,7 @@ def process_julia_data_fusion(
|
|
71
71
|
STARS_source_directory = abspath(dirname(__file__))
|
72
72
|
|
73
73
|
# Instantiate Julia dependencies
|
74
|
-
|
74
|
+
instantiate_STARSDataFusion_jl(STARS_source_directory)
|
75
75
|
|
76
76
|
# Base Julia command with required arguments
|
77
77
|
command = (
|
ECOv003_L2T_STARS/version.txt
CHANGED
@@ -1 +1 @@
|
|
1
|
-
1.
|
1
|
+
1.2.0
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
|
-
Name:
|
3
|
-
Version: 1.
|
2
|
+
Name: ECOv003-L2T-STARS
|
3
|
+
Version: 1.3.0
|
4
4
|
Summary: ECOSTRESS Collection 3 JPL STARS Data Fusion Product Generating Executable (PGE)
|
5
5
|
Author-email: "Gregory H. Halverson" <gregory.h.halverson@jpl.nasa.gov>
|
6
6
|
Project-URL: Homepage, https://github.com/ECOSTRESS-Collection-3/ECOv003-L2T-STARS
|
@@ -21,7 +21,6 @@ Requires-Dist: matplotlib
|
|
21
21
|
Requires-Dist: modland
|
22
22
|
Requires-Dist: numpy
|
23
23
|
Requires-Dist: pandas
|
24
|
-
Requires-Dist: pycksum
|
25
24
|
Requires-Dist: pytictoc
|
26
25
|
Requires-Dist: rasters
|
27
26
|
Requires-Dist: scikit-image
|
@@ -1,15 +1,16 @@
|
|
1
|
-
ECOv003_L2T_STARS/ECOv003_DL.py,sha256=
|
2
|
-
ECOv003_L2T_STARS/ECOv003_DL.xml,sha256=
|
3
|
-
ECOv003_L2T_STARS/ECOv003_L2T_STARS.
|
4
|
-
ECOv003_L2T_STARS/
|
5
|
-
ECOv003_L2T_STARS/
|
6
|
-
ECOv003_L2T_STARS/L2T_STARS.py,sha256=zlOCPP2Gs9FmgXqEzvhCqWh6KPi_3LbnVNjz_TMRrxw,24429
|
1
|
+
ECOv003_L2T_STARS/ECOv003_DL.py,sha256=7ECuYIPMDsgptcAIcC2lgfvn-ljvPJHQ9tNPnMRxkIQ,24590
|
2
|
+
ECOv003_L2T_STARS/ECOv003_DL.xml,sha256=TQxOlTJHnjcRf9RiAGVJe2mIS-ks6WL32Ze33z0_kxA,2032
|
3
|
+
ECOv003_L2T_STARS/ECOv003_L2T_STARS.xml,sha256=Sg6GJx5PO1TVMIpGCDb4gatcShLRa6yBMwxwHKAWNKw,2084
|
4
|
+
ECOv003_L2T_STARS/L2TSTARSConfig.py,sha256=pH5WAzjfUdlam8Ngi2hxmT8Sect5bi2D6cP9HbpM_Tw,8849
|
5
|
+
ECOv003_L2T_STARS/L2T_STARS.py,sha256=4LLRJOoMhJNtmfwj9x7c6FLA7G-CLUct5TegJtIu8P8,25403
|
7
6
|
ECOv003_L2T_STARS/Manifest.toml,sha256=k6dzN8jpePGBIPqTkKFsYDv5ezV3DgO4ChHss_cI524,84043
|
8
7
|
ECOv003_L2T_STARS/Project.toml,sha256=Ywo_YsreqwgpbuNP3NWMsXw3VY2m8NTUrBwKCyBUbuM,657
|
9
|
-
ECOv003_L2T_STARS/__init__.py,sha256=
|
8
|
+
ECOv003_L2T_STARS/__init__.py,sha256=fdYigR4HXHd-NYUCafBD4GTqXUQV5LK337bJVN556fA,91
|
10
9
|
ECOv003_L2T_STARS/calibrate_fine_to_coarse.py,sha256=2YQHo0hO5EuvGt6TOF_1WypiqNDAR5gjvs0cYe9F_vQ,2345
|
10
|
+
ECOv003_L2T_STARS/cksum.py,sha256=Cw3FHD39Osrb4c9Dc1jSUB1SZaxIJsWHqsNVeWwYokA,1823
|
11
11
|
ECOv003_L2T_STARS/constants.py,sha256=5-cxFiiq_zDKIvkK8Bi2iVVzST-15ytdu4QAvlLz_OY,1996
|
12
|
-
ECOv003_L2T_STARS/
|
12
|
+
ECOv003_L2T_STARS/exceptions.py,sha256=ypahdRaZVMIoQrHUIZXiwzDNeLaCH46rUAfs_8MHKBE,48
|
13
|
+
ECOv003_L2T_STARS/generate_L2T_STARS_runconfig.py,sha256=gyU0-xjkC0bZcK9NeoFwLaXjNJhsSrVD2YnxLVLVLJs,11142
|
13
14
|
ECOv003_L2T_STARS/generate_NDVI_coarse_directory.py,sha256=i1pFHFyMuj2e6aQ2wTgrF2-rAQXWe8LE2mbCRksj3c8,590
|
14
15
|
ECOv003_L2T_STARS/generate_NDVI_coarse_image.py,sha256=9XBBV1FFqjxkEFnr61xr9R2tnHQAlx2XN9KzLN8RApc,1215
|
15
16
|
ECOv003_L2T_STARS/generate_NDVI_fine_directory.py,sha256=rCYfGd_X1fLYsOfl9LtfS3E57FLDa-a_8G46ToQFM2U,531
|
@@ -23,29 +24,30 @@ ECOv003_L2T_STARS/generate_filename.py,sha256=XppMoRiKBG1Rf-uWu95UCO1klK6uv5GdK9
|
|
23
24
|
ECOv003_L2T_STARS/generate_input_staging_directory.py,sha256=TlFKYliu6BbfDGLlwD0nlt0AZzDwKWeEgeTtzmicElY,800
|
24
25
|
ECOv003_L2T_STARS/generate_model_state_tile_date_directory.py,sha256=U9d7vcuA1Udq6tuyRKUIibfoOPsJKV5dp5AWT7qLcfc,939
|
25
26
|
ECOv003_L2T_STARS/generate_output_directory.py,sha256=Dr1zX6ljYzxFgC9XkocYBiF5aMGggHJejtpI9hbLwlM,889
|
26
|
-
ECOv003_L2T_STARS/
|
27
|
-
ECOv003_L2T_STARS/
|
28
|
-
ECOv003_L2T_STARS/load_prior.py,sha256=
|
27
|
+
ECOv003_L2T_STARS/install_STARSDataFusion_jl.py,sha256=XbB_T0mzHqpmhh3cjKT0FpgqHxaTtS3iE_IZw4iZGKc,1778
|
28
|
+
ECOv003_L2T_STARS/instantiate_STARSDataFusion_jl.py,sha256=B-N_tlSBY7DQ2gZK6mPtJ8WL8XCXA_edMDEOu2xldcs,1437
|
29
|
+
ECOv003_L2T_STARS/load_prior.py,sha256=rVu3ImvReFxARalICCiZcMQ9ML_ehmQ9mc8HmXoHdm0,11335
|
30
|
+
ECOv003_L2T_STARS/login.py,sha256=zInQL33NibC8fNGzLw9qk0MDay71KYk87V-UsV-gkwA,1945
|
31
|
+
ECOv003_L2T_STARS/main.py,sha256=Jgiwd2TRCMq37FiEA-JECJZQKLBm34oytIWS9PG_x30,5891
|
29
32
|
ECOv003_L2T_STARS/prior.py,sha256=fLE54pBIOG6sCas1G60nhn8LcHa2AqZ_eSY_J-MB4eM,2867
|
30
33
|
ECOv003_L2T_STARS/process_ECOSTRESS_data_fusion_distributed_bias.jl,sha256=LrFBCQp4ovJ6wI-oIIbvpdZEiSQtt0YmScbPmxlaSoA,15400
|
31
34
|
ECOv003_L2T_STARS/process_STARS_product.py,sha256=d26HdxcY9XBXa_MFCJfFm7BMCtmCaUrNdSVXiY0-D6Y,22406
|
32
|
-
ECOv003_L2T_STARS/process_julia_data_fusion.py,sha256=
|
35
|
+
ECOv003_L2T_STARS/process_julia_data_fusion.py,sha256=t0178tuQDYnei3jfgx8GbgW2Q0uwfcnFNOpnhr1PZFA,5267
|
33
36
|
ECOv003_L2T_STARS/retrieve_STARS_sources.py,sha256=s6026PQ5PRQTPFgjrDa4vgbHa8OqUanBqn0Wdoq0DbA,3838
|
34
37
|
ECOv003_L2T_STARS/runconfig.py,sha256=TLaB3w6Y0qEZPqMa-YXuUzKSACrdpKmrozUNLh70aQw,1519
|
35
38
|
ECOv003_L2T_STARS/version.py,sha256=CcCeNt2pNqb8AQ_vHLUbLJciE8hxTMeGmN79vAYObYQ,354
|
36
|
-
ECOv003_L2T_STARS/version.txt,sha256=
|
39
|
+
ECOv003_L2T_STARS/version.txt,sha256=n3cDSx61fSTyGsfetlycyZ9jtyd14xHx8oCrK2wuBqI,5
|
37
40
|
ECOv003_L2T_STARS/BRDF/BRDF.py,sha256=41MCtcddMEd8cmW2K9wWyg71nkKjnom3Z64KX5NMehg,1532
|
38
41
|
ECOv003_L2T_STARS/BRDF/SZA.py,sha256=_J5ybmrg8ASVglXp4OvSoVutkDVeSxDBygfTe848sww,2529
|
39
42
|
ECOv003_L2T_STARS/BRDF/__init__.py,sha256=7HYw9eTuudpkgtpXAjFN1CpPjr6R9JsnW3bLtNBdRSM,20
|
40
43
|
ECOv003_L2T_STARS/BRDF/statistical_radiative_transport.txt,sha256=KHIGS6afnY7m0CzHki4zeuwqttjK38jgZg8-oYDn68o,26999
|
41
44
|
ECOv003_L2T_STARS/BRDF/version.txt,sha256=atlhOkVXmNbZLl9fOQq0uqcFlryGntaxf1zdKyhjXwY,5
|
42
|
-
ECOv003_L2T_STARS/LPDAAC/LPDAACDataPool.py,sha256=
|
45
|
+
ECOv003_L2T_STARS/LPDAAC/LPDAACDataPool.py,sha256=WqIy0wcHhje1Dx8Uq0e8W_BsEJOJIchO4DroaGMsKes,16887
|
43
46
|
ECOv003_L2T_STARS/LPDAAC/__init__.py,sha256=o8qP8kTXyBp9dFKErVOwvcZuUo7BTVU0d5UyIsOKG8g,235
|
44
47
|
ECOv003_L2T_STARS/LPDAAC/version.txt,sha256=2_CXjsK1h6XWGH_cxBzOn_LA647vrboOtR84QKtu60Y,5
|
45
48
|
ECOv003_L2T_STARS/VIIRS/VIIRSDataPool.py,sha256=mht104y3ayLysElKf9IgaqX78I0Q-4NJJmfCxMgxDzs,8781
|
46
49
|
ECOv003_L2T_STARS/VIIRS/VIIRSDownloader.py,sha256=NCf3ZItLIBkZOZugYHuQMurziAsH7LbhLKbg-ZbWC7g,616
|
47
|
-
ECOv003_L2T_STARS/VIIRS/
|
48
|
-
ECOv003_L2T_STARS/VIIRS/VNP09GA.py,sha256=bh8DDVZb9naD2Uk8sf2CIr5os7z3n4nReltYgdpmxBE,44160
|
50
|
+
ECOv003_L2T_STARS/VIIRS/VNP09GA.py,sha256=9oCqSps3m8oGLcwQ-IuNV2KMiNLD1hSKYJHDIZIhCM8,44138
|
49
51
|
ECOv003_L2T_STARS/VIIRS/VNP43IA4.py,sha256=3qZbDHoLVhoiSr4hoojMxXXuDSNKkN4B9Dan-WMApNs,9881
|
50
52
|
ECOv003_L2T_STARS/VIIRS/VNP43MA3.py,sha256=T_1mxdg_SII0vXp_D422aAU7fE0-7TY46IZzRJPGJ1Q,11043
|
51
53
|
ECOv003_L2T_STARS/VIIRS/__init__.py,sha256=PVyb97Bg5gVMdcyC7JpErQCjJWSrOFdHJH4rNE__eL8,264
|
@@ -65,9 +67,9 @@ ECOv003_L2T_STARS/daterange/__init__.py,sha256=54kYb9tmsm5twxMqjJKeD__5kGkNDz3Pp
|
|
65
67
|
ECOv003_L2T_STARS/daterange/daterange.py,sha256=EHa2Xt9fiJ1gbX7aa_QV_br1rAXjg3pHrLSRasOsOhM,959
|
66
68
|
ECOv003_L2T_STARS/timer/__init__.py,sha256=I_MQKp_aamBLUzZv0psEbRgs6GZLOJd4mmJ7bli0Ikc,21
|
67
69
|
ECOv003_L2T_STARS/timer/timer.py,sha256=tn5e3NQmsh55Jp9Fstjf-8KJW4F8UIJs-d_ZLooFYE8,1610
|
68
|
-
ecov003_l2t_stars-1.
|
69
|
-
ecov003_l2t_stars-1.
|
70
|
-
ecov003_l2t_stars-1.
|
71
|
-
ecov003_l2t_stars-1.
|
72
|
-
ecov003_l2t_stars-1.
|
73
|
-
ecov003_l2t_stars-1.
|
70
|
+
ecov003_l2t_stars-1.3.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
71
|
+
ecov003_l2t_stars-1.3.0.dist-info/METADATA,sha256=5QCrdCDDlS4j4S2Hy_9Og6g7sQdhj6iI_8J09erRWCU,13283
|
72
|
+
ecov003_l2t_stars-1.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
73
|
+
ecov003_l2t_stars-1.3.0.dist-info/entry_points.txt,sha256=EVVKltKsqXBc94JIu4IjVrMP0DPqaNEdQoAgcZOApQQ,106
|
74
|
+
ecov003_l2t_stars-1.3.0.dist-info/top_level.txt,sha256=lRivA5MjbrabH4sv-LUstMGaLZ865wRQPpz9Kh6-plg,18
|
75
|
+
ecov003_l2t_stars-1.3.0.dist-info/RECORD,,
|
@@ -1,36 +0,0 @@
|
|
1
|
-
import netrc
|
2
|
-
import os
|
3
|
-
|
4
|
-
import earthaccess
|
5
|
-
|
6
|
-
_AUTH = None
|
7
|
-
|
8
|
-
class CMRServerUnreachable(Exception):
|
9
|
-
pass
|
10
|
-
|
11
|
-
def VIIRS_CMR_login() -> earthaccess.Auth:
|
12
|
-
"""
|
13
|
-
Login to Earthdata using netrc credentials if available, falling back to environment variables.
|
14
|
-
"""
|
15
|
-
# Only login to earthaccess once
|
16
|
-
global _AUTH
|
17
|
-
if _AUTH is not None:
|
18
|
-
return _AUTH
|
19
|
-
|
20
|
-
try:
|
21
|
-
# Attempt to use netrc for credentials
|
22
|
-
secrets = netrc.netrc()
|
23
|
-
auth = secrets.authenticators("urs.earthdata.nasa.gov")
|
24
|
-
if auth:
|
25
|
-
_AUTH = earthaccess.login(strategy="netrc") # Use strategy="netrc"
|
26
|
-
return _AUTH
|
27
|
-
|
28
|
-
# Fallback to environment variables if netrc fails
|
29
|
-
if "EARTHDATA_USERNAME" in os.environ and "EARTHDATA_PASSWORD" in os.environ:
|
30
|
-
_AUTH = earthaccess.login(strategy="environment")
|
31
|
-
return _AUTH
|
32
|
-
else:
|
33
|
-
raise CMRServerUnreachable("Missing netrc credentials or environment variables 'EARTHDATA_USERNAME' and 'EARTHDATA_PASSWORD'")
|
34
|
-
|
35
|
-
except Exception as e:
|
36
|
-
raise CMRServerUnreachable(e)
|
File without changes
|
File without changes
|
File without changes
|