BESS-JPL 1.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. BESS_JPL/BESS_JPL.py +54 -0
  2. BESS_JPL/C3_photosynthesis.py +165 -0
  3. BESS_JPL/C4_fraction.jpeg +0 -0
  4. BESS_JPL/C4_fraction.tif +0 -0
  5. BESS_JPL/C4_fraction.tif.aux.xml +11 -0
  6. BESS_JPL/C4_photosynthesis.py +133 -0
  7. BESS_JPL/ECOv002-cal-val-BESS-JPL-GEOS5FP-inputs.csv +1066 -0
  8. BESS_JPL/ECOv002-cal-val-BESS-JPL-inputs.csv +1066 -0
  9. BESS_JPL/ECOv002-cal-val-BESS-JPL-outputs.csv +1066 -0
  10. BESS_JPL/ECOv002-cal-val-FLiESANN-inputs.csv +1066 -0
  11. BESS_JPL/ECOv002-static-tower-BESS-JPL-inputs.csv +122 -0
  12. BESS_JPL/ECOv002_calval_BESS_inputs.py +30 -0
  13. BESS_JPL/ECOv002_static_tower_BESS_inputs.py +19 -0
  14. BESS_JPL/FVC_from_NDVI.py +22 -0
  15. BESS_JPL/LAI_from_NDVI.py +28 -0
  16. BESS_JPL/NDVI_maximum.jpeg +0 -0
  17. BESS_JPL/NDVI_maximum.tif +0 -0
  18. BESS_JPL/NDVI_minimum.jpeg +0 -0
  19. BESS_JPL/NDVI_minimum.tif +0 -0
  20. BESS_JPL/__init__.py +5 -0
  21. BESS_JPL/ball_berry_intercept_C3.jpeg +0 -0
  22. BESS_JPL/ball_berry_intercept_C3.tif +0 -0
  23. BESS_JPL/ball_berry_slope_C3.jpeg +0 -0
  24. BESS_JPL/ball_berry_slope_C3.tif +0 -0
  25. BESS_JPL/ball_berry_slope_C4.jpeg +0 -0
  26. BESS_JPL/ball_berry_slope_C4.tif +0 -0
  27. BESS_JPL/calculate_VCmax.py +90 -0
  28. BESS_JPL/calculate_bulk_aerodynamic_resistance.py +119 -0
  29. BESS_JPL/calculate_friction_velocity.py +111 -0
  30. BESS_JPL/canopy_energy_balance.py +110 -0
  31. BESS_JPL/canopy_longwave_radiation.py +117 -0
  32. BESS_JPL/canopy_shortwave_radiation.py +276 -0
  33. BESS_JPL/carbon_uptake_efficiency.jpeg +0 -0
  34. BESS_JPL/carbon_uptake_efficiency.tif +0 -0
  35. BESS_JPL/carbon_water_fluxes.py +313 -0
  36. BESS_JPL/colors.py +33 -0
  37. BESS_JPL/constants.py +25 -0
  38. BESS_JPL/exceptions.py +3 -0
  39. BESS_JPL/generate_BESS_GEOS5FP_inputs.py +58 -0
  40. BESS_JPL/generate_BESS_inputs_table.py +186 -0
  41. BESS_JPL/generate_input_dataset.py +243 -0
  42. BESS_JPL/generate_output_dataset.py +26 -0
  43. BESS_JPL/interpolate_C3_C4.py +12 -0
  44. BESS_JPL/kn.jpeg +0 -0
  45. BESS_JPL/kn.tif +0 -0
  46. BESS_JPL/load_C4_fraction.py +20 -0
  47. BESS_JPL/load_NDVI_maximum.py +17 -0
  48. BESS_JPL/load_NDVI_minimum.py +17 -0
  49. BESS_JPL/load_ball_berry_intercept_C3.py +10 -0
  50. BESS_JPL/load_ball_berry_slope_C3.py +10 -0
  51. BESS_JPL/load_ball_berry_slope_C4.py +10 -0
  52. BESS_JPL/load_carbon_uptake_efficiency.py +10 -0
  53. BESS_JPL/load_kn.py +10 -0
  54. BESS_JPL/load_peakVCmax_C3.py +12 -0
  55. BESS_JPL/load_peakVCmax_C4.py +12 -0
  56. BESS_JPL/meteorology.py +429 -0
  57. BESS_JPL/model.py +594 -0
  58. BESS_JPL/peakVCmax_C3.jpeg +0 -0
  59. BESS_JPL/peakVCmax_C3.tif +0 -0
  60. BESS_JPL/peakVCmax_C4.jpeg +0 -0
  61. BESS_JPL/peakVCmax_C4.tif +0 -0
  62. BESS_JPL/process_BESS_table.py +365 -0
  63. BESS_JPL/process_paw_and_gao_LE.py +50 -0
  64. BESS_JPL/retrieve_BESS_JPL_GEOS5FP_inputs.py +257 -0
  65. BESS_JPL/retrieve_BESS_inputs.py +279 -0
  66. BESS_JPL/soil_energy_balance.py +35 -0
  67. BESS_JPL/verify.py +127 -0
  68. BESS_JPL/version.py +3 -0
  69. bess_jpl-1.26.0.dist-info/METADATA +102 -0
  70. bess_jpl-1.26.0.dist-info/RECORD +73 -0
  71. bess_jpl-1.26.0.dist-info/WHEEL +5 -0
  72. bess_jpl-1.26.0.dist-info/licenses/LICENSE +201 -0
  73. bess_jpl-1.26.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,243 @@
1
+ from os.path import join, abspath, dirname
2
+ import pandas as pd
3
+ import numpy as np
4
+ from ECOv002_calval_tables import load_calval_table
5
+ from FLiESANN import process_FLiESANN_table, load_ECOv002_calval_FLiESANN_inputs
6
+ from .ECOv002_static_tower_BESS_inputs import load_ECOv002_static_tower_BESS_inputs
7
+ from .process_BESS_table import process_BESS_table
8
+ from .retrieve_BESS_JPL_GEOS5FP_inputs import retrieve_BESS_JPL_GEOS5FP_inputs
9
+
10
+ import logging
11
+ import warnings
12
+ import os
13
+
14
+ # Suppress TensorFlow warnings
15
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
16
+ os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
17
+
18
+ # Suppress pandas warnings
19
+ warnings.filterwarnings('ignore', category=UserWarning)
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ # Configure GEOS5FP logging to be visible
24
+ geos5fp_logger = logging.getLogger('GEOS5FP')
25
+ geos5fp_logger.setLevel(logging.INFO)
26
+ if not geos5fp_logger.handlers:
27
+ handler = logging.StreamHandler()
28
+ handler.setLevel(logging.INFO)
29
+ formatter = logging.Formatter('[%(asctime)s %(levelname)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
30
+ handler.setFormatter(formatter)
31
+ geos5fp_logger.addHandler(handler)
32
+
33
+ def generate_input_dataset():
34
+ logger.info("Generating BESS-JPL input dataset from ECOv002 cal/val FLiESANN inputs")
35
+ # calval_df = load_calval_table()
36
+ inputs_df = load_ECOv002_calval_FLiESANN_inputs()
37
+
38
+ # Ensure `time_UTC` is in datetime format
39
+ inputs_df['time_UTC'] = pd.to_datetime(inputs_df['time_UTC'], errors='coerce')
40
+
41
+ # Create a `date_UTC` column by extracting the date from `time_UTC`
42
+ inputs_df['date_UTC'] = inputs_df['time_UTC'].dt.date
43
+
44
+ # Convert any array-like values to scalars by extracting first element if needed
45
+ def extract_scalar(x):
46
+ if isinstance(x, pd.DataFrame):
47
+ # Handle DataFrame - extract first value
48
+ return x.iloc[0, 0] if not x.empty else x
49
+ elif isinstance(x, pd.Series):
50
+ # Handle Series - extract first value
51
+ return x.iloc[0] if len(x) > 0 else x
52
+ elif isinstance(x, np.ndarray):
53
+ # Handle numpy arrays
54
+ return x.item() if x.size == 1 else x.flat[0] if x.size > 0 else x
55
+ elif isinstance(x, list):
56
+ # Handle lists
57
+ return x[0] if len(x) > 0 else x
58
+ else:
59
+ # Return as-is for scalars
60
+ return x
61
+
62
+ # Apply extraction to all columns
63
+ for col in inputs_df.columns:
64
+ inputs_df[col] = inputs_df[col].apply(extract_scalar)
65
+
66
+ # Load static tower BESS inputs
67
+ static_inputs_df = load_ECOv002_static_tower_BESS_inputs()
68
+
69
+ # Merge FLiESANN outputs with static BESS inputs on Site ID
70
+ # FLiESANN outputs contain time-varying atmospheric and radiation inputs
71
+ # Static inputs contain vegetation parameters
72
+ inputs_df = inputs_df.merge(
73
+ static_inputs_df,
74
+ left_on='ID',
75
+ right_on='ID',
76
+ how='left',
77
+ suffixes=('', '_static')
78
+ )
79
+
80
+
81
+
82
+ # Remove duplicate columns from the merge (keep non-static versions)
83
+ duplicate_cols = [col for col in inputs_df.columns if col.endswith('_static')]
84
+ inputs_df = inputs_df.drop(columns=duplicate_cols)
85
+
86
+ # Extract required parameters from inputs_df for retrieve_BESS_inputs
87
+ ST_C = np.array(inputs_df.ST_C).astype(np.float64)
88
+ NDVI = np.array(inputs_df.NDVI).astype(np.float64)
89
+ NDVI = np.where(NDVI > 0.06, NDVI, np.nan).astype(np.float64)
90
+ albedo = np.array(inputs_df.albedo).astype(np.float64)
91
+
92
+ # Extract time and geometry
93
+ from rasters import Point
94
+ from solar_apparent_time import calculate_solar_day_of_year, calculate_solar_hour_of_day
95
+ from geopandas import GeoSeries
96
+ from shapely.geometry import Point as ShapelyPoint
97
+
98
+ # Handle geometry construction
99
+ if "geometry" in inputs_df:
100
+ if isinstance(inputs_df.geometry.iloc[0], str):
101
+ def parse_geom(s):
102
+ s = s.strip()
103
+ if s.startswith("POINT"):
104
+ coords = s.replace("POINT", "").replace("(", "").replace(")", "").strip().split()
105
+ return Point(float(coords[0]), float(coords[1]))
106
+ elif "," in s:
107
+ coords = [float(c) for c in s.split(",")]
108
+ return Point(coords[0], coords[1])
109
+ else:
110
+ coords = [float(c) for c in s.split()]
111
+ return Point(coords[0], coords[1])
112
+ inputs_df = inputs_df.copy()
113
+ inputs_df['geometry'] = inputs_df['geometry'].apply(parse_geom)
114
+ geometry = [Point(pt.x, pt.y) for pt in inputs_df.geometry]
115
+ elif "lat" in inputs_df and "lon" in inputs_df:
116
+ lat = np.array(inputs_df.lat).astype(np.float64)
117
+ lon = np.array(inputs_df.lon).astype(np.float64)
118
+ geometry = [Point(lon[i], lat[i]) for i in range(len(lat))]
119
+ else:
120
+ raise KeyError("Input DataFrame must contain either 'geometry' or both 'lat' and 'lon' columns.")
121
+
122
+ # Extract time
123
+ time_UTC_list = pd.to_datetime(inputs_df.time_UTC).tolist()
124
+
125
+ # Calculate solar time
126
+ day_of_year_list = []
127
+ hour_of_day_list = []
128
+
129
+ for i, (time_utc, geom) in enumerate(zip(time_UTC_list, geometry)):
130
+ shapely_point = ShapelyPoint(geom.x, geom.y)
131
+ geoseries = GeoSeries([shapely_point])
132
+ doy = calculate_solar_day_of_year(time_UTC=time_utc, geometry=geoseries)
133
+ hod = calculate_solar_hour_of_day(time_UTC=time_utc, geometry=geoseries)
134
+ doy_scalar = doy[0] if hasattr(doy, '__getitem__') else doy
135
+ hod_scalar = hod[0] if hasattr(hod, '__getitem__') else hod
136
+ day_of_year_list.append(doy_scalar)
137
+ hour_of_day_list.append(hod_scalar)
138
+
139
+ day_of_year = np.array(day_of_year_list)
140
+ hour_of_day = np.array(hour_of_day_list)
141
+
142
+ # Keep geometry as list of Points - do NOT convert to MultiPoint
143
+ # This allows proper matching of each point with its corresponding time
144
+ time_UTC = time_UTC_list
145
+
146
+ # Extract optional inputs if present
147
+ Ta_C = np.array(inputs_df.Ta_C).astype(np.float64) if "Ta_C" in inputs_df else (np.array(inputs_df.Ta).astype(np.float64) if "Ta" in inputs_df else None)
148
+ RH = np.array(inputs_df.RH).astype(np.float64) if "RH" in inputs_df else None
149
+ elevation_m = np.array(inputs_df.elevation_m).astype(np.float64) if "elevation_m" in inputs_df else (np.array(inputs_df.elevation_km).astype(np.float64) * 1000 if "elevation_km" in inputs_df else None)
150
+ COT = np.array(inputs_df.COT).astype(np.float64) if "COT" in inputs_df else None
151
+ AOT = np.array(inputs_df.AOT).astype(np.float64) if "AOT" in inputs_df else None
152
+ vapor_gccm = np.array(inputs_df.vapor_gccm).astype(np.float64) if "vapor_gccm" in inputs_df else None
153
+ ozone_cm = np.array(inputs_df.ozone_cm).astype(np.float64) if "ozone_cm" in inputs_df else None
154
+ PAR_albedo = np.array(inputs_df.PAR_albedo).astype(np.float64) if "PAR_albedo" in inputs_df else None
155
+ NIR_albedo = np.array(inputs_df.NIR_albedo).astype(np.float64) if "NIR_albedo" in inputs_df else None
156
+ Ca = np.array(inputs_df.Ca).astype(np.float64) if "Ca" in inputs_df else None
157
+ wind_speed_mps = np.array(inputs_df.wind_speed_mps).astype(np.float64) if "wind_speed_mps" in inputs_df else None
158
+ NDVI_minimum = np.array(inputs_df.NDVI_minimum).astype(np.float64) if "NDVI_minimum" in inputs_df else None
159
+ NDVI_maximum = np.array(inputs_df.NDVI_maximum).astype(np.float64) if "NDVI_maximum" in inputs_df else None
160
+ C4_fraction = np.array(inputs_df.C4_fraction).astype(np.float64) if "C4_fraction" in inputs_df else None
161
+ carbon_uptake_efficiency = np.array(inputs_df.carbon_uptake_efficiency).astype(np.float64) if "carbon_uptake_efficiency" in inputs_df else None
162
+ kn = np.array(inputs_df.kn).astype(np.float64) if "kn" in inputs_df else None
163
+ peakVCmax_C3 = np.array(inputs_df.peakVCmax_C3).astype(np.float64) if "peakVCmax_C3" in inputs_df else None
164
+ peakVCmax_C4 = np.array(inputs_df.peakVCmax_C4).astype(np.float64) if "peakVCmax_C4" in inputs_df else None
165
+ ball_berry_slope_C3 = np.array(inputs_df.ball_berry_slope_C3).astype(np.float64) if "ball_berry_slope_C3" in inputs_df else None
166
+ ball_berry_slope_C4 = np.array(inputs_df.ball_berry_slope_C4).astype(np.float64) if "ball_berry_slope_C4" in inputs_df else None
167
+ ball_berry_intercept_C3 = np.array(inputs_df.ball_berry_intercept_C3).astype(np.float64) if "ball_berry_intercept_C3" in inputs_df else None
168
+ KG_climate = np.array(inputs_df.KG_climate) if "KG_climate" in inputs_df else None
169
+ CI = np.array(inputs_df.CI).astype(np.float64) if "CI" in inputs_df else None
170
+ canopy_height_meters = np.array(inputs_df.canopy_height_meters).astype(np.float64) if "canopy_height_meters" in inputs_df else None
171
+
172
+ logger.info("Retrieving GEOS-5 FP meteorological inputs")
173
+ logger.info(f"Calling retrieve_BESS_JPL_GEOS5FP_inputs with {len(time_UTC)} time points and {len(geometry)} geometry points")
174
+
175
+ # Retrieve only GEOS-5 FP meteorological inputs (vegetation params already in inputs_df)
176
+ # Pass geometry as list of Points to match each time with its corresponding location
177
+ GEOS5FP_inputs_dict = retrieve_BESS_JPL_GEOS5FP_inputs(
178
+ time_UTC=time_UTC,
179
+ geometry=geometry,
180
+ albedo=albedo,
181
+ Ta_C=Ta_C,
182
+ RH=RH,
183
+ COT=COT,
184
+ AOT=AOT,
185
+ vapor_gccm=vapor_gccm,
186
+ ozone_cm=ozone_cm,
187
+ PAR_albedo=PAR_albedo,
188
+ NIR_albedo=NIR_albedo,
189
+ Ca=Ca,
190
+ wind_speed_mps=wind_speed_mps
191
+ )
192
+
193
+ logger.info("Completed retrieving GEOS-5 FP meteorological inputs")
194
+
195
+ # Create complete inputs dataframe by starting with original inputs_df and updating with retrieved values
196
+ complete_inputs_df = inputs_df.copy()
197
+
198
+ # Add primary inputs
199
+ complete_inputs_df['ST_C'] = ST_C
200
+ complete_inputs_df['NDVI'] = NDVI
201
+ complete_inputs_df['albedo'] = albedo
202
+ complete_inputs_df['time_UTC'] = time_UTC_list
203
+ complete_inputs_df['day_of_year'] = day_of_year
204
+ complete_inputs_df['hour_of_day'] = hour_of_day
205
+
206
+ # Add geometry as lat/lon if not already present
207
+ if 'lat' not in complete_inputs_df:
208
+ complete_inputs_df['lat'] = [pt.y for pt in geometry]
209
+ if 'lon' not in complete_inputs_df:
210
+ complete_inputs_df['lon'] = [pt.x for pt in geometry]
211
+
212
+ # Add all retrieved GEOS5FP inputs to complete_inputs_df
213
+ for key, value in GEOS5FP_inputs_dict.items():
214
+ if hasattr(value, '__len__') and not isinstance(value, str):
215
+ try:
216
+ complete_inputs_df[key] = value
217
+ except (ValueError, TypeError) as e:
218
+ logger.warning(f"Skipping assignment of key '{key}' to inputs DataFrame: {e}")
219
+ continue
220
+ elif isinstance(value, (int, float, np.number)):
221
+ complete_inputs_df[key] = value
222
+
223
+ logger.info("Processing BESS model to generate outputs")
224
+
225
+ # Process with BESS-JPL model to get outputs
226
+ outputs_df = process_BESS_table(inputs_df)
227
+
228
+ inputs_filename = join(abspath(dirname(__file__)), "ECOv002-cal-val-BESS-JPL-inputs.csv")
229
+ outputs_filename = join(abspath(dirname(__file__)), "ECOv002-cal-val-BESS-JPL-outputs.csv")
230
+
231
+ # Save the complete input dataset to a CSV file
232
+ complete_inputs_df.to_csv(inputs_filename, index=False)
233
+
234
+ # Save the processed results to a CSV file
235
+ outputs_df.to_csv(outputs_filename, index=False)
236
+
237
+ logger.info(f"Processed {len(outputs_df)} records from the full cal/val dataset")
238
+ logger.info(f"Complete input dataset saved to: {inputs_filename}")
239
+ logger.info(f" - Contains {len(complete_inputs_df.columns)} input columns")
240
+ logger.info(f"Output dataset saved to: {outputs_filename}")
241
+ logger.info(f" - Contains {len(outputs_df.columns)} total columns")
242
+
243
+ return outputs_df
@@ -0,0 +1,26 @@
1
+ import os
2
+ from .ECOv002_calval_BESS_inputs import load_ECOv002_calval_BESS_inputs
3
+ from .process_BESS_table import process_BESS_table
4
+
5
+ def generate_output_dataset():
6
+ """
7
+ Generate the output dataset for the BESS-JPL model.
8
+ """
9
+ # Load the input data
10
+ inputs_df = load_ECOv002_calval_BESS_inputs()
11
+
12
+ # Perform any necessary processing to generate the outputs
13
+ outputs_df = process_BESS_table(inputs_df)
14
+
15
+ # Determine the directory of the current script
16
+ script_dir = os.path.dirname(os.path.abspath(__file__))
17
+
18
+ # Save the outputs to a CSV file in the same directory as this script
19
+ output_file_path = os.path.join(script_dir, "ECOv002-cal-val-BESS-JPL-outputs.csv")
20
+ outputs_df.to_csv(output_file_path, index=False)
21
+
22
+ def main():
23
+ generate_output_dataset()
24
+
25
+ if __name__ == "__main__":
26
+ main()
@@ -0,0 +1,12 @@
1
+ import numpy as np
2
+
3
+
4
+ def interpolate_C3_C4(C3: np.ndarray, C4: np.ndarray, C4_fraction: np.ndarray) -> np.ndarray:
5
+ """
6
+ Interpolate between C3 and C4 plants based on C4 fraction
7
+ :param C3: value for C3 plants
8
+ :param C4: value for C4 plants
9
+ :param C4_fraction: fraction of C4 plants
10
+ :return: interpolated value
11
+ """
12
+ return C3 * (1 - C4_fraction) + C4 * C4_fraction
BESS_JPL/kn.jpeg ADDED
Binary file
BESS_JPL/kn.tif ADDED
Binary file
@@ -0,0 +1,20 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ import numpy as np
7
+
8
+ from .constants import *
9
+
10
+ def load_C4_fraction(
11
+ geometry: RasterGeometry = None,
12
+ resampling: str = "nearest",
13
+ scale_factor: float = C4_FRACTION_SCALE_FACTOR) -> Raster:
14
+ filename = join(abspath(dirname(__file__)), "C4_fraction.tif")
15
+ image = Raster.open(filename, geometry=geometry, resampling=resampling, nodata=np.nan)
16
+ image = rt.clip(image, 0, 100)
17
+ # Scale image to be between 0 and 1 using a multiplicative scale factor
18
+ image *= scale_factor
19
+
20
+ return image
@@ -0,0 +1,17 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ import numpy as np
7
+
8
+ from .colors import NDVI_COLORMAP_ABSOLUTE
9
+
10
+ def load_NDVI_maximum(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
11
+ filename = join(abspath(dirname(__file__)), "NDVI_maximum.tif")
12
+ image = Raster.open(filename, geometry=geometry, resampling=resampling, nodata=np.nan)
13
+
14
+ if isinstance(image, Raster):
15
+ image.cmap = NDVI_COLORMAP_ABSOLUTE
16
+
17
+ return image
@@ -0,0 +1,17 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ import numpy as np
7
+
8
+ from .colors import NDVI_COLORMAP_ABSOLUTE
9
+
10
+ def load_NDVI_minimum(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
11
+ filename = join(abspath(dirname(__file__)), "NDVI_minimum.tif")
12
+ image = Raster.open(filename, geometry=geometry, resampling=resampling, nodata=np.nan)
13
+
14
+ if isinstance(image, Raster):
15
+ image.cmap = NDVI_COLORMAP_ABSOLUTE
16
+
17
+ return image
@@ -0,0 +1,10 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ def load_ball_berry_intercept_C3(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
7
+ filename = join(abspath(dirname(__file__)), "ball_berry_intercept_C3.tif")
8
+ image = Raster.open(filename, geometry=geometry, resampling=resampling)
9
+
10
+ return image
@@ -0,0 +1,10 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ def load_ball_berry_slope_C3(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
7
+ filename = join(abspath(dirname(__file__)), "ball_berry_slope_C3.tif")
8
+ image = Raster.open(filename, geometry=geometry, resampling=resampling)
9
+
10
+ return image
@@ -0,0 +1,10 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ def load_ball_berry_slope_C4(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
7
+ filename = join(abspath(dirname(__file__)), "ball_berry_slope_C4.tif")
8
+ image = Raster.open(filename, geometry=geometry, resampling=resampling)
9
+
10
+ return image
@@ -0,0 +1,10 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ def load_carbon_uptake_efficiency(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
7
+ filename = join(abspath(dirname(__file__)), "carbon_uptake_efficiency.tif")
8
+ image = Raster.open(filename, geometry=geometry, resampling=resampling)
9
+
10
+ return image
BESS_JPL/load_kn.py ADDED
@@ -0,0 +1,10 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ def load_kn(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
7
+ filename = join(abspath(dirname(__file__)), "kn.tif")
8
+ image = Raster.open(filename, geometry=geometry, resampling=resampling)
9
+
10
+ return image
@@ -0,0 +1,12 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ import numpy as np
7
+
8
+ def load_peakVCmax_C3(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
9
+ filename = join(abspath(dirname(__file__)), "peakVCmax_C3.tif")
10
+ image = Raster.open(filename, geometry=geometry, resampling=resampling, nodata=np.nan)
11
+
12
+ return image
@@ -0,0 +1,12 @@
1
+ from os.path import join, abspath, dirname
2
+
3
+ import rasters as rt
4
+ from rasters import Raster, RasterGeometry
5
+
6
+ import numpy as np
7
+
8
+ def load_peakVCmax_C4(geometry: RasterGeometry = None, resampling: str = "nearest") -> Raster:
9
+ filename = join(abspath(dirname(__file__)), "peakVCmax_C4.tif")
10
+ image = Raster.open(filename, geometry=geometry, resampling=resampling, nodata=np.nan)
11
+
12
+ return image