PyESPER 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- PyESPER/__init__.py +28 -0
- PyESPER/adjust_pH_DIC.py +73 -0
- PyESPER/coefs_AAinds.py +53 -0
- PyESPER/defaults.py +71 -0
- PyESPER/define_polygons.py +68 -0
- PyESPER/emlr_estimate.py +156 -0
- PyESPER/emlr_nn.py +54 -0
- PyESPER/errors.py +39 -0
- PyESPER/fetch_data.py +54 -0
- PyESPER/fetch_polys_NN.py +16 -0
- PyESPER/final_formatting.py +25 -0
- PyESPER/input_AAinds.py +113 -0
- PyESPER/inputdata_organize.py +56 -0
- PyESPER/interpolate.py +62 -0
- PyESPER/iterations.py +254 -0
- PyESPER/lir.py +191 -0
- PyESPER/lir_uncertainties.py +99 -0
- PyESPER/mixed.py +37 -0
- PyESPER/nn.py +134 -0
- PyESPER/organize_data.py +378 -0
- PyESPER/organize_nn_output.py +266 -0
- PyESPER/pH_DIC_nn_adjustment.py +189 -0
- PyESPER/pH_adjcalc.py +36 -0
- PyESPER/pH_adjustment.py +179 -0
- PyESPER/process_netresults.py +105 -0
- PyESPER/run_nets.py +85 -0
- PyESPER/simplecantestimatelr.py +43 -0
- PyESPER/temperature_define.py +48 -0
- pyesper-1.0.0.dist-info/METADATA +16 -0
- pyesper-1.0.0.dist-info/RECORD +33 -0
- pyesper-1.0.0.dist-info/WHEEL +5 -0
- pyesper-1.0.0.dist-info/entry_points.txt +4 -0
- pyesper-1.0.0.dist-info/top_level.txt +1 -0
PyESPER/__init__.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from .final_formatting import final_formatting
|
|
2
|
+
from .organize_nn_output import organize_nn_output
|
|
3
|
+
from .adjust_pH_DIC import adjust_pH_DIC
|
|
4
|
+
from .input_AAinds import input_AAinds
|
|
5
|
+
from .pH_adjcalc import pH_adjcalc
|
|
6
|
+
from .coefs_AAinds import coefs_AAinds
|
|
7
|
+
from .inputdata_organize import inputdata_organize
|
|
8
|
+
from .pH_adjustment import pH_adjustment
|
|
9
|
+
from .defaults import defaults
|
|
10
|
+
from .interpolate import interpolate
|
|
11
|
+
from .pH_DIC_nn_adjustment import pH_DIC_nn_adjustment
|
|
12
|
+
from .define_polygons import define_polygons
|
|
13
|
+
from .iterations import iterations
|
|
14
|
+
from .process_netresults import process_netresults
|
|
15
|
+
from .emlr_estimate import emlr_estimate
|
|
16
|
+
from .lir_uncertainties import process_uncertainties
|
|
17
|
+
from .lir_uncertainties import measurement_uncertainty_defaults
|
|
18
|
+
from .run_nets import run_nets
|
|
19
|
+
from .emlr_nn import emlr_nn
|
|
20
|
+
from .simplecantestimatelr import simplecantestimatelr
|
|
21
|
+
from .errors import errors
|
|
22
|
+
from .temperature_define import temperature_define
|
|
23
|
+
from .fetch_data import fetch_data
|
|
24
|
+
from .fetch_polys_NN import fetch_polys_NN
|
|
25
|
+
from .organize_data import organize_data
|
|
26
|
+
from .lir import lir
|
|
27
|
+
from .nn import nn
|
|
28
|
+
from .mixed import mixed
|
PyESPER/adjust_pH_DIC.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
def adjust_pH_DIC(DesiredVariables, VerboseTF, Dates, Est_pre={}, PredictorMeasurements={}, OutputCoordinates={}, **kwargs):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
If present, adjusting pH and DIC for anthropogenic carbon (Cant) within LIRs. Cant adjustment methods
|
|
5
|
+
are based on those from ESPERv1, which is a TTD-based assumption/simplification but does not
|
|
6
|
+
use the Tracer-based Rapid Anthropogenic Carbon Estimation data product, TRACE. Rather,
|
|
7
|
+
interpolation from a gridded product is used to produce estimates for the year 2002 and data is
|
|
8
|
+
adjusted to/from this reference year. This is the first of three steps for Cant adjustment
|
|
9
|
+
|
|
10
|
+
Inputs:
|
|
11
|
+
DesiredVariables: List of desired variables to estimate
|
|
12
|
+
VerboseTF: Boolean indicating whether the user wants suppression of warnings
|
|
13
|
+
Dates: List of dates for estimates
|
|
14
|
+
Est_pre: Dictionary of preliminary estimates for each variable-equation case scenario
|
|
15
|
+
PredictorMeasurements: Dictionary of input measurements for each variable-equation case scenario
|
|
16
|
+
OutputCoordinates: Dictionary of coordinates for locations of estimates
|
|
17
|
+
**kwargs: Please see README for full informations
|
|
18
|
+
|
|
19
|
+
Outputs:
|
|
20
|
+
Cant_adjusted: Dictionary of values adjusted for anthropogenic carbon for each combination
|
|
21
|
+
Cant: Numpy array of estimates for anthropogenic carbon for each estimate
|
|
22
|
+
Cant2002: Numpy array of estimates for anthropogenic carbon in the year 2002 for each estimate
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import numpy as np
|
|
26
|
+
from PyESPER.simplecantestimatelr import simplecantestimatelr
|
|
27
|
+
|
|
28
|
+
# Predefining output dictionary and formatting estimates
|
|
29
|
+
Cant_adjusted={}
|
|
30
|
+
combos2 = list(Est_pre.keys())
|
|
31
|
+
values2 = []
|
|
32
|
+
for c, v in Est_pre.items():
|
|
33
|
+
vals = np.array([v])
|
|
34
|
+
vals = vals.flatten()
|
|
35
|
+
values2.append(vals)
|
|
36
|
+
values2 = np.array(values2)
|
|
37
|
+
|
|
38
|
+
# Predefining anthropogenic carbon numpy arrays
|
|
39
|
+
n = len(Dates)
|
|
40
|
+
Cant, Cant2002 = np.zeros(n), np.zeros(n)
|
|
41
|
+
|
|
42
|
+
# Only proceed if adjustment is needed
|
|
43
|
+
if "EstDates" in kwargs and ("DIC" in DesiredVariables or "pH" in DesiredVariables):
|
|
44
|
+
if VerboseTF:
|
|
45
|
+
print("Estimating anthropogenic carbon for PyESPER_LIR.")
|
|
46
|
+
|
|
47
|
+
# Normalize longitude to [0, 360]
|
|
48
|
+
longitude = np.mod(np.array(OutputCoordinates["longitude"]), 360)
|
|
49
|
+
latitude = np.array(OutputCoordinates["latitude"])
|
|
50
|
+
depth = np.array(OutputCoordinates["depth"])
|
|
51
|
+
|
|
52
|
+
# Estimate anthropogenic carbon (Cant) and anthropogenic carbon for the year 2002 (Cant2002)
|
|
53
|
+
Cant, Cant2002 = simplecantestimatelr(Dates, longitude, latitude, depth)
|
|
54
|
+
Cant, Cant2002 = np.array(Cant), np.array(Cant2002)
|
|
55
|
+
|
|
56
|
+
for combo in range(0, len(combos2)):
|
|
57
|
+
comb = combos2[combo]
|
|
58
|
+
val = values2[combo]
|
|
59
|
+
est1 = []
|
|
60
|
+
|
|
61
|
+
# Only adjust if combo is DIC
|
|
62
|
+
if "dic" in comb.lower():
|
|
63
|
+
adjusted = np.where(val == "nan", np.nan, val + Cant - Cant2002)
|
|
64
|
+
est1.append(adjusted)
|
|
65
|
+
|
|
66
|
+
if "dic" not in comb.lower():
|
|
67
|
+
nanfix = np.where(val == "nan", np.nan, val)
|
|
68
|
+
est1.append(nanfix)
|
|
69
|
+
|
|
70
|
+
Cant_adjusted[combos2[combo]] = est1
|
|
71
|
+
|
|
72
|
+
return Cant_adjusted, Cant, Cant2002
|
|
73
|
+
|
PyESPER/coefs_AAinds.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
def coefs_AAinds(Equations, LIR_data):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Separates coefficients from MATLAB ESPERv1 into Atlantic and Arctic or other regions.
|
|
5
|
+
|
|
6
|
+
Inputs:
|
|
7
|
+
Equations: List of equations for use in ESPERs
|
|
8
|
+
LIR_data: List of dictionaries of data from MATLAB trainings (pre-processed)
|
|
9
|
+
|
|
10
|
+
Outputs:
|
|
11
|
+
Gdf: Dictionary of pre-trained and processed LIR data for grid of coordinates
|
|
12
|
+
CsDesired: Dictionary of equation coefficients based on user-defined output
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
|
|
17
|
+
# Use boolean for AA or Else to separate coefficients into Atlantic or not
|
|
18
|
+
GridCoords, Cs, AAInds = LIR_data[:3]
|
|
19
|
+
DVs, CsVs = list(Cs.keys()), list(Cs.values())
|
|
20
|
+
ListVars = np.arange(len(AAInds))
|
|
21
|
+
GridValues = np.array(list(GridCoords.values())[0], dtype=float)
|
|
22
|
+
AAIndValues = np.array(list(AAInds.values())[0], dtype=float)
|
|
23
|
+
|
|
24
|
+
lon_grid, lat_grid, d2d_grid, aainds = np.array((GridValues[:,0])), np.array((GridValues[:,1])), \
|
|
25
|
+
np.array(GridValues[:,2])/25, np.array(AAIndValues[:,0])
|
|
26
|
+
|
|
27
|
+
names = ['lon', 'lat', 'd2d', "C_alpha", "C_S", "C_T", "C_A", "C_B", "C_C", 'AAInds']
|
|
28
|
+
Gdf, CsDesired = {}, {}
|
|
29
|
+
|
|
30
|
+
# Moving data into pre-defined dictionaries
|
|
31
|
+
for lvar, name in zip(ListVars, DVs):
|
|
32
|
+
Cs2 = CsVs[:][lvar][:]
|
|
33
|
+
for e in Equations:
|
|
34
|
+
CsName = f'Cs{name}{e}'
|
|
35
|
+
CsDesired[CsName] = Cs2[e-1][:]
|
|
36
|
+
Cs3 = Cs2[e-1][:]
|
|
37
|
+
C_alpha, C_S, C_T, C_A, C_B, C_C = np.array(Cs3[:,0]), np.array(Cs3[:,1]), np.array(Cs3[:,2]), np.array(Cs3[:,3]), \
|
|
38
|
+
np.array(Cs3[:,4]), np.array(Cs3[:,5])
|
|
39
|
+
Gdf[f"{name}{e}"] = {
|
|
40
|
+
names[0]: np.array(lon_grid),
|
|
41
|
+
names[1]: np.array(lat_grid),
|
|
42
|
+
names[2]: np.array(d2d_grid),
|
|
43
|
+
names[3]: np.array(C_alpha),
|
|
44
|
+
names[4]: np.array(C_S),
|
|
45
|
+
names[5]: np.array(C_T),
|
|
46
|
+
names[6]: np.array(C_A),
|
|
47
|
+
names[7]: np.array(C_B),
|
|
48
|
+
names[8]: np.array(C_C),
|
|
49
|
+
names[9]: np.array(aainds)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return Gdf, CsDesired
|
|
53
|
+
|
PyESPER/defaults.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
def defaults (DesiredVariables, PredictorMeasurements={}, OutputCoordinates={}, **kwargs):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Set default values and bookkeep inputs.
|
|
5
|
+
|
|
6
|
+
Inputs:
|
|
7
|
+
DesiredVariables: List of desired output variables (user-requested)
|
|
8
|
+
PredictorMeasurements: Dictionary of user-provided predictor mesasurements (salinity, etc.)
|
|
9
|
+
OutputCoordinates: Dictionary of user-provided coordinates
|
|
10
|
+
**kwargs: Please see README for more information
|
|
11
|
+
|
|
12
|
+
Outputs:
|
|
13
|
+
Equations: numpy array of equations (either user-defined or default)
|
|
14
|
+
n: Scalar representing number of required estimates for each variable-equation comination
|
|
15
|
+
e: Scalar representing number of requested equations
|
|
16
|
+
p: Scalar representing number of requested output variables
|
|
17
|
+
VerboseTF: Boolean read-in of whether user wants to suppress optional warnings
|
|
18
|
+
C: Dictionary of processed geographic coordinates
|
|
19
|
+
PerKgSwTF: Boolean representing whether user input is in molal or molar units
|
|
20
|
+
MeasUncerts: Dictionary of user input measurement uncertainty values or empty
|
|
21
|
+
dictionary if not provided
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import numpy as np
|
|
25
|
+
|
|
26
|
+
# Check and define Equations based on user-defined kwargs, or use default values
|
|
27
|
+
Equations = kwargs.get("Equations", list(range(1, 17)))
|
|
28
|
+
|
|
29
|
+
# Reading dimensions of user input
|
|
30
|
+
n = max(len(v) for v in OutputCoordinates.values())
|
|
31
|
+
|
|
32
|
+
# Checking kwargs for presence of VerboseTF and EstDates, and Equations, and defining defaults, as needed
|
|
33
|
+
VerboseTF = kwargs.get("VerboseTF", True)
|
|
34
|
+
|
|
35
|
+
# Set EstDates based on kwargs, defaulting to 2002.0 if not provided
|
|
36
|
+
if "EstDates" in kwargs:
|
|
37
|
+
d = np.array(kwargs["EstDates"])
|
|
38
|
+
if len(d) != n:
|
|
39
|
+
EstDates = np.tile(d, (n + 1, 1)).reshape(-1)
|
|
40
|
+
else:
|
|
41
|
+
EstDates = d
|
|
42
|
+
else:
|
|
43
|
+
EstDates = np.full(n, 2002.0)
|
|
44
|
+
|
|
45
|
+
# Bookkeeping coordinates
|
|
46
|
+
C = {}
|
|
47
|
+
longitude = np.array(OutputCoordinates["longitude"])
|
|
48
|
+
longitude[longitude > 360] = np.remainder(longitude[longitude > 360], 360)
|
|
49
|
+
longitude[longitude < 0] = longitude[longitude<0] + 360
|
|
50
|
+
C["longitude"] = longitude
|
|
51
|
+
C["latitude"] = OutputCoordinates["latitude"]
|
|
52
|
+
C["depth"] = OutputCoordinates["depth"]
|
|
53
|
+
|
|
54
|
+
# Defining or reading in PerKgSwTF
|
|
55
|
+
PerKgSwTF = kwargs.get("PerKgSwTF", True)
|
|
56
|
+
|
|
57
|
+
# Defining Measurement Uncertainties
|
|
58
|
+
MeasUncerts = kwargs.get("MeasUncerts", {})
|
|
59
|
+
|
|
60
|
+
# Validate MeasUncerts dimensions
|
|
61
|
+
if MeasUncerts:
|
|
62
|
+
if max(len(v) for v in MeasUncerts.values()) != n:
|
|
63
|
+
if min(len(v) for v in MeasUncerts.values()) != 1:
|
|
64
|
+
raise CustomError(
|
|
65
|
+
"MeasUncerts must be undefined, a vector with the same number of elements as "
|
|
66
|
+
"PredictorMeasurements has columns, or a matrix of identical dimension to PredictorMeasurements."
|
|
67
|
+
)
|
|
68
|
+
if len(MeasUncerts) != len(PredictorMeasurements):
|
|
69
|
+
print("Warning: Different numbers of input uncertainties and input measurements.")
|
|
70
|
+
|
|
71
|
+
return Equations, n, VerboseTF, EstDates, C, PerKgSwTF, MeasUncerts
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
def define_polygons(C={}):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Defining and structuring indexing within ocean region polygons.
|
|
5
|
+
First defines the polygons, then assesses the location of user-provided
|
|
6
|
+
coordinates within polygons.
|
|
7
|
+
|
|
8
|
+
Inputs:
|
|
9
|
+
C: Dictionary of adjusted coordinates
|
|
10
|
+
|
|
11
|
+
Output:
|
|
12
|
+
df: Dictionary of adjusted coordinates with boolean indicators for specific
|
|
13
|
+
ocean regions
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
import matplotlib.path as mpltPath
|
|
18
|
+
|
|
19
|
+
# Define polygons for Atlantic and Arctic (AA) or other (Else) ocean basins
|
|
20
|
+
LNAPoly = np.array([[300, 0], [260, 20], [240, 67], [260, 40], [361, 40], [361, 0], [298, 0]])
|
|
21
|
+
LSAPoly = np.array([[298, 0], [292, -40.01], [361, -40.01], [361, 0], [298, 0]])
|
|
22
|
+
LNAPolyExtra = np.array([[-1, 50], [40, 50], [40, 0], [-1, 0], [-1, 50]])
|
|
23
|
+
LSAPolyExtra = np.array([[-1, 0], [20, 0], [20, -40], [-1, -40], [-1, 0]])
|
|
24
|
+
LNOPoly = np.array([[361, 40], [361, 91], [-1, 91], [-1, 50], [40, 50], [40, 40], [104, 40], [104, 67], [240, 67],
|
|
25
|
+
[280, 40], [361, 40]])
|
|
26
|
+
xtra = np.array([[0.5, -39.9], [.99, -39.9], [0.99, -40.001], [0.5, -40.001]])
|
|
27
|
+
polygons = [LNAPoly, LSAPoly, LNAPolyExtra, LSAPolyExtra, LNOPoly, xtra]
|
|
28
|
+
|
|
29
|
+
# Create Paths
|
|
30
|
+
paths = [mpltPath.Path(poly) for poly in polygons]
|
|
31
|
+
|
|
32
|
+
# Extract coordinates
|
|
33
|
+
longitude, latitude, depth = np.array(C["longitude"]), np.array(C["latitude"]), np.array(C["depth"])
|
|
34
|
+
|
|
35
|
+
# Check if coordinates are within each polygon
|
|
36
|
+
conditions = [path.contains_points(np.column_stack((longitude, latitude))) for path in paths]
|
|
37
|
+
|
|
38
|
+
# Combine conditions
|
|
39
|
+
AAIndsM = np.logical_or.reduce(conditions)
|
|
40
|
+
|
|
41
|
+
# Adding Bering Sea, S. Atl., and S. African Polygons separately
|
|
42
|
+
Bering = np.array([[173, 70], [210, 70], [210, 62.5], [173, 62.5], [173, 70]])
|
|
43
|
+
beringpath = mpltPath.Path(Bering)
|
|
44
|
+
beringconditions = beringpath.contains_points(np.column_stack((longitude, latitude)))
|
|
45
|
+
SAtlInds, SoAfrInds = [], []
|
|
46
|
+
for i, z in zip(longitude, latitude):
|
|
47
|
+
# Check if the conditions are met for Southern Atlantic
|
|
48
|
+
if (-34 > z > -44): # Check latitude first to reduce unnecessary checks
|
|
49
|
+
if i > 290 or i < 20:
|
|
50
|
+
SAtlInds.append('True')
|
|
51
|
+
else:
|
|
52
|
+
SAtlInds.append('False')
|
|
53
|
+
|
|
54
|
+
# Check if the condition is met for Southern Africa
|
|
55
|
+
if 19 < i < 27:
|
|
56
|
+
SoAfrInds.append('True')
|
|
57
|
+
else:
|
|
58
|
+
SoAfrInds.append('False')
|
|
59
|
+
else:
|
|
60
|
+
SAtlInds.append('False')
|
|
61
|
+
SoAfrInds.append('False')
|
|
62
|
+
|
|
63
|
+
# Create Dictionary with boolean indicators
|
|
64
|
+
df = {'AAInds': AAIndsM, 'BeringInds': beringconditions, 'SAtlInds': SAtlInds, \
|
|
65
|
+
'SoAfrInds': SoAfrInds, 'Lat': latitude, 'Lon': longitude, 'Depth': depth}
|
|
66
|
+
|
|
67
|
+
return df
|
|
68
|
+
|
PyESPER/emlr_estimate.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
def emlr_estimate(Equations, DesiredVariables, Path, OutputCoordinates={}, PredictorMeasurements={}, UDict={}, DUDict={}, Coefficients={}, **kwargs):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Uncertainty estimation step 1 for LIRs
|
|
5
|
+
|
|
6
|
+
Inputs:
|
|
7
|
+
Equations: List of equations
|
|
8
|
+
DesiredVariables: List of variables to estimate
|
|
9
|
+
Path: User-defined computer path
|
|
10
|
+
OutputCoordinates: Dictionary of locations where estimates are requested
|
|
11
|
+
PredictorMeasurements: Dictionary of measurements provided by user
|
|
12
|
+
UDict: Dictionary of user-defined measurement uncertainties
|
|
13
|
+
DUDict: Dictionary of default measurement uncertainties
|
|
14
|
+
Coefficients: Dictionary of dictionaries of coefficients for each
|
|
15
|
+
variable-equation scenario
|
|
16
|
+
**kwargs: Please see README for full description
|
|
17
|
+
|
|
18
|
+
Output:
|
|
19
|
+
EMLR: Dictionary of uncertainty values for each desired variable-equation
|
|
20
|
+
case scenario and estimate
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import numpy as np
|
|
24
|
+
from scipy.interpolate import griddata
|
|
25
|
+
from PyESPER.fetch_data import fetch_data
|
|
26
|
+
|
|
27
|
+
# Predefine dictionary and lists to fill
|
|
28
|
+
EMLR, varnames, EqM = {}, [], []
|
|
29
|
+
|
|
30
|
+
# Iterating over variables to fetch data this time
|
|
31
|
+
for dv in DesiredVariables:
|
|
32
|
+
# Fetch LIR data and process into grid arrays
|
|
33
|
+
LIR_data = fetch_data([dv], Path)
|
|
34
|
+
|
|
35
|
+
# Construct the grid array
|
|
36
|
+
LIR_data = fetch_data([dv], Path)
|
|
37
|
+
|
|
38
|
+
# Some formatting of the uncertainties from the import
|
|
39
|
+
arr = np.array(LIR_data)
|
|
40
|
+
arr = arr[3]
|
|
41
|
+
arritem = arr.item()
|
|
42
|
+
|
|
43
|
+
UGridArray = np.array([
|
|
44
|
+
np.nan_to_num([arritem[i][c][b][a] for a in range(16) for b in range(11) for c in range(8)])
|
|
45
|
+
for i in range(len(arritem))
|
|
46
|
+
]).T
|
|
47
|
+
|
|
48
|
+
# Grid columns: UDepth, USal, Eqn, RMSE
|
|
49
|
+
UDepth, USal, Eqn, RMSE = UGridArray.T
|
|
50
|
+
UGridPoints = (UDepth, USal, Eqn)
|
|
51
|
+
UGridValues = RMSE
|
|
52
|
+
|
|
53
|
+
# Iterating over equations within variables to interpolate the uncertainties to
|
|
54
|
+
# desired locations
|
|
55
|
+
for eq in Equations:
|
|
56
|
+
varname = dv + str(eq)
|
|
57
|
+
varnames.append(varname)
|
|
58
|
+
EM = []
|
|
59
|
+
|
|
60
|
+
eq_repeated = np.full_like(OutputCoordinates['depth'], eq)
|
|
61
|
+
UGridPointsOut = (
|
|
62
|
+
np.array(OutputCoordinates['depth']),
|
|
63
|
+
np.array(PredictorMeasurements['salinity']),
|
|
64
|
+
eq_repeated
|
|
65
|
+
)
|
|
66
|
+
emlr = griddata(UGridPoints, UGridValues, UGridPointsOut, method='linear')
|
|
67
|
+
|
|
68
|
+
combo = f"{dv}{eq}"
|
|
69
|
+
Coefs = {
|
|
70
|
+
k: np.nan_to_num(np.array(UDict[combo][k]))
|
|
71
|
+
for k in ["US", "UT", "UA", "UB", "UC"]
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
uncdfs, duncdfs = UDict[combo], DUDict[combo]
|
|
75
|
+
|
|
76
|
+
# Extract keys
|
|
77
|
+
keys = list(uncdfs.keys())
|
|
78
|
+
|
|
79
|
+
# Function to fill arrays with floats
|
|
80
|
+
def safe_fill(arr, fill_val):
|
|
81
|
+
arr = np.array(arr, dtype=float)
|
|
82
|
+
arr[np.isnan(arr)] = fill_val
|
|
83
|
+
return arr
|
|
84
|
+
|
|
85
|
+
# Fill=-9999 if needed
|
|
86
|
+
USu2 = [safe_fill(uncdfs[k], -9999.0) for k in keys]
|
|
87
|
+
UTu2 = [safe_fill(uncdfs[k], -9999.0) for k in keys]
|
|
88
|
+
UAu2 = [safe_fill(uncdfs[k], -9999.0) for k in keys]
|
|
89
|
+
UBu2 = [safe_fill(uncdfs[k], -9999.0) for k in keys]
|
|
90
|
+
UCu2 = [safe_fill(uncdfs[k], -9999.0) for k in keys]
|
|
91
|
+
|
|
92
|
+
DUSu2 = [safe_fill(duncdfs[k], -9999.0) for k in keys]
|
|
93
|
+
DUTu2 = [safe_fill(duncdfs[k], -9999.0) for k in keys]
|
|
94
|
+
DUAu2 = [safe_fill(duncdfs[k], -9999.0) for k in keys]
|
|
95
|
+
DUBu2 = [safe_fill(duncdfs[k], -9999.0) for k in keys]
|
|
96
|
+
DUCu2 = [safe_fill(duncdfs[k], -9999.0) for k in keys]
|
|
97
|
+
|
|
98
|
+
# Compute uncertainty estimates
|
|
99
|
+
for eq in Equations:
|
|
100
|
+
EM = []
|
|
101
|
+
|
|
102
|
+
for cucombo in range(len(Coefs["US"])):
|
|
103
|
+
# Grab each coefficient
|
|
104
|
+
s = Coefs["US"][cucombo]
|
|
105
|
+
t = Coefs["UT"][cucombo]
|
|
106
|
+
a = Coefs["UA"][cucombo]
|
|
107
|
+
b = Coefs["UB"][cucombo]
|
|
108
|
+
c = Coefs["UC"][cucombo]
|
|
109
|
+
|
|
110
|
+
# Main uncertainty components
|
|
111
|
+
s1 = (s * USu2[0][cucombo]) ** 2
|
|
112
|
+
t1 = (t * UTu2[1][cucombo]) ** 2
|
|
113
|
+
a1 = (a * UAu2[2][cucombo]) ** 2
|
|
114
|
+
b1 = (b * UBu2[3][cucombo]) ** 2
|
|
115
|
+
c1 = (c * UCu2[4][cucombo]) ** 2
|
|
116
|
+
sum2 = s1 + t1 + a1 + b1 + c1
|
|
117
|
+
|
|
118
|
+
# Delta uncertainties
|
|
119
|
+
ds1 = (s * DUSu2[0][cucombo]) ** 2
|
|
120
|
+
dt1 = (t * DUTu2[1][cucombo]) ** 2
|
|
121
|
+
da1 = (a * DUAu2[2][cucombo]) ** 2
|
|
122
|
+
db1 = (b * DUBu2[3][cucombo]) ** 2
|
|
123
|
+
dc1 = (c * DUCu2[4][cucombo]) ** 2
|
|
124
|
+
dsum2 = ds1 + dt1 + da1 + db1 + dc1
|
|
125
|
+
|
|
126
|
+
# Final uncertainty
|
|
127
|
+
uncestimate = np.sqrt(sum2 - dsum2 + emlr[cucombo] ** 2)
|
|
128
|
+
EM.append(uncestimate)
|
|
129
|
+
|
|
130
|
+
EqM.append(EM)
|
|
131
|
+
|
|
132
|
+
# Post-process and apply nan masks
|
|
133
|
+
EqM2 = []
|
|
134
|
+
for EM_arr in EqM:
|
|
135
|
+
UncertEst = np.array(EM_arr, dtype=float)
|
|
136
|
+
|
|
137
|
+
# Convert -9999 markers to np.nan based on rules
|
|
138
|
+
UncertEst[USu2[0] == -9999] = np.nan
|
|
139
|
+
|
|
140
|
+
if eq in [1, 2, 3, 4, 5, 6, 7, 8]:
|
|
141
|
+
UncertEst[UTu2[1] == -9999] = np.nan
|
|
142
|
+
if eq in [1, 2, 5, 6, 9, 10, 13, 14]:
|
|
143
|
+
UncertEst[UAu2[2] == -9999] = np.nan
|
|
144
|
+
if eq in [1, 3, 5, 7, 9, 11, 13, 15]:
|
|
145
|
+
UncertEst[UBu2[3] == -9999] = np.nan
|
|
146
|
+
if eq in [1, 2, 3, 4, 9, 10, 11, 12]:
|
|
147
|
+
UncertEst[UCu2[4] == -9999] = np.nan
|
|
148
|
+
|
|
149
|
+
EqM2.append(UncertEst)
|
|
150
|
+
|
|
151
|
+
# Final assembly into dictionary
|
|
152
|
+
for i, key in enumerate(varnames):
|
|
153
|
+
EMLR[key] = EqM2[i]
|
|
154
|
+
|
|
155
|
+
return EMLR
|
|
156
|
+
|
PyESPER/emlr_nn.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
def emlr_nn(Path, DesiredVariables, Equations, OutputCoordinates={}, PredictorMeasurements={}, **kwargs):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Estimating EMLR for neural networks.
|
|
5
|
+
Returns a dictionary with (DesiredVariable, Equation) as keys and Uncertainties as values.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from PyESPER.fetch_polys_NN import fetch_polys_NN
|
|
9
|
+
import numpy as np
|
|
10
|
+
from scipy.interpolate import griddata
|
|
11
|
+
|
|
12
|
+
EMLR = {}
|
|
13
|
+
|
|
14
|
+
for dv in DesiredVariables:
|
|
15
|
+
DV = f"{dv}"
|
|
16
|
+
NN_data = fetch_polys_NN(Path, [DV])
|
|
17
|
+
|
|
18
|
+
data_arrays = [
|
|
19
|
+
np.nan_to_num(np.array([
|
|
20
|
+
NN_data[1][i][c][b][a]
|
|
21
|
+
for a in range(16)
|
|
22
|
+
for b in range(11)
|
|
23
|
+
for c in range(8)
|
|
24
|
+
]))
|
|
25
|
+
for i in range(4)
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
# Create Dictionary of predetermined Uncertainties
|
|
29
|
+
UGridArray = {
|
|
30
|
+
'UDepth': data_arrays[0],
|
|
31
|
+
'USal': data_arrays[1],
|
|
32
|
+
'Eqn': data_arrays[2],
|
|
33
|
+
'RMSE': data_arrays[3],
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
UGridPoints = (UGridArray['UDepth'], UGridArray['USal'], UGridArray['Eqn'])
|
|
37
|
+
UGridValues = UGridArray['RMSE']
|
|
38
|
+
|
|
39
|
+
for eq in Equations:
|
|
40
|
+
name = dv + str(eq)
|
|
41
|
+
eq_array = np.full_like(OutputCoordinates['depth'], eq, dtype=float)
|
|
42
|
+
|
|
43
|
+
# Perform estimation for each equation
|
|
44
|
+
EM = griddata(
|
|
45
|
+
UGridPoints,
|
|
46
|
+
UGridValues,
|
|
47
|
+
(OutputCoordinates['depth'], PredictorMeasurements['salinity'], eq_array),
|
|
48
|
+
method='linear'
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
EMLR[name] = EM
|
|
52
|
+
|
|
53
|
+
return EMLR
|
|
54
|
+
|
PyESPER/errors.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
def errors(OutputCoordinates={}, PredictorMeasurements={}):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Custom error messages for PyESPER that check inputs and ensure that formatting and other requirements are met
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# Checking for presence of required input parameters and raising a custom error message if needed
|
|
8
|
+
class CustomError(Exception):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
required_coords = ("longitude", "latitude", "depth")
|
|
12
|
+
for coord_name in required_coords:
|
|
13
|
+
if coord_name not in OutputCoordinates:
|
|
14
|
+
raise CustomError(f"Warning: Missing {coord_name} in OutputCoordinates.")
|
|
15
|
+
|
|
16
|
+
if "salinity" not in PredictorMeasurements:
|
|
17
|
+
raise CustomError("Warning: Missing salinity measurements. Salinity is a required input.")
|
|
18
|
+
|
|
19
|
+
if "oxygen" in PredictorMeasurements and "temperature" not in PredictorMeasurements:
|
|
20
|
+
raise CustomError("Warning: Missing temperature measurements. Temperature is required when oxygen is provided.")
|
|
21
|
+
|
|
22
|
+
# Check temperature sanity and print a warning for out-of-range values
|
|
23
|
+
if "temperature" in PredictorMeasurements and any(t < -5 or t > 50 for t in PredictorMeasurements["temperature"]):
|
|
24
|
+
print("Warning: Temperatures below -5°C or above 50°C found. PyESPER is not designed for seawater with these properties. Ensure temperatures are in Celsius.")
|
|
25
|
+
|
|
26
|
+
if any(s < 5 or s > 50 for s in PredictorMeasurements["salinity"]):
|
|
27
|
+
print("Warning: Salinities less than 5 or greater than 50 have been found. ESPER is not intended for seawater with these properties.")
|
|
28
|
+
|
|
29
|
+
if any(d < 0 for d in OutputCoordinates["depth"]):
|
|
30
|
+
print("Warning: Depth can not be negative.")
|
|
31
|
+
|
|
32
|
+
if any(l > 90 for l in OutputCoordinates["latitude"]):
|
|
33
|
+
print("Warning: A latitude >90 deg (N or S) has been detected. Verify latitude is entered correctly as an input.")
|
|
34
|
+
|
|
35
|
+
# Checking for commonly used missing data indicator flags. Consider adding your commonly used flags here.
|
|
36
|
+
if any(l == -9999 or l == -9 or l == -1e20 for l in OutputCoordinates["latitude"]):
|
|
37
|
+
print("Warning: A common non-NaN missing data indicator (e.g., -999, -9, -1e20) was detected in the input measurements provided. Missing data should be replaced with NaNs. Otherwise, ESPER will interpret your inputs at face value and give terrible estimates.")
|
|
38
|
+
|
|
39
|
+
print("Please note that, for consistency with MATLAB ESPERv1, the now-deprecated sw package is used. This will be replaced with gsw in future updates.")
|
PyESPER/fetch_data.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
def fetch_data (DesiredVariables, Path):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Gathers the necessary LIR files that were pre-trained in MATLAB ESPERs
|
|
5
|
+
|
|
6
|
+
Inputs:
|
|
7
|
+
DesiredVariables: List of desired output estimate variables
|
|
8
|
+
Path: User-defined computer path of locations of files
|
|
9
|
+
|
|
10
|
+
Outputs:
|
|
11
|
+
LIR_data: List of dictionaries of LIR data
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from scipy.io import loadmat
|
|
15
|
+
import os
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
# Predefine dictionaries of output
|
|
19
|
+
AAIndsCs, GridCoords, Cs = {}, {}, {}
|
|
20
|
+
|
|
21
|
+
# Load necessary files
|
|
22
|
+
for v in DesiredVariables:
|
|
23
|
+
fname1 = os.path.join(Path, f"Mat_fullgrid/LIR_files_{v}_fullCs1.mat")
|
|
24
|
+
fname2 = os.path.join(Path, f"Mat_fullgrid/LIR_files_{v}_fullCs2.mat")
|
|
25
|
+
fname3 = os.path.join(Path, f"Mat_fullgrid/LIR_files_{v}_fullCs3.mat")
|
|
26
|
+
fname4 = os.path.join(Path, f"Mat_fullgrid/LIR_files_{v}_fullGrids.mat")
|
|
27
|
+
|
|
28
|
+
Cs1 = loadmat(fname1)
|
|
29
|
+
Cs2 = loadmat(fname2)
|
|
30
|
+
Cs3 = loadmat(fname3)
|
|
31
|
+
Grid = loadmat(fname4)
|
|
32
|
+
|
|
33
|
+
# Extract and store all arrays
|
|
34
|
+
UncGrid = np.array(Grid["UncGrid"][0][0])
|
|
35
|
+
GridCoodata = np.array(Grid["GridCoords"])
|
|
36
|
+
AAInds = np.array(Grid["AAIndsM"])
|
|
37
|
+
|
|
38
|
+
Csdata1 = np.array(Cs1["Cs1"])
|
|
39
|
+
Csdata2 = np.array(Cs2["Cs2"])
|
|
40
|
+
Csdata3 = np.array(Cs3["Cs3"])
|
|
41
|
+
|
|
42
|
+
# Store as NumPy arrays
|
|
43
|
+
AAIndsCs[v] = AAInds
|
|
44
|
+
GridCoords[v] = GridCoodata
|
|
45
|
+
|
|
46
|
+
# Combine along axis 1, then store each layer in list
|
|
47
|
+
Csdata = np.concatenate((Csdata1, Csdata2, Csdata3), axis=1)
|
|
48
|
+
Cs[v] = [Csdata[:, :, i] for i in range(Csdata.shape[2])]
|
|
49
|
+
|
|
50
|
+
# Store all in one list
|
|
51
|
+
LIR_data = [GridCoords, Cs, AAIndsCs, UncGrid]
|
|
52
|
+
|
|
53
|
+
return LIR_data
|
|
54
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
def fetch_polys_NN(Path, DesiredVariables):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Loads the uncertainty polygons for NNs
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from scipy.io import loadmat
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
for v in DesiredVariables:
|
|
11
|
+
fname = os.path.join(Path, f"Uncertainty_Polys/NN_files_{v}_Unc_Poly.mat")
|
|
12
|
+
NNs = loadmat(fname)
|
|
13
|
+
Polys, UncGrid = NNs["Polys"][0][0], NNs["UncGrid"][0][0]
|
|
14
|
+
|
|
15
|
+
NN_data = [Polys, UncGrid]
|
|
16
|
+
return NN_data
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
def final_formatting(DesiredVariables, Cant_adjusted={}, Est_pre={}):
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Formatting the final data output for estimates
|
|
5
|
+
|
|
6
|
+
Inputs:
|
|
7
|
+
DesiredVariables: List of desired variables to estimate
|
|
8
|
+
Cant_adjusted: Dictionary of estimates adjusted for anthropogenic
|
|
9
|
+
carbon for each combination
|
|
10
|
+
Est_pre: Dictionary of estimates for each combination
|
|
11
|
+
|
|
12
|
+
Output:
|
|
13
|
+
Estimates: Dictionary of estimates for each combination
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
# Conditional to whether anthropogenic carbon was needed or not
|
|
17
|
+
if ("pH" or "DIC") in DesiredVariables:
|
|
18
|
+
Estimates=Cant_adjusted
|
|
19
|
+
print("anthropogenic carbon has been incorporated into some estimates")
|
|
20
|
+
else:
|
|
21
|
+
Estimates=Est_pre
|
|
22
|
+
print("anthropogenic carbon is not considered for these estimates")
|
|
23
|
+
|
|
24
|
+
return Estimates
|
|
25
|
+
|