hestia-earth-models 0.64.4__py3-none-any.whl → 0.64.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hestia-earth-models might be problematic. Click here for more details.
- hestia_earth/models/blonkConsultants2016/ch4ToAirNaturalVegetationBurning.py +5 -9
- hestia_earth/models/blonkConsultants2016/co2ToAirAboveGroundBiomassStockChangeLandUseChange.py +5 -9
- hestia_earth/models/blonkConsultants2016/n2OToAirNaturalVegetationBurningDirect.py +6 -13
- hestia_earth/models/cycle/animal/input/properties.py +6 -0
- hestia_earth/models/cycle/completeness/soilAmendment.py +3 -2
- hestia_earth/models/cycle/concentrateFeed.py +10 -4
- hestia_earth/models/cycle/input/properties.py +6 -0
- hestia_earth/models/cycle/liveAnimal.py +2 -2
- hestia_earth/models/cycle/milkYield.py +3 -3
- hestia_earth/models/cycle/otherSitesArea.py +59 -0
- hestia_earth/models/cycle/otherSitesUnusedDuration.py +9 -8
- hestia_earth/models/cycle/pastureSystem.py +3 -2
- hestia_earth/models/cycle/product/properties.py +6 -0
- hestia_earth/models/cycle/siteArea.py +83 -0
- hestia_earth/models/cycle/stockingDensityAnimalHousingAverage.py +28 -16
- hestia_earth/models/cycle/utils.py +1 -1
- hestia_earth/models/environmentalFootprintV3/soilQualityIndexLandOccupation.py +128 -0
- hestia_earth/models/environmentalFootprintV3/utils.py +17 -0
- hestia_earth/models/ipcc2006/co2ToAirOrganicSoilCultivation.py +17 -6
- hestia_earth/models/ipcc2006/n2OToAirOrganicSoilCultivationDirect.py +17 -6
- hestia_earth/models/ipcc2019/co2ToAirCarbonStockChange_utils.py +904 -0
- hestia_earth/models/ipcc2019/co2ToAirSoilOrganicCarbonStockChangeManagementChange.py +70 -618
- hestia_earth/models/mocking/search-results.json +395 -323
- hestia_earth/models/pooreNemecek2018/saplings.py +10 -7
- hestia_earth/models/site/management.py +18 -14
- hestia_earth/models/utils/__init__.py +38 -0
- hestia_earth/models/utils/array_builders.py +63 -52
- hestia_earth/models/utils/blank_node.py +137 -82
- hestia_earth/models/utils/descriptive_stats.py +3 -239
- hestia_earth/models/utils/feedipedia.py +15 -2
- hestia_earth/models/utils/landCover.py +9 -0
- hestia_earth/models/utils/lookup.py +13 -2
- hestia_earth/models/utils/measurement.py +3 -28
- hestia_earth/models/utils/stats.py +429 -0
- hestia_earth/models/utils/term.py +15 -3
- hestia_earth/models/utils/time_series.py +90 -0
- hestia_earth/models/version.py +1 -1
- {hestia_earth_models-0.64.4.dist-info → hestia_earth_models-0.64.5.dist-info}/METADATA +1 -1
- {hestia_earth_models-0.64.4.dist-info → hestia_earth_models-0.64.5.dist-info}/RECORD +62 -48
- tests/models/blonkConsultants2016/test_ch4ToAirNaturalVegetationBurning.py +2 -2
- tests/models/blonkConsultants2016/test_co2ToAirAboveGroundBiomassStockChangeLandUseChange.py +2 -2
- tests/models/blonkConsultants2016/test_n2OToAirNaturalVegetationBurningDirect.py +2 -2
- tests/models/cycle/completeness/test_soilAmendment.py +1 -1
- tests/models/cycle/test_liveAnimal.py +1 -1
- tests/models/cycle/test_milkYield.py +1 -1
- tests/models/cycle/test_otherSitesArea.py +68 -0
- tests/models/cycle/test_siteArea.py +51 -0
- tests/models/cycle/test_stockingDensityAnimalHousingAverage.py +2 -2
- tests/models/environmentalFootprintV3/test_soilQualityIndexLandOccupation.py +136 -0
- tests/models/ipcc2019/test_co2ToAirCarbonStockChange_utils.py +50 -0
- tests/models/ipcc2019/test_co2ToAirSoilOrganicCarbonStockChangeManagementChange.py +1 -39
- tests/models/pooreNemecek2018/test_saplings.py +1 -1
- tests/models/site/test_management.py +3 -153
- tests/models/utils/test_array_builders.py +67 -6
- tests/models/utils/test_blank_node.py +191 -7
- tests/models/utils/test_descriptive_stats.py +2 -86
- tests/models/utils/test_measurement.py +1 -22
- tests/models/utils/test_stats.py +186 -0
- tests/models/utils/test_time_series.py +88 -0
- {hestia_earth_models-0.64.4.dist-info → hestia_earth_models-0.64.5.dist-info}/LICENSE +0 -0
- {hestia_earth_models-0.64.4.dist-info → hestia_earth_models-0.64.5.dist-info}/WHEEL +0 -0
- {hestia_earth_models-0.64.4.dist-info → hestia_earth_models-0.64.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
from functools import reduce
|
|
2
|
+
from numpy import abs, array, concatenate, exp, float64, inf, pi, prod, random, sign, sqrt
|
|
3
|
+
from numpy.typing import NDArray
|
|
4
|
+
from typing import Union
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def calc_z_critical(
|
|
8
|
+
confidence_interval: float,
|
|
9
|
+
n_sided: int = 2
|
|
10
|
+
) -> float64:
|
|
11
|
+
"""
|
|
12
|
+
Calculate the z-critical value from the confidence interval.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
confidence_interval : float
|
|
17
|
+
The confidence interval as a percentage between 0 and 100%.
|
|
18
|
+
n_sided : int, optional
|
|
19
|
+
The number of tails (default value = `2`).
|
|
20
|
+
|
|
21
|
+
Returns
|
|
22
|
+
-------
|
|
23
|
+
float64
|
|
24
|
+
The z-critical value as a floating point between 0 and infinity.
|
|
25
|
+
"""
|
|
26
|
+
alpha = 1 - confidence_interval / 100
|
|
27
|
+
return _normal_ppf(1 - alpha / n_sided)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _normal_ppf(q: float64, tol: float64 = 1e-10) -> float64:
|
|
31
|
+
"""
|
|
32
|
+
Calculates the percent point function (PPF), also known as the inverse cumulative distribution function (CDF), of a
|
|
33
|
+
standard normal distribution using the Newton-Raphson method.
|
|
34
|
+
|
|
35
|
+
Parameters
|
|
36
|
+
----------
|
|
37
|
+
q : float64
|
|
38
|
+
The quantile at which to evaluate the PPF.
|
|
39
|
+
tol : float64, optional
|
|
40
|
+
The tolerance for the Newton-Raphson method. Defaults to 1e-10.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
float64
|
|
45
|
+
The PPF value at the given quantile.
|
|
46
|
+
"""
|
|
47
|
+
INITIAL_GUESS = 0
|
|
48
|
+
MAX_ITER = 100
|
|
49
|
+
|
|
50
|
+
def step(x):
|
|
51
|
+
"""Perform one step of the Newton-Raphson method."""
|
|
52
|
+
x_new = x - (_normal_cdf(x) - q) / _normal_pdf(x)
|
|
53
|
+
return x_new if abs(x_new - x) >= tol else x
|
|
54
|
+
|
|
55
|
+
return (
|
|
56
|
+
inf if q == 1 else
|
|
57
|
+
-inf if q == 0 else
|
|
58
|
+
reduce(lambda x, _: step(x), range(MAX_ITER), INITIAL_GUESS)
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _normal_cdf(x: float64) -> float64:
|
|
63
|
+
"""
|
|
64
|
+
Calculates the cumulative distribution function (CDF) of a standard normal distribution for a single value using a
|
|
65
|
+
custom error function (erf).
|
|
66
|
+
|
|
67
|
+
Parameters
|
|
68
|
+
----------
|
|
69
|
+
x : float64
|
|
70
|
+
The point at which to evaluate the CDF.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
float64
|
|
75
|
+
The CDF value at the given point.
|
|
76
|
+
"""
|
|
77
|
+
return 0.5 * (1 + _erf(x / sqrt(2)))
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _erf(x: float64) -> float64:
|
|
81
|
+
"""
|
|
82
|
+
Approximates the error function of a standard normal distribution using a numerical approximation based on
|
|
83
|
+
Abramowitz and Stegun formula 7.1.26.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
x : float64
|
|
88
|
+
The input value.
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
float64
|
|
93
|
+
The approximated value of the error function.
|
|
94
|
+
"""
|
|
95
|
+
# constants
|
|
96
|
+
A_1 = 0.254829592
|
|
97
|
+
A_2 = -0.284496736
|
|
98
|
+
A_3 = 1.421413741
|
|
99
|
+
A_4 = -1.453152027
|
|
100
|
+
A_5 = 1.061405429
|
|
101
|
+
P = 0.3275911
|
|
102
|
+
|
|
103
|
+
# Save the sign of x
|
|
104
|
+
sign_ = sign(x)
|
|
105
|
+
x_ = abs(x)
|
|
106
|
+
|
|
107
|
+
# A&S formula 7.1.26
|
|
108
|
+
t = 1.0 / (1.0 + P * x_)
|
|
109
|
+
y = 1.0 - (((((A_5 * t + A_4) * t) + A_3) * t + A_2) * t + A_1) * t * exp(-x_ * x_)
|
|
110
|
+
|
|
111
|
+
return sign_ * y
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _normal_pdf(x: float64) -> float64:
|
|
115
|
+
"""
|
|
116
|
+
Calculates the probability density function (PDF) of a standard normal distribution for a single value.
|
|
117
|
+
|
|
118
|
+
Parameters
|
|
119
|
+
----------
|
|
120
|
+
x : float64
|
|
121
|
+
The point at which to evaluate the PDF.
|
|
122
|
+
|
|
123
|
+
Returns
|
|
124
|
+
-------
|
|
125
|
+
float64
|
|
126
|
+
The PDF value at the given point.
|
|
127
|
+
"""
|
|
128
|
+
return 1 / sqrt(2 * pi) * exp(-0.5 * x**2)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _calc_confidence_level(
|
|
132
|
+
z_critical: float64,
|
|
133
|
+
n_sided: int = 2
|
|
134
|
+
) -> float64:
|
|
135
|
+
"""
|
|
136
|
+
Calculate the confidence interval from the z-critical value.
|
|
137
|
+
|
|
138
|
+
Parameters
|
|
139
|
+
----------
|
|
140
|
+
z_critical_value : np.float64
|
|
141
|
+
The confidence interval as a floating point number between 0 and infinity.
|
|
142
|
+
n_sided : int, optional
|
|
143
|
+
The number of tails (default value = `2`).
|
|
144
|
+
|
|
145
|
+
Returns
|
|
146
|
+
-------
|
|
147
|
+
np.float64
|
|
148
|
+
The confidence interval as a percentage between 0 and 100%.
|
|
149
|
+
"""
|
|
150
|
+
alpha = (1 - _normal_cdf(z_critical)) * n_sided
|
|
151
|
+
return (1 - alpha) * 100
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def calc_required_iterations_monte_carlo(
|
|
155
|
+
confidence_level: float,
|
|
156
|
+
precision: float,
|
|
157
|
+
sd: float
|
|
158
|
+
) -> int:
|
|
159
|
+
"""
|
|
160
|
+
Calculate the number of iterations required for a Monte Carlo simulation to have a desired precision, subject to a
|
|
161
|
+
given confidence level.
|
|
162
|
+
|
|
163
|
+
Parameters
|
|
164
|
+
----------
|
|
165
|
+
confidence_level : float
|
|
166
|
+
The confidence level, as a percentage out of 100, that the precision should be subject too (i.e., we are x%
|
|
167
|
+
sure that the sample mean deviates from the true populatation mean by less than the desired precision).
|
|
168
|
+
precision : float
|
|
169
|
+
The desired precision as a floating point value (i.e., if the Monte Carlo simulation will be used to estimate
|
|
170
|
+
`organicCarbonPerHa` to a precision of 100 kg C ha-1 this value should be 100).
|
|
171
|
+
sd : float
|
|
172
|
+
The standard deviation of the sample. This can be estimated by running the model 500 times (a number that does
|
|
173
|
+
not take too much time to run but is large enough for the sample standard deviation to converge reasonably
|
|
174
|
+
well).
|
|
175
|
+
|
|
176
|
+
Returns
|
|
177
|
+
-------
|
|
178
|
+
int
|
|
179
|
+
The required number of iterations.
|
|
180
|
+
"""
|
|
181
|
+
z_critical_value = calc_z_critical(confidence_level)
|
|
182
|
+
return round(((sd * z_critical_value) / precision) ** 2)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def calc_confidence_level_monte_carlo(
|
|
186
|
+
n_iterations: int,
|
|
187
|
+
precision: float,
|
|
188
|
+
sd: float
|
|
189
|
+
) -> float:
|
|
190
|
+
"""
|
|
191
|
+
Calculate the confidence level that the sample mean calculated by the Monte Carlo simulation deviates from the
|
|
192
|
+
true population mean by less than the desired precision.
|
|
193
|
+
|
|
194
|
+
Parameters
|
|
195
|
+
----------
|
|
196
|
+
n_iterations : int
|
|
197
|
+
The number of iterations that the Monte Carlo simulation was run for.
|
|
198
|
+
precision : float
|
|
199
|
+
The desired precision as a floating point value (i.e., if the Monte Carlo simulation will be used to estimate
|
|
200
|
+
`organicCarbonPerHa` to a precision of 100 kg C ha-1 this value should be 100).
|
|
201
|
+
sd : float
|
|
202
|
+
The standard deviation of the sample.
|
|
203
|
+
|
|
204
|
+
Returns
|
|
205
|
+
-------
|
|
206
|
+
float
|
|
207
|
+
The confidence level, as a percentage out of 100, that the precision should be subject too (i.e., we are x%
|
|
208
|
+
sure that the sample mean deviates from the true populatation mean by less than the desired precision).
|
|
209
|
+
"""
|
|
210
|
+
return _calc_confidence_level(precision*sqrt(n_iterations)/sd)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def calc_precision_monte_carlo(
|
|
214
|
+
confidence_level: float,
|
|
215
|
+
n_iterations: int,
|
|
216
|
+
sd: float
|
|
217
|
+
) -> float:
|
|
218
|
+
"""
|
|
219
|
+
Calculate the +/- precision of a Monte Carlo simulation for a desired confidence level.
|
|
220
|
+
|
|
221
|
+
Parameters
|
|
222
|
+
----------
|
|
223
|
+
confidence_level : float
|
|
224
|
+
The confidence level, as a percentage out of 100, that the precision should be subject too (i.e., we are x%
|
|
225
|
+
sure that the sample mean deviates from the true populatation mean by less than the desired precision).
|
|
226
|
+
n_iterations : int
|
|
227
|
+
The number of iterations that the Monte Carlo simulation was run for.
|
|
228
|
+
sd : float
|
|
229
|
+
The standard deviation of the sample.
|
|
230
|
+
|
|
231
|
+
Returns
|
|
232
|
+
-------
|
|
233
|
+
float
|
|
234
|
+
The precision of the sample mean estimated by the Monte Carlo model as a floating point value with the same
|
|
235
|
+
units as the estimated mean.
|
|
236
|
+
"""
|
|
237
|
+
z_critical = calc_z_critical(confidence_level)
|
|
238
|
+
return (sd*z_critical)/sqrt(n_iterations)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def truncnorm_rvs(
|
|
242
|
+
a: float,
|
|
243
|
+
b: float,
|
|
244
|
+
loc: float,
|
|
245
|
+
scale: float,
|
|
246
|
+
shape: Union[int, tuple[int, ...]],
|
|
247
|
+
seed: Union[int, random.Generator, None] = None
|
|
248
|
+
) -> NDArray:
|
|
249
|
+
"""
|
|
250
|
+
Generate random samples from a truncated normal distribution. Unlike the `scipy` equivalent, the `a` and `b` values
|
|
251
|
+
are the abscissae at which we wish to truncate the distribution (as opposed to the number of standard deviations
|
|
252
|
+
from `loc`).
|
|
253
|
+
|
|
254
|
+
Parameters
|
|
255
|
+
----------
|
|
256
|
+
a : float
|
|
257
|
+
The lower bound of the distribution.
|
|
258
|
+
b : float
|
|
259
|
+
The upper bound of the distribution.
|
|
260
|
+
loc : float
|
|
261
|
+
Mean ("centre") of the distribution.
|
|
262
|
+
scale : float
|
|
263
|
+
Standard deviation (spread or "width") of the distribution. Must be non-negative.
|
|
264
|
+
size : int | tuple[int, ...]
|
|
265
|
+
Output shape. If the given shape is, e.g., (m, n, k), then m * n * k samples are drawn.
|
|
266
|
+
seed : int | Generator | None, optional
|
|
267
|
+
A seed to initialize the BitGenerator. If passed a Generator, it will be returned unaltered. If `None`, then
|
|
268
|
+
fresh, unpredictable entropy will be pulled from the OS.
|
|
269
|
+
|
|
270
|
+
Returns
|
|
271
|
+
-------
|
|
272
|
+
NDArray
|
|
273
|
+
Array of samples.
|
|
274
|
+
"""
|
|
275
|
+
size = prod(shape)
|
|
276
|
+
samples = array([])
|
|
277
|
+
rng = random.default_rng(seed)
|
|
278
|
+
|
|
279
|
+
while samples.size < size:
|
|
280
|
+
samples_temp = rng.normal(loc, scale, (size - samples.size) * 2)
|
|
281
|
+
valid_samples = samples_temp[(a <= samples_temp) & (samples_temp <= b)]
|
|
282
|
+
samples = concatenate([samples, valid_samples])
|
|
283
|
+
|
|
284
|
+
return samples[:size].reshape(shape)
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def add_normal_distributions(
|
|
288
|
+
mu_1: float, sigma_1: float, mu_2: float, sigma_2: float, rho: float = 0
|
|
289
|
+
) -> tuple[float, float]:
|
|
290
|
+
"""
|
|
291
|
+
Add together two normal distributions, with optional correlation.
|
|
292
|
+
|
|
293
|
+
Given two normal distributions **X<sub>1</sub> ~ N(mu<sub>1</sub>, sigma<sub>1</sub><sup>2</sup>)** and
|
|
294
|
+
**X<sub>2</sub> ~ N(mu<sub>2</sub>, sigma<sub>2</sub><sup>2</sup>)**, this function calculates the resulting mean
|
|
295
|
+
and standard deviation of the sum **Z = X<sub>1</sub> + X<sub>2</sub>**, taking into account the correlation
|
|
296
|
+
between them.
|
|
297
|
+
|
|
298
|
+
n.b. Positive correlations (`rho` > `0`) increase the standard deviation of **Z** because positively correlated
|
|
299
|
+
variables tend to move together, increasing combined uncertainty. Negative correlations (`rho` < `0`) reduces the
|
|
300
|
+
standard deviation since the variables move in opposite directions, cancelling out some of the variability.
|
|
301
|
+
Independant variables (`rho` = `0`) result in an intermediate level of uncertainty.
|
|
302
|
+
|
|
303
|
+
Parameters
|
|
304
|
+
----------
|
|
305
|
+
mu_1 : float
|
|
306
|
+
Mean of the first normal distribution (X<sub>1</sub>).
|
|
307
|
+
sigma_1 : float
|
|
308
|
+
Standard deviation of the first normal distribution (X<sub>1</sub>).
|
|
309
|
+
mu_2 : float
|
|
310
|
+
Mean of the second normal distribution (X<sub>2</sub>).
|
|
311
|
+
sigma_2 : float
|
|
312
|
+
Standard deviation of the second normal distribution (X<sub>2</sub>).
|
|
313
|
+
rho : float, optional
|
|
314
|
+
Correlation coefficient between **X<sub>1</sub>** and **X<sub>2</sub>**. `rho` must be a value between -1
|
|
315
|
+
(perfectly negative correlation) and 1 (perfectly positive correlation). Default is 0 (independent variables).
|
|
316
|
+
|
|
317
|
+
Returns
|
|
318
|
+
-------
|
|
319
|
+
tuple[float, float]
|
|
320
|
+
A tuple in the shape `(mu_sum, sigma_sum)` containing the mean and standard deviation of the distribution
|
|
321
|
+
**Z = X<sub>1</sub> + X<sub>2</sub>**.
|
|
322
|
+
"""
|
|
323
|
+
mu_sum = mu_1 + mu_2
|
|
324
|
+
sigma_sum = sqrt(
|
|
325
|
+
sigma_1 ** 2
|
|
326
|
+
+ sigma_2 ** 2
|
|
327
|
+
+ 2 * rho * sigma_1 * sigma_2
|
|
328
|
+
)
|
|
329
|
+
return mu_sum, sigma_sum
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def subtract_normal_distributions(
|
|
333
|
+
mu_1: float, sigma_1: float, mu_2: float, sigma_2: float, rho: float = 0
|
|
334
|
+
) -> tuple[float, float]:
|
|
335
|
+
"""
|
|
336
|
+
Subtract a normal distribution from another, with optional correlation.
|
|
337
|
+
|
|
338
|
+
Given two normal distributions **X<sub>1</sub> ~ N(mu<sub>1</sub>, sigma<sub>1</sub><sup>2</sup>)** and
|
|
339
|
+
**X<sub>2</sub> ~ N(mu<sub>2</sub>, sigma<sub>2</sub><sup>2</sup>)**, this function calculates the resulting mean
|
|
340
|
+
and standard deviation of the difference **Z = X<sub>1</sub> - X<sub>2</sub>**, taking into account the correlation
|
|
341
|
+
between them.
|
|
342
|
+
|
|
343
|
+
n.b. Positive correlations (`rho` > `0`) reduce the standard deviation of **Z** because positively correlated
|
|
344
|
+
variables tend to move together, cancelling out some of the variability when subtracted. Negative correlations
|
|
345
|
+
(`rho` < `0`) increase the standard deviation since the variables move in opposite directions, amplifying the
|
|
346
|
+
variability when subtracted. Independant variables (`rho` = `0`) result in an intermediate level of uncertainty.
|
|
347
|
+
|
|
348
|
+
Parameters
|
|
349
|
+
----------
|
|
350
|
+
mu_1 : float
|
|
351
|
+
Mean of the first normal distribution (X<sub>1</sub>).
|
|
352
|
+
sigma_1 : float
|
|
353
|
+
Standard deviation of the first normal distribution (X<sub>1</sub>).
|
|
354
|
+
mu_2 : float
|
|
355
|
+
Mean of the second normal distribution (X<sub>2</sub>).
|
|
356
|
+
sigma_2 : float
|
|
357
|
+
Standard deviation of the second normal distribution (X<sub>2</sub>).
|
|
358
|
+
rho : float, optional
|
|
359
|
+
Correlation coefficient between **X<sub>1</sub>** and **X<sub>2</sub>**. `rho` must be a value between -1
|
|
360
|
+
(perfectly negative correlation) and 1 (perfectly positive correlation). Default is 0 (independent variables).
|
|
361
|
+
|
|
362
|
+
Returns
|
|
363
|
+
-------
|
|
364
|
+
tuple[float, float]
|
|
365
|
+
A tuple in the shape `(mu_diff, sigma_diff)` containing the mean and standard deviation of the distribution
|
|
366
|
+
**Z = X<sub>1</sub> - X<sub>2</sub>**.
|
|
367
|
+
"""
|
|
368
|
+
mu_sum = mu_1 - mu_2
|
|
369
|
+
sigma_sum = sqrt(
|
|
370
|
+
sigma_1 ** 2
|
|
371
|
+
+ sigma_2 ** 2
|
|
372
|
+
- 2 * rho * sigma_1 * sigma_2
|
|
373
|
+
)
|
|
374
|
+
return mu_sum, sigma_sum
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def lerp_normal_distributions(
|
|
378
|
+
mu_1: float,
|
|
379
|
+
sigma_1: float,
|
|
380
|
+
mu_2: float,
|
|
381
|
+
sigma_2: float,
|
|
382
|
+
alpha: float,
|
|
383
|
+
rho: float = 0
|
|
384
|
+
) -> tuple[float, float]:
|
|
385
|
+
"""
|
|
386
|
+
Linearly interpolate between two normal distributions, with optional correlation.
|
|
387
|
+
|
|
388
|
+
Given two normal distributions **X<sub>1</sub> ~ N(mu<sub>1</sub>, sigma<sub>1</sub><sup>2</sup>)** and
|
|
389
|
+
**X<sub>2</sub> ~ N(mu<sub>2</sub>, sigma<sub>2</sub><sup>2</sup>)**, this function calculates the resulting mean
|
|
390
|
+
and standard deviation of the interpolated distribution **Z = (1 - alpha) * X<sub>1</sub> + alpha * X<sub>2</sub>**,
|
|
391
|
+
taking into account the correlation between them.
|
|
392
|
+
|
|
393
|
+
n.b. Positive correlations (`rho` > `0`) increase the standard deviation of **Z** because positively correlated
|
|
394
|
+
variables tend to move together, increasing combined uncertainty. Negative correlations (`rho` < `0`) reduces the
|
|
395
|
+
standard deviation since the variables move in opposite directions, cancelling out some of the variability.
|
|
396
|
+
Independant variables (`rho` = `0`) result in an intermediate level of uncertainty.
|
|
397
|
+
|
|
398
|
+
Parameters
|
|
399
|
+
----------
|
|
400
|
+
mu_1 : float
|
|
401
|
+
Mean of the first normal distribution (X<sub>1</sub>).
|
|
402
|
+
sigma_1 : float
|
|
403
|
+
Standard deviation of the first normal distribution (X<sub>1</sub>).
|
|
404
|
+
mu_2 : float
|
|
405
|
+
Mean of the second normal distribution (X<sub>2</sub>).
|
|
406
|
+
sigma_2 : float
|
|
407
|
+
Standard deviation of the second normal distribution (X<sub>2</sub>).
|
|
408
|
+
alpha : float
|
|
409
|
+
Interpolation factor (0 <= alpha <= 1). A value of 0 results in X1, a value of 1 results in X2, and values
|
|
410
|
+
between 0 and 1 interpolate between the two. Values of below 0 and above 1 will extrapolate beyond the
|
|
411
|
+
X<sub>1</sub> and X<sub>2</sub> respectively.
|
|
412
|
+
rho : float, optional
|
|
413
|
+
Correlation coefficient between **X<sub>1</sub>** and **X<sub>2</sub>**. `rho` must be a value between -1
|
|
414
|
+
(perfectly negative correlation) and 1 (perfectly positive correlation). Default is 0 (independent variables).
|
|
415
|
+
|
|
416
|
+
Returns
|
|
417
|
+
-------
|
|
418
|
+
tuple[float, float]
|
|
419
|
+
A tuple in the shape `(mu_Z sigma_Z)` containing the mean and standard deviation of the distribution
|
|
420
|
+
**Z = (1 - alpha) * X<sub>1</sub> + alpha * X<sub>2</sub>**.
|
|
421
|
+
"""
|
|
422
|
+
mu_Z = (1 - alpha) * mu_1 + alpha * mu_2
|
|
423
|
+
var_Z = (
|
|
424
|
+
((1 - alpha) ** 2) * sigma_1 ** 2
|
|
425
|
+
+ (alpha ** 2) * sigma_2 ** 2
|
|
426
|
+
+ 2 * alpha * (1 - alpha) * rho * sigma_1 * sigma_2
|
|
427
|
+
)
|
|
428
|
+
sigma_Z = sqrt(var_Z)
|
|
429
|
+
return mu_Z, sigma_Z
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
from hestia_earth.schema import SchemaType, TermTermType, SiteSiteType
|
|
2
|
-
from hestia_earth.utils.lookup import download_lookup, get_table_value, column_name
|
|
3
2
|
from hestia_earth.utils.api import find_node, search
|
|
3
|
+
from hestia_earth.utils.lookup import download_lookup, get_table_value, column_name
|
|
4
4
|
|
|
5
|
-
from ..log import debugMissingLookup
|
|
6
5
|
from .constant import Units
|
|
6
|
+
from ..log import debugMissingLookup
|
|
7
7
|
|
|
8
8
|
LIMIT = 1000
|
|
9
9
|
|
|
@@ -679,4 +679,16 @@ def get_land_cover_siteTypes():
|
|
|
679
679
|
"should": [{"match": {"name": siteType.value}} for siteType in SiteSiteType],
|
|
680
680
|
"minimum_should_match": 1
|
|
681
681
|
},
|
|
682
|
-
})
|
|
682
|
+
}, limit=LIMIT)
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
def download_all_land_cover_terms():
|
|
686
|
+
result = search({
|
|
687
|
+
"bool": {
|
|
688
|
+
"must": [
|
|
689
|
+
{"match": {"@type": "Term"}},
|
|
690
|
+
{"match": {"termType": "landCover"}}
|
|
691
|
+
]
|
|
692
|
+
},
|
|
693
|
+
}, fields=['@id', 'nameNormalized', 'subClassOf'], limit=10000)
|
|
694
|
+
return result
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from hestia_earth.utils.date import diff_in_days
|
|
2
|
+
|
|
3
|
+
from itertools import product
|
|
4
|
+
from numpy import array, exp, log
|
|
5
|
+
from numpy.typing import NDArray
|
|
6
|
+
from typing import Callable
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def exponential_decay(t: float, tau: float = 1, initial_value: float = 1, final_value: float = 0) -> float:
|
|
10
|
+
"""
|
|
11
|
+
Calculate the exponentially decaying value based on the time elapsed.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
t : float
|
|
16
|
+
The time elapsed.
|
|
17
|
+
tau : float, optional
|
|
18
|
+
The decay constant, related to the half-life (default = 1).
|
|
19
|
+
initial_value : float, optional
|
|
20
|
+
The value at time t = 0 (default = 1).
|
|
21
|
+
final_value : float, optional
|
|
22
|
+
The value as time approaches infinity (default = 0).
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
float
|
|
27
|
+
The exponentially decaying value based on the given parameters.
|
|
28
|
+
"""
|
|
29
|
+
return final_value + (initial_value - final_value) * exp(-t / tau)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def calc_tau(half_life: float) -> float:
|
|
33
|
+
"""
|
|
34
|
+
Calculate the decay constant (tau) for an exponential_decay function from the half-life.
|
|
35
|
+
|
|
36
|
+
Parameters
|
|
37
|
+
----------
|
|
38
|
+
half_life : float
|
|
39
|
+
The half-life period over which the value transitions to half its initial value.
|
|
40
|
+
|
|
41
|
+
Returns
|
|
42
|
+
-------
|
|
43
|
+
float
|
|
44
|
+
The decay constant tau corresponding to the specified half-life.
|
|
45
|
+
"""
|
|
46
|
+
return half_life / log(2)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def compute_time_series_correlation_matrix(
|
|
50
|
+
datestrs: list[str],
|
|
51
|
+
delta_time_fn: Callable[[str, str], float] = diff_in_days,
|
|
52
|
+
decay_fn: Callable[[float], float] = exponential_decay
|
|
53
|
+
) -> NDArray:
|
|
54
|
+
"""
|
|
55
|
+
Computes a correlation matrix for a list of time points (dates). Correlations decay as the time difference between
|
|
56
|
+
dates increases. The time difference calculation and the decay function can be customized.
|
|
57
|
+
|
|
58
|
+
n.b. The default decay function produces correlations between 0 and 1. Alternative decay functions may allow for
|
|
59
|
+
negative correlations, giving values between -1 and 1.
|
|
60
|
+
|
|
61
|
+
n.b. The function assumes that the `delta_time_fn` and `decay_fn` are appropriate for the format of the provided
|
|
62
|
+
date strings.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
datestrs : list[str]
|
|
67
|
+
List of date strings representing time points in the time series.
|
|
68
|
+
|
|
69
|
+
delta_time_fn : Callable[[str, str], float], optional
|
|
70
|
+
Function to calculate the time difference between two date strings. Defaults to `diff_in_days`, which returns
|
|
71
|
+
the difference in days. The function must have the following signature `f(date_1: str, date_2: str) -> float`.
|
|
72
|
+
|
|
73
|
+
decay_fn : Callable[[float], float], optional
|
|
74
|
+
Function to apply decay to the time differences. Defaults to `exponential_decay`, which models an exponential
|
|
75
|
+
decay in correlation. The function must have the following signature `f(delta_time: float) -> float`.
|
|
76
|
+
|
|
77
|
+
Returns
|
|
78
|
+
-------
|
|
79
|
+
NDArray
|
|
80
|
+
A symmetric 2D array with shape `(len(datestrs), len(datestrs))` containing correlation values between time
|
|
81
|
+
points, with all values between -1 and 1.
|
|
82
|
+
"""
|
|
83
|
+
n_dates = len(datestrs)
|
|
84
|
+
|
|
85
|
+
correlation_matrix = array([
|
|
86
|
+
decay_fn(abs(delta_time_fn(date_1, date_2)))
|
|
87
|
+
for date_1, date_2 in product(datestrs, repeat=2)
|
|
88
|
+
]).reshape(n_dates, n_dates)
|
|
89
|
+
|
|
90
|
+
return correlation_matrix
|
hestia_earth/models/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = '0.64.
|
|
1
|
+
VERSION = '0.64.5'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: hestia-earth-models
|
|
3
|
-
Version: 0.64.
|
|
3
|
+
Version: 0.64.5
|
|
4
4
|
Summary: HESTIA's set of modules for filling gaps in the activity data using external datasets (e.g. populating soil properties with a geospatial dataset using provided coordinates) and internal lookups (e.g. populating machinery use from fuel use). Includes rules for when gaps should be filled versus not (e.g. never gap fill yield, gap fill crop residue if yield provided etc.).
|
|
5
5
|
Home-page: https://gitlab.com/hestia-earth/hestia-engine-models
|
|
6
6
|
Author: HESTIA Team
|