hestia-earth-models 0.60.0__py3-none-any.whl → 0.61.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hestia-earth-models might be problematic. Click here for more details.
- hestia_earth/models/cache_sites.py +41 -19
- hestia_earth/models/cycle/cycleDuration.py +61 -22
- hestia_earth/models/cycle/startDate.py +16 -11
- hestia_earth/models/cycle/startDateDefinition.py +69 -0
- hestia_earth/models/impact_assessment/emissions.py +1 -3
- hestia_earth/models/ipcc2019/pastureGrass.py +1 -1
- hestia_earth/models/linkedImpactAssessment/__init__.py +8 -162
- hestia_earth/models/linkedImpactAssessment/emissions.py +168 -0
- hestia_earth/models/{impact_assessment → linkedImpactAssessment}/utils.py +1 -1
- hestia_earth/models/mocking/search-results.json +257 -229
- hestia_earth/models/{impact_assessment → pooreNemecek2018}/freshwaterWithdrawalsDuringCycle.py +1 -1
- hestia_earth/models/site/brackishWater.py +43 -0
- hestia_earth/models/site/freshWater.py +43 -0
- hestia_earth/models/site/pre_checks/cache_geospatialDatabase.py +2 -2
- hestia_earth/models/site/salineWater.py +43 -0
- hestia_earth/models/utils/__init__.py +3 -3
- hestia_earth/models/version.py +1 -1
- {hestia_earth_models-0.60.0.dist-info → hestia_earth_models-0.61.0.dist-info}/METADATA +2 -2
- {hestia_earth_models-0.60.0.dist-info → hestia_earth_models-0.61.0.dist-info}/RECORD +52 -44
- tests/models/cycle/test_cycleDuration.py +15 -27
- tests/models/cycle/test_startDate.py +7 -7
- tests/models/cycle/test_startDateDefinition.py +31 -0
- tests/models/linkedImpactAssessment/__init__.py +0 -0
- tests/models/linkedImpactAssessment/test_emissions.py +22 -0
- tests/models/{impact_assessment → linkedImpactAssessment}/test_freshwaterWithdrawalsInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landOccupationInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromCropland100YearAverageInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromCropland20YearAverageInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromForest100YearAverageInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromForest20YearAverageInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromOtherNaturalVegetation100YearAverageInputsProduction.py +4 -4
- tests/models/{impact_assessment → linkedImpactAssessment}/test_landTransformationFromOtherNaturalVegetation20YearAverageInputsProduction.py +4 -4
- tests/models/linkedImpactAssessment/test_landTransformationFromPermanentPasture100YearAverageInputsProduction.py +24 -0
- tests/models/linkedImpactAssessment/test_landTransformationFromPermanentPasture20YearAverageInputsProduction.py +24 -0
- tests/models/{impact_assessment → pooreNemecek2018}/test_freshwaterWithdrawalsDuringCycle.py +1 -1
- tests/models/site/test_brackishWater.py +32 -0
- tests/models/site/test_freshWater.py +32 -0
- tests/models/site/test_salineWater.py +32 -0
- tests/models/test_cache_sites.py +0 -12
- hestia_earth/models/cycle/endDate.py +0 -50
- tests/models/cycle/test_endDate.py +0 -24
- tests/models/impact_assessment/test_landTransformationFromPermanentPasture100YearAverageInputsProduction.py +0 -23
- tests/models/impact_assessment/test_landTransformationFromPermanentPasture20YearAverageInputsProduction.py +0 -23
- tests/models/test_linkedImpactAssessment.py +0 -33
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/freshwaterWithdrawalsInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landOccupationInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromCropland100YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromCropland20YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromForest100YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromForest20YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromOtherNaturalVegetation100YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromOtherNaturalVegetation20YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromPermanentPasture100YearAverageInputsProduction.py +0 -0
- /hestia_earth/models/{impact_assessment → linkedImpactAssessment}/landTransformationFromPermanentPasture20YearAverageInputsProduction.py +0 -0
- {hestia_earth_models-0.60.0.dist-info → hestia_earth_models-0.61.0.dist-info}/LICENSE +0 -0
- {hestia_earth_models-0.60.0.dist-info → hestia_earth_models-0.61.0.dist-info}/WHEEL +0 -0
- {hestia_earth_models-0.60.0.dist-info → hestia_earth_models-0.61.0.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from functools import reduce
|
|
2
2
|
from enum import Enum
|
|
3
|
+
from pydash.objects import merge
|
|
3
4
|
from hestia_earth.utils.api import download_hestia
|
|
4
5
|
from hestia_earth.utils.tools import flatten
|
|
5
6
|
|
|
@@ -64,12 +65,13 @@ def _run_values(sites: list, param_type: ParamType, rasters: list = [], vectors:
|
|
|
64
65
|
**_cache_results(raster_results, rasters, index),
|
|
65
66
|
**_cache_results(vector_results, vectors, index)
|
|
66
67
|
} | ({CACHE_AREA_SIZE: area_size} if area_size is not None else {})
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
CACHE_KEY
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
68
|
+
cached_data = merge(cached_value(site, CACHE_GEOSPATIAL_KEY, {}), cached_data)
|
|
69
|
+
site_cache = merge(
|
|
70
|
+
site.get(CACHE_KEY, {}),
|
|
71
|
+
{CACHE_GEOSPATIAL_KEY: cached_data},
|
|
72
|
+
({CACHE_YEARS_KEY: cached_value(site, CACHE_YEARS_KEY, []) + years} if years else {})
|
|
73
|
+
)
|
|
74
|
+
return merge(site, {CACHE_KEY: site_cache})
|
|
73
75
|
|
|
74
76
|
return reduce(lambda prev, curr: prev + [_process_site(curr)], sites, [])
|
|
75
77
|
|
|
@@ -82,7 +84,7 @@ def _preload_regions_area_size(sites: dict):
|
|
|
82
84
|
return {term_id: download_hestia(term_id).get('area') for term_id in region_ids}
|
|
83
85
|
|
|
84
86
|
|
|
85
|
-
def _group_sites(sites: dict):
|
|
87
|
+
def _group_sites(sites: dict, check_has_cache: bool = True):
|
|
86
88
|
# preload area size for all regions
|
|
87
89
|
regions_area_size = _preload_regions_area_size(sites)
|
|
88
90
|
|
|
@@ -90,7 +92,7 @@ def _group_sites(sites: dict):
|
|
|
90
92
|
return regions_area_size.get(_site_gadm_id(site)) if _should_preload_region_area_size(site) else None
|
|
91
93
|
|
|
92
94
|
sites = [
|
|
93
|
-
(n, ) + (_should_run(n, get_region_area_size(n))) for n in sites
|
|
95
|
+
(n, ) + (_should_run(n, area_size=get_region_area_size(n), check_has_cache=check_has_cache)) for n in sites
|
|
94
96
|
]
|
|
95
97
|
# restrict sites based on should_cache result
|
|
96
98
|
sites = [(site, area_size) for site, should_cache, area_size in sites if should_cache]
|
|
@@ -112,7 +114,25 @@ def _group_sites(sites: dict):
|
|
|
112
114
|
}
|
|
113
115
|
|
|
114
116
|
|
|
115
|
-
def
|
|
117
|
+
def _run(sites: list, years: list, include_region: bool, years_only: bool = False):
|
|
118
|
+
rasters, vectors = list_collections(years, include_region, years_only)
|
|
119
|
+
filtered_data = _group_sites(sites, not years_only)
|
|
120
|
+
return flatten([
|
|
121
|
+
_run_values(filtered_data.get(param_type), param_type, rasters, vectors, years)
|
|
122
|
+
for param_type in [e for e in ParamType] if len(filtered_data.get(param_type)) > 0
|
|
123
|
+
])
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _group_years(years: list, years_range: int):
|
|
127
|
+
batches = sorted(list(set(list(range(years[0], years[-1] + 1, years_range)) + [years[0], years[-1]])))
|
|
128
|
+
grouped_batches = [batches[i:i+2] for i in range(0, len(batches))]
|
|
129
|
+
return [
|
|
130
|
+
# make sure we don't overlap
|
|
131
|
+
[v[0] + (0 if v[0] == years[0] else 1), v[1]] for v in grouped_batches if len(v) == 2
|
|
132
|
+
]
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def run(sites: list, years: list = None, include_region: bool = False):
|
|
116
136
|
"""
|
|
117
137
|
Run all queries at once for the list of provided Sites.
|
|
118
138
|
Note: Earth Engine needs to be initiliased with `init_gee()` before running this function.
|
|
@@ -126,14 +146,16 @@ def run(sites: list, years: list = None, include_region: bool = False, years_onl
|
|
|
126
146
|
include_region : bool
|
|
127
147
|
Prefecth region IDs.
|
|
128
148
|
This will cache region-level data and will make the request slower. Only use if needed.
|
|
129
|
-
years_only : bool
|
|
130
|
-
Run only the collections that depend on the years (if provided).
|
|
131
149
|
"""
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
150
|
+
try:
|
|
151
|
+
return _run(sites, years, include_region)
|
|
152
|
+
except Exception as e:
|
|
153
|
+
# when querying with multiple years, we can reach a compute memory limit, so run the years separately
|
|
154
|
+
if str(e) == 'User memory limit exceeded.' and years:
|
|
155
|
+
sites = _run(sites, [], include_region)
|
|
156
|
+
# query for subranges
|
|
157
|
+
for sub_years in _group_years(years, years_range=5):
|
|
158
|
+
sites = _run(sites, sub_years, include_region, years_only=True)
|
|
159
|
+
return sites
|
|
160
|
+
|
|
161
|
+
return []
|
|
@@ -1,50 +1,89 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Cycle duration
|
|
3
3
|
|
|
4
|
-
This model calculates the cycle duration using
|
|
4
|
+
This model calculates the cycle duration using:
|
|
5
|
+
* the `endDate` and the `startDate` if both are provided with a day precision;
|
|
6
|
+
* for temporary crops, using the `croppingIntensity` if provided;
|
|
7
|
+
* for permanent crops, the duration is set to `365`.
|
|
5
8
|
"""
|
|
6
|
-
from hestia_earth.
|
|
9
|
+
from hestia_earth.schema import TermTermType
|
|
10
|
+
from hestia_earth.utils.model import find_term_match, find_primary_product
|
|
11
|
+
from hestia_earth.utils.date import diff_in_days
|
|
7
12
|
|
|
8
13
|
from hestia_earth.models.log import logRequirements, logShouldRun
|
|
9
|
-
from hestia_earth.models.utils.
|
|
14
|
+
from hestia_earth.models.utils.crop import get_crop_grouping_fao
|
|
10
15
|
from . import MODEL
|
|
11
16
|
|
|
12
17
|
REQUIREMENTS = {
|
|
13
18
|
"Cycle": {
|
|
14
|
-
"cycleDuration": "365",
|
|
15
19
|
"endDate": "",
|
|
16
|
-
"
|
|
17
|
-
|
|
18
|
-
"
|
|
19
|
-
|
|
20
|
+
"optional": {
|
|
21
|
+
"startDate": "",
|
|
22
|
+
"products": {
|
|
23
|
+
"@type": "Product",
|
|
24
|
+
"primary": "True",
|
|
25
|
+
"term.termType": "crop"
|
|
26
|
+
},
|
|
27
|
+
"practices": [{"@type": "Practice", "value": "", "term.@id": "croppingIntensity"}]
|
|
20
28
|
}
|
|
21
29
|
}
|
|
22
30
|
}
|
|
23
31
|
RETURNS = {
|
|
24
32
|
"a `number` or `None` if requirements are not met": ""
|
|
25
33
|
}
|
|
34
|
+
LOOKUPS = {
|
|
35
|
+
"crop": "cropGroupingFAO"
|
|
36
|
+
}
|
|
26
37
|
MODEL_KEY = 'cycleDuration'
|
|
27
38
|
DEFAULT_DURATION = 365
|
|
28
39
|
|
|
29
40
|
|
|
30
|
-
def
|
|
41
|
+
def _run_by_dates(cycle: dict):
|
|
42
|
+
start_date = cycle.get('startDate')
|
|
43
|
+
end_date = cycle.get('endDate')
|
|
44
|
+
return diff_in_days(start_date, end_date)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _should_run_by_dates(cycle: dict):
|
|
48
|
+
start_date = cycle.get('startDate', '')
|
|
49
|
+
start_date_has_day = len(start_date) == 10
|
|
50
|
+
end_date = cycle.get('endDate', '')
|
|
51
|
+
end_date_has_day = len(start_date) == 10
|
|
52
|
+
|
|
53
|
+
logRequirements(cycle, model=MODEL, key=MODEL_KEY, by='dates',
|
|
54
|
+
start_date_has_day=start_date_has_day,
|
|
55
|
+
start_date=start_date,
|
|
56
|
+
end_date_has_day=end_date_has_day,
|
|
57
|
+
end_date=end_date)
|
|
58
|
+
|
|
59
|
+
should_run = all([start_date_has_day, end_date_has_day])
|
|
60
|
+
logShouldRun(cycle, MODEL, None, should_run, key=MODEL_KEY, by='dates')
|
|
61
|
+
return should_run
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _run_by_crop(cycle: dict):
|
|
65
|
+
product = find_primary_product(cycle)
|
|
66
|
+
grouping = get_crop_grouping_fao(MODEL, MODEL_KEY, product.get('term', {}))
|
|
67
|
+
is_permanent_crop = grouping == 'Permanent crops'
|
|
68
|
+
croppingIntensity = find_term_match(cycle.get('practices', []), 'croppingIntensity').get('value', [1])[0]
|
|
69
|
+
return DEFAULT_DURATION * (1 if is_permanent_crop else croppingIntensity)
|
|
31
70
|
|
|
32
71
|
|
|
33
|
-
def
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
72
|
+
def _should_run_by_crop(cycle: dict):
|
|
73
|
+
product = find_primary_product(cycle) or {}
|
|
74
|
+
product_term_type = product.get('term', {}).get('termType')
|
|
75
|
+
primary_product_is_crop = product_term_type == TermTermType.CROP.value
|
|
37
76
|
|
|
38
|
-
logRequirements(cycle, model=MODEL, key=MODEL_KEY,
|
|
39
|
-
|
|
40
|
-
croppingIntensity=croppingIntensity,
|
|
41
|
-
site_type_valid=site_type_valid)
|
|
77
|
+
logRequirements(cycle, model=MODEL, key=MODEL_KEY, by='product',
|
|
78
|
+
primary_product_is_crop=primary_product_is_crop)
|
|
42
79
|
|
|
43
|
-
should_run = all([
|
|
44
|
-
logShouldRun(cycle, MODEL, None, should_run, key=MODEL_KEY)
|
|
45
|
-
return should_run
|
|
80
|
+
should_run = all([primary_product_is_crop])
|
|
81
|
+
logShouldRun(cycle, MODEL, None, should_run, key=MODEL_KEY, by='product')
|
|
82
|
+
return should_run
|
|
46
83
|
|
|
47
84
|
|
|
48
85
|
def run(cycle: dict):
|
|
49
|
-
|
|
50
|
-
|
|
86
|
+
return _run_by_dates(cycle) if _should_run_by_dates(cycle) else (
|
|
87
|
+
_run_by_crop(cycle) if _should_run_by_crop(cycle)
|
|
88
|
+
else None
|
|
89
|
+
)
|
|
@@ -1,18 +1,20 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Start Date
|
|
3
3
|
|
|
4
|
-
This model
|
|
5
|
-
`
|
|
4
|
+
This model sets the [Cycle startDate](https://hestia.earth/schema/Cycle#startDate) based on the `endDate` and the
|
|
5
|
+
`cycleDuration`. This only works when the `endDate` has been provided to a day precision (`2000-01-01`).
|
|
6
6
|
"""
|
|
7
|
+
from datetime import timedelta
|
|
7
8
|
from hestia_earth.utils.date import is_in_days
|
|
8
|
-
from hestia_earth.utils.tools import
|
|
9
|
+
from hestia_earth.utils.tools import safe_parse_date
|
|
9
10
|
|
|
10
11
|
from hestia_earth.models.log import logRequirements, logShouldRun
|
|
11
12
|
from . import MODEL
|
|
12
13
|
|
|
13
14
|
REQUIREMENTS = {
|
|
14
15
|
"Cycle": {
|
|
15
|
-
"
|
|
16
|
+
"endDate": "to day precision",
|
|
17
|
+
"cycleDuration": ""
|
|
16
18
|
}
|
|
17
19
|
}
|
|
18
20
|
RETURNS = {
|
|
@@ -22,19 +24,22 @@ MODEL_KEY = 'startDate'
|
|
|
22
24
|
|
|
23
25
|
|
|
24
26
|
def _run(cycle: dict):
|
|
25
|
-
|
|
26
|
-
|
|
27
|
+
endDate = safe_parse_date(cycle.get('endDate'))
|
|
28
|
+
cycleDuration = cycle.get('cycleDuration')
|
|
29
|
+
return (endDate - timedelta(days=cycleDuration)).strftime('%Y-%m-%d')
|
|
27
30
|
|
|
28
31
|
|
|
29
32
|
def _should_run(cycle: dict):
|
|
30
|
-
|
|
31
|
-
|
|
33
|
+
has_endDate = cycle.get('endDate') is not None
|
|
34
|
+
has_day_precision = has_endDate and is_in_days(cycle.get('endDate'))
|
|
35
|
+
has_cycleDuration = cycle.get('cycleDuration') is not None
|
|
32
36
|
|
|
33
37
|
logRequirements(cycle, model=MODEL, key=MODEL_KEY,
|
|
34
|
-
|
|
35
|
-
|
|
38
|
+
has_endDate=has_endDate,
|
|
39
|
+
has_day_precision=has_day_precision,
|
|
40
|
+
has_cycleDuration=has_cycleDuration)
|
|
36
41
|
|
|
37
|
-
should_run = all([
|
|
42
|
+
should_run = all([has_endDate, has_day_precision, has_cycleDuration])
|
|
38
43
|
logShouldRun(cycle, MODEL, None, should_run, key=MODEL_KEY)
|
|
39
44
|
return should_run
|
|
40
45
|
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Start Date Definition
|
|
3
|
+
|
|
4
|
+
This model sets the [Cycle startDateDefinition](https://hestia.earth/schema/Cycle#startDateDefinition)
|
|
5
|
+
based on the result of the `cycleDuration` gap-filling model.
|
|
6
|
+
"""
|
|
7
|
+
from hestia_earth.schema import TermTermType, CycleStartDateDefinition
|
|
8
|
+
from hestia_earth.utils.model import find_primary_product
|
|
9
|
+
|
|
10
|
+
from hestia_earth.models.log import logRequirements, logShouldRun
|
|
11
|
+
from hestia_earth.models.utils.crop import get_crop_grouping_fao
|
|
12
|
+
from . import MODEL
|
|
13
|
+
|
|
14
|
+
REQUIREMENTS = {
|
|
15
|
+
"Cycle": {
|
|
16
|
+
"cycleDuration": "",
|
|
17
|
+
"products": {
|
|
18
|
+
"@type": "Product",
|
|
19
|
+
"primary": "True",
|
|
20
|
+
"term.termType": "crop"
|
|
21
|
+
},
|
|
22
|
+
"optional": {
|
|
23
|
+
"endDate": ""
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
RETURNS = {
|
|
28
|
+
"The startDateDefinition as a string": ""
|
|
29
|
+
}
|
|
30
|
+
MODEL_KEY = 'startDateDefinition'
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _is_last_day_of_month(date: str):
|
|
34
|
+
date_parts = date.split('-')
|
|
35
|
+
return len(date_parts) > 1 and date_parts[1] == '12' and any([
|
|
36
|
+
len(date_parts) == 3 and date_parts[2] == '31',
|
|
37
|
+
len(date_parts) == 2
|
|
38
|
+
])
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _run(cycle: dict):
|
|
42
|
+
product = find_primary_product(cycle)
|
|
43
|
+
grouping = get_crop_grouping_fao(MODEL, MODEL_KEY, product.get('term', {}))
|
|
44
|
+
is_permanent_crop = grouping == 'Permanent crops'
|
|
45
|
+
print(is_permanent_crop)
|
|
46
|
+
return (
|
|
47
|
+
CycleStartDateDefinition.START_OF_YEAR.value if _is_last_day_of_month(cycle.get('endDate'))
|
|
48
|
+
else CycleStartDateDefinition.ONE_YEAR_PRIOR.value
|
|
49
|
+
) if is_permanent_crop else (
|
|
50
|
+
CycleStartDateDefinition.HARVEST_OF_PREVIOUS_CROP.value
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _should_run(cycle: dict):
|
|
55
|
+
cycleDuration_added = 'cycleDuration' in cycle.get('added', [])
|
|
56
|
+
product = find_primary_product(cycle) or {}
|
|
57
|
+
product_term_type = product.get('term', {}).get('termType')
|
|
58
|
+
primary_product_is_crop = product_term_type == TermTermType.CROP.value
|
|
59
|
+
|
|
60
|
+
logRequirements(cycle, model=MODEL, key=MODEL_KEY,
|
|
61
|
+
cycleDuration_added=cycleDuration_added,
|
|
62
|
+
primary_product_is_crop=primary_product_is_crop)
|
|
63
|
+
|
|
64
|
+
should_run = all([cycleDuration_added, primary_product_is_crop])
|
|
65
|
+
logShouldRun(cycle, MODEL, None, should_run, key=MODEL_KEY)
|
|
66
|
+
return should_run
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def run(cycle: dict): return _run(cycle) if _should_run(cycle) else None
|
|
@@ -42,11 +42,9 @@ def _indicator(product: dict):
|
|
|
42
42
|
term_id = emission.get('term', {}).get('@id')
|
|
43
43
|
value = convert_value_from_cycle(product, list_sum(emission.get('value', [0])), model=MODEL, term_id=term_id)
|
|
44
44
|
|
|
45
|
-
indicator = _new_indicator(emission.get('term', {}))
|
|
45
|
+
indicator = _new_indicator(emission.get('term', {}), emission.get('methodModel'))
|
|
46
46
|
indicator['value'] = value
|
|
47
47
|
|
|
48
|
-
if 'methodModel' in emission:
|
|
49
|
-
indicator['methodModel'] = emission['methodModel']
|
|
50
48
|
if len(emission.get('inputs', [])):
|
|
51
49
|
indicator['inputs'] = emission['inputs']
|
|
52
50
|
if emission.get('operation'):
|
|
@@ -147,7 +147,7 @@ def _input(term_id: str, value: float):
|
|
|
147
147
|
return node
|
|
148
148
|
|
|
149
149
|
|
|
150
|
-
def _sum_values(values: list, index=0): return
|
|
150
|
+
def _sum_values(values: list, index=0): return list_sum([v[index] for v in values])
|
|
151
151
|
|
|
152
152
|
|
|
153
153
|
def calculate_NEwool(cycle: dict) -> float:
|
|
@@ -1,167 +1,13 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
from os.path import dirname, abspath
|
|
2
|
+
import sys
|
|
3
|
+
from importlib import import_module
|
|
3
4
|
|
|
4
|
-
|
|
5
|
-
"""
|
|
6
|
-
from functools import reduce
|
|
7
|
-
from hestia_earth.schema import EmissionMethodTier
|
|
8
|
-
from hestia_earth.utils.lookup import download_lookup, get_table_value, column_name
|
|
9
|
-
from hestia_earth.utils.tools import flatten, list_sum
|
|
5
|
+
from hestia_earth.models.utils.blank_node import run_if_required
|
|
10
6
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
from hestia_earth.models.utils.input import load_impacts
|
|
14
|
-
from hestia_earth.models.utils.blank_node import group_by_keys
|
|
15
|
-
|
|
16
|
-
REQUIREMENTS = {
|
|
17
|
-
"Cycle": {
|
|
18
|
-
"inputs": [{
|
|
19
|
-
"@type": "Input",
|
|
20
|
-
"value": "> 0",
|
|
21
|
-
"impactAssessment": {
|
|
22
|
-
"@type": "ImpactAssessment",
|
|
23
|
-
"emissionsResourceUse": [{"@type": "Indicator", "value": ""}]
|
|
24
|
-
}
|
|
25
|
-
}],
|
|
26
|
-
"optional": {
|
|
27
|
-
"animals": [{
|
|
28
|
-
"@type": "Animal",
|
|
29
|
-
"inputs": [{
|
|
30
|
-
"@type": "Input",
|
|
31
|
-
"value": "> 0",
|
|
32
|
-
"impactAssessment": {
|
|
33
|
-
"@type": "ImpactAssessment",
|
|
34
|
-
"emissionsResourceUse": [{"@type": "Indicator", "value": ""}]
|
|
35
|
-
}
|
|
36
|
-
}]
|
|
37
|
-
}]
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
RETURNS = {
|
|
42
|
-
"Emission": [{
|
|
43
|
-
"value": "",
|
|
44
|
-
"methodTier": "background",
|
|
45
|
-
"inputs": "",
|
|
46
|
-
"operation": "",
|
|
47
|
-
"animals": ""
|
|
48
|
-
}]
|
|
49
|
-
}
|
|
7
|
+
CURRENT_DIR = dirname(abspath(__file__)) + '/'
|
|
8
|
+
sys.path.append(CURRENT_DIR)
|
|
50
9
|
MODEL = 'linkedImpactAssessment'
|
|
51
|
-
|
|
52
|
-
TIER = EmissionMethodTier.BACKGROUND.value
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def _emission(model: str, term_id: str, value: float, input: dict, operation={}, animal={}):
|
|
56
|
-
emission = _new_emission(term_id, model)
|
|
57
|
-
emission['value'] = [value]
|
|
58
|
-
emission['methodTier'] = TIER
|
|
59
|
-
emission['inputs'] = [input]
|
|
60
|
-
if operation:
|
|
61
|
-
emission['operation'] = operation
|
|
62
|
-
if animal:
|
|
63
|
-
emission['animals'] = [animal]
|
|
64
|
-
return emission
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def _run_emission(cycle: dict, term_id: str, data: dict):
|
|
68
|
-
def run_input(values: dict):
|
|
69
|
-
value = values.get('value', 0)
|
|
70
|
-
term = values.get('term', {})
|
|
71
|
-
operation = values.get('operation', {})
|
|
72
|
-
animal = values.get('animal', {})
|
|
73
|
-
is_aggregated = any(values.get('aggregated', []))
|
|
74
|
-
model = MODEL_AGGREGATED if is_aggregated else MODEL
|
|
75
|
-
|
|
76
|
-
details = values.get('details', {})
|
|
77
|
-
logRequirements(cycle, model=model, term=term_id,
|
|
78
|
-
values=log_as_table([{'impact-assessment-id': key} | value for key, value in details.items()]))
|
|
79
|
-
|
|
80
|
-
logShouldRun(cycle, model, term_id, True, methodTier=TIER,
|
|
81
|
-
input=term.get('@id'),
|
|
82
|
-
operation=operation.get('@id'),
|
|
83
|
-
animal=animal.get('@id'))
|
|
84
|
-
|
|
85
|
-
return _emission(model, term_id, value, input=term, operation=operation, animal=animal)
|
|
86
|
-
|
|
87
|
-
return list(map(run_input, data.values()))
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def _emission_group(term_id: str):
|
|
91
|
-
lookup = download_lookup('emission.csv', True)
|
|
92
|
-
return get_table_value(lookup, 'termid', term_id, column_name('inputProductionGroupId'))
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def _group_emissions(impact: dict):
|
|
96
|
-
def _group_by(group: dict, emission: dict):
|
|
97
|
-
term_id = emission.get('term', {}).get('@id')
|
|
98
|
-
grouping = _emission_group(term_id)
|
|
99
|
-
value = emission.get('value') or 0
|
|
100
|
-
if grouping:
|
|
101
|
-
group[grouping] = group.get(grouping, 0) + value
|
|
102
|
-
return group
|
|
103
|
-
|
|
104
|
-
emissions = impact.get('emissionsResourceUse', [])
|
|
105
|
-
return reduce(_group_by, emissions, {})
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def _animal_inputs(animal: dict):
|
|
109
|
-
inputs = load_impacts(animal.get('inputs', []))
|
|
110
|
-
return [(input | {'animal': animal.get('term', {})}) for input in inputs]
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
def _group_input_emissions(input: dict):
|
|
114
|
-
impact = input.get('impactAssessment')
|
|
115
|
-
emissions = _group_emissions(impact)
|
|
116
|
-
return input | {'emissions': emissions}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
def _group_inputs(group: dict, values: tuple):
|
|
120
|
-
# input_group_key = 'group-id'
|
|
121
|
-
# inputs = [{'term': {}, 'value':[], 'impactAssessment': {}, 'emissions': {'co2ToAirInputsProduction': 10}}]
|
|
122
|
-
input_group_key, inputs = values
|
|
123
|
-
for input in inputs:
|
|
124
|
-
input_value = list_sum(input.get('value'))
|
|
125
|
-
emissions = input.get('emissions', {})
|
|
126
|
-
for emission_term_id, emission_value in emissions.items():
|
|
127
|
-
group[emission_term_id] = group.get(emission_term_id, {})
|
|
128
|
-
|
|
129
|
-
grouped_inputs = group[emission_term_id].get(input_group_key, {
|
|
130
|
-
'term': input.get('term', {}),
|
|
131
|
-
'operation': input.get('operation', {}),
|
|
132
|
-
'animal': input.get('animal', {}),
|
|
133
|
-
'value': 0,
|
|
134
|
-
'aggregated': [],
|
|
135
|
-
'details': {}
|
|
136
|
-
})
|
|
137
|
-
grouped_inputs['aggregated'].append(input.get('impactAssessment', {}).get('agregated', False))
|
|
138
|
-
grouped_inputs['value'] = grouped_inputs['value'] + (emission_value * input_value)
|
|
139
|
-
# for logging
|
|
140
|
-
grouped_inputs['details'][input.get('impactAssessment', {}).get('@id')] = {
|
|
141
|
-
'emission-value': emission_value,
|
|
142
|
-
'input-value': input_value
|
|
143
|
-
}
|
|
144
|
-
group[emission_term_id][input_group_key] = grouped_inputs
|
|
145
|
-
return group
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
def run(_, cycle: dict):
|
|
149
|
-
inputs = flatten(
|
|
150
|
-
load_impacts(cycle.get('inputs', [])) +
|
|
151
|
-
list(map(_animal_inputs, cycle.get('animals', [])))
|
|
152
|
-
)
|
|
153
|
-
inputs = [i for i in inputs if list_sum(i.get('value', [])) > 0]
|
|
154
|
-
|
|
155
|
-
# group inputs with same term/operation/animal to avoid adding emissions twice
|
|
156
|
-
# inputs = {'group-id': [{'term': {},'value':[10],'impactAssessment': {}}]}
|
|
157
|
-
inputs = reduce(group_by_keys(['term', 'operation', 'animal']), inputs, {})
|
|
158
|
-
inputs = {key: list(map(_group_input_emissions, value)) for key, value in inputs.items()}
|
|
159
|
-
|
|
160
|
-
debugValues(cycle, model=MODEL,
|
|
161
|
-
nb_inputs=len(inputs))
|
|
10
|
+
PKG = '.'.join(['hestia_earth', 'models', MODEL])
|
|
162
11
|
|
|
163
|
-
# finally group everything by emission so we can log inputs together
|
|
164
|
-
# emissions = {'co2ToAirInputsProduct': {'group-id':{'term':{},'value':10,'details':{}}}}
|
|
165
|
-
emissions = reduce(_group_inputs, inputs.items(), {})
|
|
166
12
|
|
|
167
|
-
|
|
13
|
+
def run(model: str, data): return run_if_required(MODEL, model, data, import_module(f".{model}", package=PKG))
|