hestia-earth-models 0.70.2__py3-none-any.whl → 0.70.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. hestia_earth/models/config/Cycle.json +8 -0
  2. hestia_earth/models/cycle/practice/landCover.py +181 -0
  3. hestia_earth/models/emepEea2019/nh3ToAirExcreta.py +1 -1
  4. hestia_earth/models/geospatialDatabase/altitude.py +1 -0
  5. hestia_earth/models/geospatialDatabase/clayContent.py +3 -3
  6. hestia_earth/models/geospatialDatabase/sandContent.py +3 -3
  7. hestia_earth/models/geospatialDatabase/utils.py +1 -2
  8. hestia_earth/models/hestia/landCover.py +25 -12
  9. hestia_earth/models/hestia/management.py +11 -107
  10. hestia_earth/models/ipcc2019/ch4ToAirOrganicSoilCultivation.py +7 -7
  11. hestia_earth/models/ipcc2019/co2ToAirOrganicSoilCultivation.py +5 -4
  12. hestia_earth/models/ipcc2019/n2OToAirOrganicSoilCultivationDirect.py +7 -7
  13. hestia_earth/models/ipcc2019/no3ToGroundwaterExcreta.py +1 -1
  14. hestia_earth/models/ipcc2019/organicCarbonPerHa_tier_1.py +2 -2
  15. hestia_earth/models/ipcc2019/organicCarbonPerHa_tier_2.py +15 -4
  16. hestia_earth/models/ipcc2019/organicSoilCultivation_utils.py +11 -3
  17. hestia_earth/models/mocking/search-results.json +1295 -1309
  18. hestia_earth/models/pooreNemecek2018/excretaKgN.py +3 -1
  19. hestia_earth/models/utils/blank_node.py +2 -2
  20. hestia_earth/models/utils/excretaManagement.py +2 -2
  21. hestia_earth/models/utils/property.py +14 -9
  22. hestia_earth/models/utils/site.py +2 -1
  23. hestia_earth/models/version.py +1 -1
  24. {hestia_earth_models-0.70.2.dist-info → hestia_earth_models-0.70.4.dist-info}/METADATA +2 -2
  25. {hestia_earth_models-0.70.2.dist-info → hestia_earth_models-0.70.4.dist-info}/RECORD +35 -33
  26. tests/models/cycle/practice/test_landCover.py +27 -0
  27. tests/models/hestia/test_landCover.py +24 -1
  28. tests/models/ipcc2019/test_ch4ToAirEntericFermentation.py +2 -2
  29. tests/models/ipcc2019/test_ch4ToAirOrganicSoilCultivation.py +2 -1
  30. tests/models/ipcc2019/test_co2ToAirOrganicSoilCultivation.py +2 -1
  31. tests/models/ipcc2019/test_n2OToAirOrganicSoilCultivationDirect.py +2 -1
  32. tests/models/ipcc2019/test_organicCarbonPerHa_tier_1.py +8 -3
  33. {hestia_earth_models-0.70.2.dist-info → hestia_earth_models-0.70.4.dist-info}/LICENSE +0 -0
  34. {hestia_earth_models-0.70.2.dist-info → hestia_earth_models-0.70.4.dist-info}/WHEEL +0 -0
  35. {hestia_earth_models-0.70.2.dist-info → hestia_earth_models-0.70.4.dist-info}/top_level.txt +0 -0
@@ -182,6 +182,14 @@
182
182
  "mergeStrategy": "list",
183
183
  "stage": 1
184
184
  },
185
+ {
186
+ "key": "practices",
187
+ "model": "cycle",
188
+ "value": "practice.landCover",
189
+ "runStrategy": "always",
190
+ "mergeStrategy": "list",
191
+ "stage": 1
192
+ },
185
193
  {
186
194
  "key": "products",
187
195
  "model": "hestia",
@@ -0,0 +1,181 @@
1
+ from functools import reduce
2
+ from hestia_earth.schema import TermTermType, SiteSiteType
3
+ from hestia_earth.utils.model import filter_list_term_type
4
+ from hestia_earth.utils.tools import non_empty_list, flatten, list_sum
5
+
6
+ from hestia_earth.models.log import logRequirements, logShouldRun
7
+ from hestia_earth.models.utils import _omit, _include
8
+ from hestia_earth.models.utils.practice import _new_practice
9
+ from hestia_earth.models.utils.term import get_lookup_value
10
+ from hestia_earth.models.utils.blank_node import condense_nodes
11
+ from hestia_earth.models.utils.crop import get_landCover_term_id
12
+ from .. import MODEL
13
+
14
+ REQUIREMENTS = {
15
+ "Cycle": {
16
+ "endDate": "",
17
+ "products": [
18
+ {
19
+ "@type": "Product",
20
+ "term.termType": ["crop", "forage"],
21
+ "optional": {
22
+ "startDate": "",
23
+ "endDate": ""
24
+ }
25
+ }
26
+ ],
27
+ "site": {
28
+ "@type": "Site",
29
+ "siteType": "cropland"
30
+ },
31
+ "none": {
32
+ "practices": [{"@type": "Practice", "term.termType": "landCover"}]
33
+ },
34
+ "optional": {
35
+ "startDate": ""
36
+ }
37
+ }
38
+ }
39
+ RETURNS = {
40
+ "Practice": [{
41
+ "term.termType": "landCover",
42
+ "value": "",
43
+ "endDate": "",
44
+ "startDate": ""
45
+ }]
46
+ }
47
+ LOOKUPS = {
48
+ "crop": ["landCoverTermId", "maximumCycleDuration"],
49
+ "forage": ["landCoverTermId"],
50
+ "property": ["GAP_FILL_TO_MANAGEMENT", "CALCULATE_TOTAL_LAND_COVER_SHARE_SEPARATELY"]
51
+ }
52
+ MODEL_KEY = 'landCover'
53
+
54
+
55
+ def practice(data: dict):
56
+ node = _new_practice(data.get('id'))
57
+ node['value'] = [data['value']]
58
+ node['endDate'] = data['endDate']
59
+ if data.get('startDate'):
60
+ node['startDate'] = data['startDate']
61
+ if data.get('properties'):
62
+ node['properties'] = data['properties']
63
+ return node
64
+
65
+
66
+ def _should_gap_fill(term: dict):
67
+ value = get_lookup_value(lookup_term=term, column='GAP_FILL_TO_MANAGEMENT')
68
+ return bool(value)
69
+
70
+
71
+ def _filter_properties(blank_node: dict):
72
+ properties = list(filter(lambda p: _should_gap_fill(p.get('term', {})), blank_node.get('properties', [])))
73
+ return _omit(blank_node, ['properties']) | ({'properties': properties} if properties else {})
74
+
75
+
76
+ def _map_to_value(value: dict):
77
+ return {
78
+ 'id': value.get('term', {}).get('@id'),
79
+ 'value': value.get('value'),
80
+ 'startDate': value.get('startDate'),
81
+ 'endDate': value.get('endDate'),
82
+ 'properties': value.get('properties')
83
+ }
84
+
85
+
86
+ def _copy_item_if_exists(source: dict, keys: list[str] = None, dest: dict = None) -> dict:
87
+ return reduce(lambda p, c: p | ({c: source[c]} if source.get(c) else {}), keys or [], dest or {})
88
+
89
+
90
+ def _run(cycle: dict, products: list, total: float):
91
+ # remove any properties that should not get gap-filled
92
+ products = list(map(_filter_properties, products))
93
+
94
+ nodes = [
95
+ _map_to_value(_include(cycle, ["startDate", "endDate"]) | _copy_item_if_exists(
96
+ source=product,
97
+ keys=['properties', 'startDate', 'endDate'],
98
+ dest={
99
+ "term": {'@id': product.get('land-cover-id')},
100
+ "value": round((100 - total) / len(products), 2)
101
+ }
102
+ ))
103
+ for product in products
104
+ ]
105
+
106
+ return condense_nodes(list(map(practice, nodes)))
107
+
108
+
109
+ def _should_group_landCover(term: dict):
110
+ value = get_lookup_value(lookup_term=term, column='CALCULATE_TOTAL_LAND_COVER_SHARE_SEPARATELY')
111
+ return bool(value)
112
+
113
+
114
+ def _has_prop_grouped_with_landCover(product: dict):
115
+ return bool(
116
+ next((
117
+ p
118
+ for p in product.get('properties', [])
119
+ if _should_group_landCover(p.get('term', {}))
120
+ ), None)
121
+ )
122
+
123
+
124
+ def _product_wit_landCover_id(product: dict):
125
+ landCover_id = get_landCover_term_id(product.get('term', {}))
126
+ return product | {'land-cover-id': landCover_id} if landCover_id else None
127
+
128
+
129
+ def _should_run(cycle: dict):
130
+ is_cropland = cycle.get('site', {}).get('siteType') == SiteSiteType.CROPLAND.value
131
+
132
+ practices = filter_list_term_type(cycle.get('practices', []), TermTermType.LANDCOVER)
133
+ # split practices with properties that group with landCover
134
+ practices_max_100 = [
135
+ p for p in practices if _has_prop_grouped_with_landCover(p)
136
+ ]
137
+ total_practices_max_100 = list_sum([
138
+ list_sum(p.get('value', []))
139
+ for p in practices_max_100
140
+ ])
141
+ practices_without_grouped_props = [
142
+ p for p in practices if not _has_prop_grouped_with_landCover(p)
143
+ ]
144
+
145
+ products = filter_list_term_type(cycle.get('products', []), [TermTermType.CROP, TermTermType.FORAGE])
146
+ # only take products with a matching landCover term
147
+ products = non_empty_list(map(_product_wit_landCover_id, products))
148
+
149
+ # Products that can sum up to 100% => run if total is below 100%
150
+ products_max_100 = [
151
+ p for p in products
152
+ if _has_prop_grouped_with_landCover(p)
153
+ ] if total_practices_max_100 < 100 else []
154
+
155
+ # Products that must sum up to 100% => can not run practices already exist as already 100%
156
+ products_is_100 = [
157
+ p for p in products
158
+ if not _has_prop_grouped_with_landCover(p)
159
+ ] if not practices_without_grouped_props else []
160
+
161
+ has_crop_forage_products = bool(products_max_100 + products_is_100)
162
+
163
+ logRequirements(cycle, model=MODEL, model_key=MODEL_KEY,
164
+ is_cropland=is_cropland,
165
+ has_crop_forage_products=has_crop_forage_products)
166
+
167
+ should_run = all([is_cropland, has_crop_forage_products])
168
+ logShouldRun(cycle, MODEL, None, should_run, model_key=MODEL_KEY)
169
+
170
+ return should_run, [
171
+ (products_max_100, total_practices_max_100),
172
+ (products_is_100, 0)
173
+ ]
174
+
175
+
176
+ def run(cycle: dict):
177
+ should_run, products_list = _should_run(cycle)
178
+ return flatten([
179
+ _run(cycle, products, total)
180
+ for products, total in products_list if products
181
+ ]) if should_run else []
@@ -46,7 +46,7 @@ def _run(excreta_EF_input: float):
46
46
  def _should_run(cycle: dict):
47
47
  excreta_complete = _is_term_type_complete(cycle, TermTermType.EXCRETA)
48
48
  excreta_EF_input = get_excreta_inputs_with_factor(
49
- cycle, f"{list(LOOKUPS.keys())[0]}.csv", excreta_convertion_func=total_excreta_tan, model=MODEL, term=TERM_ID
49
+ cycle, f"{list(LOOKUPS.keys())[0]}.csv", excreta_conversion_func=total_excreta_tan, model=MODEL, term=TERM_ID
50
50
  )
51
51
 
52
52
  logRequirements(cycle, model=MODEL, term=TERM_ID,
@@ -27,6 +27,7 @@ EE_PARAMS = {
27
27
  'collection': 'USGS/GMTED2010_FULL',
28
28
  'ee_type': 'raster',
29
29
  'band_name': 'med',
30
+ 'reducer': 'mode',
30
31
  'is_image': True
31
32
  }
32
33
  BIBLIO_TITLE = 'An Enhanced Global Elevation Model Generalized From Multiple Higher Resolution Source Datasets'
@@ -92,14 +92,14 @@ def _run(site: dict):
92
92
  def _should_run(site: dict):
93
93
  contains_geospatial_data = has_geospatial_data(site)
94
94
  below_max_area_size = should_download(TERM_ID, site)
95
- has_original_texture_measurements = has_original_by_ids(site.get('measurements', []), SOIL_TEXTURE_IDS)
95
+ has_no_original_texture_measurements = not has_original_by_ids(site.get('measurements', []), SOIL_TEXTURE_IDS)
96
96
 
97
97
  logRequirements(site, model=MODEL, term=TERM_ID,
98
98
  contains_geospatial_data=contains_geospatial_data,
99
99
  below_max_area_size=below_max_area_size,
100
- has_original_texture_measurements=has_original_texture_measurements)
100
+ has_no_original_texture_measurements=has_no_original_texture_measurements)
101
101
 
102
- should_run = all([contains_geospatial_data, below_max_area_size, not has_original_texture_measurements])
102
+ should_run = all([contains_geospatial_data, below_max_area_size, has_no_original_texture_measurements])
103
103
  logShouldRun(site, MODEL, TERM_ID, should_run)
104
104
  return should_run
105
105
 
@@ -92,14 +92,14 @@ def _run(site: dict):
92
92
  def _should_run(site: dict):
93
93
  contains_geospatial_data = has_geospatial_data(site)
94
94
  below_max_area_size = should_download(TERM_ID, site)
95
- has_original_texture_measurements = has_original_by_ids(site.get('measurements', []), SOIL_TEXTURE_IDS)
95
+ has_no_original_texture_measurements = not has_original_by_ids(site.get('measurements', []), SOIL_TEXTURE_IDS)
96
96
 
97
97
  logRequirements(site, model=MODEL, term=TERM_ID,
98
98
  contains_geospatial_data=contains_geospatial_data,
99
99
  below_max_area_size=below_max_area_size,
100
- has_original_texture_measurements=has_original_texture_measurements)
100
+ has_no_original_texture_measurements=has_no_original_texture_measurements)
101
101
 
102
- should_run = all([contains_geospatial_data, below_max_area_size, not has_original_texture_measurements])
102
+ should_run = all([contains_geospatial_data, below_max_area_size, has_no_original_texture_measurements])
103
103
  logShouldRun(site, MODEL, TERM_ID, should_run)
104
104
  return should_run
105
105
 
@@ -157,8 +157,7 @@ def _get_cached_data(term: str, site: dict, data: dict):
157
157
  isinstance(cache, dict),
158
158
  cache_sub_key
159
159
  ]) else cache
160
- if value is not None:
161
- debugValues(site, model=MODEL, term=term, value_from_cache=value)
160
+ debugValues(site, model=MODEL, term=term, value_from_cache=value)
162
161
  return value
163
162
 
164
163
 
@@ -16,7 +16,7 @@ from hestia_earth.models.utils.constant import DAYS_IN_YEAR
16
16
  from hestia_earth.models.utils.management import _new_management
17
17
  from hestia_earth.models.utils.term import get_lookup_value
18
18
  from hestia_earth.models.utils.lookup import get_region_lookup_value
19
- from hestia_earth.models.utils.blank_node import _node_date, DatestrFormat, _gapfill_datestr, DatestrGapfillMode
19
+ from hestia_earth.models.utils.blank_node import DatestrFormat, _gapfill_datestr, DatestrGapfillMode
20
20
  from .utils import (
21
21
  IPCC_LAND_USE_CATEGORY_ANNUAL,
22
22
  IPCC_LAND_USE_CATEGORY_PERENNIAL,
@@ -410,8 +410,15 @@ def _get_faostat_name(term: dict) -> str:
410
410
  return _get_lookup_with_cache(term, "cropGroupingFaostatArea")
411
411
 
412
412
 
413
+ def _get_most_common_or_alphabetically_first(crop_terms: list) -> str:
414
+ histogram = {term: crop_terms.count(term) for term in crop_terms}
415
+ max_freq = max(histogram.values())
416
+ # Sorted; to be deterministic
417
+ return sorted([term for term, freq in histogram.items() if freq == max_freq])[0]
418
+
419
+
413
420
  def _get_complete_faostat_to_crop_mapping() -> dict:
414
- """Returns mapping in the format: {faostat_name: IPPC_LAND_USE_CATEGORY, ...}"""
421
+ """Returns mapping in the format: {faostat_name: IPCC_LAND_USE_CATEGORY, ...}"""
415
422
  lookup = download_lookup("crop.csv")
416
423
  mappings = defaultdict(list)
417
424
  for crop_term_id in [row[0] for row in lookup]:
@@ -421,7 +428,7 @@ def _get_complete_faostat_to_crop_mapping() -> dict:
421
428
  if key:
422
429
  mappings[key].append(crop_ipcc_land_use_category(crop_term_id=crop_term_id, lookup_term_type="crop"))
423
430
  return {
424
- fao_name: max(set(crop_terms), key=crop_terms.count)
431
+ fao_name: _get_most_common_or_alphabetically_first(crop_terms)
425
432
  for fao_name, crop_terms in mappings.items()
426
433
  }
427
434
 
@@ -758,26 +765,32 @@ def _collect_land_use_types(nodes: list) -> list:
758
765
  "id": node.get("term", {}).get("@id"),
759
766
  "land-use-type": _get_land_use_term_from_node(node),
760
767
  "endDate": _gapfill_datestr(datestr=node.get("endDate"), mode=DatestrGapfillMode.END)[:10],
761
- "startDate": _gapfill_datestr(
762
- datestr=node.get("startDate"), mode=DatestrGapfillMode.START
763
- )[:10] if node.get("startDate") else None
768
+ "startDate": _gapfill_datestr(datestr=node.get("startDate"), mode=DatestrGapfillMode.START)[:10]
764
769
  } for node in nodes
765
770
  ]
766
771
 
767
772
 
768
- def _no_prior_land_cover_data(nodes: list, node: dict) -> bool:
773
+ def _no_prior_land_cover_data(nodes: list, target_node: dict) -> bool:
774
+ """
775
+ Returns true if there are no nodes whose start & end dates the target_node falls within,
776
+ including a tolerance.
777
+ """
769
778
  target_date = (
770
- datetime.strptime(node.get('startDate') or node.get('endDate'), DatestrFormat.YEAR_MONTH_DAY.value)
779
+ datetime.strptime(target_node.get('startDate') or target_node.get('endDate'),
780
+ DatestrFormat.YEAR_MONTH_DAY.value)
771
781
  - timedelta(days=DEFAULT_WINDOW_IN_YEARS * DAYS_IN_YEAR)
772
782
  )
783
+ tolerance = timedelta(days=DATE_TOLERANCE_IN_YEARS * DAYS_IN_YEAR)
773
784
  previous_nodes = [
774
785
  node for node in nodes
775
- if abs(_node_date(node) - target_date) < timedelta(days=DATE_TOLERANCE_IN_YEARS * DAYS_IN_YEAR)
786
+ if datetime.strptime(node.get("startDate"), DatestrFormat.YEAR_MONTH_DAY.value) - tolerance
787
+ < target_date <
788
+ datetime.strptime(node.get("endDate"), DatestrFormat.YEAR_MONTH_DAY.value) + tolerance
776
789
  ]
777
790
  return len(previous_nodes) == 0
778
791
 
779
792
 
780
- def _should_run(site: dict) -> tuple[bool, dict]:
793
+ def _should_run(site: dict) -> tuple[bool, list, dict]:
781
794
  management_nodes = _collect_land_use_types(
782
795
  [
783
796
  node for node in filter_list_term_type(site.get("management", []), TermTermType.LANDCOVER)
@@ -795,8 +808,8 @@ def _should_run(site: dict) -> tuple[bool, dict]:
795
808
  land_use_type = relevant_nodes[0].get("land-use-type") if relevant_nodes else None
796
809
 
797
810
  has_no_prior_land_cover_data = _no_prior_land_cover_data(
798
- nodes=relevant_nodes,
799
- node=relevant_nodes[-1:][0]
811
+ nodes=management_nodes,
812
+ target_node=relevant_nodes[-1:][0]
800
813
  ) if relevant_nodes else None
801
814
 
802
815
  should_run_nodes, site_area = _should_run_historical_land_use_change(
@@ -1,6 +1,5 @@
1
1
  from typing import List
2
2
  from datetime import timedelta, datetime
3
- from functools import reduce
4
3
  from hestia_earth.schema import SchemaType, TermTermType, SiteSiteType, COMPLETENESS_MAPPING
5
4
  from hestia_earth.utils.lookup import column_name, get_table_value, download_lookup
6
5
  from hestia_earth.utils.model import filter_list_term_type
@@ -8,7 +7,7 @@ from hestia_earth.utils.tools import safe_parse_float, flatten
8
7
  from hestia_earth.utils.blank_node import get_node_value
9
8
 
10
9
  from hestia_earth.models.log import logRequirements, logShouldRun, log_as_table
11
- from hestia_earth.models.utils import _include, _omit, group_by
10
+ from hestia_earth.models.utils import _include, group_by
12
11
  from hestia_earth.models.utils.management import _new_management
13
12
  from hestia_earth.models.utils.term import get_lookup_value
14
13
  from hestia_earth.models.utils.blank_node import condense_nodes, DatestrFormat, _gapfill_datestr, DatestrGapfillMode
@@ -24,20 +23,16 @@ REQUIREMENTS = {
24
23
  "Cycle": [{
25
24
  "@type": "Cycle",
26
25
  "endDate": "",
27
- "products": [
28
- {
29
- "@type": "Product",
30
- "term.termType": ["crop", "forage", "landCover"]
31
- }
32
- ],
33
26
  "practices": [
34
27
  {
28
+ "@type": "Practice",
35
29
  "term.termType": [
36
30
  "waterRegime",
37
31
  "tillage",
38
32
  "cropResidueManagement",
39
33
  "landUseManagement",
40
- "system"
34
+ "system",
35
+ "landCover"
41
36
  ],
42
37
  "value": ""
43
38
  }
@@ -62,7 +57,6 @@ REQUIREMENTS = {
62
57
  }
63
58
  RETURNS = {
64
59
  "Management": [{
65
- "@type": "Management",
66
60
  "term.termType": [
67
61
  "landCover", "waterRegime", "tillage", "cropResidueManagement", "landUseManagement", "system"
68
62
  ],
@@ -78,7 +72,7 @@ LOOKUPS = {
78
72
  "organicFertiliser": "ANIMAL_MANURE",
79
73
  "soilAmendment": "PRACTICE_INCREASING_C_INPUT",
80
74
  "landUseManagement": "GAP_FILL_TO_MANAGEMENT",
81
- "property": ["GAP_FILL_TO_MANAGEMENT", "CALCULATE_TOTAL_LAND_COVER_SHARE_SEPARATELY"]
75
+ "property": "GAP_FILL_TO_MANAGEMENT"
82
76
  }
83
77
  MODEL_KEY = 'management'
84
78
 
@@ -87,7 +81,8 @@ _PRACTICES_TERM_TYPES = [
87
81
  TermTermType.TILLAGE,
88
82
  TermTermType.CROPRESIDUEMANAGEMENT,
89
83
  TermTermType.LANDUSEMANAGEMENT,
90
- TermTermType.SYSTEM
84
+ TermTermType.SYSTEM,
85
+ TermTermType.LANDCOVER
91
86
  ]
92
87
  _PRACTICES_COMPLETENESS_MAPPING = COMPLETENESS_MAPPING.get(SchemaType.PRACTICE.value, {})
93
88
  _ANIMAL_MANURE_USED_TERM_ID = "animalManureUsed"
@@ -125,7 +120,6 @@ _INPUT_RULES = {
125
120
  _SKIP_LAND_COVER_SITE_TYPES = [
126
121
  SiteSiteType.CROPLAND.value
127
122
  ]
128
- _CYCLE_DATE_TERM_TYPES = {TermTermType.CROP.value, TermTermType.FORAGE.value}
129
123
 
130
124
 
131
125
  def management(data: dict):
@@ -188,11 +182,6 @@ def _should_gap_fill(term: dict):
188
182
  return bool(value)
189
183
 
190
184
 
191
- def _filter_properties(blank_node: dict):
192
- properties = list(filter(lambda p: _should_gap_fill(p.get('term', {})), blank_node.get('properties', [])))
193
- return _omit(blank_node, ['properties']) | ({'properties': properties} if properties else {})
194
-
195
-
196
185
  def _map_to_value(value: dict):
197
186
  return {
198
187
  'id': value.get('term', {}).get('@id'),
@@ -207,10 +196,6 @@ def _extract_node_value(node: dict) -> dict:
207
196
  return node | {'value': get_node_value(node)}
208
197
 
209
198
 
210
- def _copy_item_if_exists(source: dict, keys: list[str] = None, dest: dict = None) -> dict:
211
- return reduce(lambda p, c: p | ({c: source[c]} if source.get(c) else {}), keys or [], dest or {})
212
-
213
-
214
199
  def _get_relevant_items(cycle: dict, item_name: str, term_types: List[TermTermType], completeness_mapping: dict = {}):
215
200
  """
216
201
  Get items from the list of cycles with any of the relevant terms.
@@ -278,84 +263,6 @@ def _run_from_siteType(site: dict, cycle: dict):
278
263
  }] if should_run else []
279
264
 
280
265
 
281
- def _run_products(cycle: dict, products: list, total_products: int = None, use_cycle_dates: bool = False):
282
- default_dates = _include_with_date_gap_fill(cycle, ["startDate", "endDate"])
283
- return [
284
- _map_to_value(default_dates | _copy_item_if_exists(
285
- source=product,
286
- keys=['properties', 'startDate', 'endDate'],
287
- dest={
288
- "term": {'@id': get_landCover_term_id(product.get('term', {}))},
289
- "value": round(100 / (total_products or len(products)), 2)
290
- }
291
- ) | (
292
- default_dates if use_cycle_dates or product.get("term", {}).get("termType") in _CYCLE_DATE_TERM_TYPES
293
- else {}
294
- ))
295
- for product in products
296
- ]
297
-
298
-
299
- def _run_from_landCover(cycle: dict, crop_forage_products: list):
300
- """
301
- Copy landCover items, and include crop/forage landCover items with properties to count in ratio.
302
- """
303
- land_cover_products = [
304
- _map_to_value(_extract_node_value(
305
- _include_with_date_gap_fill(
306
- value=product,
307
- keys=["term", "value", "startDate", "endDate", "properties"]
308
- )
309
- )) for product in _get_relevant_items(
310
- cycle=cycle,
311
- item_name="products",
312
- term_types=[TermTermType.LANDCOVER]
313
- )
314
- ]
315
- return land_cover_products + _run_products(
316
- cycle,
317
- crop_forage_products,
318
- total_products=len(crop_forage_products) + len(land_cover_products),
319
- use_cycle_dates=True
320
- )
321
-
322
-
323
- def _should_group_landCover(term: dict):
324
- value = get_lookup_value(lookup_term=term, column='CALCULATE_TOTAL_LAND_COVER_SHARE_SEPARATELY')
325
- return bool(value)
326
-
327
-
328
- def _has_prop_grouped_with_landCover(product: dict):
329
- return bool(
330
- next((
331
- p
332
- for p in product.get('properties', [])
333
- if _should_group_landCover(p.get('term', {}))
334
- ), None)
335
- )
336
-
337
-
338
- def _run_from_crop_forage(cycle: dict, site: dict):
339
- products = _get_relevant_items(
340
- cycle=cycle,
341
- item_name="products",
342
- term_types=[TermTermType.CROP, TermTermType.FORAGE]
343
- ) if site.get("siteType", "") == SiteSiteType.CROPLAND.value else []
344
- # only take products with a matching landCover term
345
- products = [p for p in products if get_landCover_term_id(p.get('term', {}))]
346
- # remove any properties that should not get gap-filled
347
- products = list(map(_filter_properties, products))
348
-
349
- # split products with properties that group with landCover
350
- products_with_gap_filled_props = [p for p in products if _has_prop_grouped_with_landCover(p)]
351
- products_without_gap_filled_props = [p for p in products if not _has_prop_grouped_with_landCover(p)]
352
-
353
- return _run_from_landCover(
354
- cycle=cycle,
355
- crop_forage_products=products_with_gap_filled_props
356
- ) + _run_products(cycle, products_without_gap_filled_props, use_cycle_dates=False)
357
-
358
-
359
266
  def _should_run_practice(practice: dict):
360
267
  """
361
268
  Include only landUseManagement practices where GAP_FILL_TO_MANAGEMENT = True
@@ -369,7 +276,7 @@ def _run_from_practices(cycle: dict):
369
276
  _extract_node_value(
370
277
  _include_with_date_gap_fill(
371
278
  value=practice,
372
- keys=["term", "value", "startDate", "endDate"]
279
+ keys=["term", "value", "startDate", "endDate", "properties"]
373
280
  )
374
281
  ) for practice in _get_relevant_items(
375
282
  cycle=cycle,
@@ -378,18 +285,16 @@ def _run_from_practices(cycle: dict):
378
285
  completeness_mapping=_PRACTICES_COMPLETENESS_MAPPING
379
286
  )
380
287
  ]
381
- practices = list(map(_map_to_value, filter(_should_run_practice, practices)))
382
- return practices
288
+ return list(map(_map_to_value, filter(_should_run_practice, practices)))
383
289
 
384
290
 
385
291
  def _run_cycle(site: dict, cycle: dict):
386
292
  inputs = _run_from_inputs(site, cycle)
387
- products = _run_from_crop_forage(site=site, cycle=cycle)
388
293
  site_types = _run_from_siteType(site=site, cycle=cycle)
389
294
  practices = _run_from_practices(cycle)
390
295
  return [
391
296
  node | {'cycle-id': cycle.get('@id')}
392
- for node in inputs + products + site_types + practices
297
+ for node in inputs + site_types + practices
393
298
  ]
394
299
 
395
300
 
@@ -409,5 +314,4 @@ def run(site: dict):
409
314
  )
410
315
  logShouldRun(site, MODEL, id, True, model_key=MODEL_KEY)
411
316
 
412
- management_nodes = condense_nodes(list(map(management, nodes)))
413
- return management_nodes
317
+ return condense_nodes(list(map(management, nodes)))
@@ -16,8 +16,8 @@ from hestia_earth.models.utils.measurement import most_relevant_measurement_valu
16
16
  from hestia_earth.models.utils.site import valid_site_type
17
17
 
18
18
  from .organicSoilCultivation_utils import (
19
- assign_ditch_category, assign_organic_soil_category, calc_emission, DitchCategory, get_ditch_frac,
20
- get_emission_factor, OrganicSoilCategory, remap_categories, valid_eco_climate_zone
19
+ assign_ditch_category, assign_organic_soil_category, calc_emission, DitchCategory, format_nd_array, format_number,
20
+ get_ditch_frac, get_emission_factor, OrganicSoilCategory, remap_categories, valid_eco_climate_zone
21
21
  )
22
22
  from . import MODEL
23
23
 
@@ -224,11 +224,11 @@ def _should_run(cycle: dict):
224
224
  eco_climate_zone=eco_climate_zone,
225
225
  organic_soil_category=organic_soil_category,
226
226
  ditch_category=ditch_category,
227
- emission_factor=f"{np.mean(emission_factor):.3f}",
228
- ditch_factor=f"{np.mean(ditch_factor):.3f}",
229
- ditch_frac=f"{np.mean(ditch_frac):.3f}",
230
- land_occupation=land_occupation,
231
- histosol=histosol
227
+ emission_factor=format_nd_array(emission_factor),
228
+ ditch_factor=format_nd_array(ditch_factor),
229
+ ditch_frac=format_nd_array(ditch_frac),
230
+ land_occupation=format_number(land_occupation),
231
+ histosol=format_number(histosol)
232
232
  )
233
233
 
234
234
  should_run = all([
@@ -14,7 +14,8 @@ from hestia_earth.models.utils.measurement import most_relevant_measurement_valu
14
14
  from hestia_earth.models.utils.site import valid_site_type
15
15
 
16
16
  from .organicSoilCultivation_utils import (
17
- assign_organic_soil_category, calc_emission, get_emission_factor, OrganicSoilCategory, valid_eco_climate_zone
17
+ assign_organic_soil_category, calc_emission, format_nd_array, format_number, get_emission_factor,
18
+ OrganicSoilCategory, valid_eco_climate_zone
18
19
  )
19
20
  from . import MODEL
20
21
 
@@ -182,9 +183,9 @@ def _should_run(cycle: dict):
182
183
  cycle, model=MODEL, term=TERM_ID,
183
184
  eco_climate_zone=eco_climate_zone,
184
185
  organic_soil_category=organic_soil_category,
185
- emission_factor=f"{np.mean(emission_factor):.3f}",
186
- land_occupation=land_occupation,
187
- histosol=histosol
186
+ emission_factor=format_nd_array(emission_factor),
187
+ land_occupation=format_number(land_occupation),
188
+ histosol=format_number(histosol)
188
189
  )
189
190
 
190
191
  should_run = all([
@@ -8,8 +8,8 @@ from hestia_earth.models.utils.measurement import most_relevant_measurement_valu
8
8
  from hestia_earth.models.utils.site import valid_site_type
9
9
 
10
10
  from .organicSoilCultivation_utils import (
11
- assign_organic_soil_category, calc_emission, get_emission_factor, OrganicSoilCategory, remap_categories,
12
- valid_eco_climate_zone
11
+ assign_organic_soil_category, calc_emission, format_number, get_emission_factor, OrganicSoilCategory,
12
+ remap_categories, valid_eco_climate_zone
13
13
  )
14
14
  from . import MODEL
15
15
 
@@ -127,9 +127,9 @@ def _should_run(cycle: dict):
127
127
  cycle, model=MODEL, term=TERM_ID,
128
128
  eco_climate_zone=eco_climate_zone,
129
129
  organic_soil_category=organic_soil_category,
130
- emission_factor=f"{emission_factor_mean} ± {emission_factor_sd}",
131
- land_occupation=land_occupation,
132
- histosol=histosol
130
+ emission_factor=f"{format_number(emission_factor_mean)} ± {format_number(emission_factor_sd)}",
131
+ land_occupation=format_number(land_occupation),
132
+ histosol=format_number(histosol)
133
133
  )
134
134
 
135
135
  should_run = all([
@@ -151,8 +151,8 @@ def _should_run(cycle: dict):
151
151
 
152
152
 
153
153
  def _run(emission_factor_mean: float, emission_factor_sd: float, histosol: float, land_occupation: float):
154
- value = calc_emission(TERM_ID, emission_factor_mean, histosol, land_occupation)
155
- sd = calc_emission(TERM_ID, emission_factor_sd, histosol, land_occupation)
154
+ value = round(calc_emission(TERM_ID, emission_factor_mean, histosol, land_occupation), 6)
155
+ sd = round(calc_emission(TERM_ID, emission_factor_sd, histosol, land_occupation), 6)
156
156
  return [_emission(value, sd)]
157
157
 
158
158
 
@@ -98,7 +98,7 @@ def _should_run(cycle: dict):
98
98
 
99
99
  use_excreta_management = _should_run_with_excreta_management(cycle)
100
100
  excreta_EF_input = get_excreta_inputs_with_factor(
101
- cycle, f"{list(LOOKUPS.keys())[0]}.csv", excreta_convertion_func=total_excreta, model=MODEL, term=TERM_ID
101
+ cycle, f"{list(LOOKUPS.keys())[0]}.csv", excreta_conversion_func=total_excreta, model=MODEL, term=TERM_ID
102
102
  ) if use_excreta_management else None
103
103
 
104
104
  logRequirements(cycle, model=MODEL, term=TERM_ID,