hestia-earth-models 0.62.0__py3-none-any.whl → 0.62.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hestia-earth-models might be problematic. Click here for more details.

Files changed (68) hide show
  1. hestia_earth/models/blonkConsultants2016/utils.py +3 -2
  2. hestia_earth/models/cycle/coldCarcassWeightPerHead.py +4 -2
  3. hestia_earth/models/cycle/coldDressedCarcassWeightPerHead.py +2 -2
  4. hestia_earth/models/cycle/concentrateFeed.py +3 -3
  5. hestia_earth/models/cycle/feedConversionRatio/feedConversionRatioNitrogen.py +2 -1
  6. hestia_earth/models/cycle/post_checks/__init__.py +3 -2
  7. hestia_earth/models/cycle/post_checks/otherSites.py +40 -0
  8. hestia_earth/models/cycle/pre_checks/__init__.py +2 -1
  9. hestia_earth/models/cycle/pre_checks/otherSites.py +42 -0
  10. hestia_earth/models/cycle/pre_checks/site.py +1 -1
  11. hestia_earth/models/cycle/readyToCookWeightPerHead.py +2 -2
  12. hestia_earth/models/ecoinventV3AndEmberClimate/utils.py +1 -1
  13. hestia_earth/models/emepEea2019/utils.py +4 -3
  14. hestia_earth/models/geospatialDatabase/heavyWinterPrecipitation.py +1 -1
  15. hestia_earth/models/ipcc2019/animal/pastureGrass.py +53 -43
  16. hestia_earth/models/ipcc2019/co2ToAirSoilOrganicCarbonStockChangeManagementChange.py +30 -4
  17. hestia_earth/models/ipcc2019/n2OToAirExcretaDirect.py +6 -2
  18. hestia_earth/models/ipcc2019/n2OToAirExcretaIndirect.py +1 -1
  19. hestia_earth/models/ipcc2019/n2OToAirInorganicFertiliserDirect.py +1 -1
  20. hestia_earth/models/ipcc2019/n2OToAirInorganicFertiliserIndirect.py +1 -1
  21. hestia_earth/models/ipcc2019/n2OToAirOrganicFertiliserIndirect.py +1 -1
  22. hestia_earth/models/ipcc2019/organicCarbonPerHa_tier_1_utils.py +4 -2
  23. hestia_earth/models/ipcc2019/organicCarbonPerHa_tier_2_utils.py +210 -40
  24. hestia_earth/models/ipcc2019/organicCarbonPerHa_utils.py +2 -6
  25. hestia_earth/models/ipcc2019/pastureGrass.py +44 -42
  26. hestia_earth/models/ipcc2019/pastureGrass_utils.py +46 -92
  27. hestia_earth/models/mocking/search-results.json +378 -234
  28. hestia_earth/models/schererPfister2015/utils.py +2 -2
  29. hestia_earth/models/site/brackishWater.py +1 -1
  30. hestia_earth/models/site/flowingWater.py +1 -1
  31. hestia_earth/models/site/freshWater.py +1 -1
  32. hestia_earth/models/site/management.py +79 -38
  33. hestia_earth/models/site/pre_checks/cache_sources.py +9 -13
  34. hestia_earth/models/site/salineWater.py +1 -1
  35. hestia_earth/models/stehfestBouwman2006/n2OToAirCropResidueDecompositionDirect.py +12 -2
  36. hestia_earth/models/stehfestBouwman2006/n2OToAirExcretaDirect.py +12 -2
  37. hestia_earth/models/stehfestBouwman2006/n2OToAirInorganicFertiliserDirect.py +11 -1
  38. hestia_earth/models/stehfestBouwman2006/n2OToAirOrganicFertiliserDirect.py +11 -1
  39. hestia_earth/models/stehfestBouwman2006/noxToAirCropResidueDecomposition.py +12 -2
  40. hestia_earth/models/stehfestBouwman2006/noxToAirExcreta.py +12 -2
  41. hestia_earth/models/stehfestBouwman2006/noxToAirInorganicFertiliser.py +11 -1
  42. hestia_earth/models/stehfestBouwman2006/noxToAirOrganicFertiliser.py +11 -1
  43. hestia_earth/models/stehfestBouwman2006GisImplementation/noxToAirCropResidueDecomposition.py +12 -2
  44. hestia_earth/models/stehfestBouwman2006GisImplementation/noxToAirExcreta.py +12 -2
  45. hestia_earth/models/stehfestBouwman2006GisImplementation/noxToAirInorganicFertiliser.py +11 -1
  46. hestia_earth/models/stehfestBouwman2006GisImplementation/noxToAirOrganicFertiliser.py +11 -1
  47. hestia_earth/models/utils/blank_node.py +144 -110
  48. hestia_earth/models/utils/constant.py +2 -0
  49. hestia_earth/models/utils/lookup.py +19 -6
  50. hestia_earth/models/utils/property.py +6 -6
  51. hestia_earth/models/utils/site.py +7 -0
  52. hestia_earth/models/utils/source.py +1 -1
  53. hestia_earth/models/utils/term.py +21 -1
  54. hestia_earth/models/version.py +1 -1
  55. {hestia_earth_models-0.62.0.dist-info → hestia_earth_models-0.62.2.dist-info}/METADATA +2 -2
  56. {hestia_earth_models-0.62.0.dist-info → hestia_earth_models-0.62.2.dist-info}/RECORD +68 -63
  57. tests/models/cycle/post_checks/test_otherSites.py +15 -0
  58. tests/models/cycle/pre_checks/test_otherSites.py +21 -0
  59. tests/models/ipcc2019/test_co2ToAirSoilOrganicCarbonStockChangeManagementChange.py +5 -3
  60. tests/models/ipcc2019/test_organicCarbonPerHa.py +10 -20
  61. tests/models/ipcc2019/test_organicCarbonPerHa_tier_2_utils.py +0 -8
  62. tests/models/site/pre_checks/test_cache_sources.py +6 -10
  63. tests/models/site/test_management.py +192 -4
  64. tests/models/utils/test_blank_node.py +0 -281
  65. tests/models/utils/test_lookup.py +10 -0
  66. {hestia_earth_models-0.62.0.dist-info → hestia_earth_models-0.62.2.dist-info}/LICENSE +0 -0
  67. {hestia_earth_models-0.62.0.dist-info → hestia_earth_models-0.62.2.dist-info}/WHEEL +0 -0
  68. {hestia_earth_models-0.62.0.dist-info → hestia_earth_models-0.62.2.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,13 @@
1
+ import calendar
1
2
  from calendar import monthrange
2
3
  from collections import defaultdict
3
4
  from collections.abc import Iterable
4
- from datetime import datetime
5
+ from datetime import datetime, timedelta
6
+ from uuid import uuid4
7
+
5
8
  from dateutil.relativedelta import relativedelta
6
9
  from enum import Enum
7
10
  from functools import reduce
8
- from statistics import mode, mean
9
11
  from typing import (
10
12
  Any,
11
13
  List,
@@ -24,9 +26,10 @@ from hestia_earth.utils.tools import (
24
26
  safe_parse_float,
25
27
  non_empty_list
26
28
  )
29
+ from hestia_earth.utils.blank_node import ArrayTreatment, get_node_value
27
30
 
28
31
  from ..log import debugValues, log_as_table
29
- from . import is_from_model, _filter_list_term_unit, is_iterable
32
+ from . import is_from_model, _filter_list_term_unit, is_iterable, _omit
30
33
  from .constant import Units
31
34
  from .property import get_node_property, get_node_property_value
32
35
  from .lookup import (
@@ -38,6 +41,44 @@ from .lookup import (
38
41
  from .term import get_lookup_value
39
42
 
40
43
 
44
+ def lookups_logs(model: str, blank_nodes: list, lookups_per_termType: dict, **log_args):
45
+ def mapper(blank_node: dict):
46
+ term = blank_node.get('term', {})
47
+ term_id = term.get('@id')
48
+ term_type = term.get('termType')
49
+ lookups = lookups_per_termType.get(term_type, [])
50
+ lookups = lookups if isinstance(lookups, list) else [lookups]
51
+
52
+ def _reduce_lookups_logs(logs: dict, column: str):
53
+ lookup_value = get_lookup_value(term, column, model=model, **log_args)
54
+ return logs | {column: lookup_value}
55
+
56
+ return reduce(_reduce_lookups_logs, lookups, {'id': term_id})
57
+
58
+ logs = list(map(mapper, blank_nodes))
59
+
60
+ return log_as_table(logs)
61
+
62
+
63
+ def properties_logs(blank_nodes: list, properties: Union[dict, list]):
64
+ def mapper(blank_node: dict):
65
+ term = blank_node.get('term', {})
66
+ term_id = term.get('@id')
67
+ term_type = term.get('termType')
68
+ props = properties.get(term_type, []) if isinstance(properties, dict) else properties
69
+ props = props if isinstance(props, list) else [props]
70
+
71
+ def _reduce_properties_logs(logs: dict, prop: str):
72
+ value = get_node_property(term, prop).get('value')
73
+ return logs | {prop: value}
74
+
75
+ return reduce(_reduce_properties_logs, properties, {'id': term_id})
76
+
77
+ logs = list(map(mapper, blank_nodes))
78
+
79
+ return log_as_table(logs)
80
+
81
+
41
82
  def group_by_keys(group_keys: list = ['term']):
42
83
  def run(group: dict, node: dict):
43
84
  group_key = '-'.join(non_empty_list(map(lambda v: node.get(v, {}).get('@id'), group_keys)))
@@ -46,7 +87,9 @@ def group_by_keys(group_keys: list = ['term']):
46
87
  return run
47
88
 
48
89
 
49
- def _module_term_id(term_id: str, module): return getattr(module, 'TERM_ID', term_id).split(',')[0]
90
+ def _module_term_id(term_id: str, module):
91
+ term_id_str = term_id.split('.')[-1] if '.' in term_id else term_id
92
+ return getattr(module, 'TERM_ID', term_id_str).split(',')[0]
50
93
 
51
94
 
52
95
  def _run_model_required(model: str, term: dict, data: dict):
@@ -342,111 +385,6 @@ def convert_to_carbon(node: dict, model: str, term_id: str, blank_nodes: list, *
342
385
  ]) if len(missing_carbon_property) == 0 else None
343
386
 
344
387
 
345
- class ArrayTreatment(Enum):
346
- """
347
- Enum representing different treatments for arrays of values.
348
- """
349
- MEAN = 'mean'
350
- MODE = 'mode'
351
- SUM = 'sum'
352
- FIRST = 'first'
353
- LAST = 'last'
354
-
355
-
356
- def _should_run_array_treatment(value):
357
- return isinstance(value, Iterable) and len(value) > 0
358
-
359
-
360
- DEFAULT_ARRAY_TREATMENT = ArrayTreatment.MEAN
361
- ARRAY_TREATMENT_TO_REDUCER = {
362
- ArrayTreatment.MEAN: lambda value: mean(value) if _should_run_array_treatment(value) else 0,
363
- ArrayTreatment.MODE: lambda value: mode(value) if _should_run_array_treatment(value) else 0,
364
- ArrayTreatment.SUM: lambda value: sum(value) if _should_run_array_treatment(value) else 0,
365
- ArrayTreatment.FIRST: lambda value: value[0] if _should_run_array_treatment(value) else 0,
366
- ArrayTreatment.LAST: lambda value: value[-1] if _should_run_array_treatment(value) else 0
367
- }
368
- """
369
- A dictionary mapping ArrayTreatment enums to corresponding reducer functions.
370
- """
371
-
372
-
373
- def _retrieve_array_treatment(
374
- node: dict,
375
- is_larger_unit: bool = False,
376
- default: ArrayTreatment = ArrayTreatment.FIRST
377
- ) -> ArrayTreatment:
378
- """
379
- Retrieves the array treatment for a given node.
380
-
381
- Array treatments are used to reduce an array's list of values into
382
- a single value. The array treatment is retrieved from a lookup on
383
- the node's term.
384
-
385
- Parameters
386
- ----------
387
- node : dict
388
- The dictionary representing the node.
389
- is_larger_unit : bool, optional
390
- Flag indicating whether to use the larger unit lookup, by default `False`.
391
- default : ArrayTreatment, optional
392
- Default value to return if the lookup fails, by default `ArrayTreatment.FIRST`.
393
-
394
- Returns
395
- -------
396
- ArrayTreatment
397
- The retrieved array treatment.
398
-
399
- """
400
- ARRAY_TREATMENT_LOOKUPS = [
401
- 'arrayTreatmentLargerUnitOfTime',
402
- 'arrayTreatment'
403
- ]
404
- lookup = ARRAY_TREATMENT_LOOKUPS[0] if is_larger_unit else ARRAY_TREATMENT_LOOKUPS[1]
405
-
406
- term = node.get('term', {})
407
- lookup_value = get_lookup_value(term, lookup, skip_debug=True)
408
-
409
- return next(
410
- (treatment for treatment in ArrayTreatment if treatment.value == lookup_value),
411
- default
412
- )
413
-
414
-
415
- def get_node_value(
416
- node: dict,
417
- is_larger_unit: bool = False,
418
- array_treatment: Optional[ArrayTreatment] = None,
419
- default: Any = 0
420
- ) -> Union[float, bool]:
421
- """
422
- Get the value from the dictionary representing the node,
423
- applying optional array treatment if the value is a list.
424
-
425
- Parameters
426
- ----------
427
- node : dict
428
- The dictionary representing the node.
429
- is_larger_unit : bool, optional
430
- A flag indicating whether the unit of time is larger, by default `False`.
431
- array_treatment : ArrayTreatment, optional
432
- An optional override for the treatment to be applied to an array value, if `None` the array treatment in the
433
- node's term's lookup is used (which defaults to `FIRST` if no array treatment is specified), by default `None`.
434
-
435
- Returns
436
- -------
437
- float | bool
438
- The extracted value from the node.
439
-
440
- """
441
- value = node.get("value", 0)
442
-
443
- reducer = ARRAY_TREATMENT_TO_REDUCER[(
444
- array_treatment or _retrieve_array_treatment(node, is_larger_unit=is_larger_unit)
445
- )] if isinstance(value, list) and len(value) > 0 else None
446
-
447
- return reducer(value) if reducer else value if isinstance(value, bool) else value or default
448
-
449
-
450
388
  def _convert_to_set(
451
389
  variable: Union[Iterable[Any], Any]
452
390
  ) -> set:
@@ -560,7 +498,7 @@ def cumulative_nodes_match(
560
498
  """
561
499
  values = [
562
500
  get_node_value(
563
- node, is_larger_unit, array_treatment
501
+ node, 'value', is_larger_unit, array_treatment
564
502
  ) or default_node_value for node in nodes if function(node)
565
503
  ]
566
504
 
@@ -1274,3 +1212,99 @@ def get_inputs_from_properties(input: dict, term_types: Union[TermTermType, List
1274
1212
  } for p in (properties or []) if all([p.get('key'), p.get('value')])
1275
1213
  ]) if input_value > 0 else []
1276
1214
  return filter_list_term_type(inputs, term_types)
1215
+
1216
+
1217
+ def _get_condensed_nodes(nodes: list) -> tuple[list, bool]:
1218
+ """Only considers nodes which already match on non-date criteria."""
1219
+ CONDENSABLE_UNITS = [Units.BOOLEAN.value, Units.PERCENTAGE_AREA.value]
1220
+ condensed_nodes = []
1221
+ matched_uuids = set()
1222
+ nodes_by_start_date = {_full_date_str(date_str=n["startDate"], is_end=True): n for n in nodes if "startDate" in n}
1223
+ if len(nodes_by_start_date) != len(nodes):
1224
+ return nodes, False
1225
+
1226
+ for node in nodes:
1227
+ search_date = _offset_date(date_str=node.get("endDate", ""))
1228
+ if node["uuid"] in matched_uuids:
1229
+ continue
1230
+ if (search_date in nodes_by_start_date and nodes_by_start_date[search_date]["uuid"] not in matched_uuids
1231
+ and node.get("term", {}).get("units") in CONDENSABLE_UNITS):
1232
+ new_node = node.copy()
1233
+ new_node["endDate"] = nodes_by_start_date[search_date]["endDate"]
1234
+ condensed_nodes.append(new_node)
1235
+ matched_uuids.add(nodes_by_start_date[search_date]["uuid"])
1236
+ elif node["uuid"] not in matched_uuids:
1237
+ condensed_nodes.append(node)
1238
+
1239
+ return condensed_nodes, len(matched_uuids) > 0
1240
+
1241
+
1242
+ def condense_nodes(nodes: list) -> list:
1243
+ grouped_nodes = _group_nodes_by_term_and_value(nodes)
1244
+ condensed_nodes = dict()
1245
+ any_changes_made = False
1246
+
1247
+ for key, node_group in grouped_nodes.items():
1248
+ condensed_nodes[key] = node_group
1249
+ while len(condensed_nodes[key]) > 1:
1250
+ condensed_nodes[key], changes_made = _get_condensed_nodes(condensed_nodes[key])
1251
+ if not changes_made:
1252
+ break
1253
+ any_changes_made = True
1254
+
1255
+ if not any_changes_made:
1256
+ return [_omit(values=n, keys=["uuid"]) for n in nodes]
1257
+
1258
+ return sorted(
1259
+ flatten([_omit(values=n, keys=["uuid"]) for nodes in condensed_nodes.values() for n in nodes]),
1260
+ key=lambda x: x["startDate"]
1261
+ )
1262
+
1263
+
1264
+ DATE_FORMAT = "%Y-%m-%d"
1265
+
1266
+
1267
+ def _variable_length_str_to_date(date_str: str, is_end: bool) -> datetime:
1268
+ """Converts to date, adding start or end of year to YYYY strings as indicated by is_end."""
1269
+ return datetime.strptime(_full_date_str(date_str, is_end=is_end), DATE_FORMAT)
1270
+
1271
+
1272
+ def _full_date_str(date_str: str, is_end: bool) -> str:
1273
+ suffix = ""
1274
+ if len(date_str) == 4:
1275
+ # Format YYYY
1276
+ suffix = "-12-31" if is_end else "-01-01"
1277
+ elif len(date_str) == 7:
1278
+ # Format YYYY-MM
1279
+ suffix = f"-{calendar.monthrange(int(date_str[:4]), int(date_str[5:7]))[1]}" if is_end else "-01"
1280
+
1281
+ return date_str + suffix
1282
+
1283
+
1284
+ def _with_full_dates(node: dict) -> dict:
1285
+ output_node = node.copy()
1286
+ if "startDate" in output_node:
1287
+ output_node["startDate"] = _full_date_str(output_node["startDate"], is_end=False)
1288
+ if "endDate" in output_node:
1289
+ output_node["endDate"] = _full_date_str(output_node["endDate"], is_end=True)
1290
+
1291
+ return output_node
1292
+
1293
+
1294
+ def _offset_date(date_str: str, days: int = 1, is_end: bool = True) -> str:
1295
+ return (
1296
+ _variable_length_str_to_date(date_str=date_str, is_end=is_end) + timedelta(days=days)
1297
+ ).strftime(DATE_FORMAT)
1298
+
1299
+
1300
+ def _group_nodes_by_term_and_value(nodes: list) -> dict:
1301
+ grouped_nodes = defaultdict(list)
1302
+
1303
+ for node in nodes:
1304
+ term_id = node.get("term", {}).get("@id", "")
1305
+ value = "-".join([str(v) for v in node.get("value")]) if isinstance(node.get("value"), list) \
1306
+ else node.get("value")
1307
+ node["uuid"] = uuid4()
1308
+ grouped_nodes[(term_id, value)].append(_with_full_dates(node))
1309
+
1310
+ return grouped_nodes
@@ -3,6 +3,7 @@ from hestia_earth.utils.tools import list_sum
3
3
 
4
4
 
5
5
  class Units(Enum):
6
+ BOOLEAN = 'boolean'
6
7
  HEAD = 'head'
7
8
  NUMBER = 'number'
8
9
  KG = 'kg'
@@ -31,6 +32,7 @@ class Units(Enum):
31
32
  KG_COLD_CARCASS_WEIGHT = 'kg cold carcass weight'
32
33
  KG_COLD_DRESSED_CARCASS_WEIGHT = 'kg cold dressed carcass weight'
33
34
  KG_READY_TO_COOK_WEIGHT = 'kg ready-to-cook weight'
35
+ PERCENTAGE_AREA = '% area'
34
36
  TO_C = '-C'
35
37
  TO_N = '-N'
36
38
 
@@ -76,6 +76,17 @@ _ALLOW_ALL = 'all'
76
76
  def _is_site(site: dict): return site.get('@type', site.get('type')) == SchemaType.SITE.value
77
77
 
78
78
 
79
+ def _get_sites(node: dict):
80
+ site = node.get('site', node.get('cycle', {}).get('site'))
81
+ other_sites = node.get('otherSites', node.get('cycle', {}).get('otherSites', []))
82
+ return non_empty_list([site] + other_sites)
83
+
84
+
85
+ def _get_site_types(node: dict):
86
+ sites = [node] if _is_site(node) else _get_sites(node)
87
+ return non_empty_list([site.get('siteType') for site in sites])
88
+
89
+
79
90
  def _model_lookup_values(model: str, term: dict, restriction: str):
80
91
  lookup = download_lookup(f"{term.get('termType')}-model-{restriction}.csv")
81
92
  values = get_table_value(lookup, 'termid', term.get('@id'), column_name(model))
@@ -83,10 +94,11 @@ def _model_lookup_values(model: str, term: dict, restriction: str):
83
94
 
84
95
 
85
96
  def is_model_siteType_allowed(model: str, term: dict, data: dict):
86
- site = data if _is_site(data) else data.get('site', data.get('cycle', {}).get('site')) or {}
87
- site_type = site.get('siteType')
97
+ site_types = _get_site_types(data)
88
98
  allowed_values = _model_lookup_values(model, term, 'siteTypesAllowed')
89
- return True if _ALLOW_ALL in allowed_values or not site_type else site_type in allowed_values
99
+ return True if _ALLOW_ALL in allowed_values or not site_types else any([
100
+ (site_type in allowed_values) for site_type in site_types
101
+ ])
90
102
 
91
103
 
92
104
  def _lookup_values(term: dict, column: str):
@@ -96,10 +108,11 @@ def _lookup_values(term: dict, column: str):
96
108
 
97
109
 
98
110
  def is_siteType_allowed(data: dict, term: dict):
99
- site = data if _is_site(data) else data.get('site', data.get('cycle', {}).get('site')) or {}
100
- site_type = site.get('siteType')
111
+ site_types = _get_site_types(data)
101
112
  allowed_values = _lookup_values(term, 'siteTypesAllowed')
102
- return True if _ALLOW_ALL in allowed_values or not site_type else site_type in allowed_values
113
+ return True if _ALLOW_ALL in allowed_values or not site_types else any([
114
+ (site_type in allowed_values) for site_type in site_types
115
+ ])
103
116
 
104
117
 
105
118
  def is_product_termType_allowed(data: dict, term: dict):
@@ -87,17 +87,17 @@ def node_has_property(term_id: str):
87
87
  return lambda product: find_term_match(product.get('properties', []), term_id, None) is not None
88
88
 
89
89
 
90
- def node_property_lookup_value(model: str, term: dict, prop_id: str, default=None, **log_args):
90
+ def node_property_lookup_value(model: str, node_term: dict, prop_id: str, default=None, **log_args):
91
91
  # as the lookup table might not exist, we are making sure we return `0` in thise case
92
92
  try:
93
- lookup_name = f"{term.get('termType')}-property.csv"
93
+ lookup_name = f"{node_term.get('termType')}-property.csv"
94
94
  lookup = download_lookup(lookup_name)
95
- term_id = term.get('@id')
95
+ term_id = node_term.get('@id')
96
96
  lookup_value = get_table_value(lookup, 'termid', term_id, column_name(prop_id))
97
97
  value = extract_grouped_data(lookup_value, 'Avg') if (
98
98
  isinstance(lookup_value, str) and 'Avg' in lookup_value
99
99
  ) else lookup_value
100
- debugMissingLookup(lookup_name, 'termid', term_id, prop_id, value, model=model, term=term_id, **log_args)
100
+ debugMissingLookup(lookup_name, 'termid', term_id, prop_id, value, model=model, **log_args)
101
101
  return safe_parse_float(value, default=None)
102
102
  except Exception:
103
103
  return default
@@ -111,9 +111,9 @@ def get_node_property_value(model: str, node: dict, prop_id: str, default=None,
111
111
  return default if value is None else (value / 100 if units == '%' else value)
112
112
 
113
113
 
114
- def get_node_property_value_converted(model: str, node: dict, prop_id: str, default=None):
114
+ def get_node_property_value_converted(model: str, node: dict, prop_id: str, default=None, **log_args):
115
115
  node_value = list_sum(node.get('value', []))
116
- prop_value = get_node_property_value(model, node, prop_id)
116
+ prop_value = get_node_property_value(model, node, prop_id, **log_args)
117
117
  return default if prop_value is None else node_value * prop_value
118
118
 
119
119
 
@@ -5,6 +5,7 @@ from hestia_earth.utils.tools import non_empty_list, flatten, safe_parse_date
5
5
 
6
6
  from hestia_earth.models.log import debugMissingLookup
7
7
  from . import cached_value, _load_calculated_node
8
+ from .term import get_land_cover_siteTypes
8
9
 
9
10
  CACHE_YEARS_KEY = 'years'
10
11
  WATER_TYPES = [
@@ -120,3 +121,9 @@ def region_factor(model: str, region_id: str, term_id: str, termType: TermTermTy
120
121
  value = get_table_value(download_lookup(lookup_name), 'termid', region_id, column_name(term_id))
121
122
  debugMissingLookup(lookup_name, 'termid', region_id, term_id, value, model=model, term=term_id)
122
123
  return value
124
+
125
+
126
+ def get_land_cover_term_id(site_type: str):
127
+ land_cover_terms = get_land_cover_siteTypes()
128
+ term = next((term for term in land_cover_terms if term["name"].lower() == site_type.lower()), {})
129
+ return term.get('@id')
@@ -18,7 +18,7 @@ def _find_source(biblio_title: str = None):
18
18
 
19
19
 
20
20
  def get_source(node: dict, biblio_title: str = None):
21
- source = cached_value(node, CACHE_SOURCES_KEY, {}).get(biblio_title, _find_source(biblio_title))
21
+ source = cached_value(node, CACHE_SOURCES_KEY, {}).get(biblio_title) or _find_source(biblio_title)
22
22
  return {'source': source} if source else {}
23
23
 
24
24
 
@@ -1,4 +1,4 @@
1
- from hestia_earth.schema import SchemaType, TermTermType
1
+ from hestia_earth.schema import SchemaType, TermTermType, SiteSiteType
2
2
  from hestia_earth.utils.lookup import download_lookup, get_table_value, column_name
3
3
  from hestia_earth.utils.api import find_node, search
4
4
 
@@ -625,3 +625,23 @@ def get_electricity_grid_mix_terms():
625
625
  ],
626
626
  }
627
627
  }, limit=LIMIT, fields=['@type', '@id', 'name', 'termType', 'units'])
628
+
629
+
630
+ def get_land_cover_siteTypes():
631
+ """
632
+ Find all `Land Cover` terms with siteTypes
633
+
634
+ Returns
635
+ -------
636
+ List of landCover terms with associated siteTypes.
637
+ """
638
+ return search({
639
+ "bool": {
640
+ "must": [
641
+ {"match": {"@type": "Term"}},
642
+ {"match": {"termType": "landCover"}}
643
+ ],
644
+ "should": [{"match": {"name": siteType.value}} for siteType in SiteSiteType],
645
+ "minimum_should_match": 1
646
+ },
647
+ })
@@ -1 +1 @@
1
- VERSION = '0.62.0'
1
+ VERSION = '0.62.2'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hestia-earth-models
3
- Version: 0.62.0
3
+ Version: 0.62.2
4
4
  Summary: Hestia's set of modules for filling gaps in the activity data using external datasets (e.g. populating soil properties with a geospatial dataset using provided coordinates) and internal lookups (e.g. populating machinery use from fuel use). Includes rules for when gaps should be filled versus not (e.g. never gap fill yield, gap fill crop residue if yield provided etc.).
5
5
  Home-page: https://gitlab.com/hestia-earth/hestia-engine-models
6
6
  Author: Hestia Team
@@ -12,7 +12,7 @@ Classifier: Programming Language :: Python :: 3.6
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: hestia-earth.schema ==29.*
15
- Requires-Dist: hestia-earth.utils >=0.13.0
15
+ Requires-Dist: hestia-earth.utils >=0.13.2
16
16
  Requires-Dist: python-dateutil >=2.8.1
17
17
  Requires-Dist: CurrencyConverter ==0.16.8
18
18
  Requires-Dist: haversine >=2.7.0