hestia-earth-aggregation 0.21.14__py3-none-any.whl → 0.21.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hestia_earth/aggregation/__init__.py +3 -1
- hestia_earth/aggregation/aggregate_cycles.py +4 -1
- hestia_earth/aggregation/config/Cycle/processedFood.json +15 -3
- hestia_earth/aggregation/utils/aggregate_country_nodes.py +15 -33
- hestia_earth/aggregation/utils/aggregate_weighted.py +0 -6
- hestia_earth/aggregation/utils/cycle.py +27 -11
- hestia_earth/aggregation/utils/emission.py +15 -0
- hestia_earth/aggregation/utils/quality_score.py +6 -2
- hestia_earth/aggregation/utils/weights.py +30 -10
- hestia_earth/aggregation/version.py +1 -1
- {hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/METADATA +1 -1
- {hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/RECORD +15 -15
- {hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/WHEEL +0 -0
- {hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/licenses/LICENSE +0 -0
- {hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/top_level.txt +0 -0
|
@@ -88,12 +88,14 @@ def aggregate(
|
|
|
88
88
|
filter_by_country=filter_by_country,
|
|
89
89
|
include_covariance=include_covariance,
|
|
90
90
|
)
|
|
91
|
-
logger.info("time=%s, unit=ms", current_time_ms() - now)
|
|
91
|
+
logger.info("Aggregations finished. time=%s, unit=ms", current_time_ms() - now)
|
|
92
|
+
now = current_time_ms()
|
|
92
93
|
aggregations = (
|
|
93
94
|
[recalculate(agg, product) for agg in aggregations]
|
|
94
95
|
if should_recalculate(product)
|
|
95
96
|
else aggregations
|
|
96
97
|
)
|
|
98
|
+
logger.info("Recalculations finished. time=%s, unit=ms", current_time_ms() - now)
|
|
97
99
|
aggregations = [
|
|
98
100
|
calculate_score(cycle=agg, countries=countries) for agg in aggregations
|
|
99
101
|
]
|
|
@@ -157,7 +157,10 @@ def aggregate_country(
|
|
|
157
157
|
functional_unit = cycles_aggregated[0].get("functionalUnit")
|
|
158
158
|
include_matrix = aggregate_with_matrix(product)
|
|
159
159
|
cycles_aggregated = non_empty_list(
|
|
160
|
-
[
|
|
160
|
+
[
|
|
161
|
+
format_terms_results(cycle, product, country, start_year, end_year)
|
|
162
|
+
for cycle in cycles_aggregated
|
|
163
|
+
]
|
|
161
164
|
)
|
|
162
165
|
cycles_aggregated = non_empty_list(
|
|
163
166
|
map(
|
|
@@ -29,17 +29,29 @@
|
|
|
29
29
|
},
|
|
30
30
|
{
|
|
31
31
|
"key": "emissions",
|
|
32
|
-
"model": "
|
|
32
|
+
"model": "worldSteel",
|
|
33
33
|
"value": "cycle",
|
|
34
34
|
"runStrategy": "always",
|
|
35
35
|
"mergeStrategy": "list",
|
|
36
36
|
"mergeArgs": {
|
|
37
37
|
"replaceThreshold": ["value", 0.01]
|
|
38
|
-
}
|
|
38
|
+
},
|
|
39
|
+
"stage": 2
|
|
39
40
|
},
|
|
40
41
|
{
|
|
41
42
|
"key": "emissions",
|
|
42
|
-
"model": "
|
|
43
|
+
"model": "bafu2025",
|
|
44
|
+
"value": "cycle",
|
|
45
|
+
"runStrategy": "always",
|
|
46
|
+
"mergeStrategy": "list",
|
|
47
|
+
"mergeArgs": {
|
|
48
|
+
"replaceThreshold": ["value", 0.01]
|
|
49
|
+
},
|
|
50
|
+
"stage": 2
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
"key": "emissions",
|
|
54
|
+
"model": "ecoinventV3AndEmberClimate",
|
|
43
55
|
"value": "cycle",
|
|
44
56
|
"runStrategy": "always",
|
|
45
57
|
"mergeStrategy": "list",
|
|
@@ -61,7 +61,11 @@ from .site import (
|
|
|
61
61
|
format_site_description,
|
|
62
62
|
site_parent_region_id,
|
|
63
63
|
)
|
|
64
|
-
from .emission import
|
|
64
|
+
from .emission import (
|
|
65
|
+
get_method_tier,
|
|
66
|
+
get_method_model,
|
|
67
|
+
has_value_without_transformation,
|
|
68
|
+
)
|
|
65
69
|
from .property import aggregate_properties
|
|
66
70
|
from .distribution import generate_blank_node_distribution
|
|
67
71
|
from .covariance import add_covariance_cycles, generate_covariance_cycles
|
|
@@ -110,6 +114,8 @@ class BlankNodeFormatted(TypedDict, total=False):
|
|
|
110
114
|
completeness_key: Optional[str]
|
|
111
115
|
start_date: Optional[str]
|
|
112
116
|
end_date: str
|
|
117
|
+
inputs: List[dict]
|
|
118
|
+
transformation: List[dict]
|
|
113
119
|
|
|
114
120
|
|
|
115
121
|
# --- Cycle
|
|
@@ -243,6 +249,8 @@ def _group_cycle_blank_node(cycle: dict, product: dict, product_value: float):
|
|
|
243
249
|
"economicValueShare": non_empty_list([_blank_node_evs(blank_nodes)]),
|
|
244
250
|
# needed for background emissions
|
|
245
251
|
"inputs": _map_values(blank_nodes, "inputs"),
|
|
252
|
+
# needed to exclude emissions with only transformation value
|
|
253
|
+
"transformation": _map_values(blank_nodes, "transformation"),
|
|
246
254
|
"properties": _map_values(blank_nodes, "properties"),
|
|
247
255
|
"methodTier": _map_values(blank_nodes, "methodTier"),
|
|
248
256
|
"methodModel": _map_values(blank_nodes, "methodModel"),
|
|
@@ -293,20 +301,16 @@ def _group_cycle_blank_nodes(
|
|
|
293
301
|
end_year: int = None,
|
|
294
302
|
):
|
|
295
303
|
def grouper(group: dict, list_key: str) -> Dict[str, Dict[str, BlankNodeFormatted]]:
|
|
296
|
-
now = current_time_ms()
|
|
297
304
|
blank_nodes = _filter_blank_nodes(cycle, list_key, start_year, end_year)
|
|
298
305
|
values = reduce(
|
|
299
306
|
_group_cycle_blank_node(cycle, product, product_value), blank_nodes, {}
|
|
300
307
|
)
|
|
301
308
|
# after combining all values, need to compute the final statistical values
|
|
302
309
|
group[list_key] = {
|
|
303
|
-
k: v | _compute_blank_node_stats(v)
|
|
310
|
+
k: v | _compute_blank_node_stats(v)
|
|
311
|
+
for k, v in values.items()
|
|
312
|
+
if list_key != "emissions" or has_value_without_transformation(v)
|
|
304
313
|
}
|
|
305
|
-
logger.debug(
|
|
306
|
-
"function=_group_cycle_blank_nodes, list_key=%s, time=%s",
|
|
307
|
-
list_key,
|
|
308
|
-
current_time_ms() - now,
|
|
309
|
-
)
|
|
310
314
|
return group
|
|
311
315
|
|
|
312
316
|
return grouper
|
|
@@ -323,6 +327,7 @@ def _format_cycle(
|
|
|
323
327
|
Returns the list of formatted Cycles, and the list of Site ids to be downloaded.
|
|
324
328
|
Note: if a Site does not have an `@id`, it means it's nested within the Cycle.
|
|
325
329
|
"""
|
|
330
|
+
now = current_time_ms()
|
|
326
331
|
product_value = _cycle_product_value(cycle, product)
|
|
327
332
|
data: CycleFormatted = (
|
|
328
333
|
(
|
|
@@ -355,8 +360,9 @@ def _format_cycle(
|
|
|
355
360
|
)
|
|
356
361
|
if data:
|
|
357
362
|
logger.debug(
|
|
358
|
-
"id=%s, yield=%s, organic=%s, irrigated=%s",
|
|
363
|
+
"id=%s, time=%sms, yield=%s, organic=%s, irrigated=%s",
|
|
359
364
|
cycle.get("@id"),
|
|
365
|
+
current_time_ms() - now,
|
|
360
366
|
product_value,
|
|
361
367
|
data["organic"],
|
|
362
368
|
data["irrigated"],
|
|
@@ -412,7 +418,6 @@ def _combine_cycle_blank_nodes(
|
|
|
412
418
|
cycles: List[CycleFormatted], completeness: Dict[str, int], cycle_count: int
|
|
413
419
|
):
|
|
414
420
|
def combine(group: dict, list_key: str):
|
|
415
|
-
now = current_time_ms()
|
|
416
421
|
# get all possible keys first, then group each key values into a single blank node
|
|
417
422
|
keys = set(flatten([list(cycle.get(list_key, {}).keys()) for cycle in cycles]))
|
|
418
423
|
group[list_key] = reduce(
|
|
@@ -420,11 +425,6 @@ def _combine_cycle_blank_nodes(
|
|
|
420
425
|
keys,
|
|
421
426
|
{},
|
|
422
427
|
)
|
|
423
|
-
logger.debug(
|
|
424
|
-
"function=_combine_cycle_blank_nodes, list_key=%s, time=%s",
|
|
425
|
-
list_key,
|
|
426
|
-
current_time_ms() - now,
|
|
427
|
-
)
|
|
428
428
|
return group
|
|
429
429
|
|
|
430
430
|
return combine
|
|
@@ -524,14 +524,8 @@ def _group_site_blank_nodes(
|
|
|
524
524
|
site: SiteJSONLD, start_year: int = None, end_year: int = None
|
|
525
525
|
):
|
|
526
526
|
def grouper(group: dict, list_key: str) -> Dict[str, BlankNodeFormatted]:
|
|
527
|
-
now = current_time_ms()
|
|
528
527
|
blank_nodes = _filter_blank_nodes(site, list_key, start_year, end_year)
|
|
529
528
|
group[list_key] = reduce(_group_site_blank_node(), blank_nodes, {})
|
|
530
|
-
logger.debug(
|
|
531
|
-
"function=_group_site_blank_nodes, list_key=%s, time=%s",
|
|
532
|
-
list_key,
|
|
533
|
-
current_time_ms() - now,
|
|
534
|
-
)
|
|
535
529
|
return group
|
|
536
530
|
|
|
537
531
|
return grouper
|
|
@@ -578,15 +572,9 @@ def _combine_site_blank_node(sites: List[SiteFormatted], list_key: str):
|
|
|
578
572
|
|
|
579
573
|
def _combine_site_blank_nodes(sites: List[SiteFormatted]):
|
|
580
574
|
def combine(group: dict, list_key: str):
|
|
581
|
-
now = current_time_ms()
|
|
582
575
|
# get all possible keys first, then group each key values into a single blank node
|
|
583
576
|
keys = set(flatten([list(site.get(list_key, {}).keys()) for site in sites]))
|
|
584
577
|
group[list_key] = reduce(_combine_site_blank_node(sites, list_key), keys, {})
|
|
585
|
-
logger.debug(
|
|
586
|
-
"function=_combine_site_blank_nodes, list_key=%s, time=%s",
|
|
587
|
-
list_key,
|
|
588
|
-
current_time_ms() - now,
|
|
589
|
-
)
|
|
590
578
|
return group
|
|
591
579
|
|
|
592
580
|
return combine
|
|
@@ -672,15 +660,9 @@ def _aggregate_formatted(
|
|
|
672
660
|
data: Union[CycleFormatted, SiteFormatted], aggregated_keys: List[str]
|
|
673
661
|
):
|
|
674
662
|
def aggregate(key: str):
|
|
675
|
-
now = current_time_ms()
|
|
676
663
|
values = data.get(key, {}).values()
|
|
677
664
|
logger.debug(f"Aggregating {len(values)} {key}...")
|
|
678
665
|
values = list(map(_aggregate_blank_node, values))
|
|
679
|
-
logger.debug(
|
|
680
|
-
"function=_aggregate_formatted, key=%s, time=%s",
|
|
681
|
-
key,
|
|
682
|
-
current_time_ms() - now,
|
|
683
|
-
)
|
|
684
666
|
return values
|
|
685
667
|
|
|
686
668
|
return reduce(lambda group, key: group | {key: aggregate(key)}, aggregated_keys, {})
|
|
@@ -6,11 +6,9 @@ from hestia_earth.utils.tools import (
|
|
|
6
6
|
list_sum,
|
|
7
7
|
list_average,
|
|
8
8
|
is_boolean,
|
|
9
|
-
current_time_ms,
|
|
10
9
|
)
|
|
11
10
|
from hestia_earth.utils.blank_node import get_node_value
|
|
12
11
|
|
|
13
|
-
from hestia_earth.aggregation.log import logger
|
|
14
12
|
from . import weighted_average, _min, _max, _sd, format_evs, pick
|
|
15
13
|
from .emission import get_method_tier, get_method_model
|
|
16
14
|
from .property import aggregate_properties
|
|
@@ -190,7 +188,6 @@ def _aggregate_nodes(
|
|
|
190
188
|
aggregate_keys: List[str], data: dict, weights: dict, missing_weights_node_id_func
|
|
191
189
|
):
|
|
192
190
|
def aggregate_single(key: str):
|
|
193
|
-
now = current_time_ms()
|
|
194
191
|
aggregates_map: dict = data.get(key)
|
|
195
192
|
terms = aggregates_map.keys()
|
|
196
193
|
values = non_empty_list(
|
|
@@ -199,9 +196,6 @@ def _aggregate_nodes(
|
|
|
199
196
|
terms,
|
|
200
197
|
)
|
|
201
198
|
)
|
|
202
|
-
logger.debug(
|
|
203
|
-
"function=_aggregate_nodes, key=%s, time=%s", key, current_time_ms() - now
|
|
204
|
-
)
|
|
205
199
|
return values
|
|
206
200
|
|
|
207
201
|
return reduce(
|
|
@@ -41,15 +41,18 @@ from .practice import new_practice, organic_practice
|
|
|
41
41
|
from .product import new_product
|
|
42
42
|
from .weights import format_weights
|
|
43
43
|
|
|
44
|
-
_MEAN_DATE_DESCRIPTION = (
|
|
45
|
-
"Additional notes: the mean endDate of all aggregated Cycles is "
|
|
46
|
-
)
|
|
47
|
-
|
|
48
44
|
|
|
49
45
|
def _timestamp():
|
|
50
46
|
return datetime.now().strftime("%Y%m%d")
|
|
51
47
|
|
|
52
48
|
|
|
49
|
+
def _mean_date_description(start: int, end: int, average: int):
|
|
50
|
+
return (
|
|
51
|
+
f"While the aggregation is for the period {start} - {end}, "
|
|
52
|
+
+ f"the average end date of all aggregated Cycles is {average}."
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
53
56
|
def _mean_date(dates: list):
|
|
54
57
|
years = [int(d.split("-")[0]) for d in dates]
|
|
55
58
|
return str(round(mean(years))) if years else ""
|
|
@@ -58,13 +61,17 @@ def _mean_date(dates: list):
|
|
|
58
61
|
def _combine_mean_dates(cycles: list):
|
|
59
62
|
# parse the mean date from the description of sub-country nodes
|
|
60
63
|
descriptions = non_empty_list(map(lambda c: c.get("description"), cycles))
|
|
61
|
-
dates = [v
|
|
64
|
+
dates = [v[-5:-1] for v in descriptions]
|
|
62
65
|
return _mean_date(dates)
|
|
63
66
|
|
|
64
67
|
|
|
65
68
|
def _aggregated_weights(weights: dict):
|
|
66
|
-
description = format_weights(weights.values())
|
|
67
|
-
return (
|
|
69
|
+
description = format_weights(weights.values(), include_default_combinations=True)
|
|
70
|
+
return (
|
|
71
|
+
f"This aggregation uses the following weights on sub-systems: ({description})"
|
|
72
|
+
if description
|
|
73
|
+
else ""
|
|
74
|
+
)
|
|
68
75
|
|
|
69
76
|
|
|
70
77
|
def is_organic(cycle: dict):
|
|
@@ -183,7 +190,9 @@ def _format_results(
|
|
|
183
190
|
return {}
|
|
184
191
|
|
|
185
192
|
|
|
186
|
-
def format_terms_results(
|
|
193
|
+
def format_terms_results(
|
|
194
|
+
data: dict, product: dict, country: dict, start_year: int, end_year: int
|
|
195
|
+
):
|
|
187
196
|
inputs = data.get("inputs", [])
|
|
188
197
|
practices = data.get("practices", [])
|
|
189
198
|
products = data.get("products", [])
|
|
@@ -193,8 +202,11 @@ def format_terms_results(data: dict, product: dict, country: dict):
|
|
|
193
202
|
cycle = _format_results(
|
|
194
203
|
cycle=_create_cycle(
|
|
195
204
|
{
|
|
196
|
-
"description":
|
|
197
|
-
|
|
205
|
+
"description": _mean_date_description(
|
|
206
|
+
start=start_year,
|
|
207
|
+
end=end_year,
|
|
208
|
+
average=_mean_date(data.get("endDate")),
|
|
209
|
+
)
|
|
198
210
|
}
|
|
199
211
|
),
|
|
200
212
|
product=product,
|
|
@@ -259,7 +271,11 @@ def format_country_results(
|
|
|
259
271
|
non_empty_list(
|
|
260
272
|
[
|
|
261
273
|
_aggregated_weights(weights),
|
|
262
|
-
|
|
274
|
+
_mean_date_description(
|
|
275
|
+
start=cycle.get("startDate"),
|
|
276
|
+
end=cycle.get("endDate"),
|
|
277
|
+
average=_combine_mean_dates(cycles),
|
|
278
|
+
),
|
|
263
279
|
]
|
|
264
280
|
)
|
|
265
281
|
),
|
|
@@ -69,3 +69,18 @@ def get_method_model(emissions: list):
|
|
|
69
69
|
values = non_empty_list(flatten([e.get("methodModel", []) for e in emissions]))
|
|
70
70
|
values = list({v["@id"]: v for v in values}.values())
|
|
71
71
|
return values[0] if len(values) == 1 else None
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def has_value_without_transformation(blank_node: dict):
|
|
75
|
+
values = blank_node.get("value", [])
|
|
76
|
+
transformations = blank_node.get("transformation", [])
|
|
77
|
+
return (
|
|
78
|
+
not transformations
|
|
79
|
+
or len(transformations) != len(values)
|
|
80
|
+
or any(
|
|
81
|
+
[
|
|
82
|
+
value is not None and transformations[index] is None
|
|
83
|
+
for index, value in enumerate(values)
|
|
84
|
+
]
|
|
85
|
+
)
|
|
86
|
+
)
|
|
@@ -175,16 +175,19 @@ def _calculate_score_emissions_system_boundary(cycle: dict, *args):
|
|
|
175
175
|
# get all emissions in the system boundary
|
|
176
176
|
all_emissions_ids = cycle_emissions_in_system_boundary(cycle)
|
|
177
177
|
# ignore emissions that should be skipped in aggregation
|
|
178
|
-
|
|
178
|
+
skipped_emission_ids = [
|
|
179
179
|
id
|
|
180
180
|
for id in all_emissions_ids
|
|
181
|
-
if should_aggregate_blank_node(
|
|
181
|
+
if not should_aggregate_blank_node(
|
|
182
182
|
{
|
|
183
183
|
"@type": SchemaType.EMISSION.value,
|
|
184
184
|
"term": {"@id": id, "termType": TermTermType.EMISSION.value},
|
|
185
185
|
}
|
|
186
186
|
)
|
|
187
187
|
]
|
|
188
|
+
all_emissions_ids = [
|
|
189
|
+
id for id in all_emissions_ids if id not in skipped_emission_ids
|
|
190
|
+
]
|
|
188
191
|
|
|
189
192
|
# get all emissions in the Cycle that are in the system boundary
|
|
190
193
|
emissions_ids = list(
|
|
@@ -207,6 +210,7 @@ def _calculate_score_emissions_system_boundary(cycle: dict, *args):
|
|
|
207
210
|
included_emissions=len(emissions_ids),
|
|
208
211
|
all_included=all_included,
|
|
209
212
|
missing_emissions=";".join(missing_emissions),
|
|
213
|
+
excluded_emissions=";".join(skipped_emission_ids),
|
|
210
214
|
)
|
|
211
215
|
|
|
212
216
|
return all_included
|
|
@@ -16,18 +16,40 @@ from .term import (
|
|
|
16
16
|
)
|
|
17
17
|
|
|
18
18
|
|
|
19
|
+
def _weight_to_key(weights: list):
|
|
20
|
+
return "_".join(non_empty_list(weights))
|
|
21
|
+
|
|
22
|
+
|
|
19
23
|
def _format_weight_key(key: str):
|
|
20
24
|
return ", ".join(key.split("_")).capitalize()
|
|
21
25
|
|
|
22
26
|
|
|
23
|
-
|
|
27
|
+
_WEIGHTS_KEYS = [["organic", "conventional"], ["irrigated", "non-irrigated"]]
|
|
28
|
+
_WEIGHTS_COMBINATIONS = [
|
|
29
|
+
_weight_to_key([organic_key, irrigated_key])
|
|
30
|
+
for organic_key in _WEIGHTS_KEYS[0]
|
|
31
|
+
for irrigated_key in _WEIGHTS_KEYS[1]
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def format_weights(weights: list, include_default_combinations: bool = False):
|
|
24
36
|
total = list_sum(non_empty_list([w.get("weight") for w in weights]))
|
|
25
|
-
|
|
37
|
+
included_keys = non_empty_list([weight.get("key") for weight in weights])
|
|
38
|
+
return "; ".join(
|
|
26
39
|
[
|
|
27
|
-
f"{_format_weight_key(weight.get('key'))}: {round(weight.get('weight') * 100 / total, 2)}"
|
|
40
|
+
f"{_format_weight_key(weight.get('key'))}: {round(weight.get('weight') * 100 / total, 2)}%"
|
|
28
41
|
for weight in weights
|
|
29
42
|
if weight.get("key")
|
|
30
43
|
]
|
|
44
|
+
+ (
|
|
45
|
+
[
|
|
46
|
+
f"{_format_weight_key(key)}: 0%"
|
|
47
|
+
for key in _WEIGHTS_COMBINATIONS
|
|
48
|
+
if key not in included_keys
|
|
49
|
+
]
|
|
50
|
+
if include_default_combinations and included_keys
|
|
51
|
+
else []
|
|
52
|
+
)
|
|
31
53
|
)
|
|
32
54
|
|
|
33
55
|
|
|
@@ -119,13 +141,11 @@ def _country_weights(
|
|
|
119
141
|
weight = (organic_weight if node.get("organic", False) else 1 - organic_weight) * (
|
|
120
142
|
irrigated_weight if node.get("irrigated", False) else 1 - irrigated_weight
|
|
121
143
|
)
|
|
122
|
-
key =
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
]
|
|
128
|
-
)
|
|
144
|
+
key = _weight_to_key(
|
|
145
|
+
[
|
|
146
|
+
"organic" if node.get("organic", False) else "conventional",
|
|
147
|
+
"irrigated" if node.get("irrigated", False) else "non-irrigated",
|
|
148
|
+
]
|
|
129
149
|
)
|
|
130
150
|
return {node_id: {"weight": weight, "completeness": completeness, "key": key}}
|
|
131
151
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "0.21.
|
|
1
|
+
VERSION = "0.21.16"
|
{hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/RECORD
RENAMED
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
hestia_earth/aggregation/__init__.py,sha256=
|
|
2
|
-
hestia_earth/aggregation/aggregate_cycles.py,sha256=
|
|
1
|
+
hestia_earth/aggregation/__init__.py,sha256=kSK6RYcK0YnRGx6TknaukhuhbnxRHofLI_8PA0-k8Yg,3339
|
|
2
|
+
hestia_earth/aggregation/aggregate_cycles.py,sha256=MR-SseQRMGFUOKiBJcLVysjdjw5J-QfJ258VxGhasFw,7868
|
|
3
3
|
hestia_earth/aggregation/log.py,sha256=0CibXDtKqFfE-R86qziM2uH7ATUZ6I9p_bVpRSn_e_o,2351
|
|
4
4
|
hestia_earth/aggregation/recalculate_cycles.py,sha256=xCO_tj5BAC407vZ36EVuxeOBmX2y82EXRrTx5W6g4CY,484
|
|
5
|
-
hestia_earth/aggregation/version.py,sha256=
|
|
6
|
-
hestia_earth/aggregation/config/Cycle/processedFood.json,sha256=
|
|
5
|
+
hestia_earth/aggregation/version.py,sha256=Mlni8p7cuSE5CwxrazB-rArGWlKBFeBsDeVs2L8bT_E,20
|
|
6
|
+
hestia_earth/aggregation/config/Cycle/processedFood.json,sha256=OBYXxleifxg9EJfZsqR1dgCxlEe2lIrCBqMe_Jo5XE4,1387
|
|
7
7
|
hestia_earth/aggregation/utils/__init__.py,sha256=nVVLgVFZ4415CgQAqgv4jDamA7gDmzOWk5SY4vshAQs,6583
|
|
8
|
-
hestia_earth/aggregation/utils/aggregate_country_nodes.py,sha256=
|
|
9
|
-
hestia_earth/aggregation/utils/aggregate_weighted.py,sha256=
|
|
8
|
+
hestia_earth/aggregation/utils/aggregate_country_nodes.py,sha256=FkfiOcWIE9a8duowZgOtFiyr4m-UwnawXRiZ-geRrTY,27924
|
|
9
|
+
hestia_earth/aggregation/utils/aggregate_weighted.py,sha256=OzrYhbL7g-X_1x7BLEwkxqLlDOLQ7CdFV8UuQTzQEz4,6817
|
|
10
10
|
hestia_earth/aggregation/utils/blank_node.py,sha256=f8JBcd5_SLqW8juzFAfbZysAESaZXj8JX6C7rDkLEoc,16812
|
|
11
11
|
hestia_earth/aggregation/utils/combine.py,sha256=EmXUZksdkrtQ6o1gMOG5WMivUhL28NKSM9D4cPohJiE,4904
|
|
12
12
|
hestia_earth/aggregation/utils/completeness.py,sha256=eZ759PAdOSshhys-YKKPEpAYxR3sTrEjI--WJTEwmus,4633
|
|
13
13
|
hestia_earth/aggregation/utils/covariance.py,sha256=2VaNGPvyD4ub3rR-OBn8VIEIeAz4JOefl6ZlAtXKj4U,5884
|
|
14
|
-
hestia_earth/aggregation/utils/cycle.py,sha256=
|
|
14
|
+
hestia_earth/aggregation/utils/cycle.py,sha256=d65RvgqJiq4sr12SShLKjM50MO95V07I88LU7p__Ixc,16496
|
|
15
15
|
hestia_earth/aggregation/utils/distribution.py,sha256=2XQKXVu-1XUy1zEHgThERupaj4RizXKO5F-VY-QQlMo,6935
|
|
16
|
-
hestia_earth/aggregation/utils/emission.py,sha256=
|
|
16
|
+
hestia_earth/aggregation/utils/emission.py,sha256=2roXjGmDuS7a3muPJoEOqg7E-TZyQsnoavhU6b3B-Z8,2983
|
|
17
17
|
hestia_earth/aggregation/utils/group.py,sha256=qU9sAuJu0RG80yLs90nsyoOcU57Pb6M1WKwT1TlJEkM,4765
|
|
18
18
|
hestia_earth/aggregation/utils/input.py,sha256=pZ1bcdHR3y4S-3b0JZllbEUWdEyVZbltxv-07j7SucI,1092
|
|
19
19
|
hestia_earth/aggregation/utils/lookup.py,sha256=osJiXTRns5KeBMz3mRsanmFQTATrwcMVBzB07xrb71M,2046
|
|
@@ -22,14 +22,14 @@ hestia_earth/aggregation/utils/measurement.py,sha256=tdQwjo1hPb3iAaboBQiua832q27
|
|
|
22
22
|
hestia_earth/aggregation/utils/practice.py,sha256=MXZsQoeSXxe2vmdjvyS58HZN9jejPZNbVnczTc3HxxU,2252
|
|
23
23
|
hestia_earth/aggregation/utils/product.py,sha256=dEUCy0eakyKyD2vu1RRnF4bdAI-jpS-qePNF8ZGhHFE,886
|
|
24
24
|
hestia_earth/aggregation/utils/property.py,sha256=n_aHZ4qASv_wcDJ_HrA1T2kFX8R2zUEo91LCPL5xWkw,917
|
|
25
|
-
hestia_earth/aggregation/utils/quality_score.py,sha256=
|
|
25
|
+
hestia_earth/aggregation/utils/quality_score.py,sha256=PPNRW1wEkVqP7QExyu5dcr8R_Z_DWkmT3zmQG9u470Y,8914
|
|
26
26
|
hestia_earth/aggregation/utils/queries.py,sha256=kzob6CHL1a60CRJdGICiWS98LKJRyq3uaToyuQyl9RI,17564
|
|
27
27
|
hestia_earth/aggregation/utils/site.py,sha256=nc1N4IRiTki69IbCX4G6lagQ5U7IpsOsqiPuHwbl1HM,5455
|
|
28
28
|
hestia_earth/aggregation/utils/source.py,sha256=SOiE-jB5WNtbRxreUWQ8c04ywzrwICy1aGyRL3-L0RY,686
|
|
29
29
|
hestia_earth/aggregation/utils/term.py,sha256=uVAfBYnxNkcTNfzuwwHur6JGBahNrPwR2pN-__sj9zk,2758
|
|
30
|
-
hestia_earth/aggregation/utils/weights.py,sha256=
|
|
31
|
-
hestia_earth_aggregation-0.21.
|
|
32
|
-
hestia_earth_aggregation-0.21.
|
|
33
|
-
hestia_earth_aggregation-0.21.
|
|
34
|
-
hestia_earth_aggregation-0.21.
|
|
35
|
-
hestia_earth_aggregation-0.21.
|
|
30
|
+
hestia_earth/aggregation/utils/weights.py,sha256=Qk5hCr_K24DzkeE4Sa241pGb6m00iaWHIXFQAJ-cDGc,7518
|
|
31
|
+
hestia_earth_aggregation-0.21.16.dist-info/licenses/LICENSE,sha256=TD25LoiRJsA5CPUNrcyt1PXlGcbUGFMAeZoBcfCrCNE,1154
|
|
32
|
+
hestia_earth_aggregation-0.21.16.dist-info/METADATA,sha256=38XSLUkdQu3ePpK38m0b_ZjiLPjWqANxTFmEk9dshCs,2481
|
|
33
|
+
hestia_earth_aggregation-0.21.16.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
34
|
+
hestia_earth_aggregation-0.21.16.dist-info/top_level.txt,sha256=q0QxKEYx9uLpAD5ZtC7Ypq29smEPfOzEAn7Xv8XHGOQ,13
|
|
35
|
+
hestia_earth_aggregation-0.21.16.dist-info/RECORD,,
|
{hestia_earth_aggregation-0.21.14.dist-info → hestia_earth_aggregation-0.21.16.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|