openepd 7.5.0__py3-none-any.whl → 7.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openepd/__version__.py +1 -1
- openepd/m49/const.py +8 -1
- openepd/m49/utils.py +25 -4
- openepd/model/base.py +64 -6
- openepd/model/lcia.py +133 -25
- openepd/model/org.py +15 -6
- openepd/model/versioning.py +1 -1
- openepd/utils/__init__.py +15 -0
- openepd/utils/mapping/__init__.py +15 -0
- openepd/utils/mapping/common.py +217 -0
- openepd/utils/mapping/geography.py +58 -0
- openepd/utils/markdown.py +52 -0
- {openepd-7.5.0.dist-info → openepd-7.7.0.dist-info}/METADATA +1 -1
- {openepd-7.5.0.dist-info → openepd-7.7.0.dist-info}/RECORD +16 -11
- {openepd-7.5.0.dist-info → openepd-7.7.0.dist-info}/LICENSE +0 -0
- {openepd-7.5.0.dist-info → openepd-7.7.0.dist-info}/WHEEL +0 -0
openepd/__version__.py
CHANGED
openepd/m49/const.py
CHANGED
@@ -695,6 +695,8 @@ REGION_VERBOSE_NAME_TO_M49 = {
|
|
695
695
|
"Polynesia": "061",
|
696
696
|
}
|
697
697
|
|
698
|
+
REGION_VERBOSE_NAME_TO_M49_LOWER = {k.lower(): v for k, v in REGION_VERBOSE_NAME_TO_M49.items()}
|
699
|
+
|
698
700
|
M49_TO_REGION_VERBOSE_NAME = {
|
699
701
|
M49_CODE_WORLD: "World",
|
700
702
|
M49_CODE_AFRICA: "Africa",
|
@@ -769,7 +771,7 @@ COUNTRY_VERBOSE_NAME_TO_M49 = {
|
|
769
771
|
"Colombia": "170",
|
770
772
|
"Comoros": "174",
|
771
773
|
"Congo": "178",
|
772
|
-
"Congo (Democratic Republic of the)": "180",
|
774
|
+
"Congo (Democratic Republic of the Congo)": "180",
|
773
775
|
"Costa Rica": "188",
|
774
776
|
"Croatia": "191",
|
775
777
|
"Cuba": "192",
|
@@ -905,6 +907,7 @@ COUNTRY_VERBOSE_NAME_TO_M49 = {
|
|
905
907
|
"Trinidad and Tobago": "780",
|
906
908
|
"Tunisia": "788",
|
907
909
|
"Türkiye": "792",
|
910
|
+
"Turkey": "792",
|
908
911
|
"Turkmenistan": "795",
|
909
912
|
"Tuvalu": "798",
|
910
913
|
"Uganda": "800",
|
@@ -913,6 +916,8 @@ COUNTRY_VERBOSE_NAME_TO_M49 = {
|
|
913
916
|
"United Kingdom of Great Britain and Northern Ireland": "826",
|
914
917
|
"United Republic of Tanzania": "834",
|
915
918
|
"United States of America": "840",
|
919
|
+
"United States": "840",
|
920
|
+
"USA": "840",
|
916
921
|
"Uruguay": "858",
|
917
922
|
"Uzbekistan": "860",
|
918
923
|
"Vanuatu": "548",
|
@@ -923,6 +928,8 @@ COUNTRY_VERBOSE_NAME_TO_M49 = {
|
|
923
928
|
"Zimbabwe": "716",
|
924
929
|
}
|
925
930
|
|
931
|
+
COUNTRY_VERBOSE_NAME_TO_M49_LOWER = {k.lower(): v for k, v in COUNTRY_VERBOSE_NAME_TO_M49.items()}
|
932
|
+
|
926
933
|
M49_TO_COUNTRY_VERBOSE_NAME = {
|
927
934
|
"004": "Afghanistan",
|
928
935
|
"008": "Albania",
|
openepd/m49/utils.py
CHANGED
@@ -14,6 +14,7 @@
|
|
14
14
|
# limitations under the License.
|
15
15
|
#
|
16
16
|
__all__ = [
|
17
|
+
"is_iso_code",
|
17
18
|
"is_m49_code",
|
18
19
|
"iso_to_m49",
|
19
20
|
"m49_to_iso",
|
@@ -25,14 +26,14 @@ __all__ = [
|
|
25
26
|
from collections.abc import Collection
|
26
27
|
|
27
28
|
from openepd.m49.const import (
|
28
|
-
|
29
|
+
COUNTRY_VERBOSE_NAME_TO_M49_LOWER,
|
29
30
|
ISO3166_ALPHA2_TO_M49,
|
30
31
|
M49_AREAS,
|
31
32
|
M49_TO_COUNTRY_VERBOSE_NAME,
|
32
33
|
M49_TO_ISO3166_ALPHA2,
|
33
34
|
M49_TO_REGION_VERBOSE_NAME,
|
34
35
|
OPENEPD_SPECIAL_REGIONS,
|
35
|
-
|
36
|
+
REGION_VERBOSE_NAME_TO_M49_LOWER,
|
36
37
|
)
|
37
38
|
|
38
39
|
|
@@ -99,7 +100,9 @@ def region_and_country_names_to_m49(regions: Collection[str]) -> set[str]:
|
|
99
100
|
|
100
101
|
result = set()
|
101
102
|
for name in regions:
|
102
|
-
m49_code =
|
103
|
+
m49_code = REGION_VERBOSE_NAME_TO_M49_LOWER.get(name.lower()) or COUNTRY_VERBOSE_NAME_TO_M49_LOWER.get(
|
104
|
+
name.lower()
|
105
|
+
)
|
103
106
|
if not m49_code:
|
104
107
|
msg = f"Region or country name '{name}' not found in M49 region codes."
|
105
108
|
raise ValueError(msg)
|
@@ -162,7 +165,7 @@ def openepd_to_m49(regions: Collection[str]) -> set[str]:
|
|
162
165
|
return result
|
163
166
|
|
164
167
|
|
165
|
-
def m49_to_openepd(regions:
|
168
|
+
def m49_to_openepd(regions: Collection[str]) -> set[str]:
|
166
169
|
"""
|
167
170
|
Convert M49 region codes to OpenEPD geography definitions.
|
168
171
|
|
@@ -203,4 +206,22 @@ def is_m49_code(to_check: str) -> bool:
|
|
203
206
|
:param to_check: any string
|
204
207
|
:return: `True` if passed string is M49 code, `False` otherwise
|
205
208
|
"""
|
209
|
+
if not to_check:
|
210
|
+
return False
|
211
|
+
if len(to_check) != 3 or not to_check.isdigit():
|
212
|
+
return False
|
206
213
|
return to_check in M49_AREAS or to_check in M49_TO_ISO3166_ALPHA2
|
214
|
+
|
215
|
+
|
216
|
+
def is_iso_code(to_check: str) -> bool:
|
217
|
+
"""
|
218
|
+
Check if passed string is ISO3166 alpha2 code.
|
219
|
+
|
220
|
+
:param to_check: any string
|
221
|
+
:return: `True` if passed string is ISO3166 alpha2 code, `False` otherwise
|
222
|
+
"""
|
223
|
+
if not to_check:
|
224
|
+
return False
|
225
|
+
if len(to_check) != 2:
|
226
|
+
return False
|
227
|
+
return to_check.upper() in ISO3166_ALPHA2_TO_M49
|
openepd/model/base.py
CHANGED
@@ -16,8 +16,7 @@
|
|
16
16
|
import abc
|
17
17
|
from collections.abc import Callable
|
18
18
|
from enum import StrEnum
|
19
|
-
import
|
20
|
-
from typing import Any, ClassVar, Generic, Optional, TypeAlias, TypeVar
|
19
|
+
from typing import Any, ClassVar, Generic, Literal, NotRequired, Optional, Self, TypeAlias, TypedDict, TypeVar, Unpack
|
21
20
|
|
22
21
|
from cqd import open_xpd_uuid # type:ignore[import-untyped]
|
23
22
|
import pydantic
|
@@ -63,6 +62,17 @@ def modify_pydantic_schema(schema_dict: dict, cls: type) -> dict:
|
|
63
62
|
return schema_dict
|
64
63
|
|
65
64
|
|
65
|
+
class SerializingArgs(TypedDict):
|
66
|
+
include: NotRequired[set[int] | set[str] | dict[int, Any] | dict[str, Any] | None]
|
67
|
+
exclude: NotRequired[set[int] | set[str] | dict[int, Any] | dict[str, Any] | None]
|
68
|
+
by_alias: NotRequired[bool]
|
69
|
+
exclude_unset: NotRequired[bool]
|
70
|
+
exclude_defaults: NotRequired[bool]
|
71
|
+
exclude_none: NotRequired[bool]
|
72
|
+
round_trip: NotRequired[bool]
|
73
|
+
warnings: NotRequired[bool]
|
74
|
+
|
75
|
+
|
66
76
|
class BaseOpenEpdSchema(pydantic.BaseModel):
|
67
77
|
"""Base class for all OpenEPD models."""
|
68
78
|
|
@@ -73,17 +83,56 @@ class BaseOpenEpdSchema(pydantic.BaseModel):
|
|
73
83
|
use_enum_values=True,
|
74
84
|
)
|
75
85
|
|
76
|
-
def to_serializable(
|
86
|
+
def to_serializable(
|
87
|
+
self, mode: Literal["json", "python"] | str = "json", **kwargs: Unpack[SerializingArgs]
|
88
|
+
) -> dict[str, Any]:
|
77
89
|
"""
|
78
90
|
Return a serializable dict representation of the DTO.
|
79
91
|
|
80
92
|
It expects the same arguments as the pydantic.BaseModel.model_dump_json() method.
|
81
93
|
"""
|
82
|
-
|
94
|
+
kwargs.setdefault("exclude_none", True)
|
95
|
+
kwargs.setdefault("exclude_unset", True)
|
96
|
+
kwargs.setdefault("by_alias", True)
|
97
|
+
return self.model_dump(mode=mode, **kwargs)
|
98
|
+
|
99
|
+
def to_json(self, indent: int = 0, **kwargs: Unpack[SerializingArgs]):
|
100
|
+
"""
|
101
|
+
Return a JSON string representation of the DTO.
|
102
|
+
|
103
|
+
It expects the same arguments as the pydantic.BaseModel.model_dump_json() method.
|
104
|
+
"""
|
105
|
+
kwargs.setdefault("exclude_none", True)
|
106
|
+
kwargs.setdefault("exclude_unset", True)
|
107
|
+
kwargs.setdefault("by_alias", True)
|
108
|
+
return self.model_dump_json(indent=indent, **kwargs)
|
109
|
+
|
110
|
+
def to_dict(self, **kwargs: Unpack[SerializingArgs]) -> dict[str, Any]:
|
111
|
+
"""
|
112
|
+
Return a dictionary representation of the DTO.
|
113
|
+
|
114
|
+
The main difference with `to_serializable` is that it doesn't convert all native python types
|
115
|
+
(e.g. datetime won't be converted into string) into JSON-compatible types.
|
83
116
|
|
84
|
-
|
117
|
+
Method expects the same kwargs as the pydantic.BaseModel.model_dump() method.
|
118
|
+
"""
|
119
|
+
return self.to_serializable(mode="python", **kwargs)
|
120
|
+
|
121
|
+
def has_values(self, exclude_fields: set[str] | None = None) -> bool:
|
85
122
|
"""Return True if the model has any values."""
|
86
|
-
|
123
|
+
if isinstance(self, RootDocument) and exclude_fields is None:
|
124
|
+
exclude_fields = {"doctype", "openepd_version"}
|
125
|
+
return len(self.model_dump(exclude_unset=True, exclude_none=True, exclude=exclude_fields)) > 0
|
126
|
+
|
127
|
+
def revalidate(self, strict: bool | None = None) -> Self:
|
128
|
+
"""
|
129
|
+
Re-run validation against current model and return a new validated instance.
|
130
|
+
|
131
|
+
Note: This method returns a validated _COPY_ of the object.
|
132
|
+
|
133
|
+
:param strict: If True, will raise an error if any field is missing or has an invalid value.
|
134
|
+
"""
|
135
|
+
return self.model_validate(self.to_dict(), strict=strict)
|
87
136
|
|
88
137
|
def set_ext(self, ext: "OpenEpdExtension") -> None:
|
89
138
|
"""Set the extension field."""
|
@@ -197,6 +246,15 @@ class RootDocument(abc.ABC, BaseOpenEpdSchema):
|
|
197
246
|
default=OpenEpdVersions.get_current().as_str(),
|
198
247
|
)
|
199
248
|
|
249
|
+
def __init__(self, **data: Any) -> None:
|
250
|
+
super().__init__(**data)
|
251
|
+
if not hasattr(self, "_FORMAT_VERSION"):
|
252
|
+
msg = f"RootDocument subclass {self.__class__.__name__} must define _FORMAT_VERSION class variable."
|
253
|
+
raise ValueError(msg)
|
254
|
+
if "openepd_version" not in self.model_fields_set:
|
255
|
+
self.openepd_version = self._FORMAT_VERSION
|
256
|
+
self.doctype = self.__class__.model_fields["doctype"].default # type: ignore[assignment]
|
257
|
+
|
200
258
|
@pydantic.field_validator(OPENEPD_VERSION_FIELD)
|
201
259
|
def version_format_validator(cls, value: str) -> str:
|
202
260
|
"""Validate the correctness of version format."""
|
openepd/model/lcia.py
CHANGED
@@ -13,11 +13,14 @@
|
|
13
13
|
# See the License for the specific language governing permissions and
|
14
14
|
# limitations under the License.
|
15
15
|
#
|
16
|
+
from collections.abc import Generator
|
16
17
|
from enum import StrEnum
|
17
|
-
from typing import Any, ClassVar, Self
|
18
|
+
from typing import Any, ClassVar, Self, cast
|
18
19
|
|
19
20
|
import pydantic
|
20
|
-
from pydantic import
|
21
|
+
from pydantic.annotated_handlers import GetJsonSchemaHandler
|
22
|
+
from pydantic.json_schema import GenerateJsonSchema
|
23
|
+
from pydantic_core import CoreSchema
|
21
24
|
|
22
25
|
from openepd.model.base import BaseOpenEpdSchema
|
23
26
|
from openepd.model.common import Measurement
|
@@ -206,7 +209,7 @@ class ScopeSet(BaseOpenEpdSchema):
|
|
206
209
|
def _unit_validator(self) -> Self:
|
207
210
|
all_units = set()
|
208
211
|
|
209
|
-
for k in self.model_fields:
|
212
|
+
for k in self.__class__.model_fields:
|
210
213
|
v = getattr(self, k, None)
|
211
214
|
if isinstance(v, Measurement):
|
212
215
|
all_units.add(v.unit)
|
@@ -246,22 +249,91 @@ class ScopeSet(BaseOpenEpdSchema):
|
|
246
249
|
class ScopesetByNameBase(BaseOpenEpdSchema, extra="allow"):
|
247
250
|
"""Base class for the data structures presented as typed name:scopeset mapping ."""
|
248
251
|
|
249
|
-
def get_scopeset_names(self) -> list[str]:
|
252
|
+
def get_scopeset_names(self, exclude_none: bool = False, only_standard: bool = False) -> list[str]:
|
250
253
|
"""
|
251
254
|
Get the names of scopesets which have been set by model (not defaults).
|
252
255
|
|
256
|
+
:param exclude_none: If True, exclude scopesets with None values.
|
257
|
+
:param only_standard: If True, include only standard scopesets (those defined in model_fields_set).
|
253
258
|
:return: set of names, for example ['gwp', 'odp']
|
254
259
|
"""
|
255
260
|
result = []
|
256
261
|
for f in self.model_fields_set:
|
257
262
|
if f in ("ext",):
|
258
263
|
continue
|
259
|
-
field = self.model_fields.get(f)
|
264
|
+
field = self.__class__.model_fields.get(f)
|
260
265
|
# field can be explicitly specified, or can be an unknown impact covered by extra='allow'
|
261
266
|
result.append(field.alias if field and field.alias else f)
|
262
267
|
|
268
|
+
# add extra fields
|
269
|
+
if not only_standard and self.model_extra:
|
270
|
+
for name in self.model_extra.keys():
|
271
|
+
if name not in result:
|
272
|
+
result.append(name)
|
273
|
+
|
274
|
+
if exclude_none:
|
275
|
+
# filter out names with None values
|
276
|
+
result = [name for name in result if self.get_scopeset_by_name(name) is not None]
|
277
|
+
|
278
|
+
return result
|
279
|
+
|
280
|
+
def set_scopeset_by_name(self, name: str, scopeset: ScopeSet | None) -> None:
|
281
|
+
"""
|
282
|
+
Set scopeset by name.
|
283
|
+
|
284
|
+
:param name: The name of the scopeset.
|
285
|
+
:param scopeset: The scopeset to set.
|
286
|
+
"""
|
287
|
+
# check known impacts first
|
288
|
+
for f_name, f in self.__class__.model_fields.items():
|
289
|
+
if f.alias == name:
|
290
|
+
setattr(self, f_name, scopeset)
|
291
|
+
return
|
292
|
+
if f_name == name:
|
293
|
+
setattr(self, f_name, scopeset)
|
294
|
+
return
|
295
|
+
# probably unknown impact, coming from 'extra' fields
|
296
|
+
setattr(self, name, scopeset)
|
297
|
+
|
298
|
+
def __getitem__(self, scopeset_name: str) -> ScopeSet:
|
299
|
+
"""Get scopeset by name or raise KeyError."""
|
300
|
+
if not isinstance(scopeset_name, str):
|
301
|
+
raise TypeError(f"Key must be a string, got {type(scopeset_name)}")
|
302
|
+
result = self.get_scopeset_by_name(scopeset_name)
|
303
|
+
if result is None:
|
304
|
+
raise KeyError(f"Scopeset '{scopeset_name}' not found")
|
263
305
|
return result
|
264
306
|
|
307
|
+
def __setitem__(self, key: str, value: ScopeSet | None) -> None:
|
308
|
+
"""Set scopeset by name."""
|
309
|
+
if not isinstance(key, str):
|
310
|
+
raise TypeError(f"Key must be a string, got {type(key)}")
|
311
|
+
if value is not None and not isinstance(value, ScopeSet):
|
312
|
+
raise TypeError(f"Value must be a ScopeSet, got {type(value)}")
|
313
|
+
self.set_scopeset_by_name(key, value)
|
314
|
+
|
315
|
+
def __contains__(self, item: str) -> bool:
|
316
|
+
"""Check if scopeset with the given name exists (was set and is not None)."""
|
317
|
+
if not isinstance(item, str):
|
318
|
+
return False
|
319
|
+
return item in self.get_scopeset_names(exclude_none=True)
|
320
|
+
|
321
|
+
def __iter__(self) -> Generator[tuple[str, ScopeSet], None, None]:
|
322
|
+
"""Iterate over existing (set and not None) scopesets and yield tuples of scopeset name and scopeset."""
|
323
|
+
for name in self.get_scopeset_names(exclude_none=True):
|
324
|
+
yield name, cast(ScopeSet, self.get_scopeset_by_name(name))
|
325
|
+
|
326
|
+
def __len__(self) -> int:
|
327
|
+
"""Get the number of existing (set and not None) scopesets."""
|
328
|
+
return len(self.get_scopeset_names(exclude_none=True))
|
329
|
+
|
330
|
+
def items(self) -> list[tuple[str, ScopeSet]]:
|
331
|
+
"""Get all scopeset names and their values."""
|
332
|
+
return [
|
333
|
+
(name, cast(ScopeSet, self.get_scopeset_by_name(name)))
|
334
|
+
for name in self.get_scopeset_names(exclude_none=True)
|
335
|
+
]
|
336
|
+
|
265
337
|
def get_scopeset_by_name(self, name: str) -> ScopeSet | None:
|
266
338
|
"""
|
267
339
|
Get scopeset by name.
|
@@ -270,7 +342,7 @@ class ScopesetByNameBase(BaseOpenEpdSchema, extra="allow"):
|
|
270
342
|
:return: A scopeset if found, None otherwise
|
271
343
|
"""
|
272
344
|
# check known impacts first
|
273
|
-
for f_name, f in self.model_fields.items():
|
345
|
+
for f_name, f in self.__class__.model_fields.items():
|
274
346
|
if f.alias == name:
|
275
347
|
return getattr(self, f_name)
|
276
348
|
if f_name == name:
|
@@ -557,25 +629,6 @@ class LCIAMethod(StrEnum):
|
|
557
629
|
class Impacts(pydantic.RootModel[dict[LCIAMethod, ImpactSet]]):
|
558
630
|
"""List of environmental impacts, compiled per one of the standard Impact Assessment methods."""
|
559
631
|
|
560
|
-
@staticmethod
|
561
|
-
def _update_schema_extra(schema, model):
|
562
|
-
schema.update(
|
563
|
-
{
|
564
|
-
"properties": {
|
565
|
-
str(lm): {
|
566
|
-
"description": str(lm),
|
567
|
-
# This is an internal representation of the reference which exists in Pydantic during
|
568
|
-
# generation process
|
569
|
-
"allOf": [{"$ref": "#/components/schemas/openepd__model__lcia__ImpactSet-Input__1"}],
|
570
|
-
}
|
571
|
-
for lm in LCIAMethod
|
572
|
-
},
|
573
|
-
"additionalProperties": None,
|
574
|
-
}
|
575
|
-
)
|
576
|
-
|
577
|
-
model_config: ClassVar[ConfigDict] = ConfigDict(json_schema_extra=_update_schema_extra)
|
578
|
-
|
579
632
|
def set_unknown_lcia(self, impact_set: ImpactSet):
|
580
633
|
"""Set the impact set as an unknown LCIA method."""
|
581
634
|
self.root[LCIAMethod.UNKNOWN] = impact_set
|
@@ -623,6 +676,61 @@ class Impacts(pydantic.RootModel[dict[LCIAMethod, ImpactSet]]):
|
|
623
676
|
"""Return the impacts as a dictionary."""
|
624
677
|
return self.root
|
625
678
|
|
679
|
+
@classmethod
|
680
|
+
def __get_pydantic_json_schema__(cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler, *, mode="validation"):
|
681
|
+
# Get the base schema from the handler
|
682
|
+
json_schema: dict[str, Any] = handler(core_schema)
|
683
|
+
# Resolve the reference to get the actual ImpactSet schema
|
684
|
+
json_schema = handler.resolve_ref_schema(json_schema)
|
685
|
+
json_schema_generator: GenerateJsonSchema | None = None
|
686
|
+
if hasattr(handler, "generate_json_schema") and handler.generate_json_schema:
|
687
|
+
json_schema_generator = handler.generate_json_schema
|
688
|
+
|
689
|
+
ref_template = ""
|
690
|
+
if json_schema_generator and json_schema_generator.ref_template:
|
691
|
+
ref_template = json_schema_generator.ref_template
|
692
|
+
elif json_schema.get("propertyNames", {}).get("$ref") is not None:
|
693
|
+
template_path, _ = json_schema.get("propertyNames", {}).get("$ref", "").rsplit("/", maxsplit=2)
|
694
|
+
if template_path:
|
695
|
+
ref_template = f"{template_path}/{{model}}"
|
696
|
+
if not ref_template:
|
697
|
+
ref_template = "#/components/schemas/{model}"
|
698
|
+
|
699
|
+
# Get the ImpactSet reference dynamically
|
700
|
+
impact_set_ref = None
|
701
|
+
if (
|
702
|
+
json_schema_generator
|
703
|
+
and hasattr(json_schema_generator, "definitions")
|
704
|
+
and json_schema_generator.definitions
|
705
|
+
):
|
706
|
+
# Look for ImpactSet in the definitions
|
707
|
+
for def_name, _ in json_schema_generator.definitions.items():
|
708
|
+
if "ImpactSet" in def_name:
|
709
|
+
# Use the correct OpenAPI reference format
|
710
|
+
impact_set_ref = ref_template.format(model=def_name)
|
711
|
+
break
|
712
|
+
|
713
|
+
# Fallback: use a generic reference if we can't find the specific one (
|
714
|
+
if not impact_set_ref:
|
715
|
+
impact_set_ref = ref_template.format(model="ImpactSet")
|
716
|
+
|
717
|
+
# Update the schema with explicit properties for each LCIA method
|
718
|
+
json_schema.update(
|
719
|
+
{
|
720
|
+
"type": "object",
|
721
|
+
"properties": {
|
722
|
+
str(lm): {
|
723
|
+
"description": str(lm),
|
724
|
+
"allOf": [{"$ref": impact_set_ref}],
|
725
|
+
}
|
726
|
+
for lm in LCIAMethod
|
727
|
+
},
|
728
|
+
}
|
729
|
+
)
|
730
|
+
json_schema.pop("additionalProperties")
|
731
|
+
|
732
|
+
return json_schema
|
733
|
+
|
626
734
|
|
627
735
|
class ResourceUseSet(ScopesetByNameBase):
|
628
736
|
"""A set of resource use indicators, such as RPRec, RPRm, etc."""
|
openepd/model/org.py
CHANGED
@@ -150,26 +150,35 @@ class Plant(PlantRef, WithAttachmentsMixin, WithAltIdsMixin):
|
|
150
150
|
|
151
151
|
pluscode: str | None = pydantic.Field(
|
152
152
|
default=None,
|
153
|
-
description="(
|
153
|
+
description="(DEPRECATED) Plus code (aka Open Location Code) of plant's location. "
|
154
|
+
"This field is deprecated. If users need a pluscode they can obtain it from `id`.",
|
154
155
|
json_schema_extra={
|
155
|
-
"deprecated":
|
156
|
-
"`id` like this: `id.spit('.', maxsplit=1)[0]`",
|
156
|
+
"deprecated": True,
|
157
157
|
},
|
158
158
|
)
|
159
159
|
latitude: float | None = pydantic.Field(
|
160
160
|
default=None,
|
161
|
-
description="(
|
161
|
+
description="(DEPRECATED) Latitude of the plant location. Use 'location' fields instead.",
|
162
|
+
json_schema_extra={
|
163
|
+
"deprecated": True,
|
164
|
+
},
|
162
165
|
)
|
163
166
|
longitude: float | None = pydantic.Field(
|
164
167
|
default=None,
|
165
|
-
description="(
|
168
|
+
description="(DEPRECATED) Longitude of the plant location. Use 'location' fields instead.",
|
169
|
+
json_schema_extra={
|
170
|
+
"deprecated": True,
|
171
|
+
},
|
166
172
|
)
|
167
173
|
owner: Org | None = pydantic.Field(description="Organization that owns the plant", default=None)
|
168
174
|
address: str | None = pydantic.Field(
|
169
175
|
max_length=200,
|
170
176
|
default=None,
|
171
|
-
description="(
|
177
|
+
description="(DEPRECATED) Text address, preferably geocoded. Use 'location' fields instead",
|
172
178
|
examples=["1503 Orchard Hill Rd, LaGrange, GA 30240, United States"],
|
179
|
+
json_schema_extra={
|
180
|
+
"deprecated": True,
|
181
|
+
},
|
173
182
|
)
|
174
183
|
contact_email: pydantic.EmailStr | None = pydantic.Field(
|
175
184
|
description="Email contact", examples=["info@interface.com"], default=None
|
openepd/model/versioning.py
CHANGED
@@ -30,7 +30,7 @@ class WithExtVersionMixin(ABC, pydantic.BaseModel):
|
|
30
30
|
"""Set the default value for the ext_version field from _EXT_VERSION class var."""
|
31
31
|
super().__init_subclass__()
|
32
32
|
if hasattr(cls, "_EXT_VERSION"):
|
33
|
-
cls.model_fields["ext_version"].
|
33
|
+
cls.model_fields["ext_version"].default_factory = lambda: cls._EXT_VERSION
|
34
34
|
|
35
35
|
# Note: default is set programmatically in __init_subclass__
|
36
36
|
ext_version: str | None = pydantic.Field(description="Extension version", examples=["3.22"], default=None)
|
@@ -0,0 +1,15 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2025 by C Change Labs Inc. www.c-change-labs.com
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
@@ -0,0 +1,15 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2025 by C Change Labs Inc. www.c-change-labs.com
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
@@ -0,0 +1,217 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2025 by C Change Labs Inc. www.c-change-labs.com
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
16
|
+
__all__ = (
|
17
|
+
"BaseDataMapper",
|
18
|
+
"KeyValueMapper",
|
19
|
+
"ReferenceMapper",
|
20
|
+
"RegexMapper",
|
21
|
+
"SimpleDataMapper",
|
22
|
+
)
|
23
|
+
|
24
|
+
import abc
|
25
|
+
from collections.abc import Mapping
|
26
|
+
import re
|
27
|
+
from typing import Generic, TypeAlias, TypeVar, assert_never, cast
|
28
|
+
|
29
|
+
T = TypeVar("T")
|
30
|
+
K = TypeVar("K")
|
31
|
+
|
32
|
+
|
33
|
+
class BaseDataMapper(Generic[T, K], abc.ABC):
|
34
|
+
"""
|
35
|
+
Base class for all data mappers.
|
36
|
+
|
37
|
+
Data mappers are objects used to map some input values to output values of different types.
|
38
|
+
|
39
|
+
Typical use case is mapping between different aliases of impact names with OpenEpd naming conventions.
|
40
|
+
"""
|
41
|
+
|
42
|
+
@abc.abstractmethod
|
43
|
+
def map(self, input_value: T, default_value: K | None, *, raise_if_missing: bool = False) -> K | None:
|
44
|
+
"""
|
45
|
+
Map the input value to the output value.
|
46
|
+
|
47
|
+
:param input_value: The input value to map.
|
48
|
+
:param default_value: The default value to return if there is no mapping for the input value.
|
49
|
+
:param raise_if_missing: Whether to raise an exception if there is no mapping for the input value.
|
50
|
+
|
51
|
+
:raise ValueError: If there is no mapping for the input value and raise_if_missing is True.
|
52
|
+
"""
|
53
|
+
pass
|
54
|
+
|
55
|
+
def map_or_keep(self, input_value: T) -> K:
|
56
|
+
"""
|
57
|
+
Map the input value to the output value or keep the input value if there is no mapping.
|
58
|
+
|
59
|
+
This is a convenience method equivalent to calling `map` with `default_value=input_value`
|
60
|
+
and `raise_if_missing=False`.
|
61
|
+
|
62
|
+
:param input_value: The input value to map.
|
63
|
+
:return: The mapped value or the input value if there is no mapping.
|
64
|
+
"""
|
65
|
+
return cast(K, self.map(input_value, default_value=cast(K, input_value), raise_if_missing=False))
|
66
|
+
|
67
|
+
def map_or_raise(self, input_value: T) -> K:
|
68
|
+
"""
|
69
|
+
Map the input value to the output value or raise an exception if there is no mapping.
|
70
|
+
|
71
|
+
This is a convenience method equivalent to calling `map` with `default_value=None`
|
72
|
+
and `raise_if_missing=True`.
|
73
|
+
|
74
|
+
:param input_value: The input value to map.
|
75
|
+
:return: The mapped value.
|
76
|
+
:raise ValueError: If there is no mapping for the input value.
|
77
|
+
"""
|
78
|
+
return cast(K, self.map(input_value, default_value=None, raise_if_missing=True))
|
79
|
+
|
80
|
+
|
81
|
+
class SimpleDataMapper(BaseDataMapper[T, T], Generic[T]):
|
82
|
+
"""A data mapper that does not change the type of the input value."""
|
83
|
+
|
84
|
+
DATABASE: Mapping[T, T] = {}
|
85
|
+
|
86
|
+
def map(self, input_value: T, default_value: T | None, *, raise_if_missing: bool = False) -> T | None:
|
87
|
+
"""
|
88
|
+
Map the input value to the output value.
|
89
|
+
|
90
|
+
:param input_value: The input value to map.
|
91
|
+
:param default_value: The default value to return if there is no mapping for the input value.
|
92
|
+
:param raise_if_missing: Whether to raise an exception if there is no mapping for the input value.
|
93
|
+
|
94
|
+
:raise ValueError: If there is no mapping for the input value and raise_if_missing is True.
|
95
|
+
"""
|
96
|
+
if raise_if_missing and input_value not in self.DATABASE:
|
97
|
+
msg = f"No mapping for input value: {input_value}"
|
98
|
+
raise ValueError(msg)
|
99
|
+
|
100
|
+
return self.DATABASE.get(input_value, default_value)
|
101
|
+
|
102
|
+
|
103
|
+
class KeyValueMapper(BaseDataMapper[str, T], Generic[T]):
|
104
|
+
"""
|
105
|
+
A data mapper that maps input values to output values using keywords.
|
106
|
+
|
107
|
+
List of values is expected to be a list string object or a list of objects easily castable to string.
|
108
|
+
"""
|
109
|
+
|
110
|
+
KV: Mapping[str, list[T]] = {}
|
111
|
+
|
112
|
+
def map(self, input_value: str, default_value: T | None, *, raise_if_missing: bool = False) -> T | None:
|
113
|
+
"""
|
114
|
+
Map the input value to the output value using keywords.
|
115
|
+
|
116
|
+
:param input_value: The input value to map.
|
117
|
+
:param default_value: The default value to return if there is no mapping for input value.
|
118
|
+
:param raise_if_missing: Whether to raise an exception if there is no mapping for the input value.
|
119
|
+
|
120
|
+
:raise ValueError: If there is no mapping for the input value and raise_if_missing is True.
|
121
|
+
"""
|
122
|
+
for impact_name, keywords in self.KV.items():
|
123
|
+
for keyword in keywords:
|
124
|
+
if str(keyword).strip().lower() in input_value.strip().lower():
|
125
|
+
return cast(T, impact_name)
|
126
|
+
|
127
|
+
if raise_if_missing:
|
128
|
+
msg = f"No mapping for input value: {input_value}"
|
129
|
+
raise ValueError(msg)
|
130
|
+
|
131
|
+
return default_value
|
132
|
+
|
133
|
+
|
134
|
+
class RegexMapper(BaseDataMapper[str, T], Generic[T]):
|
135
|
+
"""A data mapper that maps input values to output values using regex."""
|
136
|
+
|
137
|
+
PATTERNS: dict[str, str] = {}
|
138
|
+
_compiled_patterns: dict[str, re.Pattern]
|
139
|
+
|
140
|
+
def __init__(self) -> None:
|
141
|
+
self._compiled_patterns: dict[str, re.Pattern] = {
|
142
|
+
key: re.compile(pattern, re.IGNORECASE) for key, pattern in self.PATTERNS.items()
|
143
|
+
}
|
144
|
+
|
145
|
+
def map(self, input_value: str, default_value: T | None, *, raise_if_missing: bool = False) -> T | None:
|
146
|
+
"""
|
147
|
+
Map the input value to the output value using regex.
|
148
|
+
|
149
|
+
:param input_value: The input value to map.
|
150
|
+
:param default_value: The default value to return if there is no mapping for an input value.
|
151
|
+
|
152
|
+
:param raise_if_missing: Whether to raise an exception if there is no mapping for the input value.
|
153
|
+
|
154
|
+
:raise ValueError: If there is no mapping for the input value and raise_if_missing is True.
|
155
|
+
"""
|
156
|
+
for impact_name, pattern in self._compiled_patterns.items():
|
157
|
+
if pattern.search(input_value.strip().lower()):
|
158
|
+
return cast(T, impact_name)
|
159
|
+
|
160
|
+
if raise_if_missing:
|
161
|
+
msg = f"No mapping for input value: {input_value}"
|
162
|
+
raise ValueError(msg)
|
163
|
+
|
164
|
+
return default_value
|
165
|
+
|
166
|
+
|
167
|
+
_TRmRules: TypeAlias = str | re.Pattern | list[str | re.Pattern]
|
168
|
+
|
169
|
+
|
170
|
+
class ReferenceMapper(BaseDataMapper[str, _TRmRules]):
|
171
|
+
"""
|
172
|
+
A mapper that maps input values of any form to the expected value format.
|
173
|
+
|
174
|
+
Expected values may be a value or a list of values. Values are expected to be a string object, regular expressions,
|
175
|
+
or objects easily castable to string.
|
176
|
+
"""
|
177
|
+
|
178
|
+
def map(self, input_value: str, default_value: str | None, *, raise_if_missing: bool = False) -> str | None: # type: ignore[override]
|
179
|
+
"""
|
180
|
+
Return specified key as a value if any of the values in the list matches the input value.
|
181
|
+
|
182
|
+
:param input_value: value to be checked against the list of specified rules
|
183
|
+
:param default_value: default value to return if no match is found
|
184
|
+
:param raise_if_missing: whether to raise an exception if no match is found
|
185
|
+
|
186
|
+
:return: mapped value if find any match, else default value
|
187
|
+
|
188
|
+
:raise ValueError: if no match is found and raise_if_missing is True
|
189
|
+
"""
|
190
|
+
for key, value in self.MAPPING.items():
|
191
|
+
if not self._is_applied(input_value, value):
|
192
|
+
continue
|
193
|
+
return key
|
194
|
+
|
195
|
+
if raise_if_missing:
|
196
|
+
msg = f"No mapping for input value: {input_value}"
|
197
|
+
raise ValueError(msg)
|
198
|
+
|
199
|
+
return default_value
|
200
|
+
|
201
|
+
def _is_applied(self, input_value: str, rules: _TRmRules) -> bool:
|
202
|
+
if isinstance(rules, str | re.Pattern):
|
203
|
+
return self._is_applied_to_item(input_value, rules)
|
204
|
+
elif isinstance(rules, list):
|
205
|
+
return any(self._is_applied_to_item(input_value, rule) for rule in rules)
|
206
|
+
else:
|
207
|
+
assert_never(rules)
|
208
|
+
|
209
|
+
def _is_applied_to_item(self, input_value: str, rule: str | re.Pattern) -> bool:
|
210
|
+
if isinstance(rule, str):
|
211
|
+
return input_value.strip().lower() == rule.strip().lower()
|
212
|
+
elif isinstance(rule, re.Pattern):
|
213
|
+
return bool(rule.search(input_value.strip().lower()))
|
214
|
+
else:
|
215
|
+
assert_never(rule)
|
216
|
+
|
217
|
+
MAPPING: Mapping[str, _TRmRules] = {}
|
@@ -0,0 +1,58 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2025 by C Change Labs Inc. www.c-change-labs.com
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
16
|
+
|
17
|
+
__all__ = (
|
18
|
+
"OPENEPD_GEOGRAPHY_MAPPER",
|
19
|
+
"GeographyToOpenEpdMapper",
|
20
|
+
)
|
21
|
+
|
22
|
+
from openepd.m49 import const as m49_const
|
23
|
+
from openepd.m49 import utils as m49_utils
|
24
|
+
from openepd.utils.mapping.common import BaseDataMapper
|
25
|
+
|
26
|
+
|
27
|
+
class GeographyToOpenEpdMapper(BaseDataMapper[str, set[str]]):
|
28
|
+
"""Mapper that converts various geography inputs (e.g. country name, ISO code, M49) into openEPD compliant geography."""
|
29
|
+
|
30
|
+
def map(
|
31
|
+
self, input_value: str, default_value: str | set[str] | None, *, raise_if_missing: bool = False
|
32
|
+
) -> set[str] | None:
|
33
|
+
"""Map input geography value to openEpd geography codes."""
|
34
|
+
result: set[str] = set()
|
35
|
+
try:
|
36
|
+
if input_value:
|
37
|
+
input_value = input_value.strip()
|
38
|
+
if m49_utils.is_m49_code(input_value):
|
39
|
+
result = m49_utils.m49_to_openepd([input_value])
|
40
|
+
elif m49_utils.is_iso_code(input_value):
|
41
|
+
result = m49_utils.m49_to_openepd(m49_utils.iso_to_m49([input_value]))
|
42
|
+
elif input_value.upper() in m49_const.OPENEPD_SPECIAL_REGIONS: # Special regions like "NAFTA" or "EU"
|
43
|
+
result = {input_value.upper()}
|
44
|
+
else: # this might be a verbose region or country name
|
45
|
+
mapped_m49 = m49_utils.region_and_country_names_to_m49([input_value])
|
46
|
+
result = m49_utils.m49_to_openepd(mapped_m49)
|
47
|
+
except ValueError:
|
48
|
+
result = set()
|
49
|
+
if result:
|
50
|
+
return result
|
51
|
+
if raise_if_missing:
|
52
|
+
raise ValueError(f"Input '{input_value}' could not be mapped to OpenEpd geography codes.")
|
53
|
+
if isinstance(default_value, str):
|
54
|
+
return {default_value}
|
55
|
+
return default_value
|
56
|
+
|
57
|
+
|
58
|
+
OPENEPD_GEOGRAPHY_MAPPER = GeographyToOpenEpdMapper()
|
@@ -0,0 +1,52 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2025 by C Change Labs Inc. www.c-change-labs.com
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
16
|
+
import dataclasses
|
17
|
+
from typing import Self
|
18
|
+
|
19
|
+
|
20
|
+
class MarkdownSectionBuilder:
|
21
|
+
"""
|
22
|
+
A builder for Markdown sections.
|
23
|
+
|
24
|
+
Allows to build a Markdown string from a list of sections (title + content).
|
25
|
+
"""
|
26
|
+
|
27
|
+
@dataclasses.dataclass(kw_only=True)
|
28
|
+
class _MdSection:
|
29
|
+
title: str
|
30
|
+
level: int = 1
|
31
|
+
content: str | None = None
|
32
|
+
|
33
|
+
def __init__(self) -> None:
|
34
|
+
self._sections: list[MarkdownSectionBuilder._MdSection] = []
|
35
|
+
|
36
|
+
def add_section(self, title: str, content: str | None = None, level: int = 1) -> Self:
|
37
|
+
"""Add a new section to the builder."""
|
38
|
+
self._sections.append(MarkdownSectionBuilder._MdSection(title=title, content=content, level=level))
|
39
|
+
return self
|
40
|
+
|
41
|
+
@property
|
42
|
+
def has_content(self) -> bool:
|
43
|
+
"""Return True if there are any sections added to the builder."""
|
44
|
+
return len(self._sections) > 0
|
45
|
+
|
46
|
+
@staticmethod
|
47
|
+
def _build_section(section: _MdSection) -> str:
|
48
|
+
return f"{'#' * section.level} {section.title}\n\n{section.content or ''}"
|
49
|
+
|
50
|
+
def build(self) -> str:
|
51
|
+
"""Build the Markdown string."""
|
52
|
+
return "\n\n".join([self._build_section(x) for x in self._sections if x.content is not None])
|
@@ -1,5 +1,5 @@
|
|
1
1
|
openepd/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
2
|
-
openepd/__version__.py,sha256=
|
2
|
+
openepd/__version__.py,sha256=yl4wQF-1qgdO4jZxe2wV3xfKlACh5SDmznbBqByduEM,638
|
3
3
|
openepd/api/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
4
4
|
openepd/api/average_dataset/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
5
5
|
openepd/api/average_dataset/generic_estimate_sync_api.py,sha256=mjTT8eGtfj6Fgp-wcs0cCWA7DJo1KL_iQ75rgKkaY3c,8037
|
@@ -36,10 +36,10 @@ openepd/bundle/model.py,sha256=4pmOCgK-kdBu7_PLm5QhlrVZRmab_18o0EGvcUjI8FQ,2722
|
|
36
36
|
openepd/bundle/reader.py,sha256=H1mfuFxV2G0q9ld0dJ6SRucTwcTSEi-sFoKJLsBk4IQ,7767
|
37
37
|
openepd/bundle/writer.py,sha256=cyFikYTtZCcX0rhdpbuvOaOBG4CS1nE8JjHUdB7Wghs,8708
|
38
38
|
openepd/m49/__init__.py,sha256=AApOMp9PJPMXZbPB4piedqKtgHE01mlj_MyF3kf519U,718
|
39
|
-
openepd/m49/const.py,sha256=
|
40
|
-
openepd/m49/utils.py,sha256=
|
39
|
+
openepd/m49/const.py,sha256=lxp2bzwD4d95VHo5ULKFN8ryzjjfKTwpe9_MdUFIoXw,32104
|
40
|
+
openepd/m49/utils.py,sha256=0UvdtC9gtvRA5WT_hJDIuQR0RSrnx-S34wwwBRM_tsM,7807
|
41
41
|
openepd/model/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
42
|
-
openepd/model/base.py,sha256=
|
42
|
+
openepd/model/base.py,sha256=4D8BaSoNeY8RZfjkmJOSyfg0B34dKzM76lZbQT9AIXg,13622
|
43
43
|
openepd/model/category.py,sha256=iyzzAsiVwW4zJ61oYsm9Sy-sEBA71-aMFXcJP1Y-dPI,1734
|
44
44
|
openepd/model/common.py,sha256=D-FmaKk_ay6Of3HdUiloagc2nUiOLp97781BDRZMp8U,14662
|
45
45
|
openepd/model/declaration.py,sha256=v-X2JBOK-7ZYWziEUP7GOSrQFK4pI9EMZvp3Z7-mwwM,14669
|
@@ -48,8 +48,8 @@ openepd/model/factory.py,sha256=UWSGpfCr3GiMTP4rzBkwqxzbXB6GKZ_5Okb1Dqa_4aA,2701
|
|
48
48
|
openepd/model/generic_estimate.py,sha256=_R18Uz-hvxtSBl53D0_OkwVCWvoa2nIDjBdec6vEPDE,4304
|
49
49
|
openepd/model/geography.py,sha256=Jx7NIDdk_sIvwyh-7YxnIjAwIHW2HCQK7UtFGM2xKtw,42095
|
50
50
|
openepd/model/industry_epd.py,sha256=Cqn01IUNSZqRkyU05TwtOLXDKlg0YnGzqvKL8A__zbI,4061
|
51
|
-
openepd/model/lcia.py,sha256=
|
52
|
-
openepd/model/org.py,sha256=
|
51
|
+
openepd/model/lcia.py,sha256=IfSLZER6kI1tb0tKZW1FeHxFqeumznurELNLtYR6TC8,32756
|
52
|
+
openepd/model/org.py,sha256=zYZXTwU5Xah-MAiG0dP8SNRACpb79TmTWF6mUoROk8M,8181
|
53
53
|
openepd/model/pcr.py,sha256=cu3EakCAjBCkcb_AaLXB-xEjY0mlG-wJe74zGc5tdS0,5637
|
54
54
|
openepd/model/specs/README.md,sha256=UGhSiFJ9hOxT1mZl-5ZrhkOrPKf1W_gcu5CI9hzV7LU,2430
|
55
55
|
openepd/model/specs/__init__.py,sha256=RMLxvwD-_N5qaU0U2o5LxMKmP_W0_yssl72uTTC2tJg,3904
|
@@ -144,9 +144,14 @@ openepd/model/validation/common.py,sha256=SsYgCOoO2sJYm4VzI2tBmukXMrzwvbncEdkLlJ
|
|
144
144
|
openepd/model/validation/enum.py,sha256=0nRnjwmObw8ERQYWRWbovZjm90CMHi1Sc-UeNxCFnsc,1846
|
145
145
|
openepd/model/validation/numbers.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
146
146
|
openepd/model/validation/quantity.py,sha256=M9dz3byTK6Lrys43I0Gq7n2b0aE8WYys-idxi6bKCII,21755
|
147
|
-
openepd/model/versioning.py,sha256=
|
147
|
+
openepd/model/versioning.py,sha256=QzPeWiRCNvSUmlXANg4Vl4WqsPtlrCr8gSwDNUhZGvM,4577
|
148
148
|
openepd/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
149
|
-
openepd
|
150
|
-
openepd
|
151
|
-
openepd
|
152
|
-
openepd
|
149
|
+
openepd/utils/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
150
|
+
openepd/utils/mapping/__init__.py,sha256=9THJcV3LT7JDBOMz1px-QFf_sdJ0LOqJ5dmA9Dvvtd4,620
|
151
|
+
openepd/utils/mapping/common.py,sha256=hxfN-WW2WLwE_agQzf_mhvz6OHq5WWlr24uZ1S81k4Y,8426
|
152
|
+
openepd/utils/mapping/geography.py,sha256=1_-dvLk11Hqn-K58yUI5pQ5X5gsnJPFlFT7JK2Rdoeg,2396
|
153
|
+
openepd/utils/markdown.py,sha256=RQmudPhb4QU1I4-S-VV2WFbzzq2Po09kbpjjKbwkA9E,1830
|
154
|
+
openepd-7.7.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
155
|
+
openepd-7.7.0.dist-info/METADATA,sha256=pyqC4h1XAG1lwB2uZQ82w2XAmgelS-7sqwGCVnsxYgg,9810
|
156
|
+
openepd-7.7.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
157
|
+
openepd-7.7.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|