pymetadata 0.4.0__py2.py3-none-any.whl → 0.4.2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pymetadata might be problematic. Click here for more details.
- pymetadata/__init__.py +2 -1
- pymetadata/cache.py +1 -0
- pymetadata/chebi.py +1 -0
- pymetadata/console.py +1 -0
- pymetadata/core/annotation.py +1 -0
- pymetadata/core/creator.py +1 -0
- pymetadata/core/synonym.py +1 -0
- pymetadata/core/xref.py +1 -0
- pymetadata/examples/omex_example.py +1 -0
- pymetadata/identifiers/registry.py +2 -16
- pymetadata/log.py +1 -0
- pymetadata/metadata/kisao.py +10 -0
- pymetadata/omex.py +1 -1
- pymetadata/omex_v2.py +1 -0
- pymetadata/ontologies/ols.py +6 -9
- pymetadata/ontologies/ontology.py +2 -1
- pymetadata/resources/identifiers_registry.json +1082 -554
- pymetadata/unichem.py +7 -5
- {pymetadata-0.4.0.dist-info → pymetadata-0.4.2.dist-info}/METADATA +30 -39
- pymetadata-0.4.2.dist-info/RECORD +35 -0
- {pymetadata-0.4.0.dist-info → pymetadata-0.4.2.dist-info}/WHEEL +1 -1
- pymetadata-0.4.0.dist-info/RECORD +0 -35
- {pymetadata-0.4.0.dist-info → pymetadata-0.4.2.dist-info}/LICENSE +0 -0
- {pymetadata-0.4.0.dist-info → pymetadata-0.4.2.dist-info}/top_level.txt +0 -0
- {pymetadata-0.4.0.dist-info → pymetadata-0.4.2.dist-info}/zip-safe +0 -0
pymetadata/__init__.py
CHANGED
pymetadata/cache.py
CHANGED
pymetadata/chebi.py
CHANGED
pymetadata/console.py
CHANGED
pymetadata/core/annotation.py
CHANGED
pymetadata/core/creator.py
CHANGED
pymetadata/core/synonym.py
CHANGED
pymetadata/core/xref.py
CHANGED
|
@@ -4,6 +4,7 @@ Helper tools to work with identifiers registry.
|
|
|
4
4
|
https://identifiers.org/
|
|
5
5
|
https://docs.identifiers.org/articles/api.html
|
|
6
6
|
"""
|
|
7
|
+
|
|
7
8
|
from __future__ import annotations
|
|
8
9
|
|
|
9
10
|
import inspect
|
|
@@ -186,18 +187,6 @@ def ols_namespaces() -> Dict[str, Namespace]:
|
|
|
186
187
|
description="MONDO",
|
|
187
188
|
namespaceEmbeddedInLui=True,
|
|
188
189
|
),
|
|
189
|
-
Namespace(
|
|
190
|
-
id=None,
|
|
191
|
-
prefix="stato",
|
|
192
|
-
pattern=r"^STATO:\d+$",
|
|
193
|
-
name="STATO",
|
|
194
|
-
description="STATO is the statistical methods ontology. It contains "
|
|
195
|
-
"concepts and properties related to statistical methods, "
|
|
196
|
-
"probability distributions and other concepts related to "
|
|
197
|
-
"statistical analysis, including relationships to study "
|
|
198
|
-
"designs and plots.",
|
|
199
|
-
namespaceEmbeddedInLui=True,
|
|
200
|
-
),
|
|
201
190
|
Namespace(
|
|
202
191
|
id=None,
|
|
203
192
|
prefix="atol",
|
|
@@ -348,7 +337,7 @@ class Registry:
|
|
|
348
337
|
registry_path: Optional[Path] = None,
|
|
349
338
|
) -> Dict[str, Namespace]:
|
|
350
339
|
"""Update registry from identifiers.org webservice."""
|
|
351
|
-
logger.
|
|
340
|
+
logger.info(f"Update registry from '{Registry.URL}'")
|
|
352
341
|
response = requests.get(Registry.URL)
|
|
353
342
|
namespaces = response.json()["payload"]["namespaces"]
|
|
354
343
|
|
|
@@ -373,9 +362,6 @@ class Registry:
|
|
|
373
362
|
ns_dict[ns.prefix] = ns
|
|
374
363
|
|
|
375
364
|
if custom_namespaces is not None:
|
|
376
|
-
logger.warning(
|
|
377
|
-
f"Adding custom namespaces: {sorted(custom_namespaces.keys())}"
|
|
378
|
-
)
|
|
379
365
|
for key, ns in custom_namespaces.items():
|
|
380
366
|
if key in ns_dict:
|
|
381
367
|
logger.error(
|
pymetadata/log.py
CHANGED
pymetadata/metadata/kisao.py
CHANGED
|
@@ -481,6 +481,8 @@ _terms = {
|
|
|
481
481
|
"KISAO_0000695": "parameters for",
|
|
482
482
|
"KISAO_0000696": "steady state root-finding problem",
|
|
483
483
|
"KISAO_0000697": "SDE solver",
|
|
484
|
+
"KISAO_0000698": "particle coordinates",
|
|
485
|
+
"KISAO_0000699": "DAE Solver",
|
|
484
486
|
"KISAO_0000800": "systems property",
|
|
485
487
|
"KISAO_0000801": "concentration control coefficient matrix (unscaled)",
|
|
486
488
|
"KISAO_0000802": "control coefficient (scaled)",
|
|
@@ -2455,6 +2457,14 @@ class KISAO(str, Enum):
|
|
|
2455
2457
|
KISAO_0000697 = "KISAO_0000697"
|
|
2456
2458
|
SDE_SOLVER = "KISAO_0000697"
|
|
2457
2459
|
|
|
2460
|
+
# particle coordinates
|
|
2461
|
+
KISAO_0000698 = "KISAO_0000698"
|
|
2462
|
+
PARTICLE_COORDINATES = "KISAO_0000698"
|
|
2463
|
+
|
|
2464
|
+
# DAE Solver
|
|
2465
|
+
KISAO_0000699 = "KISAO_0000699"
|
|
2466
|
+
DAE_SOLVER = "KISAO_0000699"
|
|
2467
|
+
|
|
2458
2468
|
# systems property
|
|
2459
2469
|
KISAO_0000800 = "KISAO_0000800"
|
|
2460
2470
|
SYSTEMS_PROPERTY = "KISAO_0000800"
|
pymetadata/omex.py
CHANGED
|
@@ -33,7 +33,7 @@ logger = log.get_logger(__name__)
|
|
|
33
33
|
__all__ = ["EntryFormat", "ManifestEntry", "Manifest", "Omex"]
|
|
34
34
|
|
|
35
35
|
|
|
36
|
-
IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications
|
|
36
|
+
IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications/"
|
|
37
37
|
PURL_PREFIX = "https://purl.org/NET/mediatypes/"
|
|
38
38
|
|
|
39
39
|
|
pymetadata/omex_v2.py
CHANGED
pymetadata/ontologies/ols.py
CHANGED
|
@@ -4,6 +4,7 @@ This uses the EMBL-EBI Ontology Lookup Service
|
|
|
4
4
|
https://www.ebi.ac.uk/ols4
|
|
5
5
|
|
|
6
6
|
"""
|
|
7
|
+
|
|
7
8
|
import urllib.parse
|
|
8
9
|
from dataclasses import dataclass, field
|
|
9
10
|
from pathlib import Path
|
|
@@ -79,13 +80,13 @@ class OLSQuery:
|
|
|
79
80
|
cache: bool = CACHE_USE,
|
|
80
81
|
):
|
|
81
82
|
"""Initialize OLSQuery."""
|
|
82
|
-
self.ontologies = {
|
|
83
|
+
self.ontologies: Dict[str, OLSOntology] = {
|
|
83
84
|
ontology.name: ontology for ontology in ontologies
|
|
84
|
-
}
|
|
85
|
+
}
|
|
85
86
|
self.cache_path = cache_path / "ols"
|
|
86
87
|
self.cache = cache
|
|
87
88
|
|
|
88
|
-
if not self.cache_path.exists():
|
|
89
|
+
if cache and not self.cache_path.exists():
|
|
89
90
|
self.cache_path.mkdir(parents=True)
|
|
90
91
|
|
|
91
92
|
def get_iri(self, ontology: str, term: str) -> str:
|
|
@@ -136,11 +137,6 @@ class OLSQuery:
|
|
|
136
137
|
# double urlencode iri for OLS
|
|
137
138
|
urliri = urllib.parse.quote(iri, safe="")
|
|
138
139
|
urliri = urllib.parse.quote(urliri, safe="")
|
|
139
|
-
# urliri = iri.replace(":", "%253A")
|
|
140
|
-
# urliri = urliri.replace("/", "%252F")
|
|
141
|
-
|
|
142
|
-
# term_id = term.split(":")[-1]
|
|
143
|
-
# url = ols_pattern.replace('{$id}', term_id)
|
|
144
140
|
cache_path = self.cache_path / f"{urliri}.json"
|
|
145
141
|
data: Dict[str, Any] = {}
|
|
146
142
|
if self.cache:
|
|
@@ -176,7 +172,8 @@ class OLSQuery:
|
|
|
176
172
|
else:
|
|
177
173
|
data["errors"] = []
|
|
178
174
|
data["warnings"] = []
|
|
179
|
-
|
|
175
|
+
if self.cache:
|
|
176
|
+
write_json_cache(data=data, cache_path=cache_path) # type: ignore
|
|
180
177
|
|
|
181
178
|
return data
|
|
182
179
|
|
|
@@ -5,6 +5,7 @@ Special ontologies are provided as enums.
|
|
|
5
5
|
|
|
6
6
|
Uses the OWL links provided on OLS4 to download the ontologies.
|
|
7
7
|
"""
|
|
8
|
+
|
|
8
9
|
import gzip
|
|
9
10
|
import importlib
|
|
10
11
|
import re
|
|
@@ -290,7 +291,7 @@ def try_ontology_import(ontology_id: str) -> None:
|
|
|
290
291
|
|
|
291
292
|
if __name__ == "__main__":
|
|
292
293
|
# download latest versions
|
|
293
|
-
|
|
294
|
+
update_ontology_files()
|
|
294
295
|
|
|
295
296
|
# test loading of OWL files
|
|
296
297
|
# ofile: OntologyFile
|