pymetadata 0.4.0__py2.py3-none-any.whl → 0.4.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymetadata might be problematic. Click here for more details.

pymetadata/__init__.py CHANGED
@@ -1,8 +1,9 @@
1
1
  """pymetadata - Python utilities for metadata."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
  __author__ = "Matthias Koenig"
5
- __version__ = "0.4.0"
6
+ __version__ = "0.4.2"
6
7
 
7
8
 
8
9
  program_name: str = "pymetadata"
pymetadata/cache.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """Caching of information."""
2
+
2
3
  import json
3
4
  from json.encoder import JSONEncoder
4
5
  from pathlib import Path
pymetadata/chebi.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """Module for working with chebi."""
2
+
2
3
  from pathlib import Path
3
4
  from pprint import pprint
4
5
  from typing import Any, Dict
pymetadata/console.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """Rich console for logging."""
2
+
2
3
  from rich import pretty
3
4
  from rich.console import Console
4
5
  from rich.theme import Theme
@@ -2,6 +2,7 @@
2
2
 
3
3
  Core data structure to store annotations.
4
4
  """
5
+
5
6
  import re
6
7
  import urllib
7
8
  from pprint import pprint
@@ -1,4 +1,5 @@
1
1
  """Creator information."""
2
+
2
3
  from typing import Optional
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Synonym information."""
2
+
2
3
  from dataclasses import dataclass
3
4
 
4
5
 
pymetadata/core/xref.py CHANGED
@@ -1,4 +1,5 @@
1
1
  """Module for crossreferences (xref)."""
2
+
2
3
  import re
3
4
  from dataclasses import dataclass
4
5
  from typing import Dict
@@ -1,4 +1,5 @@
1
1
  """Example for reading and writing omex archives."""
2
+
2
3
  import zipfile
3
4
  from pathlib import Path
4
5
 
@@ -4,6 +4,7 @@ Helper tools to work with identifiers registry.
4
4
  https://identifiers.org/
5
5
  https://docs.identifiers.org/articles/api.html
6
6
  """
7
+
7
8
  from __future__ import annotations
8
9
 
9
10
  import inspect
@@ -186,18 +187,6 @@ def ols_namespaces() -> Dict[str, Namespace]:
186
187
  description="MONDO",
187
188
  namespaceEmbeddedInLui=True,
188
189
  ),
189
- Namespace(
190
- id=None,
191
- prefix="stato",
192
- pattern=r"^STATO:\d+$",
193
- name="STATO",
194
- description="STATO is the statistical methods ontology. It contains "
195
- "concepts and properties related to statistical methods, "
196
- "probability distributions and other concepts related to "
197
- "statistical analysis, including relationships to study "
198
- "designs and plots.",
199
- namespaceEmbeddedInLui=True,
200
- ),
201
190
  Namespace(
202
191
  id=None,
203
192
  prefix="atol",
@@ -348,7 +337,7 @@ class Registry:
348
337
  registry_path: Optional[Path] = None,
349
338
  ) -> Dict[str, Namespace]:
350
339
  """Update registry from identifiers.org webservice."""
351
- logger.warning(f"Update registry: '{Registry.URL}' -> '{registry_path}'")
340
+ logger.info(f"Update registry from '{Registry.URL}'")
352
341
  response = requests.get(Registry.URL)
353
342
  namespaces = response.json()["payload"]["namespaces"]
354
343
 
@@ -373,9 +362,6 @@ class Registry:
373
362
  ns_dict[ns.prefix] = ns
374
363
 
375
364
  if custom_namespaces is not None:
376
- logger.warning(
377
- f"Adding custom namespaces: {sorted(custom_namespaces.keys())}"
378
- )
379
365
  for key, ns in custom_namespaces.items():
380
366
  if key in ns_dict:
381
367
  logger.error(
pymetadata/log.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  Using rich for output formating.
4
4
  """
5
+
5
6
  import logging
6
7
 
7
8
  from rich.logging import RichHandler
@@ -481,6 +481,8 @@ _terms = {
481
481
  "KISAO_0000695": "parameters for",
482
482
  "KISAO_0000696": "steady state root-finding problem",
483
483
  "KISAO_0000697": "SDE solver",
484
+ "KISAO_0000698": "particle coordinates",
485
+ "KISAO_0000699": "DAE Solver",
484
486
  "KISAO_0000800": "systems property",
485
487
  "KISAO_0000801": "concentration control coefficient matrix (unscaled)",
486
488
  "KISAO_0000802": "control coefficient (scaled)",
@@ -2455,6 +2457,14 @@ class KISAO(str, Enum):
2455
2457
  KISAO_0000697 = "KISAO_0000697"
2456
2458
  SDE_SOLVER = "KISAO_0000697"
2457
2459
 
2460
+ # particle coordinates
2461
+ KISAO_0000698 = "KISAO_0000698"
2462
+ PARTICLE_COORDINATES = "KISAO_0000698"
2463
+
2464
+ # DAE Solver
2465
+ KISAO_0000699 = "KISAO_0000699"
2466
+ DAE_SOLVER = "KISAO_0000699"
2467
+
2458
2468
  # systems property
2459
2469
  KISAO_0000800 = "KISAO_0000800"
2460
2470
  SYSTEMS_PROPERTY = "KISAO_0000800"
pymetadata/omex.py CHANGED
@@ -33,7 +33,7 @@ logger = log.get_logger(__name__)
33
33
  __all__ = ["EntryFormat", "ManifestEntry", "Manifest", "Omex"]
34
34
 
35
35
 
36
- IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications:"
36
+ IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications/"
37
37
  PURL_PREFIX = "https://purl.org/NET/mediatypes/"
38
38
 
39
39
 
pymetadata/omex_v2.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  See https://docs.google.com/document/d/1-UDgY5lQ6tv4mZILZzol-PvCoAYW8yr2Ydn1OxcHMjM/edit#
4
4
  """
5
+
5
6
  from typing import List, Optional
6
7
 
7
8
  from pydantic import BaseModel
@@ -4,6 +4,7 @@ This uses the EMBL-EBI Ontology Lookup Service
4
4
  https://www.ebi.ac.uk/ols4
5
5
 
6
6
  """
7
+
7
8
  import urllib.parse
8
9
  from dataclasses import dataclass, field
9
10
  from pathlib import Path
@@ -79,13 +80,13 @@ class OLSQuery:
79
80
  cache: bool = CACHE_USE,
80
81
  ):
81
82
  """Initialize OLSQuery."""
82
- self.ontologies = {
83
+ self.ontologies: Dict[str, OLSOntology] = {
83
84
  ontology.name: ontology for ontology in ontologies
84
- } # type: Dict[str, OLSOntology]
85
+ }
85
86
  self.cache_path = cache_path / "ols"
86
87
  self.cache = cache
87
88
 
88
- if not self.cache_path.exists():
89
+ if cache and not self.cache_path.exists():
89
90
  self.cache_path.mkdir(parents=True)
90
91
 
91
92
  def get_iri(self, ontology: str, term: str) -> str:
@@ -136,11 +137,6 @@ class OLSQuery:
136
137
  # double urlencode iri for OLS
137
138
  urliri = urllib.parse.quote(iri, safe="")
138
139
  urliri = urllib.parse.quote(urliri, safe="")
139
- # urliri = iri.replace(":", "%253A")
140
- # urliri = urliri.replace("/", "%252F")
141
-
142
- # term_id = term.split(":")[-1]
143
- # url = ols_pattern.replace('{$id}', term_id)
144
140
  cache_path = self.cache_path / f"{urliri}.json"
145
141
  data: Dict[str, Any] = {}
146
142
  if self.cache:
@@ -176,7 +172,8 @@ class OLSQuery:
176
172
  else:
177
173
  data["errors"] = []
178
174
  data["warnings"] = []
179
- write_json_cache(data=data, cache_path=cache_path) # type: ignore
175
+ if self.cache:
176
+ write_json_cache(data=data, cache_path=cache_path) # type: ignore
180
177
 
181
178
  return data
182
179
 
@@ -5,6 +5,7 @@ Special ontologies are provided as enums.
5
5
 
6
6
  Uses the OWL links provided on OLS4 to download the ontologies.
7
7
  """
8
+
8
9
  import gzip
9
10
  import importlib
10
11
  import re
@@ -290,7 +291,7 @@ def try_ontology_import(ontology_id: str) -> None:
290
291
 
291
292
  if __name__ == "__main__":
292
293
  # download latest versions
293
- # update_ontology_files()
294
+ update_ontology_files()
294
295
 
295
296
  # test loading of OWL files
296
297
  # ofile: OntologyFile