pymetadata 0.4.1__py2.py3-none-any.whl → 0.4.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymetadata might be problematic. Click here for more details.

pymetadata/__init__.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from pathlib import Path
4
4
 
5
5
  __author__ = "Matthias Koenig"
6
- __version__ = "0.4.1"
6
+ __version__ = "0.4.2"
7
7
 
8
8
 
9
9
  program_name: str = "pymetadata"
@@ -187,18 +187,6 @@ def ols_namespaces() -> Dict[str, Namespace]:
187
187
  description="MONDO",
188
188
  namespaceEmbeddedInLui=True,
189
189
  ),
190
- Namespace(
191
- id=None,
192
- prefix="stato",
193
- pattern=r"^STATO:\d+$",
194
- name="STATO",
195
- description="STATO is the statistical methods ontology. It contains "
196
- "concepts and properties related to statistical methods, "
197
- "probability distributions and other concepts related to "
198
- "statistical analysis, including relationships to study "
199
- "designs and plots.",
200
- namespaceEmbeddedInLui=True,
201
- ),
202
190
  Namespace(
203
191
  id=None,
204
192
  prefix="atol",
@@ -349,7 +337,7 @@ class Registry:
349
337
  registry_path: Optional[Path] = None,
350
338
  ) -> Dict[str, Namespace]:
351
339
  """Update registry from identifiers.org webservice."""
352
- logger.warning(f"Update registry: '{Registry.URL}' -> '{registry_path}'")
340
+ logger.info(f"Update registry from '{Registry.URL}'")
353
341
  response = requests.get(Registry.URL)
354
342
  namespaces = response.json()["payload"]["namespaces"]
355
343
 
@@ -374,9 +362,6 @@ class Registry:
374
362
  ns_dict[ns.prefix] = ns
375
363
 
376
364
  if custom_namespaces is not None:
377
- logger.warning(
378
- f"Adding custom namespaces: {sorted(custom_namespaces.keys())}"
379
- )
380
365
  for key, ns in custom_namespaces.items():
381
366
  if key in ns_dict:
382
367
  logger.error(
@@ -481,6 +481,8 @@ _terms = {
481
481
  "KISAO_0000695": "parameters for",
482
482
  "KISAO_0000696": "steady state root-finding problem",
483
483
  "KISAO_0000697": "SDE solver",
484
+ "KISAO_0000698": "particle coordinates",
485
+ "KISAO_0000699": "DAE Solver",
484
486
  "KISAO_0000800": "systems property",
485
487
  "KISAO_0000801": "concentration control coefficient matrix (unscaled)",
486
488
  "KISAO_0000802": "control coefficient (scaled)",
@@ -2455,6 +2457,14 @@ class KISAO(str, Enum):
2455
2457
  KISAO_0000697 = "KISAO_0000697"
2456
2458
  SDE_SOLVER = "KISAO_0000697"
2457
2459
 
2460
+ # particle coordinates
2461
+ KISAO_0000698 = "KISAO_0000698"
2462
+ PARTICLE_COORDINATES = "KISAO_0000698"
2463
+
2464
+ # DAE Solver
2465
+ KISAO_0000699 = "KISAO_0000699"
2466
+ DAE_SOLVER = "KISAO_0000699"
2467
+
2458
2468
  # systems property
2459
2469
  KISAO_0000800 = "KISAO_0000800"
2460
2470
  SYSTEMS_PROPERTY = "KISAO_0000800"
pymetadata/omex.py CHANGED
@@ -33,7 +33,7 @@ logger = log.get_logger(__name__)
33
33
  __all__ = ["EntryFormat", "ManifestEntry", "Manifest", "Omex"]
34
34
 
35
35
 
36
- IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications:"
36
+ IDENTIFIERS_PREFIX = "http://identifiers.org/combine.specifications/"
37
37
  PURL_PREFIX = "https://purl.org/NET/mediatypes/"
38
38
 
39
39
 
@@ -80,13 +80,13 @@ class OLSQuery:
80
80
  cache: bool = CACHE_USE,
81
81
  ):
82
82
  """Initialize OLSQuery."""
83
- self.ontologies = {
83
+ self.ontologies: Dict[str, OLSOntology] = {
84
84
  ontology.name: ontology for ontology in ontologies
85
- } # type: Dict[str, OLSOntology]
85
+ }
86
86
  self.cache_path = cache_path / "ols"
87
87
  self.cache = cache
88
88
 
89
- if not self.cache_path.exists():
89
+ if cache and not self.cache_path.exists():
90
90
  self.cache_path.mkdir(parents=True)
91
91
 
92
92
  def get_iri(self, ontology: str, term: str) -> str:
@@ -137,11 +137,6 @@ class OLSQuery:
137
137
  # double urlencode iri for OLS
138
138
  urliri = urllib.parse.quote(iri, safe="")
139
139
  urliri = urllib.parse.quote(urliri, safe="")
140
- # urliri = iri.replace(":", "%253A")
141
- # urliri = urliri.replace("/", "%252F")
142
-
143
- # term_id = term.split(":")[-1]
144
- # url = ols_pattern.replace('{$id}', term_id)
145
140
  cache_path = self.cache_path / f"{urliri}.json"
146
141
  data: Dict[str, Any] = {}
147
142
  if self.cache:
@@ -177,7 +172,8 @@ class OLSQuery:
177
172
  else:
178
173
  data["errors"] = []
179
174
  data["warnings"] = []
180
- write_json_cache(data=data, cache_path=cache_path) # type: ignore
175
+ if self.cache:
176
+ write_json_cache(data=data, cache_path=cache_path) # type: ignore
181
177
 
182
178
  return data
183
179
 
@@ -291,7 +291,7 @@ def try_ontology_import(ontology_id: str) -> None:
291
291
 
292
292
  if __name__ == "__main__":
293
293
  # download latest versions
294
- # update_ontology_files()
294
+ update_ontology_files()
295
295
 
296
296
  # test loading of OWL files
297
297
  # ofile: OntologyFile