OntoLearner 1.4.9__py3-none-any.whl → 1.4.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ontolearner/VERSION CHANGED
@@ -1 +1 @@
1
- 1.4.9
1
+ 1.4.10
@@ -372,7 +372,7 @@ class BaseOntology(ABC):
372
372
  # Save updated metrics
373
373
  df.to_excel(metrics_file_path, index=False)
374
374
 
375
- def is_valid_label(label: str) -> Any:
375
+ def is_valid_label(self, label: str) -> Any:
376
376
  invalids = ['root', 'thing']
377
377
  if label.lower() in invalids:
378
378
  return None
@@ -522,7 +522,7 @@ class BaseOntology(ABC):
522
522
  return True
523
523
  return False
524
524
 
525
- def _is_anonymous_id(label: str) -> bool:
525
+ def _is_anonymous_id(self, label: str) -> bool:
526
526
  """Check if a label represents an anonymous class identifier."""
527
527
  if not label:
528
528
  return True
@@ -14,6 +14,7 @@
14
14
 
15
15
  import logging
16
16
  import time
17
+ import numpy as np
17
18
  from abc import ABC
18
19
  from rdflib import RDF, RDFS, OWL
19
20
  from collections import defaultdict
@@ -186,6 +187,56 @@ class Analyzer(ABC):
186
187
 
187
188
  return metrics
188
189
 
190
+ @staticmethod
191
+ def compute_complexity_score(
192
+ topology_metrics: TopologyMetrics,
193
+ dataset_metrics: DatasetMetrics,
194
+ a: float = 0.4,
195
+ b: float = 6.0,
196
+ eps: float = 1e-12
197
+ ) -> float:
198
+ """
199
+ Compute a single normalized complexity score for an ontology.
200
+
201
+ This function combines structural topology metrics and dataset quality metrics
202
+ into a weighted aggregate score, then applies a logistic transformation to
203
+ normalize it to the range [0, 1]. The score reflects overall ontology complexity,
204
+ considering graph structure, hierarchy, breadth, coverage, and dataset richness.
205
+
206
+ Args:
207
+ topology_metrics (TopologyMetrics): Precomputed structural metrics of the ontology graph.
208
+ dataset_metrics (DatasetMetrics): Precomputed metrics of extracted learning datasets.
209
+ a (float, optional): Steepness parameter for the logistic normalization function. Default is 0.4.
210
+ b (float, optional): Centering parameter for the logistic function, should be tuned to match the scale of aggregated metrics. Default is 6.0.
211
+ eps (float, optional): Small epsilon to prevent numerical issues in logistic computation. Default is 1e-12.
212
+
213
+ Returns:
214
+ float: Normalized complexity score in [0, 1], where higher values indicate more complex ontologies.
215
+
216
+ Notes:
217
+ - Weights are assigned to different metric categories: graph metrics, coverage metrics, hierarchy metrics,
218
+ breadth metrics, and dataset metrics (term-types, taxonomic, non-taxonomic relations).
219
+ - Metrics are log-normalized before weighting to reduce scale differences.
220
+ - The logistic transformation ensures the final score is bounded and interpretable.
221
+ """
222
+ # Define metric categories with their weights
223
+ metric_categories = {
224
+ 0.3: ["total_nodes", "total_edges", "num_root_nodes", "num_leaf_nodes"],
225
+ 0.25: ["num_classes", "num_properties", "num_individuals"],
226
+ 0.10: ["max_depth", "min_depth", "avg_depth", "depth_variance"],
227
+ 0.20: ["max_breadth", "min_breadth", "avg_breadth", "breadth_variance"],
228
+ 0.15: ["num_term_types", "num_taxonomic_relations", "num_non_taxonomic_relations", "avg_terms"]
229
+ }
230
+ weights = {metric: weight for weight, metrics in metric_categories.items() for metric in metrics}
231
+ metrics = [metric for _, metric_list in metric_categories.items() for metric in metric_list]
232
+ onto_metrics = {**topology_metrics.__dict__, **dataset_metrics.__dict__}
233
+ norm_weighted_values = [np.log1p(onto_metrics[m]) * weights[m] for m in metrics if m in onto_metrics]
234
+ total_weight = sum(weights[m] for m in metrics if m in onto_metrics)
235
+ weighted_sum = sum(norm_weighted_values) / total_weight if total_weight > 0 else 0.0
236
+ complexity_score = 1.0 / (1.0 + np.exp(-a * (weighted_sum - b) + eps))
237
+ return complexity_score
238
+
239
+
189
240
  @staticmethod
190
241
  def compute_dataset_metrics(ontology: BaseOntology) -> DatasetMetrics:
191
242
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: OntoLearner
3
- Version: 1.4.9
3
+ Version: 1.4.10
4
4
  Summary: OntoLearner: A Modular Python Library for Ontology Learning with LLMs.
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -1,10 +1,10 @@
1
- ontolearner/VERSION,sha256=x-xbkXEIv48hifmVFcVtJDdZj6d_bmXwy3Lp4d5pPVY,6
1
+ ontolearner/VERSION,sha256=yg62XB5UNHqKQzpcdrkSfe7_qhlmlTw-ksDKQaqtI30,7
2
2
  ontolearner/__init__.py,sha256=E4yukFv2PV4uyztTPDWljCySY9AVDcDDzabuvxfabYE,1889
3
3
  ontolearner/_learner.py,sha256=2CRQvpsz8akIOdxTs2-KLJ-MssULrjpK-QDD3QXUJXI,5297
4
4
  ontolearner/_ontology.py,sha256=W1mp195SImqLKwaj4ueEaBWuLJg2jUdx1JT20Ds3fmQ,6950
5
5
  ontolearner/base/__init__.py,sha256=5pf-ltxzGp32xhEcPdbtm11wXJrYJMUeWG-mbcAYD8Q,705
6
6
  ontolearner/base/learner.py,sha256=latiGv8p3nyPrxMp7g5B2MSF-JEInRwIlbOn09uh7io,18899
7
- ontolearner/base/ontology.py,sha256=JbMJ1-WUyHWQiNJL-DeaqcriUimLdqN3_ESROgqOPTQ,24772
7
+ ontolearner/base/ontology.py,sha256=t7n81Vk8Y5BCK88AYIyNKd7d1LjJnoTlXigyPvrLxR4,24784
8
8
  ontolearner/base/text2onto.py,sha256=iUXYZoqnwgebQuQzM-XSGTVRfHLlhjUK_z5XUvhRICc,5388
9
9
  ontolearner/data_structure/__init__.py,sha256=1HiKvk8FKjhYeI92RHnJXxyQbUJBi3JFytjQjthsY_s,599
10
10
  ontolearner/data_structure/data.py,sha256=jUUDfqsOZcEqIR83SRboiKibPdA_JquI1uOEiQQ_lqY,11273
@@ -65,12 +65,12 @@ ontolearner/text2onto/general.py,sha256=2RUFMbWm7qLq3MJHsyNb3rgYkGcicnkbiH2wdPBs
65
65
  ontolearner/text2onto/splitter.py,sha256=7SrFeUM5GZTTvbrve9RRKtBjELlkpnMkyPluO614PYM,10941
66
66
  ontolearner/text2onto/synthesizer.py,sha256=tSJgPTFWVKBQi2RqLQfMhX_noXeNLh2Wq2Ezbqyv-OA,5486
67
67
  ontolearner/tools/__init__.py,sha256=IB5ycAW5vUDKeq-NAMMbwjSFzwSzC-5j0UobIzO3ZmI,623
68
- ontolearner/tools/analyzer.py,sha256=8iL9wY1ESh4RumSW-s28EtXjtjPj71IKp0MBK0ograg,9925
68
+ ontolearner/tools/analyzer.py,sha256=1SooAT7qYqDIrHyvHXnrBRmuPwZhLK1uj26OiKRECc0,12989
69
69
  ontolearner/tools/visualizer.py,sha256=cwijl4yYaS1SCLM5wbvRTEcbQj9Bjo4fHzZR6q6o8qo,6267
70
70
  ontolearner/utils/__init__.py,sha256=pSEyU3dlPMADBqygqaaid44RdWf0Lo3Fvz-K_rQ7_Bw,733
71
71
  ontolearner/utils/io.py,sha256=3DqGK2p7c0onKi0Xxs16WB08uHfHUId3bW0dDKwyS0g,2110
72
72
  ontolearner/utils/train_test_split.py,sha256=Zlm42eT6QGWwlySyomCPIiTGmGqeN_h4z4xBY2EAOR8,11530
73
- ontolearner-1.4.9.dist-info/METADATA,sha256=c_V_1mUkxAhzJz04u1wRYU7xodpZQdiJXBVFzUCIMK8,11444
74
- ontolearner-1.4.9.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
75
- ontolearner-1.4.9.dist-info/licenses/LICENSE,sha256=krXMLuMKgzX-UgaufgfJdm9ojIloZot7ZdvJUnNxl4I,1067
76
- ontolearner-1.4.9.dist-info/RECORD,,
73
+ ontolearner-1.4.10.dist-info/METADATA,sha256=NlTab0joIb4EJ_xHQg9mQ0oHuZjoA4P-unkUYLbEqyI,11445
74
+ ontolearner-1.4.10.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
75
+ ontolearner-1.4.10.dist-info/licenses/LICENSE,sha256=krXMLuMKgzX-UgaufgfJdm9ojIloZot7ZdvJUnNxl4I,1067
76
+ ontolearner-1.4.10.dist-info/RECORD,,