biocypher 0.5.34__tar.gz → 0.5.36__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of biocypher might be problematic. Click here for more details.

Files changed (24) hide show
  1. {biocypher-0.5.34 → biocypher-0.5.36}/PKG-INFO +12 -4
  2. {biocypher-0.5.34 → biocypher-0.5.36}/README.md +11 -3
  3. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_get.py +82 -38
  4. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_logger.py +1 -1
  5. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_metadata.py +1 -1
  6. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_misc.py +50 -26
  7. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_ontology.py +123 -29
  8. {biocypher-0.5.34 → biocypher-0.5.36}/pyproject.toml +1 -1
  9. {biocypher-0.5.34 → biocypher-0.5.36}/LICENSE +0 -0
  10. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/__init__.py +0 -0
  11. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/__init__.py +0 -0
  12. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/biocypher_config.yaml +0 -0
  13. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/test_config.yaml +0 -0
  14. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/test_schema_config.yaml +0 -0
  15. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/test_schema_config_disconnected.yaml +0 -0
  16. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_config/test_schema_config_extended.yaml +0 -0
  17. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_connect.py +0 -0
  18. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_core.py +0 -0
  19. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_create.py +0 -0
  20. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_deduplicate.py +0 -0
  21. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_mapping.py +0 -0
  22. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_pandas.py +0 -0
  23. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_translate.py +0 -0
  24. {biocypher-0.5.34 → biocypher-0.5.36}/biocypher/_write.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: biocypher
3
- Version: 0.5.34
3
+ Version: 0.5.36
4
4
  Summary: A unifying framework for biomedical research knowledge graphs
5
5
  Home-page: https://github.com/biocypher/biocypher
6
6
  License: MIT
@@ -47,6 +47,7 @@ Description-Content-Type: text/markdown
47
47
 
48
48
 
49
49
  ## ❓ Description
50
+
50
51
  Knowledge graphs (KGs) are an [approach to knowledge
51
52
  representation](https://en.wikipedia.org/wiki/Knowledge_graph) that uses graph
52
53
  structure to facilitate exploration and analysis of complex data, often
@@ -67,6 +68,7 @@ the docs [here](https://biocypher.org).
67
68
  </img>
68
69
 
69
70
  ## 📖 Documentation
71
+
70
72
  Tutorial and developer docs at https://biocypher.org. For a quickstart into your
71
73
  own pipeline, you can refer to our [project
72
74
  template](https://github.com/biocypher/project-template), and for an overview of
@@ -75,6 +77,7 @@ features, visit our [GitHub Project
75
77
  Board](https://github.com/orgs/biocypher/projects/3/views/2).
76
78
 
77
79
  ## ⚙️ Installation / Usage
80
+
78
81
  Install the package from PyPI using `pip install biocypher`. More comprehensive
79
82
  installation and configuration instructions can be found
80
83
  [here](https://biocypher.org/installation.html).
@@ -84,6 +87,7 @@ and the various pipelines we have created. You can find these on the [Components
84
87
  Project Board](https://github.com/orgs/biocypher/projects/3/views/2).
85
88
 
86
89
  ## 🤝 Getting involved
90
+
87
91
  We are very happy about contributions from the community, large and small!
88
92
  If you would like to contribute to BioCypher development, please refer to
89
93
  our [contribution guidelines](CONTRIBUTING.md). :)
@@ -96,11 +100,15 @@ please join our community at https://biocypher.zulipchat.com!
96
100
  > This disclaimer was adapted from the [Pooch](https://github.com/fatiando/pooch) project.
97
101
 
98
102
  ## ✍️ Citation
99
- The BioCypher paper has been peer-reviewed in
100
- [Nature Biotechnology](https://www.nature.com/articles/s41587-023-01848-y).
101
- Before, it was available as a preprint at https://arxiv.org/abs/2212.13543.
103
+
104
+ The BioCypher paper has been peer-reviewed in [Nature
105
+ Biotechnology](https://www.nature.com/articles/s41587-023-01848-y). It is
106
+ available as a self-archived version on
107
+ [Zenodo](https://zenodo.org/records/10320714). Before, it was available as a
108
+ preprint at https://arxiv.org/abs/2212.13543.
102
109
 
103
110
  ## Acknowledgements
111
+
104
112
  This project has received funding from the European Union’s Horizon 2020
105
113
  research and innovation programme under grant agreement No 965193 for DECIDER
106
114
  and No 116030 for TransQST.
@@ -11,6 +11,7 @@
11
11
 
12
12
 
13
13
  ## ❓ Description
14
+
14
15
  Knowledge graphs (KGs) are an [approach to knowledge
15
16
  representation](https://en.wikipedia.org/wiki/Knowledge_graph) that uses graph
16
17
  structure to facilitate exploration and analysis of complex data, often
@@ -31,6 +32,7 @@ the docs [here](https://biocypher.org).
31
32
  </img>
32
33
 
33
34
  ## 📖 Documentation
35
+
34
36
  Tutorial and developer docs at https://biocypher.org. For a quickstart into your
35
37
  own pipeline, you can refer to our [project
36
38
  template](https://github.com/biocypher/project-template), and for an overview of
@@ -39,6 +41,7 @@ features, visit our [GitHub Project
39
41
  Board](https://github.com/orgs/biocypher/projects/3/views/2).
40
42
 
41
43
  ## ⚙️ Installation / Usage
44
+
42
45
  Install the package from PyPI using `pip install biocypher`. More comprehensive
43
46
  installation and configuration instructions can be found
44
47
  [here](https://biocypher.org/installation.html).
@@ -48,6 +51,7 @@ and the various pipelines we have created. You can find these on the [Components
48
51
  Project Board](https://github.com/orgs/biocypher/projects/3/views/2).
49
52
 
50
53
  ## 🤝 Getting involved
54
+
51
55
  We are very happy about contributions from the community, large and small!
52
56
  If you would like to contribute to BioCypher development, please refer to
53
57
  our [contribution guidelines](CONTRIBUTING.md). :)
@@ -60,11 +64,15 @@ please join our community at https://biocypher.zulipchat.com!
60
64
  > This disclaimer was adapted from the [Pooch](https://github.com/fatiando/pooch) project.
61
65
 
62
66
  ## ✍️ Citation
63
- The BioCypher paper has been peer-reviewed in
64
- [Nature Biotechnology](https://www.nature.com/articles/s41587-023-01848-y).
65
- Before, it was available as a preprint at https://arxiv.org/abs/2212.13543.
67
+
68
+ The BioCypher paper has been peer-reviewed in [Nature
69
+ Biotechnology](https://www.nature.com/articles/s41587-023-01848-y). It is
70
+ available as a self-archived version on
71
+ [Zenodo](https://zenodo.org/records/10320714). Before, it was available as a
72
+ preprint at https://arxiv.org/abs/2212.13543.
66
73
 
67
74
  ## Acknowledgements
75
+
68
76
  This project has received funding from the European Union’s Horizon 2020
69
77
  research and innovation programme under grant agreement No 965193 for DECIDER
70
78
  and No 116030 for TransQST.
@@ -15,6 +15,7 @@ BioCypher get module. Used to download and cache data from external sources.
15
15
  from __future__ import annotations
16
16
 
17
17
  from typing import Optional
18
+ import shutil
18
19
 
19
20
  from ._logger import logger
20
21
 
@@ -109,55 +110,98 @@ class Downloader:
109
110
  Returns:
110
111
  str or list: The path or paths to the downloaded resource(s).
111
112
  """
112
- # check if resource is cached
113
- cache_record = self._get_cache_record(resource)
113
+ expired = self._is_cache_expired(resource)
114
114
 
115
+ if expired or not cache:
116
+ self._delete_expired_resource_cache(resource)
117
+ logger.info(f"Asking for download of {resource.name}.")
118
+ paths = self._download_resource(cache, resource)
119
+ else:
120
+ paths = self.get_cached_version(resource)
121
+ self._update_cache_record(resource)
122
+ return paths
123
+
124
+ def _is_cache_expired(self, resource: Resource) -> bool:
125
+ """
126
+ Check if resource cache is expired.
127
+
128
+ Args:
129
+ resource (Resource): The resource to download.
130
+
131
+ Returns:
132
+ bool: cache is expired or not.
133
+ """
134
+ cache_record = self._get_cache_record(resource)
115
135
  if cache_record:
116
- # check if resource is expired (formatted in days)
117
- dl = cache_record.get("date_downloaded")
118
- # convert string to datetime
119
- dl = datetime.strptime(dl, "%Y-%m-%d %H:%M:%S.%f")
120
- lt = timedelta(days=resource.lifetime)
121
- expired = dl + lt < datetime.now()
136
+ download_time = datetime.strptime(
137
+ cache_record.get("date_downloaded"), "%Y-%m-%d %H:%M:%S.%f"
138
+ )
139
+ lifetime = timedelta(days=resource.lifetime)
140
+ expired = download_time + lifetime < datetime.now()
122
141
  else:
123
142
  expired = True
143
+ return expired
124
144
 
125
- # download resource
126
- if expired or not cache:
127
- logger.info(f"Asking for download of {resource.name}.")
145
+ def _delete_expired_resource_cache(self, resource: Resource):
146
+ resource_cache_path = self.cache_dir + "/" + resource.name
147
+ if os.path.exists(resource_cache_path) and os.path.isdir(
148
+ resource_cache_path
149
+ ):
150
+ shutil.rmtree(resource_cache_path)
128
151
 
129
- if resource.is_dir:
130
- files = self._get_files(resource)
131
- resource.url_s = [resource.url_s + "/" + file for file in files]
132
- resource.is_dir = False
133
- paths = self._download_or_cache(resource, cache)
134
- elif isinstance(resource.url_s, list):
135
- paths = []
136
- for url in resource.url_s:
137
- fname = url[url.rfind("/") + 1 :]
138
- paths.append(
139
- self._retrieve(
140
- url=url,
141
- fname=fname,
142
- path=os.path.join(self.cache_dir, resource.name),
143
- )
152
+ def _download_resource(self, cache, resource):
153
+ """Download a resource.
154
+
155
+ Args:
156
+ cache (bool): Whether to cache the resource or not.
157
+ resource (Resource): The resource to download.
158
+
159
+ Returns:
160
+ str or list: The path or paths to the downloaded resource(s).
161
+ """
162
+ if resource.is_dir:
163
+ files = self._get_files(resource)
164
+ resource.url_s = [resource.url_s + "/" + file for file in files]
165
+ resource.is_dir = False
166
+ paths = self._download_or_cache(resource, cache)
167
+ elif isinstance(resource.url_s, list):
168
+ paths = []
169
+ for url in resource.url_s:
170
+ fname = url[url.rfind("/") + 1 :]
171
+ paths.append(
172
+ self._retrieve(
173
+ url=url,
174
+ fname=fname,
175
+ path=os.path.join(self.cache_dir, resource.name),
144
176
  )
145
- else:
146
- fname = resource.url_s[resource.url_s.rfind("/") + 1 :]
147
- paths = self._retrieve(
148
- url=resource.url_s,
149
- fname=fname,
150
- path=os.path.join(self.cache_dir, resource.name),
151
177
  )
178
+ else:
179
+ fname = resource.url_s[resource.url_s.rfind("/") + 1 :]
180
+ paths = self._retrieve(
181
+ url=resource.url_s,
182
+ fname=fname,
183
+ path=os.path.join(self.cache_dir, resource.name),
184
+ )
185
+ # sometimes a compressed file contains multiple files
186
+ # TODO ask for a list of files in the archive to be used from the
187
+ # adapter
188
+ return paths
152
189
 
153
- # sometimes a compressed file contains multiple files
154
- # TODO ask for a list of files in the archive to be used from the
155
- # adapter
190
+ def get_cached_version(self, resource) -> list[str]:
191
+ """Get the cached version of a resource.
156
192
 
157
- # update cache record
158
- self._update_cache_record(resource)
193
+ Args:
194
+ resource (Resource): The resource to get the cached version of.
159
195
 
160
- return paths
196
+ Returns:
197
+ list[str]: The paths to the cached resource(s).
198
+ """
199
+ cached_resource_location = os.path.join(self.cache_dir, resource.name)
200
+ logger.info(f"Use cached version from {cached_resource_location}.")
201
+ paths = []
202
+ for file in os.listdir(cached_resource_location):
203
+ paths.append(os.path.join(cached_resource_location, file))
204
+ return paths
161
205
 
162
206
  def _retrieve(
163
207
  self,
@@ -48,7 +48,7 @@ def get_logger(name: str = "biocypher") -> logging.Logger:
48
48
  # create logger
49
49
  logger = logging.getLogger(name)
50
50
  logger.setLevel(logging.DEBUG)
51
- logger.propagate = False
51
+ logger.propagate = True
52
52
 
53
53
  # formatting
54
54
  file_formatter = logging.Formatter(
@@ -19,7 +19,7 @@ import importlib.metadata
19
19
 
20
20
  import toml
21
21
 
22
- _VERSION = "0.5.34"
22
+ _VERSION = "0.5.36"
23
23
 
24
24
 
25
25
  def get_metadata():
@@ -76,56 +76,80 @@ def ensure_iterable(value: Any) -> Iterable:
76
76
  return value if isinstance(value, LIST_LIKE) else (value,)
77
77
 
78
78
 
79
- def create_tree_visualisation(inheritance_tree: Union[dict, nx.Graph]) -> str:
79
+ def create_tree_visualisation(inheritance_graph: Union[dict, nx.Graph]) -> Tree:
80
80
  """
81
81
  Creates a visualisation of the inheritance tree using treelib.
82
82
  """
83
+ inheritance_tree = _get_inheritance_tree(inheritance_graph)
84
+ classes, root = _find_root_node(inheritance_tree)
85
+
86
+ tree = Tree()
87
+ tree.create_node(root, root)
88
+ while classes:
89
+ for child in classes:
90
+ parent = inheritance_tree[child]
91
+ if parent in tree.nodes.keys() or parent == root:
92
+ tree.create_node(child, child, parent=parent)
93
+
94
+ for node in tree.nodes.keys():
95
+ if node in classes:
96
+ classes.remove(node)
97
+
98
+ return tree
99
+
100
+
101
+ def _get_inheritance_tree(inheritance_graph: Union[dict, nx.Graph]) -> dict:
102
+ """Transforms an inheritance_graph into an inheritance_tree.
103
+
104
+ Args:
105
+ inheritance_graph: A dict or nx.Graph representing the inheritance graph.
106
+
107
+ Returns:
108
+ A dict representing the inheritance tree.
109
+ """
110
+ if isinstance(inheritance_graph, nx.Graph):
111
+ inheritance_tree = nx.to_dict_of_lists(inheritance_graph)
112
+
113
+ multiple_parents_present = _multiple_inheritance_present(
114
+ inheritance_tree
115
+ )
116
+ if multiple_parents_present:
117
+ logger.warning(
118
+ "The ontology contains multiple inheritance (one child node has multiple parent nodes). This is not visualized in the following hierarchy tree (the child node is only added once). If you want to browse all relationships of the parsed ontology write a graphml file to disk and view this file."
119
+ )
83
120
 
84
- if isinstance(inheritance_tree, nx.Graph):
85
- inheritance_tree = nx.to_dict_of_lists(inheritance_tree)
86
121
  # unlist values
87
122
  inheritance_tree = {k: v[0] for k, v in inheritance_tree.items() if v}
123
+ return inheritance_tree
124
+ elif not _multiple_inheritance_present(inheritance_graph):
125
+ return inheritance_graph
126
+
88
127
 
89
- # find root node
128
+ def _multiple_inheritance_present(inheritance_tree: dict) -> bool:
129
+ """Checks if multiple inheritance is present in the inheritance_tree."""
130
+ return any(len(value) > 1 for value in inheritance_tree.values())
131
+
132
+
133
+ def _find_root_node(inheritance_tree: dict) -> tuple[set, str]:
90
134
  classes = set(inheritance_tree.keys())
91
135
  parents = set(inheritance_tree.values())
92
136
  root = list(parents - classes)
93
-
94
137
  if len(root) > 1:
95
138
  if "entity" in root:
96
- root = "entity" # default: good standard? TODO
97
-
139
+ root = "entity" # TODO: default: good standard?
98
140
  else:
99
141
  raise ValueError(
100
142
  "Inheritance tree cannot have more than one root node. "
101
143
  f"Found {len(root)}: {root}."
102
144
  )
103
-
104
145
  else:
105
146
  root = root[0]
106
-
107
147
  if not root:
108
148
  # find key whose value is None
109
149
  root = list(inheritance_tree.keys())[
110
150
  list(inheritance_tree.values()).index(None)
111
151
  ]
112
-
113
- tree = Tree()
114
-
115
- tree.create_node(root, root)
116
-
117
- while classes:
118
- for child in classes:
119
- parent = inheritance_tree[child]
120
-
121
- if parent in tree.nodes.keys() or parent == root:
122
- tree.create_node(child, child, parent=parent)
123
-
124
- for node in tree.nodes.keys():
125
- if node in classes:
126
- classes.remove(node)
127
-
128
- return tree
152
+ return classes, root
129
153
 
130
154
 
131
155
  # string conversion, adapted from Biolink Model Toolkit
@@ -93,7 +93,7 @@ class OntologyAdapter:
93
93
  self._reverse_labels = reverse_labels
94
94
  self._remove_prefixes = remove_prefixes
95
95
 
96
- # Load the ontology into an rdflib Graph according to the file extension
96
+ # Load the ontology into a rdflib Graph according to the file extension
97
97
  self._rdf_graph = self._load_rdf_graph(ontology_file)
98
98
 
99
99
  self._nx_graph = self._rdf_to_nx(
@@ -107,56 +107,77 @@ class OntologyAdapter:
107
107
  G = nx.DiGraph()
108
108
 
109
109
  # Define a recursive function to add subclasses to the graph
110
- def add_subclasses(node):
111
- # Only add nodes that have a label
112
- if (node, rdflib.RDFS.label, None) not in g:
110
+ def add_subclasses(parent_node):
111
+ if not has_label(parent_node, g):
113
112
  return
114
113
 
115
- nx_id, nx_label = _get_nx_id_and_label(node)
116
-
117
- if nx_id not in G:
118
- G.add_node(nx_id)
119
- G.nodes[nx_id]["label"] = nx_label
120
-
121
- # Recursively add all subclasses of the node to the graph
122
- for s, _, o in g.triples((None, rdflib.RDFS.subClassOf, node)):
123
- # Only add nodes that have a label
124
- if (s, rdflib.RDFS.label, None) not in g:
125
- continue
126
-
127
- s_id, s_label = _get_nx_id_and_label(s)
128
- G.add_node(s_id)
129
- G.nodes[s_id]["label"] = s_label
114
+ nx_parent_node_id, nx_parent_node_label = _get_nx_id_and_label(
115
+ parent_node
116
+ )
130
117
 
131
- G.add_edge(s_id, nx_id)
132
- add_subclasses(s)
133
- add_parents(s)
118
+ if nx_parent_node_id not in G:
119
+ add_node(nx_parent_node_id, nx_parent_node_label)
120
+
121
+ child_nodes = get_child_nodes(parent_node, g)
122
+
123
+ if child_nodes:
124
+ for child_node in child_nodes:
125
+ if not has_label(child_node, g):
126
+ continue
127
+ (
128
+ nx_child_node_id,
129
+ nx_child_node_label,
130
+ ) = _get_nx_id_and_label(child_node)
131
+ add_node(nx_child_node_id, nx_child_node_label)
132
+ G.add_edge(nx_child_node_id, nx_parent_node_id)
133
+ for child_node in child_nodes:
134
+ add_subclasses(child_node)
135
+ add_parents(child_node)
134
136
 
135
137
  def add_parents(node):
136
- # Only add nodes that have a label
137
- if (node, rdflib.RDFS.label, None) not in g:
138
+ if not has_label(node, g):
138
139
  return
139
140
 
140
141
  nx_id, nx_label = _get_nx_id_and_label(node)
141
142
 
142
143
  # Recursively add all parents of the node to the graph
143
144
  for s, _, o in g.triples((node, rdflib.RDFS.subClassOf, None)):
144
- # Only add nodes that have a label
145
- if (o, rdflib.RDFS.label, None) not in g:
145
+ if not has_label(o, g):
146
146
  continue
147
147
 
148
148
  o_id, o_label = _get_nx_id_and_label(o)
149
149
 
150
- # Skip nodes already in the graph
150
+ # Skip if node already in the graph
151
151
  if o_id in G:
152
152
  continue
153
153
 
154
- G.add_node(o_id)
155
- G.nodes[o_id]["label"] = o_label
154
+ add_node(o_id, o_label)
156
155
 
157
156
  G.add_edge(nx_id, o_id)
158
157
  add_parents(o)
159
158
 
159
+ def has_label(node: rdflib.URIRef, g: rdflib.Graph) -> bool:
160
+ """Does the node have a label in g?
161
+
162
+ Args:
163
+ node (rdflib.URIRef): The node to check
164
+ g (rdflib.Graph): The graph to check in
165
+
166
+ Returns:
167
+ bool: True if the node has a label, False otherwise
168
+ """
169
+ return (node, rdflib.RDFS.label, None) in g
170
+
171
+ def add_node(nx_node_id: str, nx_node_label: str):
172
+ """Add a node to the graph.
173
+
174
+ Args:
175
+ nx_node_id (str): The ID of the node
176
+ nx_node_label (str): The label of the node
177
+ """
178
+ G.add_node(nx_node_id)
179
+ G.nodes[nx_node_id]["label"] = nx_node_label
180
+
160
181
  def _get_nx_id_and_label(node):
161
182
  node_id_str = self._remove_prefix(str(node))
162
183
  node_label_str = str(g.value(node, rdflib.RDFS.label)).replace(
@@ -168,6 +189,79 @@ class OntologyAdapter:
168
189
  nx_label = node_id_str if switch_id_and_label else node_label_str
169
190
  return nx_id, nx_label
170
191
 
192
+ def get_child_nodes(
193
+ parent_node: rdflib.URIRef, g: rdflib.Graph
194
+ ) -> list:
195
+ """Get the child nodes of a node in the ontology.
196
+ Accounts for the case of multiple parents defined in intersectionOf.
197
+
198
+ Args:
199
+ parent_node (rdflib.URIRef): The parent node to get the children of
200
+ g (rdflib.Graph): The graph to get the children from
201
+
202
+ Returns:
203
+ list: A list of the child nodes
204
+ """
205
+ child_nodes = []
206
+ for s, p, o in g.triples((None, rdflib.RDFS.subClassOf, None)):
207
+ if (o, rdflib.RDF.type, rdflib.OWL.Class) in g and (
208
+ o,
209
+ rdflib.OWL.intersectionOf,
210
+ None,
211
+ ) in g:
212
+ # Check if node has multiple parent nodes defined in intersectionOf (one of them = parent_node)
213
+ parent_nodes = get_nodes_in_intersectionof(o)
214
+ if parent_node in parent_nodes:
215
+ child_nodes.append(s)
216
+ for node in parent_nodes:
217
+ add_parents(node)
218
+ elif o == parent_node:
219
+ # only one parent node
220
+ child_nodes.append(s)
221
+ return child_nodes
222
+
223
+ def get_nodes_in_intersectionof(o: rdflib.URIRef) -> list:
224
+ """Get the nodes in an intersectionOf node.
225
+
226
+ Args:
227
+ o (rdflib.URIRef): The intersectionOf node
228
+
229
+ Returns:
230
+ list: A list of the nodes in the intersectionOf node
231
+ """
232
+ anonymous_intersection_nodes = []
233
+ for _, _, anonymous_object in g.triples(
234
+ (o, rdflib.OWL.intersectionOf, None)
235
+ ):
236
+ anonymous_intersection_nodes.append(anonymous_object)
237
+ anonymous_intersection_node = anonymous_intersection_nodes[0]
238
+ nodes_in_intersection = retrieve_rdf_linked_list(
239
+ anonymous_intersection_node
240
+ )
241
+ return nodes_in_intersection
242
+
243
+ def retrieve_rdf_linked_list(subject: rdflib.URIRef) -> list:
244
+ """Recursively retrieves a linked list from RDF.
245
+ Example RDF list with the items [item1, item2]:
246
+ list_node - first -> item1
247
+ list_node - rest -> list_node2
248
+ list_node2 - first -> item2
249
+ list_node2 - rest -> nil
250
+
251
+ Args:
252
+ subject (rdflib.URIRef): One list_node of the RDF list
253
+
254
+ Returns:
255
+ list: The items of the RDF list
256
+ """
257
+ rdf_list = []
258
+ for s, p, o in g.triples((subject, rdflib.RDF.first, None)):
259
+ rdf_list.append(o)
260
+ for s, p, o in g.triples((subject, rdflib.RDF.rest, None)):
261
+ if o != rdflib.RDF.nil:
262
+ rdf_list.extend(retrieve_rdf_linked_list(o))
263
+ return rdf_list
264
+
171
265
  # Add all subclasses of the root node to the graph
172
266
  add_subclasses(root)
173
267
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "biocypher"
3
- version = "0.5.34"
3
+ version = "0.5.36"
4
4
  description = "A unifying framework for biomedical research knowledge graphs"
5
5
  authors = [
6
6
  "Sebastian Lobentanzer <sebastian.lobentanzer@gmail.com>",
File without changes