biocypher 0.5.42__py3-none-any.whl → 0.5.44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of biocypher might be problematic. Click here for more details.
- biocypher/_config/biocypher_config.yaml +8 -0
- biocypher/_core.py +3 -3
- biocypher/_metadata.py +1 -1
- biocypher/{_connect.py → output/connect/_neo4j_driver.py} +5 -5
- biocypher/{_pandas.py → output/in_memory/_pandas.py} +2 -1
- biocypher/output/write/__init__.py +0 -0
- biocypher/{write → output/write}/_batch_writer.py +16 -23
- biocypher/{write/_write.py → output/write/_get_writer.py} +15 -12
- biocypher/output/write/_writer.py +200 -0
- biocypher/output/write/graph/__init__.py +0 -0
- biocypher/{write → output/write}/graph/_arangodb.py +1 -1
- biocypher/{write → output/write}/graph/_neo4j.py +9 -11
- biocypher/output/write/graph/_networkx.py +76 -0
- biocypher/{write → output/write}/graph/_rdf.py +3 -4
- biocypher/output/write/relational/__init__.py +0 -0
- biocypher/output/write/relational/_csv.py +76 -0
- biocypher/{write → output/write}/relational/_postgresql.py +2 -2
- biocypher/{write → output/write}/relational/_sqlite.py +1 -1
- {biocypher-0.5.42.dist-info → biocypher-0.5.44.dist-info}/METADATA +1 -1
- biocypher-0.5.44.dist-info/RECORD +39 -0
- biocypher-0.5.42.dist-info/RECORD +0 -33
- /biocypher/{write → output}/__init__.py +0 -0
- /biocypher/{write/graph → output/connect}/__init__.py +0 -0
- /biocypher/{write/relational → output/in_memory}/__init__.py +0 -0
- {biocypher-0.5.42.dist-info → biocypher-0.5.44.dist-info}/LICENSE +0 -0
- {biocypher-0.5.42.dist-info → biocypher-0.5.44.dist-info}/WHEEL +0 -0
|
@@ -131,3 +131,11 @@ sqlite:
|
|
|
131
131
|
delimiter: '\t'
|
|
132
132
|
# import_call_bin_prefix: '' # path to "sqlite3"
|
|
133
133
|
# import_call_file_prefix: '/path/to/files'
|
|
134
|
+
|
|
135
|
+
csv:
|
|
136
|
+
### CSV/Pandas configuration ###
|
|
137
|
+
delimiter: ","
|
|
138
|
+
|
|
139
|
+
networkx:
|
|
140
|
+
### NetworkX configuration ###
|
|
141
|
+
some_config: some_value # placeholder for technical reasons TODO
|
biocypher/_core.py
CHANGED
|
@@ -26,17 +26,17 @@ from ._logger import logger
|
|
|
26
26
|
|
|
27
27
|
logger.debug(f"Loading module {__name__}.")
|
|
28
28
|
|
|
29
|
-
from biocypher.write._write import DBMS_TO_CLASS, get_writer
|
|
30
29
|
from ._get import Downloader
|
|
31
30
|
from ._config import config as _config
|
|
32
31
|
from ._config import update_from_file as _file_update
|
|
33
32
|
from ._create import BioCypherEdge, BioCypherNode, BioCypherRelAsNode
|
|
34
|
-
from ._pandas import Pandas
|
|
35
|
-
from ._connect import get_driver
|
|
36
33
|
from ._mapping import OntologyMapping
|
|
37
34
|
from ._ontology import Ontology
|
|
38
35
|
from ._translate import Translator
|
|
39
36
|
from ._deduplicate import Deduplicator
|
|
37
|
+
from .output.in_memory._pandas import Pandas
|
|
38
|
+
from .output.write._get_writer import DBMS_TO_CLASS, get_writer
|
|
39
|
+
from .output.connect._neo4j_driver import get_driver
|
|
40
40
|
|
|
41
41
|
__all__ = ["BioCypher"]
|
|
42
42
|
|
biocypher/_metadata.py
CHANGED
|
@@ -13,7 +13,7 @@ BioCypher 'online' mode. Handles connection and manipulation of a running DBMS.
|
|
|
13
13
|
"""
|
|
14
14
|
import subprocess
|
|
15
15
|
|
|
16
|
-
from ._logger import logger
|
|
16
|
+
from biocypher._logger import logger
|
|
17
17
|
|
|
18
18
|
logger.debug(f"Loading module {__name__}.")
|
|
19
19
|
|
|
@@ -22,10 +22,10 @@ import itertools
|
|
|
22
22
|
|
|
23
23
|
import neo4j_utils
|
|
24
24
|
|
|
25
|
-
from
|
|
26
|
-
from ._config import config as _config
|
|
27
|
-
from ._create import BioCypherEdge, BioCypherNode
|
|
28
|
-
from ._translate import Translator
|
|
25
|
+
from biocypher import _misc
|
|
26
|
+
from biocypher._config import config as _config
|
|
27
|
+
from biocypher._create import BioCypherEdge, BioCypherNode
|
|
28
|
+
from biocypher._translate import Translator
|
|
29
29
|
|
|
30
30
|
__all__ = ["_Neo4jDriver"]
|
|
31
31
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
|
|
3
|
-
from ._create import BioCypherEdge, BioCypherNode, BioCypherRelAsNode
|
|
3
|
+
from biocypher._create import BioCypherEdge, BioCypherNode, BioCypherRelAsNode
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class Pandas:
|
|
@@ -87,3 +87,4 @@ class Pandas:
|
|
|
87
87
|
self.dfs[_type] = pd.concat(
|
|
88
88
|
[self.dfs[_type], df], ignore_index=True
|
|
89
89
|
)
|
|
90
|
+
return self.dfs[_type]
|
|
File without changes
|
|
@@ -6,16 +6,18 @@ import os
|
|
|
6
6
|
import re
|
|
7
7
|
import glob
|
|
8
8
|
|
|
9
|
-
from rdflib import Graph
|
|
10
9
|
from more_itertools import peekable
|
|
11
10
|
|
|
12
11
|
from biocypher._create import BioCypherEdge, BioCypherNode, BioCypherRelAsNode
|
|
13
12
|
from biocypher._logger import logger
|
|
14
13
|
from biocypher._translate import Translator
|
|
15
14
|
from biocypher._deduplicate import Deduplicator
|
|
15
|
+
from biocypher.output.write._writer import _Writer
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
class _BatchWriter(ABC):
|
|
18
|
+
class _BatchWriter(_Writer, ABC):
|
|
19
|
+
"""Abstract batch writer class"""
|
|
20
|
+
|
|
19
21
|
@abstractmethod
|
|
20
22
|
def _get_default_import_call_bin_prefix(self):
|
|
21
23
|
"""
|
|
@@ -41,7 +43,7 @@ class _BatchWriter(ABC):
|
|
|
41
43
|
str: The database-specific string representation of an array
|
|
42
44
|
"""
|
|
43
45
|
raise NotImplementedError(
|
|
44
|
-
"Database writer must override '
|
|
46
|
+
"Database writer must override '_write_array_string'"
|
|
45
47
|
)
|
|
46
48
|
|
|
47
49
|
@abstractmethod
|
|
@@ -206,6 +208,12 @@ class _BatchWriter(ABC):
|
|
|
206
208
|
rdf_namespaces:
|
|
207
209
|
The namespaces for RDF.
|
|
208
210
|
"""
|
|
211
|
+
super().__init__(
|
|
212
|
+
translator=translator,
|
|
213
|
+
deduplicator=deduplicator,
|
|
214
|
+
output_directory=output_directory,
|
|
215
|
+
strict_mode=strict_mode,
|
|
216
|
+
)
|
|
209
217
|
self.db_name = db_name
|
|
210
218
|
self.db_user = db_user
|
|
211
219
|
self.db_password = db_password
|
|
@@ -239,32 +247,15 @@ class _BatchWriter(ABC):
|
|
|
239
247
|
self.import_call_nodes = set()
|
|
240
248
|
self.import_call_edges = set()
|
|
241
249
|
|
|
242
|
-
self.
|
|
250
|
+
self.outdir = output_directory
|
|
243
251
|
|
|
244
252
|
self._import_call_file_prefix = import_call_file_prefix
|
|
245
253
|
|
|
246
|
-
if os.path.exists(self.outdir):
|
|
247
|
-
logger.warning(
|
|
248
|
-
f"Output directory `{self.outdir}` already exists. "
|
|
249
|
-
"If this is not planned, file consistency may be compromised."
|
|
250
|
-
)
|
|
251
|
-
else:
|
|
252
|
-
logger.info(f"Creating output directory `{self.outdir}`.")
|
|
253
|
-
os.makedirs(self.outdir)
|
|
254
|
-
|
|
255
254
|
self.parts = {} # dict to store the paths of part files for each label
|
|
256
255
|
|
|
257
256
|
# TODO not memory efficient, but should be fine for most cases; is
|
|
258
257
|
# there a more elegant solution?
|
|
259
258
|
|
|
260
|
-
@property
|
|
261
|
-
def outdir(self):
|
|
262
|
-
"""
|
|
263
|
-
Property for output directory path.
|
|
264
|
-
"""
|
|
265
|
-
|
|
266
|
-
return self._outdir
|
|
267
|
-
|
|
268
259
|
@property
|
|
269
260
|
def import_call_file_prefix(self):
|
|
270
261
|
"""
|
|
@@ -272,7 +263,7 @@ class _BatchWriter(ABC):
|
|
|
272
263
|
"""
|
|
273
264
|
|
|
274
265
|
if self._import_call_file_prefix is None:
|
|
275
|
-
return self.
|
|
266
|
+
return self.outdir
|
|
276
267
|
else:
|
|
277
268
|
return self._import_call_file_prefix
|
|
278
269
|
|
|
@@ -1005,7 +996,9 @@ class _BatchWriter(ABC):
|
|
|
1005
996
|
"""
|
|
1006
997
|
|
|
1007
998
|
file_path = os.path.join(self.outdir, self._get_import_script_name())
|
|
1008
|
-
logger.info(
|
|
999
|
+
logger.info(
|
|
1000
|
+
f"Writing {self.db_name + ' ' if self.db_name else ''}import call to `{file_path}`."
|
|
1001
|
+
)
|
|
1009
1002
|
|
|
1010
1003
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
1011
1004
|
f.write(self._construct_import_call())
|
|
@@ -14,11 +14,13 @@ suitable for import into a DBMS.
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
16
|
from biocypher._logger import logger
|
|
17
|
-
from biocypher.write.graph._rdf import _RDFWriter
|
|
18
|
-
from biocypher.write.graph._neo4j import _Neo4jBatchWriter
|
|
19
|
-
from biocypher.write.graph._arangodb import _ArangoDBBatchWriter
|
|
20
|
-
from biocypher.write.
|
|
21
|
-
from biocypher.write.relational.
|
|
17
|
+
from biocypher.output.write.graph._rdf import _RDFWriter
|
|
18
|
+
from biocypher.output.write.graph._neo4j import _Neo4jBatchWriter
|
|
19
|
+
from biocypher.output.write.graph._arangodb import _ArangoDBBatchWriter
|
|
20
|
+
from biocypher.output.write.graph._networkx import _NetworkXWriter
|
|
21
|
+
from biocypher.output.write.relational._csv import _PandasCSVWriter
|
|
22
|
+
from biocypher.output.write.relational._sqlite import _SQLiteBatchWriter
|
|
23
|
+
from biocypher.output.write.relational._postgresql import _PostgreSQLBatchWriter
|
|
22
24
|
|
|
23
25
|
logger.debug(f"Loading module {__name__}.")
|
|
24
26
|
|
|
@@ -46,6 +48,12 @@ DBMS_TO_CLASS = {
|
|
|
46
48
|
"sqlite3": _SQLiteBatchWriter,
|
|
47
49
|
"rdf": _RDFWriter,
|
|
48
50
|
"RDF": _RDFWriter,
|
|
51
|
+
"csv": _PandasCSVWriter,
|
|
52
|
+
"CSV": _PandasCSVWriter,
|
|
53
|
+
"pandas": _PandasCSVWriter,
|
|
54
|
+
"Pandas": _PandasCSVWriter,
|
|
55
|
+
"networkx": _NetworkXWriter,
|
|
56
|
+
"NetworkX": _NetworkXWriter,
|
|
49
57
|
}
|
|
50
58
|
|
|
51
59
|
|
|
@@ -61,19 +69,14 @@ def get_writer(
|
|
|
61
69
|
file.
|
|
62
70
|
|
|
63
71
|
Args:
|
|
64
|
-
|
|
65
72
|
dbms: the database management system; for options, see DBMS_TO_CLASS.
|
|
66
|
-
|
|
67
73
|
translator: the Translator object.
|
|
68
|
-
|
|
69
|
-
output_directory: the directory to write the output files to.
|
|
70
|
-
|
|
74
|
+
deduplicator: the Deduplicator object.
|
|
75
|
+
output_directory: the directory to output.write the output files to.
|
|
71
76
|
strict_mode: whether to use strict mode.
|
|
72
77
|
|
|
73
78
|
Returns:
|
|
74
|
-
|
|
75
79
|
instance: an instance of the selected writer class.
|
|
76
|
-
|
|
77
80
|
"""
|
|
78
81
|
|
|
79
82
|
dbms_config = _config(dbms)
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Union, Optional
|
|
3
|
+
from collections.abc import Iterable
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from biocypher._create import BioCypherEdge, BioCypherNode, BioCypherRelAsNode
|
|
7
|
+
from biocypher._logger import logger
|
|
8
|
+
from biocypher._translate import Translator
|
|
9
|
+
from biocypher._deduplicate import Deduplicator
|
|
10
|
+
|
|
11
|
+
__all__ = ["_Writer"]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class _Writer(ABC):
|
|
15
|
+
"""Abstract class for writing node and edge representations to disk.
|
|
16
|
+
Specifics of the different writers (e.g. neo4j, postgresql, csv, etc.)
|
|
17
|
+
are implemented in the child classes. Any concrete writer needs to
|
|
18
|
+
implement at least:
|
|
19
|
+
- _write_node_data
|
|
20
|
+
- _write_edge_data
|
|
21
|
+
- _construct_import_call
|
|
22
|
+
- _get_import_script_name
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
translator (Translator): Instance of :py:class:`Translator` to enable translation of
|
|
26
|
+
nodes and manipulation of properties.
|
|
27
|
+
deduplicator (Deduplicator): Instance of :py:class:`Deduplicator` to enable deduplication
|
|
28
|
+
of nodes and edges.
|
|
29
|
+
output_directory (str, optional): Path for exporting CSV files. Defaults to None.
|
|
30
|
+
strict_mode (bool, optional): Whether to enforce source, version, and license properties. Defaults to False.
|
|
31
|
+
strict_mode (bool, optional): Whether to enforce source, version, and license properties. Defaults to False.
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
NotImplementedError: Writer implementation must override '_write_node_data'
|
|
35
|
+
NotImplementedError: Writer implementation must override '_write_edge_data'
|
|
36
|
+
NotImplementedError: Writer implementation must override '_construct_import_call'
|
|
37
|
+
NotImplementedError: Writer implementation must override '_get_import_script_name'
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
translator: Translator,
|
|
43
|
+
deduplicator: Deduplicator,
|
|
44
|
+
output_directory: Optional[str] = None,
|
|
45
|
+
strict_mode: bool = False,
|
|
46
|
+
*args,
|
|
47
|
+
**kwargs,
|
|
48
|
+
):
|
|
49
|
+
"""Abstract class for writing node and edge representations to disk.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
translator (Translator): Instance of :py:class:`Translator` to enable translation of
|
|
53
|
+
nodes and manipulation of properties.
|
|
54
|
+
deduplicator (Deduplicator): Instance of :py:class:`Deduplicator` to enable deduplication
|
|
55
|
+
of nodes and edges.
|
|
56
|
+
output_directory (str, optional): Path for exporting CSV files. Defaults to None.
|
|
57
|
+
strict_mode (bool, optional): Whether to enforce source, version, and license properties. Defaults to False.
|
|
58
|
+
strict_mode (bool, optional): Whether to enforce source, version, and license properties. Defaults to False.
|
|
59
|
+
"""
|
|
60
|
+
self.translator = translator
|
|
61
|
+
self.deduplicator = deduplicator
|
|
62
|
+
self.strict_mode = strict_mode
|
|
63
|
+
self.output_directory = output_directory
|
|
64
|
+
|
|
65
|
+
if os.path.exists(self.output_directory):
|
|
66
|
+
if kwargs.get("write_to_file", True):
|
|
67
|
+
logger.warning(
|
|
68
|
+
f"Output directory `{self.output_directory}` already exists. "
|
|
69
|
+
"If this is not planned, file consistency may be compromised."
|
|
70
|
+
)
|
|
71
|
+
else:
|
|
72
|
+
logger.info(f"Creating output directory `{self.output_directory}`.")
|
|
73
|
+
os.makedirs(self.output_directory)
|
|
74
|
+
|
|
75
|
+
@abstractmethod
|
|
76
|
+
def _write_node_data(
|
|
77
|
+
self,
|
|
78
|
+
nodes: Iterable[
|
|
79
|
+
Union[BioCypherNode, BioCypherEdge, BioCypherRelAsNode]
|
|
80
|
+
],
|
|
81
|
+
) -> bool:
|
|
82
|
+
"""Implement how to output.write nodes to disk.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
nodes (Iterable): An iterable of BioCypherNode / BioCypherEdge / BioCypherRelAsNode objects.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
bool: The return value. True for success, False otherwise.
|
|
89
|
+
"""
|
|
90
|
+
raise NotImplementedError(
|
|
91
|
+
"Writer implementation must override 'write_nodes'"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
@abstractmethod
|
|
95
|
+
def _write_edge_data(
|
|
96
|
+
self,
|
|
97
|
+
edges: Iterable[
|
|
98
|
+
Union[BioCypherNode, BioCypherEdge, BioCypherRelAsNode]
|
|
99
|
+
],
|
|
100
|
+
) -> bool:
|
|
101
|
+
"""Implement how to output.write edges to disk.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
edges (Iterable): An iterable of BioCypherNode / BioCypherEdge / BioCypherRelAsNode objects.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
bool: The return value. True for success, False otherwise.
|
|
108
|
+
"""
|
|
109
|
+
raise NotImplementedError(
|
|
110
|
+
"Writer implementation must override 'write_edges'"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
@abstractmethod
|
|
114
|
+
def _construct_import_call(self) -> str:
|
|
115
|
+
"""
|
|
116
|
+
Function to construct the import call detailing folder and
|
|
117
|
+
individual node and edge headers and data files, as well as
|
|
118
|
+
delimiters and database name. Built after all data has been
|
|
119
|
+
processed to ensure that nodes are called before any edges.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
str: command for importing the output files into a DBMS.
|
|
123
|
+
"""
|
|
124
|
+
raise NotImplementedError(
|
|
125
|
+
"Writer implementation must override '_construct_import_call'"
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@abstractmethod
|
|
129
|
+
def _get_import_script_name(self) -> str:
|
|
130
|
+
"""Returns the name of the import script.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
str: The name of the import script (ending in .sh)
|
|
134
|
+
"""
|
|
135
|
+
raise NotImplementedError(
|
|
136
|
+
"Writer implementation must override '_get_import_script_name'"
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
def write_nodes(
|
|
140
|
+
self, nodes, batch_size: int = int(1e6), force: bool = False
|
|
141
|
+
):
|
|
142
|
+
"""Wrapper for writing nodes.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
nodes (BioCypherNode): a list or generator of nodes in
|
|
146
|
+
:py:class:`BioCypherNode` format
|
|
147
|
+
batch_size (int): The batch size for writing nodes.
|
|
148
|
+
force (bool): Whether to force writing nodes even if their type is
|
|
149
|
+
not present in the schema.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
bool: The return value. True for success, False otherwise.
|
|
153
|
+
"""
|
|
154
|
+
passed = self._write_node_data(nodes)
|
|
155
|
+
if not passed:
|
|
156
|
+
logger.error("Error while writing node data.")
|
|
157
|
+
return False
|
|
158
|
+
return True
|
|
159
|
+
|
|
160
|
+
def write_edges(
|
|
161
|
+
self, edges, batch_size: int = int(1e6), force: bool = False
|
|
162
|
+
):
|
|
163
|
+
"""Wrapper for writing edges.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
nodes (BioCypherNode): a list or generator of nodes in
|
|
167
|
+
:py:class:`BioCypherNode` format
|
|
168
|
+
batch_size (int): The batch size for writing nodes.
|
|
169
|
+
force (bool): Whether to force writing nodes even if their type is
|
|
170
|
+
not present in the schema.
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
bool: The return value. True for success, False otherwise.
|
|
174
|
+
"""
|
|
175
|
+
passed = self._write_edge_data(edges)
|
|
176
|
+
if not passed:
|
|
177
|
+
logger.error("Error while writing edge data.")
|
|
178
|
+
return False
|
|
179
|
+
return True
|
|
180
|
+
|
|
181
|
+
def write_import_call(self):
|
|
182
|
+
"""
|
|
183
|
+
Function to output.write the import call detailing folder and
|
|
184
|
+
individual node and edge headers and data files, as well as
|
|
185
|
+
delimiters and database name, to the export folder as txt.
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
str: The path of the file holding the import call.
|
|
189
|
+
"""
|
|
190
|
+
file_path = os.path.join(
|
|
191
|
+
self.output_directory, self._get_import_script_name()
|
|
192
|
+
)
|
|
193
|
+
logger.info(
|
|
194
|
+
f"Writing {self.__class__.__name__} import call to `{file_path}`."
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
198
|
+
f.write(self._construct_import_call())
|
|
199
|
+
|
|
200
|
+
return file_path
|
|
File without changes
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
|
-
import re
|
|
3
|
-
import subprocess
|
|
4
2
|
|
|
5
3
|
from biocypher._logger import logger
|
|
6
|
-
from biocypher.write._batch_writer import parse_label, _BatchWriter
|
|
4
|
+
from biocypher.output.write._batch_writer import parse_label, _BatchWriter
|
|
7
5
|
|
|
8
6
|
|
|
9
7
|
class _Neo4jBatchWriter(_BatchWriter):
|
|
@@ -49,7 +47,7 @@ class _Neo4jBatchWriter(_BatchWriter):
|
|
|
49
47
|
|
|
50
48
|
def _write_array_string(self, string_list):
|
|
51
49
|
"""
|
|
52
|
-
Abstract method to write the string representation of an array into a .csv file
|
|
50
|
+
Abstract method to output.write the string representation of an array into a .csv file
|
|
53
51
|
as required by the neo4j admin-import.
|
|
54
52
|
|
|
55
53
|
Args:
|
|
@@ -303,11 +301,13 @@ class _Neo4jBatchWriter(_BatchWriter):
|
|
|
303
301
|
Returns:
|
|
304
302
|
str: The import call.
|
|
305
303
|
"""
|
|
306
|
-
import_call =
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
304
|
+
import_call = f"{self.import_call_bin_prefix}neo4j-admin {import_cmd} "
|
|
305
|
+
|
|
306
|
+
import_call += f"{database_cmd}{self.db_name} "
|
|
307
|
+
|
|
308
|
+
import_call += f'--delimiter="{self.escaped_delim}" '
|
|
309
|
+
|
|
310
|
+
import_call += f'--array-delimiter="{self.escaped_adelim}" '
|
|
311
311
|
|
|
312
312
|
if self.quote == "'":
|
|
313
313
|
import_call += f'--quote="{self.quote}" '
|
|
@@ -329,6 +329,4 @@ class _Neo4jBatchWriter(_BatchWriter):
|
|
|
329
329
|
for header_path, parts_path in self.import_call_edges:
|
|
330
330
|
import_call += f'--relationships="{header_path},{parts_path}" '
|
|
331
331
|
|
|
332
|
-
# Database needs to be at the end starting with Neo4j 5.0+.
|
|
333
|
-
import_call += f"{database_cmd}{self.db_name} "
|
|
334
332
|
return import_call
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
|
|
3
|
+
import networkx as nx
|
|
4
|
+
|
|
5
|
+
from biocypher._logger import logger
|
|
6
|
+
from biocypher.output.write._writer import _Writer
|
|
7
|
+
from biocypher.output.write.relational._csv import _PandasCSVWriter
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class _NetworkXWriter(_Writer):
|
|
11
|
+
"""
|
|
12
|
+
Class for writing node and edges to a networkx DiGraph.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, *args, **kwargs):
|
|
16
|
+
super().__init__(*args, **kwargs)
|
|
17
|
+
self.csv_writer = _PandasCSVWriter(*args, write_to_file=False, **kwargs)
|
|
18
|
+
self.G = nx.DiGraph()
|
|
19
|
+
|
|
20
|
+
def _construct_import_call(self) -> str:
|
|
21
|
+
"""Function to construct the Python code to load all node and edge csv files again into Pandas dfs.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
str: Python code to load the csv files into Pandas dfs.
|
|
25
|
+
"""
|
|
26
|
+
logger.info(
|
|
27
|
+
f"Writing networkx {self.G} to pickle file networkx_graph.pkl."
|
|
28
|
+
)
|
|
29
|
+
with open(f"{self.output_directory}/networkx_graph.pkl", "wb") as f:
|
|
30
|
+
pickle.dump(self.G, f)
|
|
31
|
+
|
|
32
|
+
import_call = "import pickle\n"
|
|
33
|
+
import_call += "with open('./networkx_graph.pkl', 'rb') as f:\n\tG_loaded = pickle.load(f)"
|
|
34
|
+
return import_call
|
|
35
|
+
|
|
36
|
+
def _get_import_script_name(self) -> str:
|
|
37
|
+
"""Function to return the name of the import script."""
|
|
38
|
+
return "import_networkx.py"
|
|
39
|
+
|
|
40
|
+
def _write_node_data(self, nodes) -> bool:
|
|
41
|
+
passed = self.csv_writer._write_entities_to_file(nodes)
|
|
42
|
+
self.add_to_networkx()
|
|
43
|
+
return passed
|
|
44
|
+
|
|
45
|
+
def _write_edge_data(self, edges) -> bool:
|
|
46
|
+
passed = self.csv_writer._write_entities_to_file(edges)
|
|
47
|
+
self.add_to_networkx()
|
|
48
|
+
return passed
|
|
49
|
+
|
|
50
|
+
def add_to_networkx(self) -> bool:
|
|
51
|
+
all_dfs = self.csv_writer.stored_dfs
|
|
52
|
+
node_dfs = [
|
|
53
|
+
df
|
|
54
|
+
for df in all_dfs.values()
|
|
55
|
+
if df.columns.str.contains("node_id").any()
|
|
56
|
+
]
|
|
57
|
+
edge_dfs = [
|
|
58
|
+
df
|
|
59
|
+
for df in all_dfs.values()
|
|
60
|
+
if df.columns.str.contains("source_id").any()
|
|
61
|
+
and df.columns.str.contains("target_id").any()
|
|
62
|
+
]
|
|
63
|
+
for df in node_dfs:
|
|
64
|
+
nodes = df.set_index("node_id").to_dict(orient="index")
|
|
65
|
+
self.G.add_nodes_from(nodes.items())
|
|
66
|
+
for df in edge_dfs:
|
|
67
|
+
edges = df.set_index(["source_id", "target_id"]).to_dict(
|
|
68
|
+
orient="index"
|
|
69
|
+
)
|
|
70
|
+
self.G.add_edges_from(
|
|
71
|
+
(
|
|
72
|
+
(source, target, attrs)
|
|
73
|
+
for (source, target), attrs in edges.items()
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
return True
|
|
@@ -24,11 +24,10 @@ from rdflib.namespace import (
|
|
|
24
24
|
|
|
25
25
|
from biocypher._create import BioCypherEdge, BioCypherNode
|
|
26
26
|
from biocypher._logger import logger
|
|
27
|
-
from biocypher.write._batch_writer import _BatchWriter
|
|
27
|
+
from biocypher.output.write._batch_writer import _BatchWriter
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
class _RDFWriter(_BatchWriter):
|
|
31
|
-
|
|
32
31
|
"""
|
|
33
32
|
Class to write BioCypher's property graph into an RDF format using
|
|
34
33
|
rdflib and all the extensions it supports (RDF/XML, N3, NTriples,
|
|
@@ -125,7 +124,7 @@ class _RDFWriter(_BatchWriter):
|
|
|
125
124
|
|
|
126
125
|
# create file name
|
|
127
126
|
file_name = os.path.join(
|
|
128
|
-
self.
|
|
127
|
+
self.outdir, f"{label_pascal}.{self.extension}"
|
|
129
128
|
)
|
|
130
129
|
|
|
131
130
|
# write data in graph
|
|
@@ -287,7 +286,7 @@ class _RDFWriter(_BatchWriter):
|
|
|
287
286
|
|
|
288
287
|
# create file name
|
|
289
288
|
file_name = os.path.join(
|
|
290
|
-
self.
|
|
289
|
+
self.outdir, f"{label_pascal}.{self.extension}"
|
|
291
290
|
)
|
|
292
291
|
|
|
293
292
|
# write data in graph
|
|
File without changes
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from more_itertools import peekable
|
|
2
|
+
|
|
3
|
+
from biocypher._logger import logger
|
|
4
|
+
from biocypher.output.write._writer import _Writer
|
|
5
|
+
from biocypher.output.in_memory._pandas import Pandas
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _PandasCSVWriter(_Writer):
|
|
9
|
+
"""
|
|
10
|
+
Class for writing node and edge representations to a CSV file.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self, *args, write_to_file: bool = True, **kwargs):
|
|
14
|
+
kwargs["write_to_file"] = write_to_file
|
|
15
|
+
super().__init__(*args, **kwargs)
|
|
16
|
+
self.in_memory_dfs = {}
|
|
17
|
+
self.stored_dfs = {}
|
|
18
|
+
self.pandas_in_memory = Pandas(
|
|
19
|
+
translator=self.translator,
|
|
20
|
+
deduplicator=self.deduplicator,
|
|
21
|
+
)
|
|
22
|
+
self.delimiter = kwargs.get("delimiter")
|
|
23
|
+
if not self.delimiter:
|
|
24
|
+
self.delimiter = ","
|
|
25
|
+
self.write_to_file = write_to_file
|
|
26
|
+
|
|
27
|
+
def _construct_import_call(self) -> str:
|
|
28
|
+
"""Function to construct the Python code to load all node and edge csv files again into Pandas dfs.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
str: Python code to load the csv files into Pandas dfs.
|
|
32
|
+
"""
|
|
33
|
+
import_call = "import pandas as pd\n\n"
|
|
34
|
+
for df_name in self.stored_dfs.keys():
|
|
35
|
+
import_call += f"{df_name} = pd.read_csv('./{df_name}.csv', header=0, index_col=0)\n"
|
|
36
|
+
return import_call
|
|
37
|
+
|
|
38
|
+
def _get_import_script_name(self) -> str:
|
|
39
|
+
"""Function to return the name of the import script."""
|
|
40
|
+
return "import_pandas_csv.py"
|
|
41
|
+
|
|
42
|
+
def _write_node_data(self, nodes) -> bool:
|
|
43
|
+
passed = self._write_entities_to_file(nodes)
|
|
44
|
+
return passed
|
|
45
|
+
|
|
46
|
+
def _write_edge_data(self, edges) -> bool:
|
|
47
|
+
passed = self._write_entities_to_file(edges)
|
|
48
|
+
return passed
|
|
49
|
+
|
|
50
|
+
def _write_entities_to_file(self, entities: iter) -> bool:
|
|
51
|
+
"""Function to output.write the entities to a CSV file.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
entities (iterable): An iterable of BioCypherNode / BioCypherEdge / BioCypherRelAsNode objects.
|
|
55
|
+
"""
|
|
56
|
+
entities = peekable(entities)
|
|
57
|
+
entity_list = self.pandas_in_memory._separate_entity_types(entities)
|
|
58
|
+
for entity_type, entities in entity_list.items():
|
|
59
|
+
self.in_memory_dfs[
|
|
60
|
+
entity_type
|
|
61
|
+
] = self.pandas_in_memory._add_entity_df(entity_type, entities)
|
|
62
|
+
for entity_type in self.in_memory_dfs.keys():
|
|
63
|
+
entity_df = self.in_memory_dfs[entity_type]
|
|
64
|
+
if " " in entity_type or "." in entity_type:
|
|
65
|
+
entity_type = entity_type.replace(" ", "_").replace(".", "_")
|
|
66
|
+
if self.write_to_file:
|
|
67
|
+
logger.info(
|
|
68
|
+
f"Writing {entity_df.shape[0]} entries to {entity_type}.csv."
|
|
69
|
+
)
|
|
70
|
+
entity_df.to_csv(
|
|
71
|
+
f"{self.output_directory}/{entity_type}.csv",
|
|
72
|
+
sep=self.delimiter,
|
|
73
|
+
)
|
|
74
|
+
self.stored_dfs[entity_type] = entity_df
|
|
75
|
+
self.in_memory_dfs = {}
|
|
76
|
+
return True
|
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
import glob
|
|
3
3
|
|
|
4
4
|
from biocypher._logger import logger
|
|
5
|
-
from biocypher.write._batch_writer import _BatchWriter
|
|
5
|
+
from biocypher.output.write._batch_writer import _BatchWriter
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class _PostgreSQLBatchWriter(_BatchWriter):
|
|
@@ -59,7 +59,7 @@ class _PostgreSQLBatchWriter(_BatchWriter):
|
|
|
59
59
|
|
|
60
60
|
def _write_array_string(self, string_list) -> str:
|
|
61
61
|
"""
|
|
62
|
-
Abstract method to write the string representation of an array into a .csv file
|
|
62
|
+
Abstract method to output.write the string representation of an array into a .csv file
|
|
63
63
|
as required by the postgresql COPY command, with '{','}' brackets and ',' separation.
|
|
64
64
|
|
|
65
65
|
Args:
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
biocypher/__init__.py,sha256=ejNY53vH_pE3ZbIN8G_ZBYxxPG9aERovRLD0XhDvt4k,942
|
|
2
|
+
biocypher/_config/__init__.py,sha256=fFHRFYxE2MtDAQWL6upe--MJ1vw3Z8CwIPhF2gW8cRU,3698
|
|
3
|
+
biocypher/_config/biocypher_config.yaml,sha256=pusj0IjJM3uWRcm0N7U7mb1IX257HCV2reZV3YKFCk0,3037
|
|
4
|
+
biocypher/_config/test_config.yaml,sha256=Np8jeS5_EP6HHOvMKb7B_Tkyqd5YaYlYz_DVsXypt-A,119
|
|
5
|
+
biocypher/_config/test_schema_config.yaml,sha256=D1600WgEj3iTXrumVU9LIivJHJO36iaxfkOgyam9zVU,3129
|
|
6
|
+
biocypher/_config/test_schema_config_disconnected.yaml,sha256=Qm8FLxEn2spHcyj_5F859KjcDvKSxNhxDvi4b4LLkvQ,68
|
|
7
|
+
biocypher/_config/test_schema_config_extended.yaml,sha256=wn3A76142hhjnImhMF6RODbCFESTJ2TtPvcFdIFsAT0,3309
|
|
8
|
+
biocypher/_core.py,sha256=m4o4Szv2xY2gl3PnNAA9m7Gg5Sgd8iR9THv3RDyZlQ8,22618
|
|
9
|
+
biocypher/_create.py,sha256=vpUchUdEpWupZi1LgFLxAWMtqoBwnWbP7PwEDUCBS4A,10202
|
|
10
|
+
biocypher/_deduplicate.py,sha256=BBvfpXzu6L5YDY5FdtXxnf8YlsbJpbCE8RdUoKsm0n0,4949
|
|
11
|
+
biocypher/_get.py,sha256=3Kpky3blfNf1JwxKWLsZxTU2aTP_C4sUe8OpiyYj63I,10810
|
|
12
|
+
biocypher/_logger.py,sha256=NGXe3hZA79WSujfOgpcxHBf8N2QAfrmvM1LFDpsGK2U,3185
|
|
13
|
+
biocypher/_mapping.py,sha256=ERSNH2Bg19145KytxbFE4BInPaiP-LWW7osOBot29Eo,9304
|
|
14
|
+
biocypher/_metadata.py,sha256=HTVcugUBYLbM1r3SEl-J9hiNGlPqic1zqXtMlLyTwH8,1658
|
|
15
|
+
biocypher/_misc.py,sha256=18EG2Bei3RnyWXDWc3qtZaT3gybvXI8opi0HvSaF7Lg,6066
|
|
16
|
+
biocypher/_ontology.py,sha256=G5k-bnzvPZUqhLPxtoOPFa4OSQ4JpufgozVakLTjwLg,31789
|
|
17
|
+
biocypher/_translate.py,sha256=JafvhtVaFSpruRfYh9BzjVbvDF1Mhg7LLKMDZHWkRjg,16496
|
|
18
|
+
biocypher/output/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
+
biocypher/output/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
biocypher/output/connect/_neo4j_driver.py,sha256=jzF5sDhs_WnYEfXiSjQ1P3wNgoadl4Cg80EUYYOk0Ro,13497
|
|
21
|
+
biocypher/output/in_memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
biocypher/output/in_memory/_pandas.py,sha256=lsYQKjfxUy0O-ae4-YpsCJX-l85bxyc60WOj8gKfMfU,3080
|
|
23
|
+
biocypher/output/write/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
+
biocypher/output/write/_batch_writer.py,sha256=3pdS8ZLN4sBwATXaXFaSrfPQmejjFjo0avHkPQavFSU,36959
|
|
25
|
+
biocypher/output/write/_get_writer.py,sha256=AeQcHQTrz68ZvtxsZl4W0ymc8cOxe3Qfq5PJRY7kq_I,3736
|
|
26
|
+
biocypher/output/write/_writer.py,sha256=v4-c8yME1UCJeqy8Lfmv7KtY7_B4QkWgADt5xkFNJFQ,7453
|
|
27
|
+
biocypher/output/write/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
+
biocypher/output/write/graph/_arangodb.py,sha256=aUa_CNZyunFaPrJHc9RtVHRo0Fca9xJ-ZmRz4PxPO8c,8078
|
|
29
|
+
biocypher/output/write/graph/_neo4j.py,sha256=ZINbfrv7gBAgHMT9uE4HCznosNRspT3SmPV83XXfhCw,11849
|
|
30
|
+
biocypher/output/write/graph/_networkx.py,sha256=EW2we3FlqQ8KfLv4l_2wE27KBUlhXJyD5ORvowSjlaA,2545
|
|
31
|
+
biocypher/output/write/graph/_rdf.py,sha256=BtunVo0iaCVM9I2tWOYwGpB9itbngHBjP0RhwgcJUiM,17977
|
|
32
|
+
biocypher/output/write/relational/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
|
+
biocypher/output/write/relational/_csv.py,sha256=eyAtmwfCNYnuVbkpd0rUoo9KgG2KPgopZVA3X97tRLU,2919
|
|
34
|
+
biocypher/output/write/relational/_postgresql.py,sha256=6sABZaELzmV7a2aUy2iRksf28WFsc3EA9mdQ2mShPeM,11959
|
|
35
|
+
biocypher/output/write/relational/_sqlite.py,sha256=ozElhca1YCYq8R-VFh-LDsnPBaXVJm2cvEboBK2LVVY,2073
|
|
36
|
+
biocypher-0.5.44.dist-info/LICENSE,sha256=SjUaQkq671iQUZOxEUpC4jvJxXOlfSiHTTueyz9kXJM,1065
|
|
37
|
+
biocypher-0.5.44.dist-info/METADATA,sha256=_a0l4S_OPALMmrpEhbKWuwtPcSzWRursqJKMEAkrZaw,10642
|
|
38
|
+
biocypher-0.5.44.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
39
|
+
biocypher-0.5.44.dist-info/RECORD,,
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
biocypher/__init__.py,sha256=ejNY53vH_pE3ZbIN8G_ZBYxxPG9aERovRLD0XhDvt4k,942
|
|
2
|
-
biocypher/_config/__init__.py,sha256=fFHRFYxE2MtDAQWL6upe--MJ1vw3Z8CwIPhF2gW8cRU,3698
|
|
3
|
-
biocypher/_config/biocypher_config.yaml,sha256=VE_UH6POExAsuPpqWsahsT8-9k5jglMkuBfuszH1tiU,2868
|
|
4
|
-
biocypher/_config/test_config.yaml,sha256=Np8jeS5_EP6HHOvMKb7B_Tkyqd5YaYlYz_DVsXypt-A,119
|
|
5
|
-
biocypher/_config/test_schema_config.yaml,sha256=D1600WgEj3iTXrumVU9LIivJHJO36iaxfkOgyam9zVU,3129
|
|
6
|
-
biocypher/_config/test_schema_config_disconnected.yaml,sha256=Qm8FLxEn2spHcyj_5F859KjcDvKSxNhxDvi4b4LLkvQ,68
|
|
7
|
-
biocypher/_config/test_schema_config_extended.yaml,sha256=wn3A76142hhjnImhMF6RODbCFESTJ2TtPvcFdIFsAT0,3309
|
|
8
|
-
biocypher/_connect.py,sha256=7hk3J03hzZOPE48ISaoB6IgRun8GaUmDtIRnnD7vKiU,13453
|
|
9
|
-
biocypher/_core.py,sha256=5rZKYie_vSjTYduH8oH-GxLMZuNqLAe3ZYAQ5nUp8Nc,22578
|
|
10
|
-
biocypher/_create.py,sha256=vpUchUdEpWupZi1LgFLxAWMtqoBwnWbP7PwEDUCBS4A,10202
|
|
11
|
-
biocypher/_deduplicate.py,sha256=BBvfpXzu6L5YDY5FdtXxnf8YlsbJpbCE8RdUoKsm0n0,4949
|
|
12
|
-
biocypher/_get.py,sha256=3Kpky3blfNf1JwxKWLsZxTU2aTP_C4sUe8OpiyYj63I,10810
|
|
13
|
-
biocypher/_logger.py,sha256=NGXe3hZA79WSujfOgpcxHBf8N2QAfrmvM1LFDpsGK2U,3185
|
|
14
|
-
biocypher/_mapping.py,sha256=ERSNH2Bg19145KytxbFE4BInPaiP-LWW7osOBot29Eo,9304
|
|
15
|
-
biocypher/_metadata.py,sha256=hTN9aStXCS7IzABrE7BmT5GZ-8YUt8gP9PG_P5Ix1Vw,1658
|
|
16
|
-
biocypher/_misc.py,sha256=18EG2Bei3RnyWXDWc3qtZaT3gybvXI8opi0HvSaF7Lg,6066
|
|
17
|
-
biocypher/_ontology.py,sha256=G5k-bnzvPZUqhLPxtoOPFa4OSQ4JpufgozVakLTjwLg,31789
|
|
18
|
-
biocypher/_pandas.py,sha256=GVCFM68J7yBjh40MpkNVgD8qT1RFMrrIjMOtD3iKsf4,3040
|
|
19
|
-
biocypher/_translate.py,sha256=JafvhtVaFSpruRfYh9BzjVbvDF1Mhg7LLKMDZHWkRjg,16496
|
|
20
|
-
biocypher/write/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
-
biocypher/write/_batch_writer.py,sha256=x_fe2yndASNAvO-GaeVhjUVxnSNdDZ6-FB1mj572Jvw,37129
|
|
22
|
-
biocypher/write/_write.py,sha256=4UYw-y3CevwcdVBq6ou1rTJXuXrcde7oraWeO8YXcK4,3330
|
|
23
|
-
biocypher/write/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
-
biocypher/write/graph/_arangodb.py,sha256=du5pivCR7xKs8VyxeegxYsSBIcsXGrfSbM_AffFapwg,8071
|
|
25
|
-
biocypher/write/graph/_neo4j.py,sha256=qSj1PryD4UmveS7ACs1R3eo2pegi53pVI7d7P0ihOKI,11930
|
|
26
|
-
biocypher/write/graph/_rdf.py,sha256=9_u9usWhU7EKKDd1PgXyV99opS5IAeef2lhDNEN6fOw,17973
|
|
27
|
-
biocypher/write/relational/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
-
biocypher/write/relational/_postgresql.py,sha256=NdI-ULP8valsqlkObOg50od-3-amVj5RzGnZ_7NW2ww,11945
|
|
29
|
-
biocypher/write/relational/_sqlite.py,sha256=KLQpxQXF1B8qqTtKUFfjWdwHjd1Fhn9syK931Z0dsq0,2066
|
|
30
|
-
biocypher-0.5.42.dist-info/LICENSE,sha256=SjUaQkq671iQUZOxEUpC4jvJxXOlfSiHTTueyz9kXJM,1065
|
|
31
|
-
biocypher-0.5.42.dist-info/METADATA,sha256=3lT_thshGguJMnCeer-4JaJQfrsuKeWAd6oaYWhXPyk,10642
|
|
32
|
-
biocypher-0.5.42.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
33
|
-
biocypher-0.5.42.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|