aldepyde 0.0.0a33__py3-none-any.whl → 0.0.0a35__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aldepyde might be problematic. Click here for more details.
- aldepyde/databases/SCOPe_Astral.py +71 -0
- aldepyde/databases/UniRef.py +24 -23
- aldepyde/databases/_database.py +33 -2
- {aldepyde-0.0.0a33.dist-info → aldepyde-0.0.0a35.dist-info}/METADATA +1 -1
- {aldepyde-0.0.0a33.dist-info → aldepyde-0.0.0a35.dist-info}/RECORD +8 -7
- {aldepyde-0.0.0a33.dist-info → aldepyde-0.0.0a35.dist-info}/WHEEL +0 -0
- {aldepyde-0.0.0a33.dist-info → aldepyde-0.0.0a35.dist-info}/licenses/LICENSE +0 -0
- {aldepyde-0.0.0a33.dist-info → aldepyde-0.0.0a35.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from aldepyde.databases._database import local_database
|
|
2
|
+
import operator
|
|
3
|
+
from contextlib import nullcontext
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
class scop_parser(local_database):
|
|
7
|
+
op = {
|
|
8
|
+
"and": lambda a,b,c: a and b and c,
|
|
9
|
+
"or": lambda a,b,c: a or b or c
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
def fetch(self, url):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
def fetch_code(self, codes):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
def parse(self, text):
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
def extract_all_scop(self):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
def partition_scope(self):
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
def extract_all_astral(self):
|
|
28
|
+
lines = self.fp.readlines()
|
|
29
|
+
entry = b""
|
|
30
|
+
for line in lines:
|
|
31
|
+
if line.startswith(b">") and len(entry) > 0:
|
|
32
|
+
yield entry
|
|
33
|
+
entry = b""
|
|
34
|
+
entry += line
|
|
35
|
+
yield entry
|
|
36
|
+
|
|
37
|
+
# TODO allow a list of search parameters. Big challenge to make efficient, but could be cute
|
|
38
|
+
def partition_astral(self, destination:None|str=None, append=False, class_name:str=b'',contains_id:str=b'' , contains_desc:str=b'', mode="and") -> dict:
|
|
39
|
+
mode = mode.lower()
|
|
40
|
+
# Everything is a byte string in order to play nicely with future parent methods
|
|
41
|
+
if isinstance(class_name, str):
|
|
42
|
+
class_name = class_name.encode('utf-8')
|
|
43
|
+
if isinstance(contains_desc, str):
|
|
44
|
+
contains_desc = contains_desc.encode('utf-8')
|
|
45
|
+
if isinstance(contains_id, str):
|
|
46
|
+
contains_id = contains_id.encode('utf-8')
|
|
47
|
+
if mode != "and" and mode != "or":
|
|
48
|
+
raise ValueError("mode must be \"and\" or \"or\".")
|
|
49
|
+
logic = scop_parser.op[mode]
|
|
50
|
+
regex = re.compile(b">[a-zA-Z0-9_.]* *[a-l](.[0-9]+)?(.[0-9]+)?(.[0-9]+)?")
|
|
51
|
+
if append:
|
|
52
|
+
file_context = open(destination, 'ab') if destination is not None else nullcontext()
|
|
53
|
+
else:
|
|
54
|
+
file_context = open(destination, 'wb') if destination is not None else nullcontext()
|
|
55
|
+
with file_context as fp:
|
|
56
|
+
ret_dict = dict()
|
|
57
|
+
for line in self.extract_all_astral():
|
|
58
|
+
identifiers = regex.search(line).group().split()
|
|
59
|
+
id = identifiers[0]
|
|
60
|
+
cls = identifiers[1]
|
|
61
|
+
unmatched_spl = regex.sub(b'', line).split(b'\n')
|
|
62
|
+
desc = unmatched_spl[0]
|
|
63
|
+
sequence = unmatched_spl[1:]
|
|
64
|
+
if logic(class_name.lower() in cls.lower(), contains_id.lower() in id.lower(), contains_desc.lower() in desc.lower()):
|
|
65
|
+
ret_dict[id] = { # Yes, I know '>' isn't part of the FASTA identifier. This keeps things more consistant
|
|
66
|
+
"class" : cls,
|
|
67
|
+
"description" : desc,
|
|
68
|
+
"sequence" : b"".join(sequence)
|
|
69
|
+
}
|
|
70
|
+
fp.write(line)
|
|
71
|
+
return ret_dict
|
aldepyde/databases/UniRef.py
CHANGED
|
@@ -1,42 +1,45 @@
|
|
|
1
1
|
import zlib
|
|
2
|
-
|
|
3
|
-
from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
|
|
4
|
-
from aldepyde.databases._database import _database
|
|
2
|
+
from aldepyde.databases._database import streamable_database
|
|
5
3
|
from aldepyde.utils import ProgressBar
|
|
6
|
-
import os
|
|
7
|
-
import gzip
|
|
8
4
|
|
|
9
|
-
class uniref_parser(
|
|
5
|
+
class uniref_parser(streamable_database):
|
|
10
6
|
def __init__(self):
|
|
11
7
|
super().__init__()
|
|
12
8
|
|
|
13
9
|
# TODO single entry parsing
|
|
14
10
|
# TODO store metadata upon request
|
|
11
|
+
# TODO implement abstract methods
|
|
15
12
|
|
|
16
13
|
@staticmethod
|
|
17
|
-
def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
|
|
18
|
-
raw_stream, size =
|
|
14
|
+
def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False, stitch=False):
|
|
15
|
+
raw_stream, size = streamable_database.open_stream(filepath)
|
|
19
16
|
pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
|
|
20
17
|
decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
|
21
18
|
try:
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
pbar
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
19
|
+
if not stitch:
|
|
20
|
+
while True:
|
|
21
|
+
comp_chunk = raw_stream.read(chunk_size)
|
|
22
|
+
if not comp_chunk:
|
|
23
|
+
break
|
|
24
|
+
if pbar is not None:
|
|
25
|
+
pbar.update()
|
|
26
|
+
decomp_chunk = decompressor.decompress(comp_chunk)
|
|
27
|
+
if decomp_chunk:
|
|
28
|
+
yield decomp_chunk
|
|
29
|
+
final = decompressor.flush()
|
|
30
|
+
if final:
|
|
31
|
+
yield final
|
|
32
|
+
else:
|
|
33
|
+
# Really hacky solution for now
|
|
34
|
+
# TODO Clean this up
|
|
35
|
+
yield from uniref_parser.stitch_streamed_sequences(
|
|
36
|
+
uniref_parser.stream_uniref_gz(filepath=filepath, chunk_size=chunk_size, use_progress_bar=use_progress_bar, stitch=False))
|
|
34
37
|
finally:
|
|
35
38
|
raw_stream.close()
|
|
36
39
|
|
|
37
40
|
@staticmethod
|
|
38
41
|
def download_file(url, destination, chunk_size=8192, use_progress_bar=False):
|
|
39
|
-
raw_stream, size =
|
|
42
|
+
raw_stream, size = streamable_database.open_stream(url)
|
|
40
43
|
pbar = ProgressBar(size // chunk_size) if use_progress_bar else None
|
|
41
44
|
with open(destination, 'wb') as fp:
|
|
42
45
|
while True:
|
|
@@ -48,8 +51,6 @@ class uniref_parser(_database):
|
|
|
48
51
|
fp.write(chunk)
|
|
49
52
|
|
|
50
53
|
|
|
51
|
-
|
|
52
|
-
|
|
53
54
|
@staticmethod
|
|
54
55
|
def stitch_streamed_sequences(stream, as_str=True):
|
|
55
56
|
buffer = b''
|
aldepyde/databases/_database.py
CHANGED
|
@@ -5,7 +5,7 @@ import os
|
|
|
5
5
|
from typing import Tuple, BinaryIO
|
|
6
6
|
from io import TextIOWrapper
|
|
7
7
|
|
|
8
|
-
class
|
|
8
|
+
class streamable_database(ABC):
|
|
9
9
|
|
|
10
10
|
def __init__(self):
|
|
11
11
|
pass
|
|
@@ -38,4 +38,35 @@ class _database(ABC):
|
|
|
38
38
|
# Yes, I know the first conditionals do the same thing
|
|
39
39
|
|
|
40
40
|
def __call__(self):
|
|
41
|
-
pass
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
class local_database(ABC):
|
|
44
|
+
|
|
45
|
+
def __init__(self, filepath=None, as_fp=False):
|
|
46
|
+
self.fp = None
|
|
47
|
+
self.as_fp = as_fp
|
|
48
|
+
self.size = None
|
|
49
|
+
self.load_path(filepath)
|
|
50
|
+
|
|
51
|
+
def load_path(self, filepath):
|
|
52
|
+
self.filepath = filepath
|
|
53
|
+
|
|
54
|
+
def get_pointer(self):
|
|
55
|
+
return self.fp
|
|
56
|
+
|
|
57
|
+
def __enter__(self):
|
|
58
|
+
self.fp, self.size = local_database.open_stream(self.filepath)
|
|
59
|
+
if self.as_fp:
|
|
60
|
+
return self.fp
|
|
61
|
+
else:
|
|
62
|
+
return self
|
|
63
|
+
|
|
64
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
65
|
+
if self.fp is not None:
|
|
66
|
+
self.fp.close()
|
|
67
|
+
self.fp = None
|
|
68
|
+
|
|
69
|
+
@staticmethod
|
|
70
|
+
def open_stream(source:str) -> Tuple[BinaryIO, int] | None:
|
|
71
|
+
size = os.path.getsize(source)
|
|
72
|
+
return open(source, 'rb'), size
|
|
@@ -24,9 +24,10 @@ aldepyde/data/RemoteFileHandler.py,sha256=aPASdoYgt0xnRrSYiwMcefQAizLIbB93SKVrlf
|
|
|
24
24
|
aldepyde/data/__init__.py,sha256=_yKL38EKhXkRM8PSafeee0A6VXGncAoUXVdUyiJ5n3s,48
|
|
25
25
|
aldepyde/databases/PDB.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
26
|
aldepyde/databases/RemoteFileHandler.py,sha256=HHx69o6IpI60lf_7hY1ZwI8k0OVyfofG4x1bFeF1uL4,1231
|
|
27
|
-
aldepyde/databases/
|
|
27
|
+
aldepyde/databases/SCOPe_Astral.py,sha256=zj34CbMSTmTa-KBowND-S9rxNGGFnr84OuxC_fumM6E,2821
|
|
28
|
+
aldepyde/databases/UniRef.py,sha256=lAT5XVO9Rk4_9XDKs5Nr1aGBbkaQ6m-vM0yufym43aY,6004
|
|
28
29
|
aldepyde/databases/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
-
aldepyde/databases/_database.py,sha256=
|
|
30
|
+
aldepyde/databases/_database.py,sha256=fvm4jaP9UKHDeS-0wwWyp7i4uo9PjCGDbwJgdkD8MLo,1822
|
|
30
31
|
aldepyde/fetcher/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
32
|
aldepyde/fetcher/test.py,sha256=Q0O3TrMnyd_V3QZvMaDtkFGXIvtQLg57RofKjxtG8Y8,23
|
|
32
33
|
aldepyde/json/CHG.json,sha256=igz1QSwoyXieOWMRwPnQjUJX29N5bk5OBVkhpFjyCzo,614
|
|
@@ -36,8 +37,8 @@ aldepyde/rand/RandomProtein.py,sha256=sNXx4jop9Fplz2oz4g6pEArF69j31_PvtiZuRpLz51
|
|
|
36
37
|
aldepyde/rand/__init__.py,sha256=Q30wrG_XHrmdgaXaLWSlce_ZGT_ZpOT3CYLDj6OgEy0,182
|
|
37
38
|
aldepyde/stats/ProteinStats.py,sha256=t_gqhld2wKweszPZvtHhrORadFc28glFx5OgJs23TsM,2569
|
|
38
39
|
aldepyde/stats/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
aldepyde-0.0.
|
|
40
|
-
aldepyde-0.0.
|
|
41
|
-
aldepyde-0.0.
|
|
42
|
-
aldepyde-0.0.
|
|
43
|
-
aldepyde-0.0.
|
|
40
|
+
aldepyde-0.0.0a35.dist-info/licenses/LICENSE,sha256=VbOVaNlEaWa9cnYi8gOnenCBAVk9s_P3J_z-n_F-638,1091
|
|
41
|
+
aldepyde-0.0.0a35.dist-info/METADATA,sha256=NdHCpQYVP7NbBRxOmUXvu5ZlsD94jVS_z6ABY_Ejees,2554
|
|
42
|
+
aldepyde-0.0.0a35.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
43
|
+
aldepyde-0.0.0a35.dist-info/top_level.txt,sha256=xv0YJ1izG4AP9ZlielN_0z9QGQQdwtHFM3-TmJivBOM,9
|
|
44
|
+
aldepyde-0.0.0a35.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|