aldepyde 0.0.0a32__tar.gz → 0.0.0a33__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aldepyde might be problematic. Click here for more details.
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/PKG-INFO +1 -1
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/__init__.py +0 -19
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/__init__.py +3 -0
- aldepyde-0.0.0a33/aldepyde/databases/UniRef.py +113 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/_database.py +3 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/rand/RandomProtein.py +2 -0
- aldepyde-0.0.0a33/aldepyde/rand/__init__.py +6 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/PKG-INFO +1 -1
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/pyproject.toml +1 -1
- aldepyde-0.0.0a32/aldepyde/databases/UniRef.py +0 -75
- aldepyde-0.0.0a32/aldepyde/rand/__init__.py +0 -3
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/LICENSE +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/README.md +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/Parsers/_mmcif_parser.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/Parsers/_pdb_parser.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/_config.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/Residue.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_Atom.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_AtomFactory.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_amino_acid.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_dna.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_pdb.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_rna.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/utils.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/__init__.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/_cache.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/cachemanager.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/downloader.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/utils.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/configurable.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data/RemoteFileHandler.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data/__init__.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/PDB.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/RemoteFileHandler.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/__init__.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/env.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/fetcher/__init__.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/fetcher/test.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/CHG.json +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/Swiss_Prot.json +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/chemistry.json +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/stats/ProteinStats.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/stats/__init__.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/utils.py +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/SOURCES.txt +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/dependency_links.txt +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/top_level.txt +0 -0
- {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/setup.cfg +0 -0
|
@@ -17,25 +17,6 @@ def get_cache() -> CacheManager:
|
|
|
17
17
|
global _cache_manager
|
|
18
18
|
return _cache_manager
|
|
19
19
|
|
|
20
|
-
# def get_cache() -> _cache_handler:
|
|
21
|
-
# global _cache
|
|
22
|
-
# # if _cache.null:
|
|
23
|
-
# # return create_cache()
|
|
24
|
-
# return _cache
|
|
25
|
-
|
|
26
|
-
# def SaveConfig(path: str="config.json", indent: str = "") -> None:
|
|
27
|
-
# global _config
|
|
28
|
-
# get_config().Save(path=path, indent=indent)
|
|
29
|
-
#
|
|
30
|
-
#
|
|
31
|
-
# def LoadConfig(s: dict | str, ignore_missing=False) -> None:
|
|
32
|
-
# global _config
|
|
33
|
-
# get_config().Load(s, ignore_missing=ignore_missing)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
# from . import rand
|
|
37
|
-
# from . import biomolecule
|
|
38
|
-
# from . import fetcher
|
|
39
20
|
|
|
40
21
|
from importlib import import_module
|
|
41
22
|
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import zlib
|
|
2
|
+
|
|
3
|
+
from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
|
|
4
|
+
from aldepyde.databases._database import _database
|
|
5
|
+
from aldepyde.utils import ProgressBar
|
|
6
|
+
import os
|
|
7
|
+
import gzip
|
|
8
|
+
|
|
9
|
+
class uniref_parser(_database):
|
|
10
|
+
def __init__(self):
|
|
11
|
+
super().__init__()
|
|
12
|
+
|
|
13
|
+
# TODO single entry parsing
|
|
14
|
+
# TODO store metadata upon request
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
|
|
18
|
+
raw_stream, size = _database.open_stream(filepath)
|
|
19
|
+
pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
|
|
20
|
+
decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
|
21
|
+
try:
|
|
22
|
+
while True:
|
|
23
|
+
comp_chunk = raw_stream.read(chunk_size)
|
|
24
|
+
if not comp_chunk:
|
|
25
|
+
break
|
|
26
|
+
if pbar is not None:
|
|
27
|
+
pbar.update()
|
|
28
|
+
decomp_chunk = decompressor.decompress(comp_chunk)
|
|
29
|
+
if decomp_chunk:
|
|
30
|
+
yield decomp_chunk
|
|
31
|
+
final = decompressor.flush()
|
|
32
|
+
if final:
|
|
33
|
+
yield final
|
|
34
|
+
finally:
|
|
35
|
+
raw_stream.close()
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def download_file(url, destination, chunk_size=8192, use_progress_bar=False):
|
|
39
|
+
raw_stream, size = _database.open_stream(url)
|
|
40
|
+
pbar = ProgressBar(size // chunk_size) if use_progress_bar else None
|
|
41
|
+
with open(destination, 'wb') as fp:
|
|
42
|
+
while True:
|
|
43
|
+
chunk = raw_stream.read(chunk_size)
|
|
44
|
+
if not chunk:
|
|
45
|
+
break
|
|
46
|
+
if pbar is not None:
|
|
47
|
+
pbar.update()
|
|
48
|
+
fp.write(chunk)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@staticmethod
|
|
54
|
+
def stitch_streamed_sequences(stream, as_str=True):
|
|
55
|
+
buffer = b''
|
|
56
|
+
for chunk in stream:
|
|
57
|
+
buffer += chunk
|
|
58
|
+
while buffer.count(b'>') >= 2:
|
|
59
|
+
sequences = [b">" + seq for seq in buffer.split(b">") if seq != b""]
|
|
60
|
+
buffer = buffer[buffer.rfind(b">"):]
|
|
61
|
+
ret_l = [b"".join(sequence.split(b'\n')[1:]).replace(b"\n", b"") for sequence in sequences[:-1]]
|
|
62
|
+
for s in ret_l:
|
|
63
|
+
yield s if not as_str else s.decode()
|
|
64
|
+
yield uniref_parser._final_sequence(buffer) if not as_str else uniref_parser._final_sequence(buffer).decode()
|
|
65
|
+
|
|
66
|
+
@staticmethod
|
|
67
|
+
def _final_sequence(buffer):
|
|
68
|
+
lines = buffer.split(b'\n')
|
|
69
|
+
return b"".join(lines[1:])
|
|
70
|
+
|
|
71
|
+
@staticmethod
|
|
72
|
+
def stream_uniref50(chunk_size=8192, use_progress_bar=False, stitch=False):
|
|
73
|
+
if not stitch:
|
|
74
|
+
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
|
|
75
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
76
|
+
else:
|
|
77
|
+
yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
|
|
78
|
+
'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
|
|
79
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar))
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def stream_uniref90(chunk_size=8192, use_progress_bar=False, stitch=False):
|
|
83
|
+
if not stitch:
|
|
84
|
+
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
|
|
85
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
86
|
+
else:
|
|
87
|
+
yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
|
|
88
|
+
'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
|
|
89
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar))
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def stream_uniref100(chunk_size=8192, use_progress_bar=False, stitch=False):
|
|
93
|
+
if not stitch:
|
|
94
|
+
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
|
|
95
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
96
|
+
else:
|
|
97
|
+
yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
|
|
98
|
+
'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
|
|
99
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar))
|
|
100
|
+
|
|
101
|
+
@staticmethod
|
|
102
|
+
def download_uniref50(destination='uniref50.fasta.gz', chunk_size=8192, use_progress_bar=False):
|
|
103
|
+
uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', destination=destination,
|
|
104
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
105
|
+
|
|
106
|
+
@staticmethod
|
|
107
|
+
def download_uniref90(destination='uniref90.fasta.gz', chunk_size=8192, use_progress_bar=False):
|
|
108
|
+
uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', destination=destination,
|
|
109
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
110
|
+
@staticmethod
|
|
111
|
+
def download_uniref100(destination='uniref100.fasta.gz', chunk_size=8192, use_progress_bar=False):
|
|
112
|
+
uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', destination=destination,
|
|
113
|
+
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
@@ -11,6 +11,8 @@ class InvalidDistribution(Exception):
|
|
|
11
11
|
class ImpossibleSetting(Exception):
|
|
12
12
|
pass
|
|
13
13
|
|
|
14
|
+
|
|
15
|
+
# TODO This whole thing needs to be cleaned up to better align with more modern python
|
|
14
16
|
class RandomProtein:
|
|
15
17
|
# Hardcode data for now
|
|
16
18
|
def __init__(self, His_Is_Charged=True, Cys_Is_Polar=True, Charged_Is_Polar=True, Distribution="Swiss"):
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
import zlib
|
|
2
|
-
|
|
3
|
-
from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
|
|
4
|
-
from aldepyde.databases._database import _database
|
|
5
|
-
from aldepyde.utils import ProgressBar
|
|
6
|
-
import os
|
|
7
|
-
import gzip
|
|
8
|
-
|
|
9
|
-
class uniref_parser(_database):
|
|
10
|
-
def __init__(self):
|
|
11
|
-
pass
|
|
12
|
-
|
|
13
|
-
#TODO Fix the total calculation
|
|
14
|
-
@staticmethod
|
|
15
|
-
def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
|
|
16
|
-
if use_progress_bar is not None:
|
|
17
|
-
raw_stream, size = _database.open_stream(filepath)
|
|
18
|
-
pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
|
|
19
|
-
decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
|
20
|
-
try:
|
|
21
|
-
while True:
|
|
22
|
-
comp_chunk = raw_stream.read(chunk_size)
|
|
23
|
-
if not comp_chunk:
|
|
24
|
-
break
|
|
25
|
-
if pbar is not None:
|
|
26
|
-
pbar.update()
|
|
27
|
-
decomp_chunk = decompressor.decompress(comp_chunk)
|
|
28
|
-
if decomp_chunk:
|
|
29
|
-
yield decomp_chunk
|
|
30
|
-
final = decompressor.flush()
|
|
31
|
-
if final:
|
|
32
|
-
yield final
|
|
33
|
-
finally:
|
|
34
|
-
raw_stream.close()
|
|
35
|
-
|
|
36
|
-
@staticmethod
|
|
37
|
-
def stitch_streamed_sequences(stream, as_str=True):
|
|
38
|
-
buffer = b''
|
|
39
|
-
for chunk in stream:
|
|
40
|
-
buffer += chunk
|
|
41
|
-
while buffer.count(b'>') >= 2:
|
|
42
|
-
sequences = [b">" + seq for seq in buffer.split(b">") if seq != b""]
|
|
43
|
-
buffer = buffer[buffer.rfind(b">"):]
|
|
44
|
-
ret_l = [b"".join(sequence.split(b'\n')[1:]).replace(b"\n", b"") for sequence in sequences[:-1]]
|
|
45
|
-
for s in ret_l:
|
|
46
|
-
yield s if not as_str else s.decode()
|
|
47
|
-
yield uniref_parser._final_sequence(buffer) if not as_str else uniref_parser._final_sequence(buffer).decode()
|
|
48
|
-
|
|
49
|
-
@staticmethod
|
|
50
|
-
def _final_sequence(buffer):
|
|
51
|
-
lines = buffer.split(b'\n')
|
|
52
|
-
return b"".join(lines[1:])
|
|
53
|
-
|
|
54
|
-
@staticmethod
|
|
55
|
-
def stream_uniref50(chunk_size=8192, use_progress_bar=False):
|
|
56
|
-
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
|
|
57
|
-
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
58
|
-
# yield from RemoteFileHandler.stream_url(
|
|
59
|
-
# 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', chunk_size=chunk_size)
|
|
60
|
-
|
|
61
|
-
@staticmethod
|
|
62
|
-
def stream_uniref90(chunk_size=8192, use_progress_bar=False):
|
|
63
|
-
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
|
|
64
|
-
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
65
|
-
|
|
66
|
-
# yield from RemoteFileHandler.stream_url(
|
|
67
|
-
# 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', chunk_size=chunk_size)
|
|
68
|
-
|
|
69
|
-
@staticmethod
|
|
70
|
-
def stream_uniref100(chunk_size=8192, use_progress_bar=False):
|
|
71
|
-
yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
|
|
72
|
-
chunk_size=chunk_size, use_progress_bar=use_progress_bar)
|
|
73
|
-
|
|
74
|
-
# yield from RemoteFileHandler.stream_url(
|
|
75
|
-
# 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', chunk_size=chunk_size)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|