aldepyde 0.0.0a32__py3-none-any.whl → 0.0.0a35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aldepyde might be problematic. Click here for more details.

aldepyde/__init__.py CHANGED
@@ -17,25 +17,6 @@ def get_cache() -> CacheManager:
17
17
  global _cache_manager
18
18
  return _cache_manager
19
19
 
20
- # def get_cache() -> _cache_handler:
21
- # global _cache
22
- # # if _cache.null:
23
- # # return create_cache()
24
- # return _cache
25
-
26
- # def SaveConfig(path: str="config.json", indent: str = "") -> None:
27
- # global _config
28
- # get_config().Save(path=path, indent=indent)
29
- #
30
- #
31
- # def LoadConfig(s: dict | str, ignore_missing=False) -> None:
32
- # global _config
33
- # get_config().Load(s, ignore_missing=ignore_missing)
34
-
35
-
36
- # from . import rand
37
- # from . import biomolecule
38
- # from . import fetcher
39
20
 
40
21
  from importlib import import_module
41
22
 
@@ -13,3 +13,6 @@ __all__ = list(set(_amino_acid.__all__.copy()) |
13
13
  set(_pdb.__all__.copy()) |
14
14
  set(_rna.__all__.copy()))
15
15
 
16
+ import sys
17
+
18
+ sys.stderr.write("Note that the `biomolecule` submodule is not yet fully tested and may be unstable")
@@ -0,0 +1,71 @@
1
+ from aldepyde.databases._database import local_database
2
+ import operator
3
+ from contextlib import nullcontext
4
+ import re
5
+
6
+ class scop_parser(local_database):
7
+ op = {
8
+ "and": lambda a,b,c: a and b and c,
9
+ "or": lambda a,b,c: a or b or c
10
+ }
11
+
12
+ def fetch(self, url):
13
+ pass
14
+
15
+ def fetch_code(self, codes):
16
+ pass
17
+
18
+ def parse(self, text):
19
+ pass
20
+
21
+ def extract_all_scop(self):
22
+ pass
23
+
24
+ def partition_scope(self):
25
+ pass
26
+
27
+ def extract_all_astral(self):
28
+ lines = self.fp.readlines()
29
+ entry = b""
30
+ for line in lines:
31
+ if line.startswith(b">") and len(entry) > 0:
32
+ yield entry
33
+ entry = b""
34
+ entry += line
35
+ yield entry
36
+
37
+ # TODO allow a list of search parameters. Big challenge to make efficient, but could be cute
38
+ def partition_astral(self, destination:None|str=None, append=False, class_name:str=b'',contains_id:str=b'' , contains_desc:str=b'', mode="and") -> dict:
39
+ mode = mode.lower()
40
+ # Everything is a byte string in order to play nicely with future parent methods
41
+ if isinstance(class_name, str):
42
+ class_name = class_name.encode('utf-8')
43
+ if isinstance(contains_desc, str):
44
+ contains_desc = contains_desc.encode('utf-8')
45
+ if isinstance(contains_id, str):
46
+ contains_id = contains_id.encode('utf-8')
47
+ if mode != "and" and mode != "or":
48
+ raise ValueError("mode must be \"and\" or \"or\".")
49
+ logic = scop_parser.op[mode]
50
+ regex = re.compile(b">[a-zA-Z0-9_.]* *[a-l](.[0-9]+)?(.[0-9]+)?(.[0-9]+)?")
51
+ if append:
52
+ file_context = open(destination, 'ab') if destination is not None else nullcontext()
53
+ else:
54
+ file_context = open(destination, 'wb') if destination is not None else nullcontext()
55
+ with file_context as fp:
56
+ ret_dict = dict()
57
+ for line in self.extract_all_astral():
58
+ identifiers = regex.search(line).group().split()
59
+ id = identifiers[0]
60
+ cls = identifiers[1]
61
+ unmatched_spl = regex.sub(b'', line).split(b'\n')
62
+ desc = unmatched_spl[0]
63
+ sequence = unmatched_spl[1:]
64
+ if logic(class_name.lower() in cls.lower(), contains_id.lower() in id.lower(), contains_desc.lower() in desc.lower()):
65
+ ret_dict[id] = { # Yes, I know '>' isn't part of the FASTA identifier. This keeps things more consistant
66
+ "class" : cls,
67
+ "description" : desc,
68
+ "sequence" : b"".join(sequence)
69
+ }
70
+ fp.write(line)
71
+ return ret_dict
@@ -1,23 +1,22 @@
1
1
  import zlib
2
-
3
- from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
4
- from aldepyde.databases._database import _database
2
+ from aldepyde.databases._database import streamable_database
5
3
  from aldepyde.utils import ProgressBar
6
- import os
7
- import gzip
8
4
 
9
- class uniref_parser(_database):
5
+ class uniref_parser(streamable_database):
10
6
  def __init__(self):
11
- pass
7
+ super().__init__()
8
+
9
+ # TODO single entry parsing
10
+ # TODO store metadata upon request
11
+ # TODO implement abstract methods
12
12
 
13
- #TODO Fix the total calculation
14
13
  @staticmethod
15
- def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
16
- if use_progress_bar is not None:
17
- raw_stream, size = _database.open_stream(filepath)
18
- pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
19
- decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
20
- try:
14
+ def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False, stitch=False):
15
+ raw_stream, size = streamable_database.open_stream(filepath)
16
+ pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
17
+ decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
18
+ try:
19
+ if not stitch:
21
20
  while True:
22
21
  comp_chunk = raw_stream.read(chunk_size)
23
22
  if not comp_chunk:
@@ -30,8 +29,27 @@ class uniref_parser(_database):
30
29
  final = decompressor.flush()
31
30
  if final:
32
31
  yield final
33
- finally:
34
- raw_stream.close()
32
+ else:
33
+ # Really hacky solution for now
34
+ # TODO Clean this up
35
+ yield from uniref_parser.stitch_streamed_sequences(
36
+ uniref_parser.stream_uniref_gz(filepath=filepath, chunk_size=chunk_size, use_progress_bar=use_progress_bar, stitch=False))
37
+ finally:
38
+ raw_stream.close()
39
+
40
+ @staticmethod
41
+ def download_file(url, destination, chunk_size=8192, use_progress_bar=False):
42
+ raw_stream, size = streamable_database.open_stream(url)
43
+ pbar = ProgressBar(size // chunk_size) if use_progress_bar else None
44
+ with open(destination, 'wb') as fp:
45
+ while True:
46
+ chunk = raw_stream.read(chunk_size)
47
+ if not chunk:
48
+ break
49
+ if pbar is not None:
50
+ pbar.update()
51
+ fp.write(chunk)
52
+
35
53
 
36
54
  @staticmethod
37
55
  def stitch_streamed_sequences(stream, as_str=True):
@@ -52,24 +70,45 @@ class uniref_parser(_database):
52
70
  return b"".join(lines[1:])
53
71
 
54
72
  @staticmethod
55
- def stream_uniref50(chunk_size=8192, use_progress_bar=False):
56
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
73
+ def stream_uniref50(chunk_size=8192, use_progress_bar=False, stitch=False):
74
+ if not stitch:
75
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
57
76
  chunk_size=chunk_size, use_progress_bar=use_progress_bar)
58
- # yield from RemoteFileHandler.stream_url(
59
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', chunk_size=chunk_size)
77
+ else:
78
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
79
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
80
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
60
81
 
61
82
  @staticmethod
62
- def stream_uniref90(chunk_size=8192, use_progress_bar=False):
63
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
83
+ def stream_uniref90(chunk_size=8192, use_progress_bar=False, stitch=False):
84
+ if not stitch:
85
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
64
86
  chunk_size=chunk_size, use_progress_bar=use_progress_bar)
65
-
66
- # yield from RemoteFileHandler.stream_url(
67
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', chunk_size=chunk_size)
87
+ else:
88
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
89
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
90
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
68
91
 
69
92
  @staticmethod
70
- def stream_uniref100(chunk_size=8192, use_progress_bar=False):
71
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
93
+ def stream_uniref100(chunk_size=8192, use_progress_bar=False, stitch=False):
94
+ if not stitch:
95
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
72
96
  chunk_size=chunk_size, use_progress_bar=use_progress_bar)
97
+ else:
98
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
99
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
100
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
101
+
102
+ @staticmethod
103
+ def download_uniref50(destination='uniref50.fasta.gz', chunk_size=8192, use_progress_bar=False):
104
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', destination=destination,
105
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
73
106
 
74
- # yield from RemoteFileHandler.stream_url(
75
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', chunk_size=chunk_size)
107
+ @staticmethod
108
+ def download_uniref90(destination='uniref90.fasta.gz', chunk_size=8192, use_progress_bar=False):
109
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', destination=destination,
110
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
111
+ @staticmethod
112
+ def download_uniref100(destination='uniref100.fasta.gz', chunk_size=8192, use_progress_bar=False):
113
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', destination=destination,
114
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
@@ -5,7 +5,10 @@ import os
5
5
  from typing import Tuple, BinaryIO
6
6
  from io import TextIOWrapper
7
7
 
8
- class _database(ABC):
8
+ class streamable_database(ABC):
9
+
10
+ def __init__(self):
11
+ pass
9
12
 
10
13
  @abstractmethod
11
14
  def fetch(self, url):
@@ -35,4 +38,35 @@ class _database(ABC):
35
38
  # Yes, I know the first conditionals do the same thing
36
39
 
37
40
  def __call__(self):
38
- pass
41
+ pass
42
+
43
+ class local_database(ABC):
44
+
45
+ def __init__(self, filepath=None, as_fp=False):
46
+ self.fp = None
47
+ self.as_fp = as_fp
48
+ self.size = None
49
+ self.load_path(filepath)
50
+
51
+ def load_path(self, filepath):
52
+ self.filepath = filepath
53
+
54
+ def get_pointer(self):
55
+ return self.fp
56
+
57
+ def __enter__(self):
58
+ self.fp, self.size = local_database.open_stream(self.filepath)
59
+ if self.as_fp:
60
+ return self.fp
61
+ else:
62
+ return self
63
+
64
+ def __exit__(self, exc_type, exc_val, exc_tb):
65
+ if self.fp is not None:
66
+ self.fp.close()
67
+ self.fp = None
68
+
69
+ @staticmethod
70
+ def open_stream(source:str) -> Tuple[BinaryIO, int] | None:
71
+ size = os.path.getsize(source)
72
+ return open(source, 'rb'), size
@@ -11,6 +11,8 @@ class InvalidDistribution(Exception):
11
11
  class ImpossibleSetting(Exception):
12
12
  pass
13
13
 
14
+
15
+ # TODO This whole thing needs to be cleaned up to better align with more modern python
14
16
  class RandomProtein:
15
17
  # Hardcode data for now
16
18
  def __init__(self, His_Is_Charged=True, Cys_Is_Polar=True, Charged_Is_Polar=True, Distribution="Swiss"):
aldepyde/rand/__init__.py CHANGED
@@ -1,3 +1,6 @@
1
1
  from aldepyde.rand.RandomProtein import *
2
2
 
3
- __all__ = ['RandomProtein']
3
+ __all__ = ['RandomProtein']
4
+
5
+ import sys
6
+ sys.stderr.write("Note that the `rand` submodule is not yet fully tested and may be unstable")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aldepyde
3
- Version: 0.0.0a32
3
+ Version: 0.0.0a35
4
4
  Summary: A package of chemistry and biochemical tools
5
5
  Author-email: Nate McMurray <nate.mcmurray13@gmail.com>
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- aldepyde/__init__.py,sha256=MY5qU07PyoSEySBCRiQcjiYhME8F4EjjTeTtBOf71zE,1378
1
+ aldepyde/__init__.py,sha256=vAL59PwNON2aqFJWnN62vwuJ2Q-1F0qwbGQb3ek4dnw,862
2
2
  aldepyde/_config.py,sha256=Jne1TH8w_brEpUD3b-4XY2T8nXtWi8mTcBV5_YqMlX0,4789
3
3
  aldepyde/configurable.py,sha256=OJ7vLA-UIAmsNVw_A_j2nCERUi91kRtFGZS9Brr_7s0,214
4
4
  aldepyde/data.py,sha256=4dhArC3yt8u7sArrZ5lvWEtwFC7rhM4B02LofbQVV64,5780
@@ -9,7 +9,7 @@ aldepyde/Parsers/_pdb_parser.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
9
9
  aldepyde/biomolecule/Residue.py,sha256=CgTEIT7F4ljgxSscxzGU1AuThIRnX7i1mJ1DleUrcN0,172
10
10
  aldepyde/biomolecule/_Atom.py,sha256=XkZ-qu7U2EHbRJ7erLKV-qUc6NpizCiWbxqGZw0FHpQ,3314
11
11
  aldepyde/biomolecule/_AtomFactory.py,sha256=CA4PCESBe55ttr4riBX0F8nFToqbAAT4VSgP06_WER4,2642
12
- aldepyde/biomolecule/__init__.py,sha256=FlVxzp2MSgD6hGZUo-1mY6M4AarSxqFdNw2mpLRiu5Y,482
12
+ aldepyde/biomolecule/__init__.py,sha256=mBr_PozzTu70-rgMWXFFJQB5ZINjQBFOo5F7Lt-JdQg,597
13
13
  aldepyde/biomolecule/_amino_acid.py,sha256=Ovgx12oygCDQuZjSSRVhdMY7nNPhJWFgcLIB537x-QM,116
14
14
  aldepyde/biomolecule/_dna.py,sha256=VWanPQRkD_GHMLjudSsZkDzIHVMWqcPB87b4APpGZC0,102
15
15
  aldepyde/biomolecule/_pdb.py,sha256=fh0hZaueFV8EuKfGBTpmCyI40fR7l6HYUFiGAS1kGto,17244
@@ -24,20 +24,21 @@ aldepyde/data/RemoteFileHandler.py,sha256=aPASdoYgt0xnRrSYiwMcefQAizLIbB93SKVrlf
24
24
  aldepyde/data/__init__.py,sha256=_yKL38EKhXkRM8PSafeee0A6VXGncAoUXVdUyiJ5n3s,48
25
25
  aldepyde/databases/PDB.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  aldepyde/databases/RemoteFileHandler.py,sha256=HHx69o6IpI60lf_7hY1ZwI8k0OVyfofG4x1bFeF1uL4,1231
27
- aldepyde/databases/UniRef.py,sha256=AMRUXe8hy6il7EDWcBeQAM2g51_t3q0y-UoXhIPJpdU,3567
27
+ aldepyde/databases/SCOPe_Astral.py,sha256=zj34CbMSTmTa-KBowND-S9rxNGGFnr84OuxC_fumM6E,2821
28
+ aldepyde/databases/UniRef.py,sha256=lAT5XVO9Rk4_9XDKs5Nr1aGBbkaQ6m-vM0yufym43aY,6004
28
29
  aldepyde/databases/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- aldepyde/databases/_database.py,sha256=ZRwGWyuhXHe9i0WYzStqRL7-SYevQkR5Jik7J360a2A,949
30
+ aldepyde/databases/_database.py,sha256=fvm4jaP9UKHDeS-0wwWyp7i4uo9PjCGDbwJgdkD8MLo,1822
30
31
  aldepyde/fetcher/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
32
  aldepyde/fetcher/test.py,sha256=Q0O3TrMnyd_V3QZvMaDtkFGXIvtQLg57RofKjxtG8Y8,23
32
33
  aldepyde/json/CHG.json,sha256=igz1QSwoyXieOWMRwPnQjUJX29N5bk5OBVkhpFjyCzo,614
33
34
  aldepyde/json/Swiss_Prot.json,sha256=mJiUiYnvDLa_59tlb_hcIsaYGFrwvJAorwhHc2O9H1U,612
34
35
  aldepyde/json/chemistry.json,sha256=pCWYNRv5xVEMuhYi1nn1RutgsoFSMo_TRu1dk_TYyds,124436
35
- aldepyde/rand/RandomProtein.py,sha256=CaFYJh8WiT5hvvWONXNkJAEVLefu7qPfnLi7WR5YL1g,16056
36
- aldepyde/rand/__init__.py,sha256=1EztGz5e6NRmKuAfp66trQDDa5ov-_ZkxrD9Zp8fScA,72
36
+ aldepyde/rand/RandomProtein.py,sha256=sNXx4jop9Fplz2oz4g6pEArF69j31_PvtiZuRpLz51I,16146
37
+ aldepyde/rand/__init__.py,sha256=Q30wrG_XHrmdgaXaLWSlce_ZGT_ZpOT3CYLDj6OgEy0,182
37
38
  aldepyde/stats/ProteinStats.py,sha256=t_gqhld2wKweszPZvtHhrORadFc28glFx5OgJs23TsM,2569
38
39
  aldepyde/stats/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
- aldepyde-0.0.0a32.dist-info/licenses/LICENSE,sha256=VbOVaNlEaWa9cnYi8gOnenCBAVk9s_P3J_z-n_F-638,1091
40
- aldepyde-0.0.0a32.dist-info/METADATA,sha256=VpqfGzulHQ-omiPapAvVCVMIRGeTxPKUroF8FJkKBe8,2554
41
- aldepyde-0.0.0a32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
42
- aldepyde-0.0.0a32.dist-info/top_level.txt,sha256=xv0YJ1izG4AP9ZlielN_0z9QGQQdwtHFM3-TmJivBOM,9
43
- aldepyde-0.0.0a32.dist-info/RECORD,,
40
+ aldepyde-0.0.0a35.dist-info/licenses/LICENSE,sha256=VbOVaNlEaWa9cnYi8gOnenCBAVk9s_P3J_z-n_F-638,1091
41
+ aldepyde-0.0.0a35.dist-info/METADATA,sha256=NdHCpQYVP7NbBRxOmUXvu5ZlsD94jVS_z6ABY_Ejees,2554
42
+ aldepyde-0.0.0a35.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
43
+ aldepyde-0.0.0a35.dist-info/top_level.txt,sha256=xv0YJ1izG4AP9ZlielN_0z9QGQQdwtHFM3-TmJivBOM,9
44
+ aldepyde-0.0.0a35.dist-info/RECORD,,