aldepyde 0.0.0a32__tar.gz → 0.0.0a33__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aldepyde might be problematic. Click here for more details.

Files changed (49) hide show
  1. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/PKG-INFO +1 -1
  2. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/__init__.py +0 -19
  3. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/__init__.py +3 -0
  4. aldepyde-0.0.0a33/aldepyde/databases/UniRef.py +113 -0
  5. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/_database.py +3 -0
  6. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/rand/RandomProtein.py +2 -0
  7. aldepyde-0.0.0a33/aldepyde/rand/__init__.py +6 -0
  8. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/PKG-INFO +1 -1
  9. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/pyproject.toml +1 -1
  10. aldepyde-0.0.0a32/aldepyde/databases/UniRef.py +0 -75
  11. aldepyde-0.0.0a32/aldepyde/rand/__init__.py +0 -3
  12. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/LICENSE +0 -0
  13. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/README.md +0 -0
  14. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/Parsers/_mmcif_parser.py +0 -0
  15. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/Parsers/_pdb_parser.py +0 -0
  16. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/_config.py +0 -0
  17. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/Residue.py +0 -0
  18. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_Atom.py +0 -0
  19. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_AtomFactory.py +0 -0
  20. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_amino_acid.py +0 -0
  21. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_dna.py +0 -0
  22. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_pdb.py +0 -0
  23. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/_rna.py +0 -0
  24. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/biomolecule/utils.py +0 -0
  25. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/__init__.py +0 -0
  26. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/_cache.py +0 -0
  27. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/cachemanager.py +0 -0
  28. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/downloader.py +0 -0
  29. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/cache/utils.py +0 -0
  30. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/configurable.py +0 -0
  31. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data/RemoteFileHandler.py +0 -0
  32. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data/__init__.py +0 -0
  33. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/data.py +0 -0
  34. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/PDB.py +0 -0
  35. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/RemoteFileHandler.py +0 -0
  36. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/databases/__init__.py +0 -0
  37. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/env.py +0 -0
  38. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/fetcher/__init__.py +0 -0
  39. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/fetcher/test.py +0 -0
  40. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/CHG.json +0 -0
  41. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/Swiss_Prot.json +0 -0
  42. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/json/chemistry.json +0 -0
  43. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/stats/ProteinStats.py +0 -0
  44. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/stats/__init__.py +0 -0
  45. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde/utils.py +0 -0
  46. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/SOURCES.txt +0 -0
  47. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/dependency_links.txt +0 -0
  48. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/aldepyde.egg-info/top_level.txt +0 -0
  49. {aldepyde-0.0.0a32 → aldepyde-0.0.0a33}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aldepyde
3
- Version: 0.0.0a32
3
+ Version: 0.0.0a33
4
4
  Summary: A package of chemistry and biochemical tools
5
5
  Author-email: Nate McMurray <nate.mcmurray13@gmail.com>
6
6
  License: MIT License
@@ -17,25 +17,6 @@ def get_cache() -> CacheManager:
17
17
  global _cache_manager
18
18
  return _cache_manager
19
19
 
20
- # def get_cache() -> _cache_handler:
21
- # global _cache
22
- # # if _cache.null:
23
- # # return create_cache()
24
- # return _cache
25
-
26
- # def SaveConfig(path: str="config.json", indent: str = "") -> None:
27
- # global _config
28
- # get_config().Save(path=path, indent=indent)
29
- #
30
- #
31
- # def LoadConfig(s: dict | str, ignore_missing=False) -> None:
32
- # global _config
33
- # get_config().Load(s, ignore_missing=ignore_missing)
34
-
35
-
36
- # from . import rand
37
- # from . import biomolecule
38
- # from . import fetcher
39
20
 
40
21
  from importlib import import_module
41
22
 
@@ -13,3 +13,6 @@ __all__ = list(set(_amino_acid.__all__.copy()) |
13
13
  set(_pdb.__all__.copy()) |
14
14
  set(_rna.__all__.copy()))
15
15
 
16
+ import sys
17
+
18
+ sys.stderr.write("Note that the `biomolecule` submodule is not yet fully tested and may be unstable")
@@ -0,0 +1,113 @@
1
+ import zlib
2
+
3
+ from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
4
+ from aldepyde.databases._database import _database
5
+ from aldepyde.utils import ProgressBar
6
+ import os
7
+ import gzip
8
+
9
+ class uniref_parser(_database):
10
+ def __init__(self):
11
+ super().__init__()
12
+
13
+ # TODO single entry parsing
14
+ # TODO store metadata upon request
15
+
16
+ @staticmethod
17
+ def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
18
+ raw_stream, size = _database.open_stream(filepath)
19
+ pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
20
+ decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
21
+ try:
22
+ while True:
23
+ comp_chunk = raw_stream.read(chunk_size)
24
+ if not comp_chunk:
25
+ break
26
+ if pbar is not None:
27
+ pbar.update()
28
+ decomp_chunk = decompressor.decompress(comp_chunk)
29
+ if decomp_chunk:
30
+ yield decomp_chunk
31
+ final = decompressor.flush()
32
+ if final:
33
+ yield final
34
+ finally:
35
+ raw_stream.close()
36
+
37
+ @staticmethod
38
+ def download_file(url, destination, chunk_size=8192, use_progress_bar=False):
39
+ raw_stream, size = _database.open_stream(url)
40
+ pbar = ProgressBar(size // chunk_size) if use_progress_bar else None
41
+ with open(destination, 'wb') as fp:
42
+ while True:
43
+ chunk = raw_stream.read(chunk_size)
44
+ if not chunk:
45
+ break
46
+ if pbar is not None:
47
+ pbar.update()
48
+ fp.write(chunk)
49
+
50
+
51
+
52
+
53
+ @staticmethod
54
+ def stitch_streamed_sequences(stream, as_str=True):
55
+ buffer = b''
56
+ for chunk in stream:
57
+ buffer += chunk
58
+ while buffer.count(b'>') >= 2:
59
+ sequences = [b">" + seq for seq in buffer.split(b">") if seq != b""]
60
+ buffer = buffer[buffer.rfind(b">"):]
61
+ ret_l = [b"".join(sequence.split(b'\n')[1:]).replace(b"\n", b"") for sequence in sequences[:-1]]
62
+ for s in ret_l:
63
+ yield s if not as_str else s.decode()
64
+ yield uniref_parser._final_sequence(buffer) if not as_str else uniref_parser._final_sequence(buffer).decode()
65
+
66
+ @staticmethod
67
+ def _final_sequence(buffer):
68
+ lines = buffer.split(b'\n')
69
+ return b"".join(lines[1:])
70
+
71
+ @staticmethod
72
+ def stream_uniref50(chunk_size=8192, use_progress_bar=False, stitch=False):
73
+ if not stitch:
74
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
75
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
76
+ else:
77
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
78
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
79
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
80
+
81
+ @staticmethod
82
+ def stream_uniref90(chunk_size=8192, use_progress_bar=False, stitch=False):
83
+ if not stitch:
84
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
85
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
86
+ else:
87
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
88
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz',
89
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
90
+
91
+ @staticmethod
92
+ def stream_uniref100(chunk_size=8192, use_progress_bar=False, stitch=False):
93
+ if not stitch:
94
+ yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
95
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
96
+ else:
97
+ yield from uniref_parser.stitch_streamed_sequences(uniref_parser.stream_uniref_gz(
98
+ 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz',
99
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar))
100
+
101
+ @staticmethod
102
+ def download_uniref50(destination='uniref50.fasta.gz', chunk_size=8192, use_progress_bar=False):
103
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', destination=destination,
104
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
105
+
106
+ @staticmethod
107
+ def download_uniref90(destination='uniref90.fasta.gz', chunk_size=8192, use_progress_bar=False):
108
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', destination=destination,
109
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
110
+ @staticmethod
111
+ def download_uniref100(destination='uniref100.fasta.gz', chunk_size=8192, use_progress_bar=False):
112
+ uniref_parser.download_file('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', destination=destination,
113
+ chunk_size=chunk_size, use_progress_bar=use_progress_bar)
@@ -7,6 +7,9 @@ from io import TextIOWrapper
7
7
 
8
8
  class _database(ABC):
9
9
 
10
+ def __init__(self):
11
+ pass
12
+
10
13
  @abstractmethod
11
14
  def fetch(self, url):
12
15
  pass
@@ -11,6 +11,8 @@ class InvalidDistribution(Exception):
11
11
  class ImpossibleSetting(Exception):
12
12
  pass
13
13
 
14
+
15
+ # TODO This whole thing needs to be cleaned up to better align with more modern python
14
16
  class RandomProtein:
15
17
  # Hardcode data for now
16
18
  def __init__(self, His_Is_Charged=True, Cys_Is_Polar=True, Charged_Is_Polar=True, Distribution="Swiss"):
@@ -0,0 +1,6 @@
1
+ from aldepyde.rand.RandomProtein import *
2
+
3
+ __all__ = ['RandomProtein']
4
+
5
+ import sys
6
+ sys.stderr.write("Note that the `rand` submodule is not yet fully tested and may be unstable")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aldepyde
3
- Version: 0.0.0a32
3
+ Version: 0.0.0a33
4
4
  Summary: A package of chemistry and biochemical tools
5
5
  Author-email: Nate McMurray <nate.mcmurray13@gmail.com>
6
6
  License: MIT License
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
5
5
  "aldepyde" = ["json/*.json"]
6
6
  [project]
7
7
  name = "aldepyde"
8
- version = "0.0.0a32"
8
+ version = "0.0.0a33"
9
9
  authors = [
10
10
  { name="Nate McMurray", email="nate.mcmurray13@gmail.com" },
11
11
  ]
@@ -1,75 +0,0 @@
1
- import zlib
2
-
3
- from aldepyde.databases.RemoteFileHandler import RemoteFileHandler
4
- from aldepyde.databases._database import _database
5
- from aldepyde.utils import ProgressBar
6
- import os
7
- import gzip
8
-
9
- class uniref_parser(_database):
10
- def __init__(self):
11
- pass
12
-
13
- #TODO Fix the total calculation
14
- @staticmethod
15
- def stream_uniref_gz(filepath, chunk_size=8192, use_progress_bar=False):
16
- if use_progress_bar is not None:
17
- raw_stream, size = _database.open_stream(filepath)
18
- pbar = ProgressBar(size//chunk_size) if use_progress_bar else None
19
- decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
20
- try:
21
- while True:
22
- comp_chunk = raw_stream.read(chunk_size)
23
- if not comp_chunk:
24
- break
25
- if pbar is not None:
26
- pbar.update()
27
- decomp_chunk = decompressor.decompress(comp_chunk)
28
- if decomp_chunk:
29
- yield decomp_chunk
30
- final = decompressor.flush()
31
- if final:
32
- yield final
33
- finally:
34
- raw_stream.close()
35
-
36
- @staticmethod
37
- def stitch_streamed_sequences(stream, as_str=True):
38
- buffer = b''
39
- for chunk in stream:
40
- buffer += chunk
41
- while buffer.count(b'>') >= 2:
42
- sequences = [b">" + seq for seq in buffer.split(b">") if seq != b""]
43
- buffer = buffer[buffer.rfind(b">"):]
44
- ret_l = [b"".join(sequence.split(b'\n')[1:]).replace(b"\n", b"") for sequence in sequences[:-1]]
45
- for s in ret_l:
46
- yield s if not as_str else s.decode()
47
- yield uniref_parser._final_sequence(buffer) if not as_str else uniref_parser._final_sequence(buffer).decode()
48
-
49
- @staticmethod
50
- def _final_sequence(buffer):
51
- lines = buffer.split(b'\n')
52
- return b"".join(lines[1:])
53
-
54
- @staticmethod
55
- def stream_uniref50(chunk_size=8192, use_progress_bar=False):
56
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
57
- chunk_size=chunk_size, use_progress_bar=use_progress_bar)
58
- # yield from RemoteFileHandler.stream_url(
59
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz', chunk_size=chunk_size)
60
-
61
- @staticmethod
62
- def stream_uniref90(chunk_size=8192, use_progress_bar=False):
63
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
64
- chunk_size=chunk_size, use_progress_bar=use_progress_bar)
65
-
66
- # yield from RemoteFileHandler.stream_url(
67
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref90/uniref90.fasta.gz', chunk_size=chunk_size)
68
-
69
- @staticmethod
70
- def stream_uniref100(chunk_size=8192, use_progress_bar=False):
71
- yield from uniref_parser.stream_uniref_gz('https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref50/uniref50.fasta.gz',
72
- chunk_size=chunk_size, use_progress_bar=use_progress_bar)
73
-
74
- # yield from RemoteFileHandler.stream_url(
75
- # 'https://ftp.uniprot.org/pub/databases/uniprot/uniref/uniref100/uniref100.fasta.gz', chunk_size=chunk_size)
@@ -1,3 +0,0 @@
1
- from aldepyde.rand.RandomProtein import *
2
-
3
- __all__ = ['RandomProtein']
File without changes
File without changes
File without changes
File without changes