aldepyde 0.0.0a38__tar.gz → 0.0.0a43__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/PKG-INFO +1 -1
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/__init__.py +5 -4
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/_config.py +3 -3
- aldepyde-0.0.0a43/aldepyde/biomolecule/structure.py +60 -0
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/_AtomFactory.py +1 -1
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/__init__.py +1 -1
- aldepyde-0.0.0a43/aldepyde/biomolecule_old/_amino_acid.py +6 -0
- aldepyde-0.0.0a43/aldepyde/biomolecule_old/_dna.py +6 -0
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/_pdb.py +36 -36
- aldepyde-0.0.0a43/aldepyde/biomolecule_old/_rna.py +6 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/cache/_cache.py +4 -4
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/cache/cachemanager.py +36 -13
- aldepyde-0.0.0a43/aldepyde/data/chemistry_old.json +4622 -0
- aldepyde-0.0.0a43/aldepyde/data/distribution.py +46 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/arabidopsis.json +26 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/bacillus.json +26 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/drosophila.json +27 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/ecoli.json +27 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/human.json +27 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/mouse.json +29 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/standards.py +2 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/swiss.json +25 -0
- aldepyde-0.0.0a43/aldepyde/data/distributions/yeast.json +25 -0
- aldepyde-0.0.0a43/aldepyde/data/new_js.json +4622 -0
- aldepyde-0.0.0a43/aldepyde/data/protein_distribution.py +8 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/data.py +5 -5
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/databases/RemoteFileHandler.py +7 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/databases/SCOPe_Astral.py +2 -0
- aldepyde-0.0.0a43/aldepyde/databases/UniRef.py +282 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/databases/_database.py +21 -7
- aldepyde-0.0.0a43/aldepyde/generators/Sequence.py +400 -0
- aldepyde-0.0.0a43/aldepyde/generators/__init__.py +11 -0
- {aldepyde-0.0.0a38/aldepyde/rand_utils → aldepyde-0.0.0a43/aldepyde/generators/deprecated}/RandomProtein.py +3 -3
- aldepyde-0.0.0a43/aldepyde/generators/deprecated/__init__.py +0 -0
- aldepyde-0.0.0a38/aldepyde/rand_utils/PolymerClassifier.py → aldepyde-0.0.0a43/aldepyde/generators/polymer_classifier.py +2 -1
- aldepyde-0.0.0a38/aldepyde/rand_utils/ProteinClassifier.py → aldepyde-0.0.0a43/aldepyde/generators/protein_classifier.py +3 -1
- aldepyde-0.0.0a38/aldepyde/rand_utils/ProteinGenerator.py → aldepyde-0.0.0a43/aldepyde/generators/protein_generator.py +3 -4
- aldepyde-0.0.0a43/aldepyde/generators/residue_generator.py +51 -0
- aldepyde-0.0.0a43/aldepyde/pdb/__init__.py +12 -0
- aldepyde-0.0.0a43/aldepyde/pdb/fetch.py +18 -0
- aldepyde-0.0.0a43/aldepyde/pdb/load.py +13 -0
- aldepyde-0.0.0a43/aldepyde/pdb/parse.py +12 -0
- aldepyde-0.0.0a43/aldepyde/pdb/parse_mmcif.py +208 -0
- aldepyde-0.0.0a43/aldepyde/pdb/parse_pdb.py +102 -0
- aldepyde-0.0.0a43/aldepyde/pdb/pdb_client.py +36 -0
- aldepyde-0.0.0a43/aldepyde/pdb/sniff.py +16 -0
- {aldepyde-0.0.0a38/aldepyde/data → aldepyde-0.0.0a43/aldepyde/remode_data}/RemoteFileHandler.py +6 -1
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/stats/ProteinStats.py +2 -2
- aldepyde-0.0.0a43/aldepyde/stats/__init__.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde.egg-info/PKG-INFO +1 -1
- aldepyde-0.0.0a43/aldepyde.egg-info/SOURCES.txt +73 -0
- aldepyde-0.0.0a43/aldepyde.egg-info/top_level.txt +2 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/pyproject.toml +5 -2
- aldepyde-0.0.0a38/aldepyde/biomolecule/_amino_acid.py +0 -6
- aldepyde-0.0.0a38/aldepyde/biomolecule/_dna.py +0 -6
- aldepyde-0.0.0a38/aldepyde/biomolecule/_rna.py +0 -6
- aldepyde-0.0.0a38/aldepyde/databases/UniRef.py +0 -114
- aldepyde-0.0.0a38/aldepyde/fetcher/test.py +0 -2
- aldepyde-0.0.0a38/aldepyde/json/CHG.json +0 -25
- aldepyde-0.0.0a38/aldepyde/json/Swiss_Prot.json +0 -25
- aldepyde-0.0.0a38/aldepyde/rand_utils/ResidueGenerator.py +0 -9
- aldepyde-0.0.0a38/aldepyde/rand_utils/Sequence.py +0 -2
- aldepyde-0.0.0a38/aldepyde/rand_utils/__init__.py +0 -7
- aldepyde-0.0.0a38/aldepyde.egg-info/SOURCES.txt +0 -52
- aldepyde-0.0.0a38/aldepyde.egg-info/top_level.txt +0 -1
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/LICENSE +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/README.md +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/Parsers/_mmcif_parser.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/Parsers/_pdb_parser.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/databases → aldepyde-0.0.0a43/aldepyde/biomolecule}/__init__.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/Residue.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/_Atom.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/utils.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/cache/__init__.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/cache/downloader.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/cache/utils.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/configurable.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/fetcher → aldepyde-0.0.0a43/aldepyde/data}/__init__.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/json → aldepyde-0.0.0a43/aldepyde/data}/chemistry.json +0 -0
- {aldepyde-0.0.0a38/aldepyde/stats → aldepyde-0.0.0a43/aldepyde/databases}/__init__.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/env.py +0 -0
- /aldepyde-0.0.0a38/aldepyde/databases/PDB.py → /aldepyde-0.0.0a43/aldepyde/fetcher/__init__.py +0 -0
- /aldepyde-0.0.0a38/aldepyde/rand_utils/PolymerConstraints.py → /aldepyde-0.0.0a43/aldepyde/generators/polymer_constraints.py +0 -0
- {aldepyde-0.0.0a38/aldepyde/data → aldepyde-0.0.0a43/aldepyde/remode_data}/__init__.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde/utils.py +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/aldepyde.egg-info/dependency_links.txt +0 -0
- {aldepyde-0.0.0a38 → aldepyde-0.0.0a43}/setup.cfg +0 -0
|
@@ -7,10 +7,11 @@ from aldepyde.cache.cachemanager import CacheManager
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
_cache_manager = CacheManager(initialize=False)
|
|
10
|
-
def
|
|
10
|
+
def use_cache(max_memory="2gib", path=None) -> CacheManager:
|
|
11
11
|
global _cache_manager
|
|
12
|
-
_cache_manager =
|
|
13
|
-
|
|
12
|
+
_cache_manager = CacheManager(path=path, initialize=True)
|
|
13
|
+
_cache_manager.enable()
|
|
14
|
+
_cache_manager.set_max_memory(max_memory)
|
|
14
15
|
return _cache_manager
|
|
15
16
|
|
|
16
17
|
def get_cache() -> CacheManager:
|
|
@@ -20,7 +21,7 @@ def get_cache() -> CacheManager:
|
|
|
20
21
|
|
|
21
22
|
from importlib import import_module
|
|
22
23
|
|
|
23
|
-
__all__ = ["
|
|
24
|
+
__all__ = ["generators", "biomolecule_old", "fetcher"]
|
|
24
25
|
|
|
25
26
|
def __getattr__(name):
|
|
26
27
|
if name in __all__:
|
|
@@ -83,10 +83,10 @@ class _configuration():
|
|
|
83
83
|
####################
|
|
84
84
|
def Load(self, s: dict | str, ignore_missing=False) -> None:
|
|
85
85
|
if isinstance(s, str):
|
|
86
|
-
if os.path.exists(s): # Try and read as a
|
|
86
|
+
if os.path.exists(s): # Try and read as a data file
|
|
87
87
|
with open(s, "r") as jp:
|
|
88
88
|
s = json.load(jp)
|
|
89
|
-
else: # Try and read as a
|
|
89
|
+
else: # Try and read as a data string
|
|
90
90
|
s = json.loads(s)
|
|
91
91
|
extra_settings = "".join([f"\t-{k}\n" for k in s.keys() if k not in self._setters.keys()])
|
|
92
92
|
if len(extra_settings) != 0:
|
|
@@ -107,7 +107,7 @@ class _configuration():
|
|
|
107
107
|
return json.dumps(self._settings, indent=indent)
|
|
108
108
|
return self._settings
|
|
109
109
|
|
|
110
|
-
def Save(self, path: str="config.
|
|
110
|
+
def Save(self, path: str="config.data", indent: str=""):
|
|
111
111
|
with open(path, "w") as jp:
|
|
112
112
|
json.dump(self._settings, jp, indent=indent)
|
|
113
113
|
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from typing import List, Tuple, Optional, Dict
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class Atom():
|
|
7
|
+
serial: str
|
|
8
|
+
name: str
|
|
9
|
+
coord: Tuple[float, float, float]
|
|
10
|
+
occupancy: float
|
|
11
|
+
b_factor: float
|
|
12
|
+
is_het: bool
|
|
13
|
+
element: Optional[str]
|
|
14
|
+
charge: Optional[str]
|
|
15
|
+
altloc: Optional[str]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class Residue():
|
|
20
|
+
name: str
|
|
21
|
+
id: str
|
|
22
|
+
ins_code: Optional[str]
|
|
23
|
+
atoms: List[Atom] = field(default_factory=list)
|
|
24
|
+
|
|
25
|
+
def add_atom(self, atom: Atom) -> None:
|
|
26
|
+
self.atoms.append(atom)
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Chain():
|
|
30
|
+
id: str
|
|
31
|
+
residues: Dict[str, Residue] = field(default_factory=dict)
|
|
32
|
+
|
|
33
|
+
def add_residue(self, residue: Residue):
|
|
34
|
+
self.residues[residue.id] = residue
|
|
35
|
+
# self.residues.append(residue)
|
|
36
|
+
|
|
37
|
+
def get_residue(self, id):
|
|
38
|
+
return self.residues[id]
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class Model:
|
|
42
|
+
id: str
|
|
43
|
+
chains: Dict[str, Chain] = field(default_factory=dict)
|
|
44
|
+
|
|
45
|
+
def add_chain(self, chain: Chain) -> None:
|
|
46
|
+
self.chains[chain.id] = chain
|
|
47
|
+
# self.chains.append(chain)
|
|
48
|
+
|
|
49
|
+
def get_chain(self, id) -> Chain:
|
|
50
|
+
return self.chains[id]
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class Structure:
|
|
54
|
+
id: str
|
|
55
|
+
# title: str = ""
|
|
56
|
+
models: Dict[str, Model] = field(default_factory=dict)
|
|
57
|
+
|
|
58
|
+
def add_model(self, model: Model) -> None:
|
|
59
|
+
self.models[model.id] = model
|
|
60
|
+
# self.models.append(model)
|
{aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/__init__.py
RENAMED
|
@@ -15,4 +15,4 @@ __all__ = list(set(_amino_acid.__all__.copy()) |
|
|
|
15
15
|
|
|
16
16
|
import sys
|
|
17
17
|
|
|
18
|
-
sys.stderr.write("Note that the `
|
|
18
|
+
sys.stderr.write("Note that the `biomolecule_old` submodule is not yet fully tested and may be unstable")
|
{aldepyde-0.0.0a38/aldepyde/biomolecule → aldepyde-0.0.0a43/aldepyde/biomolecule_old}/_pdb.py
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Submodule for reading, writing, and fetching PDB/mmcif files
|
|
2
2
|
from aldepyde.configurable import Configurable
|
|
3
|
-
from aldepyde.
|
|
3
|
+
from aldepyde.remode_data import RemoteFileHandler
|
|
4
4
|
from ._AtomFactory import *
|
|
5
5
|
from ._Atom import Atom
|
|
6
6
|
from aldepyde import cache
|
|
@@ -232,7 +232,7 @@ class PDB(Configurable):
|
|
|
232
232
|
# url = r'https://files.rcsb.org/download/' + prot.strip() + '.pdb'
|
|
233
233
|
# try:
|
|
234
234
|
# with urllib.request.urlopen(url) as f:
|
|
235
|
-
# self.
|
|
235
|
+
# self.biomolecule_old.SetFetch(f.read().decode('utf-8'))
|
|
236
236
|
# self._Parse(hold_pdb)
|
|
237
237
|
# return True
|
|
238
238
|
# except urllib.error.URLError:
|
|
@@ -256,11 +256,11 @@ class PDB(Configurable):
|
|
|
256
256
|
|
|
257
257
|
# class PDB:
|
|
258
258
|
# def __init__(self):
|
|
259
|
-
# self.
|
|
259
|
+
# self.biomolecule_old = biomolecule_old()
|
|
260
260
|
# # print(apalib.j_data.GetJson())
|
|
261
261
|
#
|
|
262
262
|
# def Current(self):
|
|
263
|
-
# return self.
|
|
263
|
+
# return self.biomolecule_old
|
|
264
264
|
#
|
|
265
265
|
# def FetchFASTA(self, prot):
|
|
266
266
|
# import urllib.request
|
|
@@ -287,7 +287,7 @@ class PDB(Configurable):
|
|
|
287
287
|
# url = r'https://files.rcsb.org/download/' + prot.strip() + '.pdb'
|
|
288
288
|
# try:
|
|
289
289
|
# with urllib.request.urlopen(url) as f:
|
|
290
|
-
# self.
|
|
290
|
+
# self.biomolecule_old.SetFetch(f.read().decode('utf-8'))
|
|
291
291
|
# self._Parse(hold_pdb)
|
|
292
292
|
# return True
|
|
293
293
|
# except urllib.error.URLError:
|
|
@@ -298,15 +298,15 @@ class PDB(Configurable):
|
|
|
298
298
|
#
|
|
299
299
|
# def Read(self, path, hold_pdb=False):
|
|
300
300
|
# with open(path, 'r') as fp:
|
|
301
|
-
# self.
|
|
301
|
+
# self.biomolecule_old.SetFetch(fp.read())
|
|
302
302
|
# self._Parse(hold_pdb)
|
|
303
303
|
#
|
|
304
304
|
# # Wrapper for the ParsePDB file to allow functionality with a fetched protein
|
|
305
305
|
# def _Parse(self, hold_pdb=False):
|
|
306
306
|
# try:
|
|
307
|
-
# if self.
|
|
307
|
+
# if self.biomolecule_old.GetFetch() is None:
|
|
308
308
|
# raise apaExcept.NoFetchError
|
|
309
|
-
# return self._ParsePDB(self.
|
|
309
|
+
# return self._ParsePDB(self.biomolecule_old.GetFetch(), hold_pdb)
|
|
310
310
|
# # return self._ParsePDB(self.container.GetFetch().splitlines())
|
|
311
311
|
# except apaExcept.NoFetchError as e:
|
|
312
312
|
# sys.stderr.write(e.message)
|
|
@@ -314,10 +314,10 @@ class PDB(Configurable):
|
|
|
314
314
|
#
|
|
315
315
|
# #PDB standard described here: https://www.wwpdb.org/documentation/file-format-content/format33/v3.3.html
|
|
316
316
|
# def _ParsePDB(self, raw_pdb, hold_pdb=False):
|
|
317
|
-
# self.
|
|
317
|
+
# self.biomolecule_old.ClearAll()
|
|
318
318
|
# remark350 = ""
|
|
319
319
|
# if hold_pdb:
|
|
320
|
-
# self.
|
|
320
|
+
# self.biomolecule_old.SetFetch(raw_pdb)
|
|
321
321
|
# for line in raw_pdb.splitlines():
|
|
322
322
|
# # print(line)
|
|
323
323
|
# if line[0:6] == 'ATOM ' or line[0:6] == 'HETATM':
|
|
@@ -326,8 +326,8 @@ class PDB(Configurable):
|
|
|
326
326
|
# remark350 += line + "\n"
|
|
327
327
|
#
|
|
328
328
|
# symmetry_groups = self._ParseRemark350(remark350)
|
|
329
|
-
# self.
|
|
330
|
-
# self.
|
|
329
|
+
# self.biomolecule_old._AddSymmetry(symmetry_groups)
|
|
330
|
+
# self.biomolecule_old._PostParseEvaluations()
|
|
331
331
|
# def _ParseRemark350(self, remark350):
|
|
332
332
|
# lines = remark350.splitlines()
|
|
333
333
|
# lines.append("END")
|
|
@@ -352,22 +352,22 @@ class PDB(Configurable):
|
|
|
352
352
|
# symLines.append([BIOMT, int(id), float(x), float(y), float(z) ,float(m)])
|
|
353
353
|
# elif symFlag:
|
|
354
354
|
# symFlag = False
|
|
355
|
-
#
|
|
356
|
-
#
|
|
355
|
+
# biomolecule_old = {}
|
|
356
|
+
# biomolecule_old['chains'] = chains
|
|
357
357
|
# for sl in symLines:
|
|
358
|
-
# if sl[1] not in
|
|
359
|
-
#
|
|
360
|
-
#
|
|
361
|
-
# biomolecules.append(
|
|
358
|
+
# if sl[1] not in biomolecule_old.keys():
|
|
359
|
+
# biomolecule_old[sl[1]] = []
|
|
360
|
+
# biomolecule_old[sl[1]].append([sl[0]] + sl[2:])
|
|
361
|
+
# biomolecules.append(biomolecule_old)
|
|
362
362
|
# #I hate PDB file format
|
|
363
363
|
# for i in range(len(biomolecules)):
|
|
364
|
-
#
|
|
365
|
-
# for key in
|
|
366
|
-
#
|
|
367
|
-
# for i in range(len(
|
|
364
|
+
# biomolecule_old = biomolecules[i]
|
|
365
|
+
# for key in biomolecule_old.keys():
|
|
366
|
+
# biomolecule_old[key].sort(key=lambda x:x[0])
|
|
367
|
+
# for i in range(len(biomolecule_old[key])):
|
|
368
368
|
# if key == "chains":
|
|
369
369
|
# continue
|
|
370
|
-
#
|
|
370
|
+
# biomolecule_old[key][i].pop(0)
|
|
371
371
|
# return biomolecules
|
|
372
372
|
#
|
|
373
373
|
# def _ExtractAtomAndResidue(self, line):
|
|
@@ -392,22 +392,22 @@ class PDB(Configurable):
|
|
|
392
392
|
# resType = "HETATM"
|
|
393
393
|
# else:
|
|
394
394
|
# resType = self.DetermineResType(resName)
|
|
395
|
-
# residue = self.
|
|
395
|
+
# residue = self.biomolecule_old.AddResidue(resType, resSeq, resName, chainID)
|
|
396
396
|
# residue.InsertAtom(atom)
|
|
397
397
|
#
|
|
398
398
|
# def DetermineResType(self, res_code):
|
|
399
|
-
# if
|
|
399
|
+
# if remode_data.ValidateRNA(res_code):
|
|
400
400
|
# return 'RNA'
|
|
401
|
-
# elif
|
|
401
|
+
# elif remode_data.ValidateDNA(res_code):
|
|
402
402
|
# return 'DNA'
|
|
403
|
-
# elif
|
|
403
|
+
# elif remode_data.ValidateAA(res_code):
|
|
404
404
|
# return "AA"
|
|
405
405
|
# else:
|
|
406
406
|
# return "HETATM"
|
|
407
407
|
#
|
|
408
408
|
# #Remove all of the waters from the current fetch. Probably make this more general for any HETATM group. Make a wrapper?
|
|
409
409
|
# def RemoveWater(self):
|
|
410
|
-
# h_chains = self.
|
|
410
|
+
# h_chains = self.biomolecule_old.GetHETATMChains()
|
|
411
411
|
# for chain in h_chains.keys():
|
|
412
412
|
# h_chains[chain] = {key: value for (key, value) in h_chains[chain].items() if value.GetResName().upper() != 'HOH'}
|
|
413
413
|
#
|
|
@@ -420,7 +420,7 @@ class PDB(Configurable):
|
|
|
420
420
|
#
|
|
421
421
|
# def AddChain(self, collection, name=None):
|
|
422
422
|
# if name is None:
|
|
423
|
-
# chains = list(self.
|
|
423
|
+
# chains = list(self.biomolecule_old.Chains.keys())
|
|
424
424
|
# for i in range(26):
|
|
425
425
|
# if chr(ord('z') - i) not in chains:
|
|
426
426
|
# name = chr(ord('z') - i)
|
|
@@ -433,23 +433,23 @@ class PDB(Configurable):
|
|
|
433
433
|
# residue.SetChainID(name)
|
|
434
434
|
# for atom in residue.GetAtoms():
|
|
435
435
|
# atom.SetChainID(name)
|
|
436
|
-
# self.
|
|
437
|
-
# self.
|
|
436
|
+
# self.biomolecule_old.AddChain(name)
|
|
437
|
+
# self.biomolecule_old.Chains[name] = collection
|
|
438
438
|
#
|
|
439
439
|
# def WritePDB(self, fp):
|
|
440
|
-
# s = sorted(self.
|
|
440
|
+
# s = sorted(self.biomolecule_old.DumpResidues(), key=lambda x: x.seqNum)
|
|
441
441
|
# with open(fp, "w") as f:
|
|
442
442
|
# for res in s:
|
|
443
443
|
# f.write(res.WriteForPDB())
|
|
444
444
|
#
|
|
445
445
|
# #Write contents to FASTA
|
|
446
446
|
# def ToFASTA(self):
|
|
447
|
-
# ls = self.
|
|
447
|
+
# ls = self.biomolecule_old.AsList(ordered=True)
|
|
448
448
|
# retStr = ""
|
|
449
449
|
# for r in ls:
|
|
450
|
-
# if
|
|
451
|
-
# name =
|
|
452
|
-
# retStr +=
|
|
450
|
+
# if remode_data.ValidateAA(r.resName):
|
|
451
|
+
# name = remode_data.Map("Amino Acids", r.resName)
|
|
452
|
+
# retStr += remode_data.GetJson()["Amino Acids"][name]["1code"]
|
|
453
453
|
# elif r.resName.upper() != "HOH":
|
|
454
454
|
# retStr += "X"
|
|
455
455
|
# return retStr
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# import re # We're getting spicy with this
|
|
6
6
|
# import gzip
|
|
7
7
|
# from io import BytesIO, TextIOWrapper
|
|
8
|
-
# import
|
|
8
|
+
# import data
|
|
9
9
|
# from dataclasses import dataclass
|
|
10
10
|
# from aldepyde.env import ENV
|
|
11
11
|
#
|
|
@@ -72,21 +72,21 @@
|
|
|
72
72
|
#
|
|
73
73
|
# def load(self, path):
|
|
74
74
|
# with open(path, "r") as fp:
|
|
75
|
-
# settings =
|
|
75
|
+
# settings = data.load(fp)
|
|
76
76
|
# self._enabled = settings['enabled']
|
|
77
77
|
# self._max_memory = settings['max_memory']
|
|
78
78
|
#
|
|
79
79
|
# def _save_cache(self):
|
|
80
80
|
# marker = os.path.join(self._path, self.cache_marker)
|
|
81
81
|
# # with open(marker, "r") as fp:
|
|
82
|
-
# # settings =
|
|
82
|
+
# # settings = data.load(fp)
|
|
83
83
|
# settings = {}
|
|
84
84
|
# settings["version"] = self.version
|
|
85
85
|
# settings["enabled"] = self._enabled
|
|
86
86
|
# settings["path"] = self._path
|
|
87
87
|
# settings["max_memory"] = self._max_memory
|
|
88
88
|
# with open(marker, "w") as fp:
|
|
89
|
-
# fp.write(
|
|
89
|
+
# fp.write(data.dumps(settings, indent=2))
|
|
90
90
|
#
|
|
91
91
|
# def designate_cache(self, path) -> None:
|
|
92
92
|
# os.makedirs(self._path, exist_ok=True)
|
|
@@ -2,6 +2,8 @@ import os
|
|
|
2
2
|
import sys
|
|
3
3
|
import json
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
+
from io import BytesIO
|
|
6
|
+
|
|
5
7
|
from aldepyde.env import ENV
|
|
6
8
|
from .utils import _parse_memory, _convert_memory_bytes, _convert_memory_bits
|
|
7
9
|
|
|
@@ -11,9 +13,7 @@ def requires_enabled(func):
|
|
|
11
13
|
if hasattr(cls, "enabled") and cls.enabled and cls._initialized:
|
|
12
14
|
return func(cls, *args, **kwargs)
|
|
13
15
|
else:
|
|
14
|
-
|
|
15
|
-
pass
|
|
16
|
-
|
|
16
|
+
return None
|
|
17
17
|
return wrapper
|
|
18
18
|
|
|
19
19
|
|
|
@@ -23,6 +23,7 @@ class CacheManager():
|
|
|
23
23
|
self._initialized = False
|
|
24
24
|
return
|
|
25
25
|
self._initialized = True
|
|
26
|
+
self.enable()
|
|
26
27
|
self._cache_marker = ".aldepyde_cache"
|
|
27
28
|
self.fingerprint = "adpy."
|
|
28
29
|
if path is None:
|
|
@@ -30,7 +31,11 @@ class CacheManager():
|
|
|
30
31
|
else:
|
|
31
32
|
self._path = path
|
|
32
33
|
if os.path.exists(self.marker_location()):
|
|
33
|
-
|
|
34
|
+
try:
|
|
35
|
+
self.load_manager()
|
|
36
|
+
except json.decoder.JSONDecodeError:
|
|
37
|
+
self.load_defaults()
|
|
38
|
+
self.save_settings()
|
|
34
39
|
else:
|
|
35
40
|
self.load_defaults()
|
|
36
41
|
self.save_settings()
|
|
@@ -58,7 +63,7 @@ class CacheManager():
|
|
|
58
63
|
return f"{self.__dict__}"
|
|
59
64
|
|
|
60
65
|
@requires_enabled
|
|
61
|
-
def marker_location(self):
|
|
66
|
+
def marker_location(self) -> str:
|
|
62
67
|
return os.path.join(self._path, self._cache_marker)
|
|
63
68
|
|
|
64
69
|
# Saves settings to a location. If a path is specified, the results are saved to that location
|
|
@@ -70,8 +75,8 @@ class CacheManager():
|
|
|
70
75
|
elif os.path.isdir(path):
|
|
71
76
|
path = os.path.join(path, self._cache_marker)
|
|
72
77
|
with open(path, 'w') as fp:
|
|
73
|
-
for v in vars(self):
|
|
74
|
-
|
|
78
|
+
# for v in vars(self):
|
|
79
|
+
# print(v)
|
|
75
80
|
fp.write(json.dumps(vars(self), indent=2))
|
|
76
81
|
|
|
77
82
|
@requires_enabled
|
|
@@ -95,6 +100,9 @@ class CacheManager():
|
|
|
95
100
|
def _enabled_and_initialized(self):
|
|
96
101
|
return self.enabled and self._initialized
|
|
97
102
|
|
|
103
|
+
def is_enabled(self):
|
|
104
|
+
return self.enabled
|
|
105
|
+
|
|
98
106
|
@requires_enabled
|
|
99
107
|
def load_defaults(self):
|
|
100
108
|
self._cache_marker = ".aldepyde_cache"
|
|
@@ -103,9 +111,17 @@ class CacheManager():
|
|
|
103
111
|
self.enabled = True
|
|
104
112
|
|
|
105
113
|
@requires_enabled
|
|
106
|
-
def _inside(self, f):
|
|
114
|
+
def _inside(self, f) -> str:
|
|
107
115
|
return os.path.join(self._path, f)
|
|
108
116
|
|
|
117
|
+
@requires_enabled
|
|
118
|
+
def peek(self, name):
|
|
119
|
+
files = []
|
|
120
|
+
for file in self.list_cache():
|
|
121
|
+
if name.lower() in file.lower():
|
|
122
|
+
files.append(file)
|
|
123
|
+
return files
|
|
124
|
+
|
|
109
125
|
# Requires a filename, not a path. The path will be self._path
|
|
110
126
|
@requires_enabled
|
|
111
127
|
def _is_safe_to_delete(self, filename):
|
|
@@ -132,16 +148,23 @@ class CacheManager():
|
|
|
132
148
|
os.remove(self._inside(file))
|
|
133
149
|
print(f"Deleting {file}")
|
|
134
150
|
|
|
135
|
-
@requires_enabled
|
|
136
|
-
def get(self, file) -> str | None:
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
151
|
+
# @requires_enabled
|
|
152
|
+
# def get(self, file) -> str | None:
|
|
153
|
+
# if not self._enabled_and_initialized():
|
|
154
|
+
# return None
|
|
155
|
+
# return self._inside(file) if self.exists(file) else None
|
|
140
156
|
|
|
141
157
|
# TODO Create a with cache_manager.open(...) setup here
|
|
142
158
|
# def open(self, filename):
|
|
143
159
|
# pass
|
|
144
160
|
|
|
161
|
+
@requires_enabled
|
|
162
|
+
def retrieve(self, filename:str) -> str|None:
|
|
163
|
+
if self.exists(filename):
|
|
164
|
+
return self._inside(filename)
|
|
165
|
+
else:
|
|
166
|
+
return None
|
|
167
|
+
|
|
145
168
|
@requires_enabled
|
|
146
169
|
def exists(self, filename):
|
|
147
170
|
return os.path.isfile(self._inside(filename))
|