rcsb-embedding-model 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rcsb-embedding-model might be problematic. Click here for more details.
- rcsb_embedding_model-0.0.1/.gitignore +3 -0
- rcsb_embedding_model-0.0.1/LICENSE.md +28 -0
- rcsb_embedding_model-0.0.1/PKG-INFO +115 -0
- rcsb_embedding_model-0.0.1/README.md +99 -0
- rcsb_embedding_model-0.0.1/assets/embedding-model-architecture.png +0 -0
- rcsb_embedding_model-0.0.1/examples/esm_embeddings.py +77 -0
- rcsb_embedding_model-0.0.1/pyproject.toml +29 -0
- rcsb_embedding_model-0.0.1/src/rcsb_embedding_model/__init__.py +4 -0
- rcsb_embedding_model-0.0.1/src/rcsb_embedding_model/model/layers.py +28 -0
- rcsb_embedding_model-0.0.1/src/rcsb_embedding_model/model/residue_embedding_aggregator.py +53 -0
- rcsb_embedding_model-0.0.1/src/rcsb_embedding_model/rcsb_structure_embedding.py +148 -0
- rcsb_embedding_model-0.0.1/tests/resources/1acb.cif +5068 -0
- rcsb_embedding_model-0.0.1/tests/test_model.py +32 -0
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
BSD 3-Clause License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024, RCSB Protein Data Bank, UC San Diego
|
|
4
|
+
|
|
5
|
+
Redistribution and use in source and binary forms, with or without
|
|
6
|
+
modification, are permitted provided that the following conditions are met:
|
|
7
|
+
|
|
8
|
+
1. Redistributions of source code must retain the above copyright notice, this
|
|
9
|
+
list of conditions and the following disclaimer.
|
|
10
|
+
|
|
11
|
+
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
12
|
+
this list of conditions and the following disclaimer in the documentation
|
|
13
|
+
and/or other materials provided with the distribution.
|
|
14
|
+
|
|
15
|
+
3. Neither the name of the copyright holder nor the names of its
|
|
16
|
+
contributors may be used to endorse or promote products derived from
|
|
17
|
+
this software without specific prior written permission.
|
|
18
|
+
|
|
19
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
20
|
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
21
|
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
22
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
23
|
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
24
|
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
25
|
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
26
|
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
27
|
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
28
|
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: rcsb-embedding-model
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Protein Embedding Model for Structure Search
|
|
5
|
+
Project-URL: Homepage, https://github.com/rcsb/rcsb-embedding-model
|
|
6
|
+
Project-URL: Issues, https://github.com/rcsb/rcsb-embedding-model/issues
|
|
7
|
+
Author-email: Joan Segura <joan.segura@rcsb.org>
|
|
8
|
+
License-Expression: BSD-3-Clause
|
|
9
|
+
License-File: LICENSE.md
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Requires-Python: >=3.10
|
|
13
|
+
Requires-Dist: esm>=3.2.0
|
|
14
|
+
Requires-Dist: torch>=2.6.0
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
|
|
17
|
+
# RCSB Embedding Model: A Deep Learning Approach for 3D Structure Embeddings
|
|
18
|
+
|
|
19
|
+
## Overview
|
|
20
|
+
RCSB Embedding Model is a PyTorch-based neural network that transforms macromolecular 3D structures into vector embeddings.
|
|
21
|
+
|
|
22
|
+
Preprint: [Multi-scale structural similarity embedding search across entire proteomes](https://www.biorxiv.org/content/10.1101/2025.02.28.640875v1).
|
|
23
|
+
|
|
24
|
+
A web-based implementation using this model for structure similarity search is available at [rcsb-embedding-search](http://embedding-search.rcsb.org).
|
|
25
|
+
|
|
26
|
+
If you are interested in training the model with a new dataset, visit the [rcsb-embedding-search repository](https://github.com/bioinsilico/rcsb-embedding-search), which provides scripts and documentation for training.
|
|
27
|
+
|
|
28
|
+
---
|
|
29
|
+
|
|
30
|
+
## Embedding Model
|
|
31
|
+
The embedding model is trained to predict structural similarity by approximating TM-scores using cosine distances between embeddings. It consists of two main components:
|
|
32
|
+
|
|
33
|
+
- **Protein Language Model (PLM)**: Computes residue-level embeddings from a given 3D structure.
|
|
34
|
+
- **Residue Embedding Aggregator**: A transformer-based neural network that aggregates these residue-level embeddings into a single vector.
|
|
35
|
+
|
|
36
|
+

|
|
37
|
+
|
|
38
|
+
### **Protein Language Model (PLM)**
|
|
39
|
+
Residue-wise embeddings of protein structures are computed using the [ESM3](https://www.evolutionaryscale.ai/) generative protein language model.
|
|
40
|
+
|
|
41
|
+
### **Residue Embedding Aggregator**
|
|
42
|
+
The aggregation component consists of six transformer encoder layers, each with a 3,072-neuron feedforward layer and ReLU activations. After processing through these layers, a summation pooling operation is applied, followed by 12 fully connected residual layers that refine the embeddings into a single 1,536-dimensional vector.
|
|
43
|
+
|
|
44
|
+
---
|
|
45
|
+
|
|
46
|
+
## How to Use the Model
|
|
47
|
+
This repository provides the tools to compute embeddings for 3D macromolecular structure data.
|
|
48
|
+
|
|
49
|
+
### **Installation**
|
|
50
|
+
`pip install `
|
|
51
|
+
|
|
52
|
+
### **Requirements**
|
|
53
|
+
Ensure you have the following dependencies installed:
|
|
54
|
+
- `python >= 3.10`
|
|
55
|
+
- `esm`
|
|
56
|
+
- `torch`
|
|
57
|
+
|
|
58
|
+
### **Generating Residue Embeddings**
|
|
59
|
+
ESM3 embeddings for the 3D structures can be calculated as:
|
|
60
|
+
|
|
61
|
+
```python
|
|
62
|
+
from rcsb_embedding_model import RcsbStructureEmbedding
|
|
63
|
+
|
|
64
|
+
mmcif_file = "<path_to_file>/<name>.cif"
|
|
65
|
+
model = RcsbStructureEmbedding()
|
|
66
|
+
res_embedding = model.residue_embedding(
|
|
67
|
+
structure_src=mmcif_file,
|
|
68
|
+
format="mmcif",
|
|
69
|
+
chain_id='A'
|
|
70
|
+
)
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### **Generating Protein Structure**
|
|
74
|
+
Protein 3D structure embedding can be calculated as:
|
|
75
|
+
|
|
76
|
+
```python
|
|
77
|
+
from rcsb_embedding_model import RcsbStructureEmbedding
|
|
78
|
+
|
|
79
|
+
mmcif_file = "<path_to_file>/<name>.cif"
|
|
80
|
+
model = RcsbStructureEmbedding()
|
|
81
|
+
res_embedding = model.residue_embedding(
|
|
82
|
+
structure_src=mmcif_file,
|
|
83
|
+
format="mmcif",
|
|
84
|
+
chain_id='A'
|
|
85
|
+
)
|
|
86
|
+
structure_embedding = model.aggregator_embedding(
|
|
87
|
+
res_embedding
|
|
88
|
+
)
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### **Pretrained Model**
|
|
92
|
+
You can download a pretrained Residue Embedding Aggregator model from [Hugging Face](https://huggingface.co/jseguramora/rcsb-embedding-model/resolve/main/rcsb-embedding-model.pt).
|
|
93
|
+
|
|
94
|
+
---
|
|
95
|
+
|
|
96
|
+
## Questions & Issues
|
|
97
|
+
For any questions or comments, please open an issue on this repository.
|
|
98
|
+
|
|
99
|
+
---
|
|
100
|
+
|
|
101
|
+
## License
|
|
102
|
+
This software is released under the BSD 3-Clause License. See the full license text below.
|
|
103
|
+
|
|
104
|
+
### BSD 3-Clause License
|
|
105
|
+
|
|
106
|
+
Copyright (c) 2024, RCSB Protein Data Bank, UC San Diego
|
|
107
|
+
|
|
108
|
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
109
|
+
|
|
110
|
+
1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer.
|
|
111
|
+
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
112
|
+
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
|
113
|
+
|
|
114
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
115
|
+
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# RCSB Embedding Model: A Deep Learning Approach for 3D Structure Embeddings
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
RCSB Embedding Model is a PyTorch-based neural network that transforms macromolecular 3D structures into vector embeddings.
|
|
5
|
+
|
|
6
|
+
Preprint: [Multi-scale structural similarity embedding search across entire proteomes](https://www.biorxiv.org/content/10.1101/2025.02.28.640875v1).
|
|
7
|
+
|
|
8
|
+
A web-based implementation using this model for structure similarity search is available at [rcsb-embedding-search](http://embedding-search.rcsb.org).
|
|
9
|
+
|
|
10
|
+
If you are interested in training the model with a new dataset, visit the [rcsb-embedding-search repository](https://github.com/bioinsilico/rcsb-embedding-search), which provides scripts and documentation for training.
|
|
11
|
+
|
|
12
|
+
---
|
|
13
|
+
|
|
14
|
+
## Embedding Model
|
|
15
|
+
The embedding model is trained to predict structural similarity by approximating TM-scores using cosine distances between embeddings. It consists of two main components:
|
|
16
|
+
|
|
17
|
+
- **Protein Language Model (PLM)**: Computes residue-level embeddings from a given 3D structure.
|
|
18
|
+
- **Residue Embedding Aggregator**: A transformer-based neural network that aggregates these residue-level embeddings into a single vector.
|
|
19
|
+
|
|
20
|
+

|
|
21
|
+
|
|
22
|
+
### **Protein Language Model (PLM)**
|
|
23
|
+
Residue-wise embeddings of protein structures are computed using the [ESM3](https://www.evolutionaryscale.ai/) generative protein language model.
|
|
24
|
+
|
|
25
|
+
### **Residue Embedding Aggregator**
|
|
26
|
+
The aggregation component consists of six transformer encoder layers, each with a 3,072-neuron feedforward layer and ReLU activations. After processing through these layers, a summation pooling operation is applied, followed by 12 fully connected residual layers that refine the embeddings into a single 1,536-dimensional vector.
|
|
27
|
+
|
|
28
|
+
---
|
|
29
|
+
|
|
30
|
+
## How to Use the Model
|
|
31
|
+
This repository provides the tools to compute embeddings for 3D macromolecular structure data.
|
|
32
|
+
|
|
33
|
+
### **Installation**
|
|
34
|
+
`pip install `
|
|
35
|
+
|
|
36
|
+
### **Requirements**
|
|
37
|
+
Ensure you have the following dependencies installed:
|
|
38
|
+
- `python >= 3.10`
|
|
39
|
+
- `esm`
|
|
40
|
+
- `torch`
|
|
41
|
+
|
|
42
|
+
### **Generating Residue Embeddings**
|
|
43
|
+
ESM3 embeddings for the 3D structures can be calculated as:
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
from rcsb_embedding_model import RcsbStructureEmbedding
|
|
47
|
+
|
|
48
|
+
mmcif_file = "<path_to_file>/<name>.cif"
|
|
49
|
+
model = RcsbStructureEmbedding()
|
|
50
|
+
res_embedding = model.residue_embedding(
|
|
51
|
+
structure_src=mmcif_file,
|
|
52
|
+
format="mmcif",
|
|
53
|
+
chain_id='A'
|
|
54
|
+
)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### **Generating Protein Structure**
|
|
58
|
+
Protein 3D structure embedding can be calculated as:
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from rcsb_embedding_model import RcsbStructureEmbedding
|
|
62
|
+
|
|
63
|
+
mmcif_file = "<path_to_file>/<name>.cif"
|
|
64
|
+
model = RcsbStructureEmbedding()
|
|
65
|
+
res_embedding = model.residue_embedding(
|
|
66
|
+
structure_src=mmcif_file,
|
|
67
|
+
format="mmcif",
|
|
68
|
+
chain_id='A'
|
|
69
|
+
)
|
|
70
|
+
structure_embedding = model.aggregator_embedding(
|
|
71
|
+
res_embedding
|
|
72
|
+
)
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### **Pretrained Model**
|
|
76
|
+
You can download a pretrained Residue Embedding Aggregator model from [Hugging Face](https://huggingface.co/jseguramora/rcsb-embedding-model/resolve/main/rcsb-embedding-model.pt).
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## Questions & Issues
|
|
81
|
+
For any questions or comments, please open an issue on this repository.
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## License
|
|
86
|
+
This software is released under the BSD 3-Clause License. See the full license text below.
|
|
87
|
+
|
|
88
|
+
### BSD 3-Clause License
|
|
89
|
+
|
|
90
|
+
Copyright (c) 2024, RCSB Protein Data Bank, UC San Diego
|
|
91
|
+
|
|
92
|
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
93
|
+
|
|
94
|
+
1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer.
|
|
95
|
+
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
96
|
+
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
|
97
|
+
|
|
98
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
99
|
+
|
|
Binary file
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
|
|
3
|
+
import torch
|
|
4
|
+
from biotite.structure import chain_iter, get_residues, filter_amino_acids
|
|
5
|
+
from biotite.structure.io.pdb import PDBFile
|
|
6
|
+
from biotite.structure.io.pdbx import CIFFile, get_structure, BinaryCIFFile
|
|
7
|
+
from esm.models.esm3 import ESM3, ESM3_OPEN_SMALL, ESM3InferenceClient
|
|
8
|
+
from esm.sdk.api import ESMProtein, SamplingConfig
|
|
9
|
+
from esm.utils.structure.protein_chain import ProteinChain
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_structure_from_file(file_name, format="pdb", chain_id=None):
|
|
13
|
+
try:
|
|
14
|
+
if format == "pdb":
|
|
15
|
+
structure = PDBFile.read(file_name).get_structure(
|
|
16
|
+
model=1
|
|
17
|
+
)
|
|
18
|
+
elif format == "mmcif":
|
|
19
|
+
cif_file = CIFFile.read(file_name)
|
|
20
|
+
structure = get_structure(
|
|
21
|
+
cif_file,
|
|
22
|
+
model=1,
|
|
23
|
+
use_author_fields=False
|
|
24
|
+
)
|
|
25
|
+
elif format == "binarycif":
|
|
26
|
+
cif_file = BinaryCIFFile.read(file_name)
|
|
27
|
+
structure = get_structure(
|
|
28
|
+
cif_file,
|
|
29
|
+
model=1,
|
|
30
|
+
use_author_fields=False
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
if chain_id:
|
|
34
|
+
structure = structure[structure.chain_id == chain_id]
|
|
35
|
+
return structure
|
|
36
|
+
except:
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
esm3_model = ESM3.from_pretrained(ESM3_OPEN_SMALL)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def compute_embeddings(structure):
|
|
44
|
+
embedding_ch = []
|
|
45
|
+
for atom_ch in chain_iter(structure):
|
|
46
|
+
atom_res = atom_ch[filter_amino_acids(atom_ch)]
|
|
47
|
+
if len(atom_res) == 0 or len(get_residues(atom_res)[0]) < 10:
|
|
48
|
+
continue
|
|
49
|
+
protein_chain = ProteinChain.from_atomarray(atom_ch)
|
|
50
|
+
protein = ESMProtein.from_protein_chain(protein_chain)
|
|
51
|
+
protein_tensor = esm3_model.encode(protein)
|
|
52
|
+
embedding_ch.append( esm3_model.forward_and_sample(
|
|
53
|
+
protein_tensor, SamplingConfig(return_per_residue_embeddings=True)
|
|
54
|
+
).per_residue_embedding)
|
|
55
|
+
return torch.cat(
|
|
56
|
+
embedding_ch,
|
|
57
|
+
dim=0
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
if __name__ == "__main__":
|
|
62
|
+
|
|
63
|
+
parser = argparse.ArgumentParser()
|
|
64
|
+
parser.add_argument('--file', type=str, required=True)
|
|
65
|
+
parser.add_argument('--file_format', type=str)
|
|
66
|
+
parser.add_argument('--chain', type=str)
|
|
67
|
+
args = parser.parse_args()
|
|
68
|
+
|
|
69
|
+
structure = get_structure_from_file(
|
|
70
|
+
args.file,
|
|
71
|
+
"pdb" if not args.file_format else args.file_format,
|
|
72
|
+
chain_id=args.chain
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
embeddings = compute_embeddings(structure)
|
|
76
|
+
|
|
77
|
+
print(embeddings.shape)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "rcsb-embedding-model"
|
|
3
|
+
version = "0.0.1"
|
|
4
|
+
authors = [
|
|
5
|
+
{ name="Joan Segura", email="joan.segura@rcsb.org" },
|
|
6
|
+
]
|
|
7
|
+
description = "Protein Embedding Model for Structure Search"
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
requires-python = ">=3.10"
|
|
10
|
+
classifiers = [
|
|
11
|
+
"Programming Language :: Python :: 3",
|
|
12
|
+
"Operating System :: OS Independent",
|
|
13
|
+
]
|
|
14
|
+
license = "BSD-3-Clause"
|
|
15
|
+
license-files = ["LICEN[CS]E*"]
|
|
16
|
+
dependencies=[
|
|
17
|
+
"esm >= 3.2.0",
|
|
18
|
+
"torch >= 2.6.0"
|
|
19
|
+
]
|
|
20
|
+
[project.urls]
|
|
21
|
+
Homepage = "https://github.com/rcsb/rcsb-embedding-model"
|
|
22
|
+
Issues = "https://github.com/rcsb/rcsb-embedding-model/issues"
|
|
23
|
+
|
|
24
|
+
[build-system]
|
|
25
|
+
requires = [
|
|
26
|
+
"hatchling >= 1.14.1"
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
build-backend = "hatchling.build"
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
|
|
2
|
+
import torch.nn as nn
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ResBlock(nn.Module):
|
|
6
|
+
def __init__(self, in_dim, out_dim, dropout=0.1):
|
|
7
|
+
super().__init__()
|
|
8
|
+
if in_dim != out_dim:
|
|
9
|
+
self.residual = nn.Linear(in_dim, out_dim)
|
|
10
|
+
else:
|
|
11
|
+
self.residual = nn.Identity()
|
|
12
|
+
|
|
13
|
+
self.block = nn.Sequential(
|
|
14
|
+
nn.LayerNorm(in_dim),
|
|
15
|
+
nn.Dropout(p=dropout),
|
|
16
|
+
nn.Linear(in_dim, out_dim),
|
|
17
|
+
nn.ReLU(),
|
|
18
|
+
nn.LayerNorm(out_dim),
|
|
19
|
+
nn.Dropout(p=dropout),
|
|
20
|
+
nn.Linear(out_dim, out_dim),
|
|
21
|
+
)
|
|
22
|
+
self.activate = nn.ReLU()
|
|
23
|
+
|
|
24
|
+
def forward(self, x):
|
|
25
|
+
residual = self.residual(x)
|
|
26
|
+
x = self.block(x)
|
|
27
|
+
x = self.activate(x + residual)
|
|
28
|
+
return x
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
|
|
2
|
+
import torch.nn as nn
|
|
3
|
+
from collections import OrderedDict
|
|
4
|
+
|
|
5
|
+
from rcsb_embedding_model.model.layers import ResBlock
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ResidueEmbeddingAggregator(nn.Module):
|
|
9
|
+
dropout = 0.1
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
input_features=1536,
|
|
14
|
+
dim_feedforward=3072,
|
|
15
|
+
hidden_layer=1536,
|
|
16
|
+
nhead=12,
|
|
17
|
+
num_layers=6,
|
|
18
|
+
res_block_layers=12
|
|
19
|
+
):
|
|
20
|
+
super().__init__()
|
|
21
|
+
encoder_layer = nn.TransformerEncoderLayer(
|
|
22
|
+
d_model=input_features,
|
|
23
|
+
nhead=nhead,
|
|
24
|
+
dim_feedforward=dim_feedforward,
|
|
25
|
+
dropout=self.dropout,
|
|
26
|
+
batch_first=True
|
|
27
|
+
)
|
|
28
|
+
self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=num_layers)
|
|
29
|
+
if res_block_layers == 0:
|
|
30
|
+
self.embedding = nn.Sequential(OrderedDict([
|
|
31
|
+
('norm', nn.LayerNorm(input_features)),
|
|
32
|
+
('dropout', nn.Dropout(p=self.dropout)),
|
|
33
|
+
('linear', nn.Linear(input_features, hidden_layer)),
|
|
34
|
+
('activation', nn.ReLU())
|
|
35
|
+
]))
|
|
36
|
+
else:
|
|
37
|
+
res_block = OrderedDict([(
|
|
38
|
+
f'block{i}',
|
|
39
|
+
ResBlock(input_features, hidden_layer, self.dropout)
|
|
40
|
+
) for i in range(res_block_layers)])
|
|
41
|
+
res_block.update([
|
|
42
|
+
('dropout', nn.Dropout(p=self.dropout)),
|
|
43
|
+
('linear', nn.Linear(input_features, hidden_layer)),
|
|
44
|
+
('activation', nn.ReLU())
|
|
45
|
+
])
|
|
46
|
+
self.embedding = nn.Sequential(res_block)
|
|
47
|
+
|
|
48
|
+
def forward(self, x, x_mask=None):
|
|
49
|
+
if x.dim() == 2:
|
|
50
|
+
return self.embedding(self.transformer(x, src_key_padding_mask=x_mask).sum(dim=0))
|
|
51
|
+
if x.dim() == 3:
|
|
52
|
+
return self.embedding(self.transformer(x, src_key_padding_mask=x_mask).sum(dim=1))
|
|
53
|
+
raise RuntimeError("Tensor dimension error. Allowed shapes (batch, sequence, residue_embeddings) or (sequence, residue_embeddings)")
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import torch
|
|
2
|
+
from biotite.structure import get_residues, chain_iter, filter_amino_acids
|
|
3
|
+
from biotite.structure.io.pdb import PDBFile, get_structure as get_pdb_structure, get_assembly as get_pdb_assembly
|
|
4
|
+
from biotite.structure.io.pdbx import CIFFile, get_structure, get_assembly, BinaryCIFFile
|
|
5
|
+
from esm.models.esm3 import ESM3
|
|
6
|
+
from esm.sdk.api import ESMProtein, SamplingConfig
|
|
7
|
+
from esm.utils.constants.models import ESM3_OPEN_SMALL
|
|
8
|
+
from esm.utils.structure.protein_chain import ProteinChain
|
|
9
|
+
from huggingface_hub import hf_hub_download
|
|
10
|
+
|
|
11
|
+
from rcsb_embedding_model.model.residue_embedding_aggregator import ResidueEmbeddingAggregator
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class RcsbStructureEmbedding:
|
|
15
|
+
|
|
16
|
+
MIN_RES = 10
|
|
17
|
+
REPO_ID = "rcsb/rcsb-embedding-model"
|
|
18
|
+
FILE_NAME = "rcsb-embedding-model.pt"
|
|
19
|
+
VERSION = "2d71cf6"
|
|
20
|
+
|
|
21
|
+
def __init__(self):
|
|
22
|
+
self.__residue_embedding = None
|
|
23
|
+
self.__aggregator_embedding = None
|
|
24
|
+
|
|
25
|
+
def load_models(self, device=None):
|
|
26
|
+
self.load_residue_embedding(device)
|
|
27
|
+
self.load_aggregator_embedding(device)
|
|
28
|
+
|
|
29
|
+
def load_residue_embedding(self, device=None):
|
|
30
|
+
if not device:
|
|
31
|
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
32
|
+
self.__residue_embedding = ESM3.from_pretrained(
|
|
33
|
+
ESM3_OPEN_SMALL,
|
|
34
|
+
device
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
def load_aggregator_embedding(self, device=None):
|
|
38
|
+
if not device:
|
|
39
|
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
40
|
+
self.__aggregator_embedding = _load_model(
|
|
41
|
+
_download_model(
|
|
42
|
+
RcsbStructureEmbedding.REPO_ID,
|
|
43
|
+
RcsbStructureEmbedding.FILE_NAME,
|
|
44
|
+
RcsbStructureEmbedding.VERSION
|
|
45
|
+
),
|
|
46
|
+
device
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
def structure_embedding(self, structure_src, format="pdb", chain_id=None, assembly_id=None):
|
|
50
|
+
res_embedding = self.residue_embedding(structure_src, format, chain_id, assembly_id)
|
|
51
|
+
return self.aggregator_embedding(res_embedding)
|
|
52
|
+
|
|
53
|
+
def residue_embedding(self, structure_src, format="pdb", chain_id=None, assembly_id=None):
|
|
54
|
+
self.__check_residue_embedding()
|
|
55
|
+
structure = _get_structure_from_src(structure_src, format, chain_id, assembly_id)
|
|
56
|
+
embedding_ch = []
|
|
57
|
+
for atom_ch in chain_iter(structure):
|
|
58
|
+
atom_res = atom_ch[filter_amino_acids(atom_ch)]
|
|
59
|
+
if len(atom_res) == 0 or len(get_residues(atom_res)[0]) < RcsbStructureEmbedding.MIN_RES:
|
|
60
|
+
continue
|
|
61
|
+
protein_chain = ProteinChain.from_atomarray(atom_ch)
|
|
62
|
+
protein = ESMProtein.from_protein_chain(protein_chain)
|
|
63
|
+
protein_tensor = self.__residue_embedding.encode(protein)
|
|
64
|
+
embedding_ch.append(self.__residue_embedding.forward_and_sample(
|
|
65
|
+
protein_tensor, SamplingConfig(return_per_residue_embeddings=True)
|
|
66
|
+
).per_residue_embedding)
|
|
67
|
+
return torch.cat(
|
|
68
|
+
embedding_ch,
|
|
69
|
+
dim=0
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def aggregator_embedding(self, residue_embedding):
|
|
73
|
+
self.__check_aggregator_embedding()
|
|
74
|
+
return self.__aggregator_embedding(residue_embedding)
|
|
75
|
+
|
|
76
|
+
def __check_residue_embedding(self):
|
|
77
|
+
if self.__residue_embedding is None:
|
|
78
|
+
self.load_residue_embedding()
|
|
79
|
+
|
|
80
|
+
def __check_aggregator_embedding(self):
|
|
81
|
+
if self.__aggregator_embedding is None:
|
|
82
|
+
self.load_aggregator_embedding()
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _get_structure_from_src(structure_src, format="pdb", chain_id=None, assembly_id=None):
|
|
86
|
+
if format == "pdb":
|
|
87
|
+
pdb_file = PDBFile.read(structure_src)
|
|
88
|
+
structure = _get_pdb_structure(pdb_file, assembly_id)
|
|
89
|
+
elif format == "mmcif":
|
|
90
|
+
cif_file = CIFFile.read(structure_src)
|
|
91
|
+
structure = _get_structure(cif_file, assembly_id)
|
|
92
|
+
elif format == "binarycif":
|
|
93
|
+
cif_file = BinaryCIFFile.read(structure_src)
|
|
94
|
+
structure = _get_structure(cif_file, assembly_id)
|
|
95
|
+
else:
|
|
96
|
+
raise RuntimeError(f"Unknown file format {format}")
|
|
97
|
+
|
|
98
|
+
if chain_id is not None:
|
|
99
|
+
structure = structure[structure.chain_id == chain_id]
|
|
100
|
+
|
|
101
|
+
return structure
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _get_pdb_structure(pdb_file, assembly_id = None):
|
|
105
|
+
return get_pdb_structure(
|
|
106
|
+
pdb_file,
|
|
107
|
+
model=1
|
|
108
|
+
) if assembly_id is None else get_pdb_assembly(
|
|
109
|
+
pdb_file,
|
|
110
|
+
assembly_id=assembly_id,
|
|
111
|
+
model=1
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _get_structure(cif_file, assembly_id = None):
|
|
116
|
+
return get_structure(
|
|
117
|
+
cif_file,
|
|
118
|
+
model=1,
|
|
119
|
+
use_author_fields=False
|
|
120
|
+
) if assembly_id is None else get_assembly(
|
|
121
|
+
cif_file,
|
|
122
|
+
assembly_id=assembly_id,
|
|
123
|
+
model=1,
|
|
124
|
+
use_author_fields=False
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _download_model(
|
|
129
|
+
repo_id,
|
|
130
|
+
filename,
|
|
131
|
+
revision
|
|
132
|
+
):
|
|
133
|
+
return hf_hub_download(
|
|
134
|
+
repo_id=repo_id,
|
|
135
|
+
filename=filename,
|
|
136
|
+
revision=revision
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _load_model(model_path, device=None):
|
|
141
|
+
if not device:
|
|
142
|
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
143
|
+
weights = torch.load(model_path, weights_only=True, map_location=device)
|
|
144
|
+
aggregator_model = ResidueEmbeddingAggregator()
|
|
145
|
+
aggregator_model.load_state_dict(weights)
|
|
146
|
+
aggregator_model.to(device)
|
|
147
|
+
aggregator_model.eval()
|
|
148
|
+
return aggregator_model
|