bayesianflow-for-chem 1.2.4__py3-none-any.whl → 1.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bayesianflow-for-chem might be problematic. Click here for more details.
- bayesianflow_for_chem/__init__.py +1 -1
- bayesianflow_for_chem/tool.py +28 -12
- {bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/METADATA +1 -1
- {bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/RECORD +7 -7
- {bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/LICENSE +0 -0
- {bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/WHEEL +0 -0
- {bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/top_level.txt +0 -0
bayesianflow_for_chem/tool.py
CHANGED
|
@@ -492,15 +492,16 @@ def inpaint(
|
|
|
492
492
|
|
|
493
493
|
def quantise_model(model: ChemBFN) -> nn.Module:
|
|
494
494
|
"""
|
|
495
|
-
Dynamic quantisation of the trained model.
|
|
495
|
+
Dynamic quantisation of the trained model to `torch.qint8` data type.
|
|
496
496
|
|
|
497
497
|
:param model: trained ChemBFN model
|
|
498
498
|
:type model: bayesianflow_for_chem.model.ChemBFN
|
|
499
499
|
:return: quantised model
|
|
500
500
|
:rtype: torch.nn.Module
|
|
501
501
|
"""
|
|
502
|
-
from torch.ao.nn.quantized.modules.utils import _quantize_weight
|
|
503
502
|
from torch.ao.nn.quantized import dynamic
|
|
503
|
+
from torch.ao.nn.quantized.modules.utils import _quantize_weight
|
|
504
|
+
from torch.ao.quantization.qconfig import default_dynamic_qconfig
|
|
504
505
|
|
|
505
506
|
class QuantisedLinear(dynamic.Linear):
|
|
506
507
|
# Modified from https://github.com/pytorch/pytorch/blob/main/torch/ao/nn/quantized/dynamic/modules/linear.py
|
|
@@ -521,6 +522,9 @@ def quantise_model(model: ChemBFN) -> nn.Module:
|
|
|
521
522
|
self.scaling: Optional[float] = None
|
|
522
523
|
self.lora_dropout: Optional[float] = None
|
|
523
524
|
|
|
525
|
+
def _get_name(self) -> str:
|
|
526
|
+
return "DynamicQuantizedLoRALinear"
|
|
527
|
+
|
|
524
528
|
def enable_lora(
|
|
525
529
|
self, r: int = 8, lora_alpha: int = 1, lora_dropout: float = 0.0
|
|
526
530
|
) -> None:
|
|
@@ -540,7 +544,6 @@ def quantise_model(model: ChemBFN) -> nn.Module:
|
|
|
540
544
|
self._packed_params.requires_grad_(False)
|
|
541
545
|
|
|
542
546
|
def forward(self, x: Tensor) -> Tensor:
|
|
543
|
-
# Note that we can handle self.bias == None case.
|
|
544
547
|
if self._packed_params.dtype == torch.qint8:
|
|
545
548
|
if self.version is None or self.version < 4:
|
|
546
549
|
Y = torch.ops.quantized.linear_dynamic(
|
|
@@ -575,11 +578,6 @@ def quantise_model(model: ChemBFN) -> nn.Module:
|
|
|
575
578
|
if mod.qconfig is not None and mod.qconfig.weight is not None:
|
|
576
579
|
weight_observer = mod.qconfig.weight()
|
|
577
580
|
else:
|
|
578
|
-
# We have the circular import issues if we import the qconfig in the beginning of this file:
|
|
579
|
-
# https://github.com/pytorch/pytorch/pull/24231. The current workaround is to postpone the
|
|
580
|
-
# import until we need it.
|
|
581
|
-
from torch.ao.quantization.qconfig import default_dynamic_qconfig
|
|
582
|
-
|
|
583
581
|
weight_observer = default_dynamic_qconfig.weight()
|
|
584
582
|
dtype = weight_observer.dtype
|
|
585
583
|
assert dtype in [torch.qint8, torch.float16], (
|
|
@@ -599,10 +597,28 @@ def quantise_model(model: ChemBFN) -> nn.Module:
|
|
|
599
597
|
qlinear.set_weight_bias(qweight, mod.bias)
|
|
600
598
|
if mod.lora_enabled:
|
|
601
599
|
qlinear.lora_enabled = True
|
|
602
|
-
qlinear.lora_A = mod.lora_A
|
|
603
|
-
qlinear.lora_B = mod.lora_B
|
|
604
|
-
qlinear.scaling = mod.scaling
|
|
605
|
-
qlinear.lora_dropout = mod.lora_dropout
|
|
600
|
+
qlinear.lora_A = nn.Parameter(mod.lora_A.clone().detach_())
|
|
601
|
+
qlinear.lora_B = nn.Parameter(mod.lora_B.clone().detach_())
|
|
602
|
+
qlinear.scaling = deepcopy(mod.scaling)
|
|
603
|
+
qlinear.lora_dropout = deepcopy(mod.lora_dropout)
|
|
604
|
+
return qlinear
|
|
605
|
+
|
|
606
|
+
@classmethod
|
|
607
|
+
def from_reference(cls, ref_qlinear: Self) -> Self:
|
|
608
|
+
qlinear = cls(
|
|
609
|
+
ref_qlinear.in_features,
|
|
610
|
+
ref_qlinear.out_features,
|
|
611
|
+
dtype=ref_qlinear.weight_dtype,
|
|
612
|
+
)
|
|
613
|
+
qweight = ref_qlinear.get_quantized_weight()
|
|
614
|
+
bias = ref_qlinear.bias
|
|
615
|
+
qlinear.set_weight_bias(qweight, bias)
|
|
616
|
+
if ref_qlinear.lora_enabled:
|
|
617
|
+
qlinear.lora_enabled = True
|
|
618
|
+
qlinear.lora_A = nn.Parameter(ref_qlinear.lora_A.clone().detach_())
|
|
619
|
+
qlinear.lora_B = nn.Parameter(ref_qlinear.lora_B.clone().detach_())
|
|
620
|
+
qlinear.scaling = deepcopy(ref_qlinear.scaling)
|
|
621
|
+
qlinear.lora_dropout = deepcopy(ref_qlinear.lora_dropout)
|
|
606
622
|
return qlinear
|
|
607
623
|
|
|
608
624
|
mapping = deepcopy(quantization.DEFAULT_DYNAMIC_QUANT_MODULE_MAPPINGS)
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
bayesianflow_for_chem/__init__.py,sha256
|
|
1
|
+
bayesianflow_for_chem/__init__.py,sha256=sdyCK-Zd32-FNOcjuSB02ABx8vn53phorQeVqyWMWk4,293
|
|
2
2
|
bayesianflow_for_chem/data.py,sha256=9tpRba40lxwrB6aPSJMkxUglEVC3VEQC9wWxhDuz3Q8,7760
|
|
3
3
|
bayesianflow_for_chem/model.py,sha256=HvEvW_xRbkv4eSv5lhd72BJMZkg-ZACEi1DAW3p5Q1Y,35918
|
|
4
4
|
bayesianflow_for_chem/scorer.py,sha256=mV1vX8aBGFra2BE7N8WHihVIo3dXmUdPQIGfSaiuNdk,4084
|
|
5
|
-
bayesianflow_for_chem/tool.py,sha256=
|
|
5
|
+
bayesianflow_for_chem/tool.py,sha256=VuEqbT7Qraa4vnKMHbToyAYIiRoQI7gEPLKEBCWGmVg,23706
|
|
6
6
|
bayesianflow_for_chem/train.py,sha256=kj6icGqymUUYopDtpre1oE_wpvpeNilbpzgffBsd1tk,9589
|
|
7
7
|
bayesianflow_for_chem/vocab.txt,sha256=HgtAZmpWYk4y8PqEVC4vqut1vE75DfRKE_10s2UW0rU,790
|
|
8
|
-
bayesianflow_for_chem-1.2.
|
|
9
|
-
bayesianflow_for_chem-1.2.
|
|
10
|
-
bayesianflow_for_chem-1.2.
|
|
11
|
-
bayesianflow_for_chem-1.2.
|
|
12
|
-
bayesianflow_for_chem-1.2.
|
|
8
|
+
bayesianflow_for_chem-1.2.6.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
9
|
+
bayesianflow_for_chem-1.2.6.dist-info/METADATA,sha256=Akoh5dQW_0jeYuGC4ZKKYHS1WJn0xRwGDr7ut-Q-5sc,5890
|
|
10
|
+
bayesianflow_for_chem-1.2.6.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
11
|
+
bayesianflow_for_chem-1.2.6.dist-info/top_level.txt,sha256=KHsanI3BMCt8D9Qpze2ycrF6nMa3PyojgO6eS1c8kco,22
|
|
12
|
+
bayesianflow_for_chem-1.2.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{bayesianflow_for_chem-1.2.4.dist-info → bayesianflow_for_chem-1.2.6.dist-info}/top_level.txt
RENAMED
|
File without changes
|