bayesianflow-for-chem 1.2.4__py3-none-any.whl → 1.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bayesianflow-for-chem might be problematic. Click here for more details.

@@ -7,5 +7,5 @@ from . import data, tool, train, scorer
7
7
  from .model import ChemBFN, MLP
8
8
 
9
9
  __all__ = ["data", "tool", "train", "scorer", "ChemBFN", "MLP"]
10
- __version__ = "1.2.4"
10
+ __version__ = "1.2.5"
11
11
  __author__ = "Nianze A. Tao (Omozawa Sueno)"
@@ -521,6 +521,9 @@ def quantise_model(model: ChemBFN) -> nn.Module:
521
521
  self.scaling: Optional[float] = None
522
522
  self.lora_dropout: Optional[float] = None
523
523
 
524
+ def _get_name(self) -> str:
525
+ return "DynamicQuantizedLoRALinear"
526
+
524
527
  def enable_lora(
525
528
  self, r: int = 8, lora_alpha: int = 1, lora_dropout: float = 0.0
526
529
  ) -> None:
@@ -540,23 +543,7 @@ def quantise_model(model: ChemBFN) -> nn.Module:
540
543
  self._packed_params.requires_grad_(False)
541
544
 
542
545
  def forward(self, x: Tensor) -> Tensor:
543
- # Note that we can handle self.bias == None case.
544
- if self._packed_params.dtype == torch.qint8:
545
- if self.version is None or self.version < 4:
546
- Y = torch.ops.quantized.linear_dynamic(
547
- x, self._packed_params._packed_params
548
- )
549
- else:
550
- Y = torch.ops.quantized.linear_dynamic(
551
- x, self._packed_params._packed_params, reduce_range=True
552
- )
553
- elif self._packed_params.dtype == torch.float16:
554
- Y = torch.ops.quantized.linear_dynamic_fp16(
555
- x, self._packed_params._packed_params
556
- )
557
- else:
558
- raise RuntimeError("Unsupported dtype on dynamic quantized linear!")
559
- result = Y.to(x.dtype)
546
+ result = dynamic.Linear.forward(self, x)
560
547
  if self.lora_enabled and isinstance(self.lora_dropout, float):
561
548
  result += (
562
549
  nn.functional.dropout(x, self.lora_dropout, self.training)
@@ -599,10 +586,28 @@ def quantise_model(model: ChemBFN) -> nn.Module:
599
586
  qlinear.set_weight_bias(qweight, mod.bias)
600
587
  if mod.lora_enabled:
601
588
  qlinear.lora_enabled = True
602
- qlinear.lora_A = mod.lora_A
603
- qlinear.lora_B = mod.lora_B
604
- qlinear.scaling = mod.scaling
605
- qlinear.lora_dropout = mod.lora_dropout
589
+ qlinear.lora_A = nn.Parameter(mod.lora_A.clone().detach_())
590
+ qlinear.lora_B = nn.Parameter(mod.lora_B.clone().detach_())
591
+ qlinear.scaling = deepcopy(mod.scaling)
592
+ qlinear.lora_dropout = deepcopy(mod.lora_dropout)
593
+ return qlinear
594
+
595
+ @classmethod
596
+ def from_reference(cls, ref_qlinear: Self) -> Self:
597
+ qlinear = cls(
598
+ ref_qlinear.in_features,
599
+ ref_qlinear.out_features,
600
+ dtype=ref_qlinear.weight_dtype,
601
+ )
602
+ qweight = ref_qlinear.get_quantized_weight()
603
+ bias = ref_qlinear.bias
604
+ qlinear.set_weight_bias(qweight, bias)
605
+ if ref_qlinear.lora_enabled:
606
+ qlinear.lora_enabled = True
607
+ qlinear.lora_A = nn.Parameter(ref_qlinear.lora_A.clone().detach_())
608
+ qlinear.lora_B = nn.Parameter(ref_qlinear.lora_B.clone().detach_())
609
+ qlinear.scaling = deepcopy(ref_qlinear.scaling)
610
+ qlinear.lora_dropout = deepcopy(ref_qlinear.lora_dropout)
606
611
  return qlinear
607
612
 
608
613
  mapping = deepcopy(quantization.DEFAULT_DYNAMIC_QUANT_MODULE_MAPPINGS)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: bayesianflow_for_chem
3
- Version: 1.2.4
3
+ Version: 1.2.5
4
4
  Summary: Bayesian flow network framework for Chemistry
5
5
  Home-page: https://augus1999.github.io/bayesian-flow-network-for-chemistry/
6
6
  Author: Nianze A. Tao
@@ -1,12 +1,12 @@
1
- bayesianflow_for_chem/__init__.py,sha256=-_0xD4lo_Vn2GrlXG-y13MCTwDfj391kzgTnyLplkNk,293
1
+ bayesianflow_for_chem/__init__.py,sha256=GMGe5nU963qFL6vJ9OZSfqfSyEImC_P2zyUS0cyP3Mg,293
2
2
  bayesianflow_for_chem/data.py,sha256=9tpRba40lxwrB6aPSJMkxUglEVC3VEQC9wWxhDuz3Q8,7760
3
3
  bayesianflow_for_chem/model.py,sha256=HvEvW_xRbkv4eSv5lhd72BJMZkg-ZACEi1DAW3p5Q1Y,35918
4
4
  bayesianflow_for_chem/scorer.py,sha256=mV1vX8aBGFra2BE7N8WHihVIo3dXmUdPQIGfSaiuNdk,4084
5
- bayesianflow_for_chem/tool.py,sha256=d-g47Ctn6qb_j1bWCWV99ytUxJ23zJ32SJacQ_WXONk,23028
5
+ bayesianflow_for_chem/tool.py,sha256=tJjb8q3_orNkj2BYJwz5VxqeaOv55dvqO93_uigLJIk,23221
6
6
  bayesianflow_for_chem/train.py,sha256=kj6icGqymUUYopDtpre1oE_wpvpeNilbpzgffBsd1tk,9589
7
7
  bayesianflow_for_chem/vocab.txt,sha256=HgtAZmpWYk4y8PqEVC4vqut1vE75DfRKE_10s2UW0rU,790
8
- bayesianflow_for_chem-1.2.4.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
9
- bayesianflow_for_chem-1.2.4.dist-info/METADATA,sha256=78FGoGjMsdwBavH4rSDtQ_psRYLSUdcg6cdR7KRmgVQ,5890
10
- bayesianflow_for_chem-1.2.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
11
- bayesianflow_for_chem-1.2.4.dist-info/top_level.txt,sha256=KHsanI3BMCt8D9Qpze2ycrF6nMa3PyojgO6eS1c8kco,22
12
- bayesianflow_for_chem-1.2.4.dist-info/RECORD,,
8
+ bayesianflow_for_chem-1.2.5.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
9
+ bayesianflow_for_chem-1.2.5.dist-info/METADATA,sha256=hwEEDW6ipmHpjRjQDKxWk5zqI9jwjsl-yxBpvYn93HQ,5890
10
+ bayesianflow_for_chem-1.2.5.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
11
+ bayesianflow_for_chem-1.2.5.dist-info/top_level.txt,sha256=KHsanI3BMCt8D9Qpze2ycrF6nMa3PyojgO6eS1c8kco,22
12
+ bayesianflow_for_chem-1.2.5.dist-info/RECORD,,