mmgp 3.5.0__tar.gz → 3.5.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mmgp might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mmgp
3
- Version: 3.5.0
3
+ Version: 3.5.1
4
4
  Summary: Memory Management for the GPU Poor
5
5
  Author-email: deepbeepmeep <deepbeepmeep@yahoo.com>
6
6
  Requires-Python: >=3.10
@@ -15,7 +15,7 @@ Dynamic: license-file
15
15
 
16
16
 
17
17
  <p align="center">
18
- <H2>Memory Management 3.5.0 for the GPU Poor by DeepBeepMeep</H2>
18
+ <H2>Memory Management 3.5.1 for the GPU Poor by DeepBeepMeep</H2>
19
19
  </p>
20
20
 
21
21
 
@@ -1,6 +1,6 @@
1
1
 
2
2
  <p align="center">
3
- <H2>Memory Management 3.5.0 for the GPU Poor by DeepBeepMeep</H2>
3
+ <H2>Memory Management 3.5.1 for the GPU Poor by DeepBeepMeep</H2>
4
4
  </p>
5
5
 
6
6
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "mmgp"
3
- version = "3.5.0"
3
+ version = "3.5.1"
4
4
  authors = [
5
5
  { name = "deepbeepmeep", email = "deepbeepmeep@yahoo.com" },
6
6
  ]
@@ -1,4 +1,4 @@
1
- # ------------------ Memory Management 3.5.0 for the GPU Poor by DeepBeepMeep (mmgp)------------------
1
+ # ------------------ Memory Management 3.5.1 for the GPU Poor by DeepBeepMeep (mmgp)------------------
2
2
  #
3
3
  # This module contains multiples optimisations so that models such as Flux (and derived), Mochi, CogView, HunyuanVideo, ... can run smoothly on a 24 GB GPU limited card.
4
4
  # This a replacement for the accelerate library that should in theory manage offloading, but doesn't work properly with models that are loaded / unloaded several
@@ -658,7 +658,7 @@ def _welcome():
658
658
  if welcome_displayed:
659
659
  return
660
660
  welcome_displayed = True
661
- print(f"{BOLD}{HEADER}************ Memory Management for the GPU Poor (mmgp 3.5.0) by DeepBeepMeep ************{ENDC}{UNBOLD}")
661
+ print(f"{BOLD}{HEADER}************ Memory Management for the GPU Poor (mmgp 3.5.1) by DeepBeepMeep ************{ENDC}{UNBOLD}")
662
662
 
663
663
  def change_dtype(model, new_dtype, exclude_buffers = False):
664
664
  for submodule_name, submodule in model.named_modules():
@@ -2014,6 +2014,8 @@ class offload:
2014
2014
  weight = weight.clone()
2015
2015
  for active_adapter in active_adapters:
2016
2016
  data = loras_data.get(active_adapter + '_GPU', None)
2017
+ if data == None:
2018
+ continue
2017
2019
  lora_A_weight, lora_B_weight, diff_b, alpha = data
2018
2020
  scaling = self._get_lora_scaling(loras_scaling, model, active_adapter) * alpha
2019
2021
  if lora_A_weight != None:
@@ -2028,7 +2030,6 @@ class offload:
2028
2030
  original_bias = False
2029
2031
  bias.add_(diff_b, alpha=scaling)
2030
2032
  # base_weight += scaling * lora_B_weight @ lora_A_weight
2031
- break
2032
2033
  if training:
2033
2034
  pass
2034
2035
  # result = torch.nn.functional.linear(dropout(x), base_weight, bias=submodule.bias)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mmgp
3
- Version: 3.5.0
3
+ Version: 3.5.1
4
4
  Summary: Memory Management for the GPU Poor
5
5
  Author-email: deepbeepmeep <deepbeepmeep@yahoo.com>
6
6
  Requires-Python: >=3.10
@@ -15,7 +15,7 @@ Dynamic: license-file
15
15
 
16
16
 
17
17
  <p align="center">
18
- <H2>Memory Management 3.5.0 for the GPU Poor by DeepBeepMeep</H2>
18
+ <H2>Memory Management 3.5.1 for the GPU Poor by DeepBeepMeep</H2>
19
19
  </p>
20
20
 
21
21
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes