hcpdiff 2.2__tar.gz → 2.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. {hcpdiff-2.2 → hcpdiff-2.2.1}/PKG-INFO +8 -4
  2. {hcpdiff-2.2 → hcpdiff-2.2.1}/README.md +6 -2
  3. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/cfg/sd15_train.py +2 -2
  4. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/cfg/sdxl_train.py +2 -2
  5. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/text_emb_ex.py +4 -0
  6. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/trainer_ac.py +0 -7
  7. hcpdiff-2.2.1/hcpdiff/trainer_deepspeed.py +47 -0
  8. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/PKG-INFO +8 -4
  9. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/SOURCES.txt +1 -1
  10. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/entry_points.txt +1 -0
  11. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/requires.txt +1 -1
  12. {hcpdiff-2.2 → hcpdiff-2.2.1}/setup.py +2 -1
  13. hcpdiff-2.2/hcpdiff/train_deepspeed.py +0 -69
  14. {hcpdiff-2.2 → hcpdiff-2.2.1}/LICENSE +0 -0
  15. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/__init__.py +0 -0
  16. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/__init__.py +0 -0
  17. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/ckpt.py +0 -0
  18. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/format/__init__.py +0 -0
  19. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/format/diffusers.py +0 -0
  20. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/format/emb.py +0 -0
  21. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/format/lora_webui.py +0 -0
  22. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/format/sd_single.py +0 -0
  23. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/ckpt_manager/loader.py +0 -0
  24. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/__init__.py +0 -0
  25. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/cache/__init__.py +0 -0
  26. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/cache/vae.py +0 -0
  27. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/dataset.py +0 -0
  28. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/handler/__init__.py +0 -0
  29. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/handler/controlnet.py +0 -0
  30. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/handler/diffusion.py +0 -0
  31. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/handler/text.py +0 -0
  32. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/source/__init__.py +0 -0
  33. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/source/folder_class.py +0 -0
  34. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/source/text.py +0 -0
  35. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/source/text2img.py +0 -0
  36. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/data/source/text2img_cond.py +0 -0
  37. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/__init__.py +0 -0
  38. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/noise/__init__.py +0 -0
  39. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/noise/pyramid_noise.py +0 -0
  40. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/noise/zero_terminal.py +0 -0
  41. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/__init__.py +0 -0
  42. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/base.py +0 -0
  43. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/ddpm.py +0 -0
  44. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/diffusers.py +0 -0
  45. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/edm.py +0 -0
  46. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/sigma_scheduler/__init__.py +0 -0
  47. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/sigma_scheduler/base.py +0 -0
  48. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/sigma_scheduler/ddpm.py +0 -0
  49. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/diffusion/sampler/sigma_scheduler/edm.py +0 -0
  50. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/__init__.py +0 -0
  51. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/cfg/__init__.py +0 -0
  52. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/cfg/t2i.py +0 -0
  53. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/model/__init__.py +0 -0
  54. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/model/cnet.py +0 -0
  55. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/model/loader.py +0 -0
  56. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/easy/sampler.py +0 -0
  57. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/evaluate/__init__.py +0 -0
  58. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/evaluate/previewer.py +0 -0
  59. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/__init__.py +0 -0
  60. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/base.py +0 -0
  61. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/gw.py +0 -0
  62. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/ssim.py +0 -0
  63. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/vlb.py +0 -0
  64. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/loss/weighting.py +0 -0
  65. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/__init__.py +0 -0
  66. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/cfg_context.py +0 -0
  67. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/compose/__init__.py +0 -0
  68. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/compose/compose_hook.py +0 -0
  69. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/compose/compose_textencoder.py +0 -0
  70. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/compose/compose_tokenizer.py +0 -0
  71. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/compose/sdxl_composer.py +0 -0
  72. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/container.py +0 -0
  73. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/controlnet.py +0 -0
  74. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/lora_base.py +0 -0
  75. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/lora_base_patch.py +0 -0
  76. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/lora_layers.py +0 -0
  77. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/lora_layers_patch.py +0 -0
  78. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/textencoder_ex.py +0 -0
  79. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/tokenizer_ex.py +0 -0
  80. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/wrapper/__init__.py +0 -0
  81. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/wrapper/pixart.py +0 -0
  82. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/wrapper/sd.py +0 -0
  83. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/models/wrapper/utils.py +0 -0
  84. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/parser/__init__.py +0 -0
  85. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/parser/embpt.py +0 -0
  86. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/__init__.py +0 -0
  87. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/convert_caption_txt2json.py +0 -0
  88. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/convert_old_lora.py +0 -0
  89. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/create_embedding.py +0 -0
  90. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/dataset_generator.py +0 -0
  91. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/diffusers2sd.py +0 -0
  92. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/download_hf_model.py +0 -0
  93. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/embedding_convert.py +0 -0
  94. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/gen_from_ptlist.py +0 -0
  95. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/init_proj.py +0 -0
  96. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/lora_convert.py +0 -0
  97. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/save_model.py +0 -0
  98. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/tools/sd2diffusers.py +0 -0
  99. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/train_colo.py +0 -0
  100. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/trainer_ac_single.py +0 -0
  101. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/__init__.py +0 -0
  102. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/colo_utils.py +0 -0
  103. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/inpaint_pipe.py +0 -0
  104. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/net_utils.py +0 -0
  105. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/pipe_hook.py +0 -0
  106. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/utils/utils.py +0 -0
  107. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/__init__.py +0 -0
  108. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/daam/__init__.py +0 -0
  109. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/daam/act.py +0 -0
  110. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/daam/hook.py +0 -0
  111. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/diffusion.py +0 -0
  112. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/fast.py +0 -0
  113. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/flow.py +0 -0
  114. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/io.py +0 -0
  115. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/model.py +0 -0
  116. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/text.py +0 -0
  117. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/utils.py +0 -0
  118. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff/workflow/vae.py +0 -0
  119. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/dependency_links.txt +0 -0
  120. {hcpdiff-2.2 → hcpdiff-2.2.1}/hcpdiff.egg-info/top_level.txt +0 -0
  121. {hcpdiff-2.2 → hcpdiff-2.2.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hcpdiff
3
- Version: 2.2
3
+ Version: 2.2.1
4
4
  Summary: A universal Diffusion toolbox
5
5
  Home-page: https://github.com/IrisRainbowNeko/HCP-Diffusion
6
6
  Author: Ziyi Dong
@@ -17,7 +17,7 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
17
  Requires-Python: >=3.8
18
18
  Description-Content-Type: text/markdown
19
19
  License-File: LICENSE
20
- Requires-Dist: rainbowneko
20
+ Requires-Dist: rainbowneko==1.6
21
21
  Requires-Dist: diffusers
22
22
  Requires-Dist: matplotlib
23
23
  Requires-Dist: pyarrow
@@ -262,9 +262,13 @@ hcp_run --cfg cfgs/workflow/text2img_cli.py \
262
262
  seed=42
263
263
  ```
264
264
 
265
- ### Tutorials
265
+ ### 📚 Tutorials
266
266
 
267
- 🚧 In Development
267
+ + 🧠 [Model Training Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/train.html)
268
+ + 🔧 [LoRA Training Tutorial](https://hcpdiff.readthedocs.io/enlatest/tutorial/lora.html)
269
+ + 🎨 [Image Generation Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/workflow.html)
270
+ + ⚙️ [Configuration File Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/cfg.html)
271
+ + 🧩 [Model Format Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/model_format.html)
268
272
 
269
273
  ---
270
274
 
@@ -225,9 +225,13 @@ hcp_run --cfg cfgs/workflow/text2img_cli.py \
225
225
  seed=42
226
226
  ```
227
227
 
228
- ### Tutorials
228
+ ### 📚 Tutorials
229
229
 
230
- 🚧 In Development
230
+ + 🧠 [Model Training Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/train.html)
231
+ + 🔧 [LoRA Training Tutorial](https://hcpdiff.readthedocs.io/enlatest/tutorial/lora.html)
232
+ + 🎨 [Image Generation Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/workflow.html)
233
+ + ⚙️ [Configuration File Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/cfg.html)
234
+ + 🧩 [Model Format Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/model_format.html)
231
235
 
232
236
  ---
233
237
 
@@ -47,7 +47,7 @@ def SD15_finetuning(base_model: str, train_steps: int, dataset, save_step: int =
47
47
 
48
48
  optimizer=optimizer,
49
49
 
50
- scheduler=ConstantLR(
50
+ lr_scheduler=ConstantLR(
51
51
  _partial_=True,
52
52
  warmup_steps=warmup_steps,
53
53
  ),
@@ -132,7 +132,7 @@ def SD15_lora_train(base_model: str, train_steps: int, dataset, save_step: int =
132
132
 
133
133
  optimizer=optimizer,
134
134
 
135
- scheduler=ConstantLR(
135
+ lr_scheduler=ConstantLR(
136
136
  _partial_=True,
137
137
  warmup_steps=warmup_steps,
138
138
  ),
@@ -44,7 +44,7 @@ def SDXL_finetuning(base_model: str, train_steps: int, dataset, save_step: int =
44
44
 
45
45
  optimizer=optimizer,
46
46
 
47
- scheduler=ConstantLR(
47
+ lr_scheduler=ConstantLR(
48
48
  _partial_=True,
49
49
  warmup_steps=warmup_steps,
50
50
  ),
@@ -128,7 +128,7 @@ def SDXL_lora_train(base_model: str, train_steps: int, dataset, save_step: int =
128
128
 
129
129
  optimizer=optimizer,
130
130
 
131
- scheduler=ConstantLR(
131
+ lr_scheduler=ConstantLR(
132
132
  _partial_=True,
133
133
  warmup_steps=warmup_steps,
134
134
  ),
@@ -126,6 +126,10 @@ class EmbeddingPTInterpHook(SinglePluginBlock):
126
126
  BOS = repeat(inputs_embeds[0,0,:], 'e -> r 1 e', r=self.N_repeats)
127
127
  EOS = repeat(inputs_embeds[0,-1,:], 'e -> r 1 e', r=self.N_repeats)
128
128
 
129
+ # make DDP happy
130
+ if len(self.emb_train) > 0:
131
+ BOS = BOS + sum(emb.mean()*0 for emb in self.emb_train if emb.requires_grad)
132
+
129
133
  replaced_embeds = []
130
134
  for item, rep_idxs, ids_raw in zip(inputs_embeds, rep_idxs_B, self.input_ids):
131
135
  # insert pt to embeddings
@@ -42,13 +42,6 @@ class HCPTrainer(Trainer):
42
42
  def pt_trainable(self):
43
43
  return self.cfgs.emb_pt is not None
44
44
 
45
- def get_loss(self, ds_name, model_pred, inputs):
46
- loss = super().get_loss(ds_name, model_pred, inputs)
47
- # make DDP happy
48
- if len(self.train_pts)>0:
49
- loss = loss+0*sum([emb.mean() for emb in self.train_pts.values()])
50
- return loss
51
-
52
45
  def save_model(self, from_raw=False):
53
46
  NekoSaver.save_all(
54
47
  self.model_raw,
@@ -0,0 +1,47 @@
1
+ import argparse
2
+ import warnings
3
+
4
+ import torch
5
+ from rainbowneko.ckpt_manager import NekoPluginSaver
6
+ from rainbowneko.train.trainer import TrainerDeepspeed
7
+ from rainbowneko.utils import xformers_available
8
+
9
+ from hcpdiff.trainer_ac import HCPTrainer, load_config_with_cli
10
+
11
+ class HCPTrainerDeepspeed(TrainerDeepspeed, HCPTrainer):
12
+ def config_model(self):
13
+ if self.cfgs.model.enable_xformers:
14
+ if xformers_available:
15
+ self.model_wrapper.enable_xformers()
16
+ else:
17
+ warnings.warn("xformers is not available. Make sure it is installed correctly")
18
+
19
+ if self.model_wrapper.vae is not None:
20
+ self.vae_dtype = self.weight_dtype_map.get(self.cfgs.model.get('vae_dtype', None), torch.float32)
21
+ self.model_wrapper.set_dtype(self.weight_dtype, self.vae_dtype)
22
+
23
+ if self.cfgs.model.gradient_checkpointing:
24
+ self.model_wrapper.enable_gradient_checkpointing()
25
+
26
+ if self.is_local_main_process:
27
+ for saver in self.ckpt_saver.values():
28
+ if isinstance(saver, NekoPluginSaver):
29
+ saver.plugin_from_raw = True
30
+
31
+ def hcp_train():
32
+ import subprocess
33
+ parser = argparse.ArgumentParser(description='HCP-Diffusion Launcher')
34
+ parser.add_argument('--launch_cfg', type=str, default='cfgs/launcher/deepspeed.yaml')
35
+ args, train_args = parser.parse_known_args()
36
+
37
+ subprocess.run(["accelerate", "launch", '--config_file', args.launch_cfg, "-m",
38
+ "hcpdiff.trainer_deepspeed"]+train_args, check=True)
39
+
40
+ if __name__ == '__main__':
41
+ parser = argparse.ArgumentParser(description='HCP Diffusion Trainer for DeepSpeed')
42
+ parser.add_argument("--cfg", type=str, default=None, required=True)
43
+ args, cfg_args = parser.parse_known_args()
44
+
45
+ parser, conf = load_config_with_cli(args.cfg, args_list=cfg_args) # skip --cfg
46
+ trainer = HCPTrainerDeepspeed(parser, conf)
47
+ trainer.train()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hcpdiff
3
- Version: 2.2
3
+ Version: 2.2.1
4
4
  Summary: A universal Diffusion toolbox
5
5
  Home-page: https://github.com/IrisRainbowNeko/HCP-Diffusion
6
6
  Author: Ziyi Dong
@@ -17,7 +17,7 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
17
  Requires-Python: >=3.8
18
18
  Description-Content-Type: text/markdown
19
19
  License-File: LICENSE
20
- Requires-Dist: rainbowneko
20
+ Requires-Dist: rainbowneko==1.6
21
21
  Requires-Dist: diffusers
22
22
  Requires-Dist: matplotlib
23
23
  Requires-Dist: pyarrow
@@ -262,9 +262,13 @@ hcp_run --cfg cfgs/workflow/text2img_cli.py \
262
262
  seed=42
263
263
  ```
264
264
 
265
- ### Tutorials
265
+ ### 📚 Tutorials
266
266
 
267
- 🚧 In Development
267
+ + 🧠 [Model Training Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/train.html)
268
+ + 🔧 [LoRA Training Tutorial](https://hcpdiff.readthedocs.io/enlatest/tutorial/lora.html)
269
+ + 🎨 [Image Generation Guide](https://hcpdiff.readthedocs.io/en/latest/user_guides/workflow.html)
270
+ + ⚙️ [Configuration File Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/cfg.html)
271
+ + 🧩 [Model Format Explanation](https://hcpdiff.readthedocs.io/en/latest/user_guides/model_format.html)
268
272
 
269
273
  ---
270
274
 
@@ -3,9 +3,9 @@ README.md
3
3
  setup.py
4
4
  hcpdiff/__init__.py
5
5
  hcpdiff/train_colo.py
6
- hcpdiff/train_deepspeed.py
7
6
  hcpdiff/trainer_ac.py
8
7
  hcpdiff/trainer_ac_single.py
8
+ hcpdiff/trainer_deepspeed.py
9
9
  hcpdiff.egg-info/PKG-INFO
10
10
  hcpdiff.egg-info/SOURCES.txt
11
11
  hcpdiff.egg-info/dependency_links.txt
@@ -2,4 +2,5 @@
2
2
  hcp_run = rainbowneko.infer.infer_workflow:run_workflow
3
3
  hcp_train = hcpdiff.trainer_ac:hcp_train
4
4
  hcp_train_1gpu = hcpdiff.trainer_ac_single:hcp_train
5
+ hcp_train_ds = hcpdiff.trainer_deepspeed:hcp_train
5
6
  hcpinit = hcpdiff.tools.init_proj:main
@@ -1,4 +1,4 @@
1
- rainbowneko
1
+ rainbowneko==1.6
2
2
  diffusers
3
3
  matplotlib
4
4
  pyarrow
@@ -12,7 +12,7 @@ with open('requirements.txt', encoding='utf8') as f:
12
12
  setuptools.setup(
13
13
  name="hcpdiff",
14
14
  py_modules=["hcpdiff"],
15
- version="2.2",
15
+ version="2.2.1",
16
16
  author="Ziyi Dong",
17
17
  author_email="rainbow-neko@outlook.com",
18
18
  description="A universal Diffusion toolbox",
@@ -38,6 +38,7 @@ setuptools.setup(
38
38
  'hcpinit = hcpdiff.tools.init_proj:main',
39
39
  'hcp_train = hcpdiff.trainer_ac:hcp_train',
40
40
  'hcp_train_1gpu = hcpdiff.trainer_ac_single:hcp_train',
41
+ 'hcp_train_ds = hcpdiff.trainer_deepspeed:hcp_train',
41
42
  'hcp_run = rainbowneko.infer.infer_workflow:run_workflow',
42
43
  ]
43
44
  },
@@ -1,69 +0,0 @@
1
- import argparse
2
- import os
3
- import sys
4
- import warnings
5
- from functools import partial
6
-
7
- import torch
8
-
9
- from hcpdiff.ckpt_manager import CkptManagerPKL, CkptManagerSafe
10
- from hcpdiff.train_ac_old import Trainer, load_config_with_cli
11
- from hcpdiff.utils.net_utils import get_scheduler
12
-
13
- class TrainerDeepSpeed(Trainer):
14
-
15
- def build_ckpt_manager(self):
16
- self.ckpt_manager = self.ckpt_manager_map[self.cfgs.ckpt_type](plugin_from_raw=True)
17
- if self.is_local_main_process:
18
- self.ckpt_manager.set_save_dir(os.path.join(self.exp_dir, 'ckpts'), emb_dir=self.cfgs.tokenizer_pt.emb_dir)
19
-
20
- @property
21
- def unet_raw(self):
22
- return self.accelerator.unwrap_model(self.TE_unet).unet if self.train_TE else self.accelerator.unwrap_model(self.TE_unet.unet)
23
-
24
- @property
25
- def TE_raw(self):
26
- return self.accelerator.unwrap_model(self.TE_unet).TE if self.train_TE else self.TE_unet.TE
27
-
28
- def get_loss(self, model_pred, target, timesteps, att_mask):
29
- if att_mask is None:
30
- att_mask = 1.0
31
- if getattr(self.criterion, 'need_timesteps', False):
32
- loss = (self.criterion(model_pred.float(), target.float(), timesteps)*att_mask).mean()
33
- else:
34
- loss = (self.criterion(model_pred.float(), target.float())*att_mask).mean()
35
- return loss
36
-
37
- def build_optimizer_scheduler(self):
38
- # set optimizer
39
- parameters, parameters_pt = self.get_param_group_train()
40
-
41
- if len(parameters_pt)>0: # do prompt-tuning
42
- cfg_opt_pt = self.cfgs.train.optimizer_pt
43
- # if self.cfgs.train.scale_lr_pt:
44
- # self.scale_lr(parameters_pt)
45
- assert isinstance(cfg_opt_pt, partial), f'optimizer.type is not supported anymore, please use class path like "torch.optim.AdamW".'
46
- weight_decay = cfg_opt_pt.keywords.get('weight_decay', None)
47
- if weight_decay is not None:
48
- for param in parameters_pt:
49
- param['weight_decay'] = weight_decay
50
-
51
- parameters += parameters_pt
52
- warnings.warn('deepspeed dose not support multi optimizer and lr_scheduler. optimizer_pt and scheduler_pt will not work.')
53
-
54
- if len(parameters)>0:
55
- cfg_opt = self.cfgs.train.optimizer
56
- if self.cfgs.train.scale_lr:
57
- self.scale_lr(parameters)
58
- assert isinstance(cfg_opt, partial), f'optimizer.type is not supported anymore, please use class path like "torch.optim.AdamW".'
59
- self.optimizer = cfg_opt(params=parameters)
60
- self.lr_scheduler = get_scheduler(self.cfgs.train.scheduler, self.optimizer)
61
-
62
- if __name__ == '__main__':
63
- parser = argparse.ArgumentParser(description='Stable Diffusion Training')
64
- parser.add_argument('--cfg', type=str, default='cfg/train/demo.yaml')
65
- args, cfg_args = parser.parse_known_args()
66
-
67
- conf = load_config_with_cli(args.cfg, args_list=cfg_args) # skip --cfg
68
- trainer = TrainerDeepSpeed(conf)
69
- trainer.train()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes