comfyui-workflow-templates 0.1.94__py3-none-any.whl → 0.1.96__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of comfyui-workflow-templates might be problematic. Click here for more details.

Files changed (168) hide show
  1. comfyui_workflow_templates/templates/2_pass_pose_worship.json +551 -139
  2. comfyui_workflow_templates/templates/3d_hunyuan3d-v2.1.json +8 -8
  3. comfyui_workflow_templates/templates/3d_hunyuan3d_image_to_model.json +187 -295
  4. comfyui_workflow_templates/templates/3d_hunyuan3d_multiview_to_model.json +158 -160
  5. comfyui_workflow_templates/templates/3d_hunyuan3d_multiview_to_model_turbo.json +54 -56
  6. comfyui_workflow_templates/templates/ByteDance-Seedance_00003_.json +210 -0
  7. comfyui_workflow_templates/templates/api_bfl_flux_1_kontext_max_image.json +124 -89
  8. comfyui_workflow_templates/templates/api_bfl_flux_1_kontext_multiple_images_input.json +138 -99
  9. comfyui_workflow_templates/templates/api_bfl_flux_1_kontext_pro_image.json +191 -156
  10. comfyui_workflow_templates/templates/api_bfl_flux_pro_t2i.json +81 -79
  11. comfyui_workflow_templates/templates/api_bytedance_flf2v.json +90 -90
  12. comfyui_workflow_templates/templates/api_bytedance_image_to_video.json +2 -2
  13. comfyui_workflow_templates/templates/api_bytedance_seedream4.json +227 -222
  14. comfyui_workflow_templates/templates/api_bytedance_text_to_video.json +39 -40
  15. comfyui_workflow_templates/templates/api_google_gemini.json +6 -7
  16. comfyui_workflow_templates/templates/api_google_gemini_image.json +113 -47
  17. comfyui_workflow_templates/templates/api_hailuo_minimax_i2v.json +59 -40
  18. comfyui_workflow_templates/templates/api_hailuo_minimax_t2v.json +28 -28
  19. comfyui_workflow_templates/templates/api_hailuo_minimax_video.json +37 -31
  20. comfyui_workflow_templates/templates/api_ideogram_v3_t2i.json +59 -43
  21. comfyui_workflow_templates/templates/api_kling_effects.json +58 -31
  22. comfyui_workflow_templates/templates/api_kling_flf.json +74 -43
  23. comfyui_workflow_templates/templates/api_kling_i2v.json +58 -32
  24. comfyui_workflow_templates/templates/api_luma_i2v.json +115 -118
  25. comfyui_workflow_templates/templates/api_luma_photon_i2i.json +102 -54
  26. comfyui_workflow_templates/templates/api_luma_photon_style_ref.json +320 -163
  27. comfyui_workflow_templates/templates/api_luma_t2v.json +59 -50
  28. comfyui_workflow_templates/templates/api_moonvalley_image_to_video.json +40 -41
  29. comfyui_workflow_templates/templates/api_moonvalley_video_to_video_motion_transfer.json +10 -11
  30. comfyui_workflow_templates/templates/api_moonvalley_video_to_video_pose_control.json +36 -37
  31. comfyui_workflow_templates/templates/api_openai_dall_e_2_inpaint.json +47 -11
  32. comfyui_workflow_templates/templates/api_openai_dall_e_2_t2i.json +8 -29
  33. comfyui_workflow_templates/templates/api_openai_image_1_i2i.json +47 -39
  34. comfyui_workflow_templates/templates/api_openai_image_1_inpaint.json +11 -32
  35. comfyui_workflow_templates/templates/api_openai_image_1_multi_inputs.json +63 -43
  36. comfyui_workflow_templates/templates/api_openai_image_1_t2i.json +46 -67
  37. comfyui_workflow_templates/templates/api_openai_sora_video.json +202 -162
  38. comfyui_workflow_templates/templates/api_pika_i2v.json +31 -31
  39. comfyui_workflow_templates/templates/api_pika_scene.json +16 -7
  40. comfyui_workflow_templates/templates/api_pixverse_i2v.json +72 -77
  41. comfyui_workflow_templates/templates/api_pixverse_t2v.json +20 -16
  42. comfyui_workflow_templates/templates/api_pixverse_template_i2v.json +40 -36
  43. comfyui_workflow_templates/templates/api_recraft_image_gen_with_color_control.json +170 -75
  44. comfyui_workflow_templates/templates/api_recraft_image_gen_with_style_control.json +212 -199
  45. comfyui_workflow_templates/templates/api_recraft_vector_gen.json +78 -69
  46. comfyui_workflow_templates/templates/api_rodin_gen2.json +214 -208
  47. comfyui_workflow_templates/templates/api_rodin_image_to_model.json +479 -439
  48. comfyui_workflow_templates/templates/api_rodin_multiview_to_model.json +191 -135
  49. comfyui_workflow_templates/templates/api_runway_first_last_frame.json +45 -45
  50. comfyui_workflow_templates/templates/api_runway_gen3a_turbo_image_to_video.json +31 -32
  51. comfyui_workflow_templates/templates/api_runway_gen4_turo_image_to_video.json +65 -66
  52. comfyui_workflow_templates/templates/api_runway_reference_to_image.json +109 -71
  53. comfyui_workflow_templates/templates/api_runway_text_to_image.json +55 -55
  54. comfyui_workflow_templates/templates/api_stability_ai_audio_inpaint.json +58 -82
  55. comfyui_workflow_templates/templates/api_stability_ai_audio_to_audio.json +51 -54
  56. comfyui_workflow_templates/templates/api_stability_ai_i2i.json +66 -41
  57. comfyui_workflow_templates/templates/api_stability_ai_sd3.5_i2i.json +74 -26
  58. comfyui_workflow_templates/templates/api_stability_ai_sd3.5_t2i.json +70 -70
  59. comfyui_workflow_templates/templates/api_stability_ai_stable_image_ultra_t2i.json +33 -31
  60. comfyui_workflow_templates/templates/api_tripo_image_to_model.json +90 -92
  61. comfyui_workflow_templates/templates/api_tripo_multiview_to_model.json +283 -283
  62. comfyui_workflow_templates/templates/api_tripo_text_to_model.json +102 -102
  63. comfyui_workflow_templates/templates/api_veo2_i2v.json +32 -29
  64. comfyui_workflow_templates/templates/api_veo3.json +78 -72
  65. comfyui_workflow_templates/templates/api_vidu_image_to_video.json +37 -9
  66. comfyui_workflow_templates/templates/api_vidu_reference_to_video.json +86 -19
  67. comfyui_workflow_templates/templates/api_vidu_start_end_to_video.json +14 -6
  68. comfyui_workflow_templates/templates/api_vidu_text_to_video.json +2 -2
  69. comfyui_workflow_templates/templates/api_wan_image_to_video.json +52 -53
  70. comfyui_workflow_templates/templates/api_wan_text_to_image .json +140 -0
  71. comfyui_workflow_templates/templates/api_wan_text_to_video.json +38 -45
  72. comfyui_workflow_templates/templates/audio_ace_step_1_m2m_editing.json +174 -351
  73. comfyui_workflow_templates/templates/audio_ace_step_1_t2a_instrumentals.json +214 -405
  74. comfyui_workflow_templates/templates/audio_ace_step_1_t2a_song.json +179 -390
  75. comfyui_workflow_templates/templates/audio_stable_audio_example.json +156 -118
  76. comfyui_workflow_templates/templates/controlnet_example.json +110 -122
  77. comfyui_workflow_templates/templates/default.json +329 -139
  78. comfyui_workflow_templates/templates/depth_controlnet.json +463 -206
  79. comfyui_workflow_templates/templates/depth_t2i_adapter.json +1522 -236
  80. comfyui_workflow_templates/templates/esrgan_example.json +24 -30
  81. comfyui_workflow_templates/templates/flux1_dev_uso_reference_image_gen.json +227 -193
  82. comfyui_workflow_templates/templates/flux1_krea_dev.json +3 -3
  83. comfyui_workflow_templates/templates/flux_canny_model_example.json +576 -268
  84. comfyui_workflow_templates/templates/flux_depth_lora_example.json +1234 -213
  85. comfyui_workflow_templates/templates/flux_fill_inpaint_example.json +553 -250
  86. comfyui_workflow_templates/templates/flux_fill_outpaint_example.json +556 -228
  87. comfyui_workflow_templates/templates/flux_kontext_dev_basic.json +149 -234
  88. comfyui_workflow_templates/templates/flux_redux_model_example-1.webp +0 -0
  89. comfyui_workflow_templates/templates/flux_redux_model_example.json +600 -560
  90. comfyui_workflow_templates/templates/flux_schnell_full_text_to_image.json +21 -29
  91. comfyui_workflow_templates/templates/hidream_e1_1.json +180 -210
  92. comfyui_workflow_templates/templates/hidream_e1_full.json +34 -40
  93. comfyui_workflow_templates/templates/hidream_i1_dev.json +15 -15
  94. comfyui_workflow_templates/templates/hidream_i1_fast.json +15 -15
  95. comfyui_workflow_templates/templates/hidream_i1_full.json +17 -16
  96. comfyui_workflow_templates/templates/hiresfix_esrgan_workflow.json +31 -37
  97. comfyui_workflow_templates/templates/hiresfix_latent_workflow.json +84 -88
  98. comfyui_workflow_templates/templates/image2image-1.webp +0 -0
  99. comfyui_workflow_templates/templates/image2image-2.webp +0 -0
  100. comfyui_workflow_templates/templates/image2image.json +198 -196
  101. comfyui_workflow_templates/templates/image_chroma1_radiance_text_to_image.json +60 -60
  102. comfyui_workflow_templates/templates/image_flux.1_fill_dev_OneReward.json +178 -162
  103. comfyui_workflow_templates/templates/image_lotus_depth_v1_1.json +26 -32
  104. comfyui_workflow_templates/templates/image_netayume_lumina_t2i-1.webp +0 -0
  105. comfyui_workflow_templates/templates/image_netayume_lumina_t2i.json +597 -0
  106. comfyui_workflow_templates/templates/image_omnigen2_image_edit.json +322 -323
  107. comfyui_workflow_templates/templates/image_omnigen2_t2i.json +26 -33
  108. comfyui_workflow_templates/templates/image_qwen_image.json +40 -40
  109. comfyui_workflow_templates/templates/image_qwen_image_controlnet_patch.json +47 -46
  110. comfyui_workflow_templates/templates/image_qwen_image_edit.json +216 -216
  111. comfyui_workflow_templates/templates/image_qwen_image_edit_2509.json +361 -361
  112. comfyui_workflow_templates/templates/image_qwen_image_instantx_controlnet.json +75 -73
  113. comfyui_workflow_templates/templates/image_qwen_image_instantx_inpainting_controlnet.json +186 -181
  114. comfyui_workflow_templates/templates/image_qwen_image_union_control_lora.json +191 -190
  115. comfyui_workflow_templates/templates/image_to_video.json +64 -64
  116. comfyui_workflow_templates/templates/image_to_video_wan.json +163 -140
  117. comfyui_workflow_templates/templates/index.es.json +26 -25
  118. comfyui_workflow_templates/templates/index.fr.json +26 -25
  119. comfyui_workflow_templates/templates/index.ja.json +26 -25
  120. comfyui_workflow_templates/templates/index.json +12 -24
  121. comfyui_workflow_templates/templates/index.ko.json +26 -25
  122. comfyui_workflow_templates/templates/index.ru.json +26 -25
  123. comfyui_workflow_templates/templates/index.zh-TW.json +26 -25
  124. comfyui_workflow_templates/templates/index.zh.json +26 -25
  125. comfyui_workflow_templates/templates/inpaint_example.json +87 -86
  126. comfyui_workflow_templates/templates/inpaint_model_outpainting.json +24 -21
  127. comfyui_workflow_templates/templates/latent_upscale_different_prompt_model.json +179 -185
  128. comfyui_workflow_templates/templates/ltxv_image_to_video-1.webp +0 -0
  129. comfyui_workflow_templates/templates/ltxv_image_to_video.json +367 -337
  130. comfyui_workflow_templates/templates/mixing_controlnets.json +422 -373
  131. comfyui_workflow_templates/templates/sd3.5_large_blur.json +14 -14
  132. comfyui_workflow_templates/templates/sd3.5_large_canny_controlnet_example.json +615 -258
  133. comfyui_workflow_templates/templates/sd3.5_large_depth.json +1317 -210
  134. comfyui_workflow_templates/templates/sdxl_revision_text_prompts.json +619 -256
  135. comfyui_workflow_templates/templates/sdxlturbo_example.json +308 -162
  136. comfyui_workflow_templates/templates/video_humo.json +194 -194
  137. comfyui_workflow_templates/templates/video_wan2.1_fun_camera_v1.1_1.3B.json +100 -73
  138. comfyui_workflow_templates/templates/video_wan2.1_fun_camera_v1.1_14B.json +102 -153
  139. comfyui_workflow_templates/templates/video_wan2_2_14B_animate.json +48 -46
  140. comfyui_workflow_templates/templates/video_wan2_2_14B_flf2v.json +151 -151
  141. comfyui_workflow_templates/templates/video_wan2_2_14B_fun_camera.json +118 -114
  142. comfyui_workflow_templates/templates/video_wan2_2_14B_fun_control.json +394 -372
  143. comfyui_workflow_templates/templates/video_wan2_2_14B_fun_inpaint.json +125 -125
  144. comfyui_workflow_templates/templates/video_wan2_2_14B_i2v.json +60 -60
  145. comfyui_workflow_templates/templates/video_wan2_2_14B_s2v.json +244 -240
  146. comfyui_workflow_templates/templates/video_wan2_2_14B_t2v (2).json +1954 -0
  147. comfyui_workflow_templates/templates/video_wan2_2_5B_fun_control.json +242 -222
  148. comfyui_workflow_templates/templates/video_wan2_2_5B_fun_inpaint.json +6 -6
  149. comfyui_workflow_templates/templates/video_wan2_2_5B_ti2v.json +49 -49
  150. comfyui_workflow_templates/templates/video_wan_ati.json +51 -51
  151. comfyui_workflow_templates/templates/video_wan_vace_14B_ref2v.json +138 -152
  152. comfyui_workflow_templates/templates/video_wan_vace_14B_t2v.json +2 -2
  153. comfyui_workflow_templates/templates/video_wan_vace_14B_v2v.json +135 -153
  154. comfyui_workflow_templates/templates/video_wan_vace_flf2v.json +178 -194
  155. comfyui_workflow_templates/templates/video_wan_vace_inpainting.json +228 -236
  156. comfyui_workflow_templates/templates/video_wan_vace_outpainting.json +257 -340
  157. comfyui_workflow_templates/templates/wan2.1_flf2v_720_f16.json +65 -73
  158. comfyui_workflow_templates/templates/wan2.1_fun_control.json +202 -133
  159. comfyui_workflow_templates/templates/wan2.1_fun_inp.json +78 -52
  160. {comfyui_workflow_templates-0.1.94.dist-info → comfyui_workflow_templates-0.1.96.dist-info}/METADATA +1 -1
  161. {comfyui_workflow_templates-0.1.94.dist-info → comfyui_workflow_templates-0.1.96.dist-info}/RECORD +164 -162
  162. comfyui_workflow_templates/templates/sdxl_revision_zero_positive-1.webp +0 -0
  163. comfyui_workflow_templates/templates/sdxl_revision_zero_positive.json +0 -496
  164. comfyui_workflow_templates/templates/stable_zero123_example-1.webp +0 -0
  165. comfyui_workflow_templates/templates/stable_zero123_example.json +0 -273
  166. {comfyui_workflow_templates-0.1.94.dist-info → comfyui_workflow_templates-0.1.96.dist-info}/WHEEL +0 -0
  167. {comfyui_workflow_templates-0.1.94.dist-info → comfyui_workflow_templates-0.1.96.dist-info}/licenses/LICENSE +0 -0
  168. {comfyui_workflow_templates-0.1.94.dist-info → comfyui_workflow_templates-0.1.96.dist-info}/top_level.txt +0 -0
@@ -8,11 +8,11 @@
8
8
  "id": 28,
9
9
  "type": "SamplerCustomAdvanced",
10
10
  "pos": [
11
- 626.147705078125,
12
- 105.61650085449219
11
+ 630,
12
+ 100
13
13
  ],
14
14
  "size": [
15
- 202.53378295898438,
15
+ 202.5337890625,
16
16
  106
17
17
  ],
18
18
  "flags": {},
@@ -71,8 +71,8 @@
71
71
  "id": 20,
72
72
  "type": "KSamplerSelect",
73
73
  "pos": [
74
- 286.1477355957031,
75
- 435.6164855957031
74
+ 290,
75
+ 430
76
76
  ],
77
77
  "size": [
78
78
  315,
@@ -104,8 +104,8 @@
104
104
  "id": 21,
105
105
  "type": "RandomNoise",
106
106
  "pos": [
107
- 286.1477355957031,
108
- 115.61649322509766
107
+ 290,
108
+ 110
109
109
  ],
110
110
  "size": [
111
111
  310,
@@ -138,12 +138,12 @@
138
138
  "id": 27,
139
139
  "type": "DualCFGGuider",
140
140
  "pos": [
141
- 286.1477355957031,
142
- 245.61639404296875
141
+ 290,
142
+ 240
143
143
  ],
144
144
  "size": [
145
145
  310,
146
- 142
146
+ 166
147
147
  ],
148
148
  "flags": {},
149
149
  "order": 10,
@@ -187,15 +187,16 @@
187
187
  },
188
188
  "widgets_values": [
189
189
  5,
190
- 2
190
+ 2,
191
+ "regular"
191
192
  ]
192
193
  },
193
194
  {
194
195
  "id": 23,
195
196
  "type": "BasicScheduler",
196
197
  "pos": [
197
- 286.1477355957031,
198
- 535.6163940429688
198
+ 290,
199
+ 530
199
200
  ],
200
201
  "size": [
201
202
  210,
@@ -269,9 +270,7 @@
269
270
  },
270
271
  "widgets_values": [
271
272
  "ae.safetensors"
272
- ],
273
- "color": "#322",
274
- "bgcolor": "#533"
273
+ ]
275
274
  },
276
275
  {
277
276
  "id": 10,
@@ -314,9 +313,7 @@
314
313
  "qwen_2.5_vl_fp16.safetensors",
315
314
  "omnigen2",
316
315
  "default"
317
- ],
318
- "color": "#322",
319
- "bgcolor": "#533"
316
+ ]
320
317
  },
321
318
  {
322
319
  "id": 12,
@@ -358,16 +355,14 @@
358
355
  "widgets_values": [
359
356
  "omnigen2_fp16.safetensors",
360
357
  "default"
361
- ],
362
- "color": "#322",
363
- "bgcolor": "#533"
358
+ ]
364
359
  },
365
360
  {
366
361
  "id": 8,
367
362
  "type": "VAEDecode",
368
363
  "pos": [
369
- 606.147705078125,
370
- 585.6165771484375
364
+ 610,
365
+ 580
371
366
  ],
372
367
  "size": [
373
368
  210,
@@ -438,9 +433,7 @@
438
433
  1024,
439
434
  1024,
440
435
  1
441
- ],
442
- "color": "#322",
443
- "bgcolor": "#533"
436
+ ]
444
437
  },
445
438
  {
446
439
  "id": 9,
@@ -576,7 +569,7 @@
576
569
  "title": "Model links",
577
570
  "properties": {},
578
571
  "widgets_values": [
579
- "## Docs\n[English](http://docs.comfy.org/tutorials/image/omnigen/omnigen2) | [中文](http://docs.comfy.org/zh-CN/tutorials/image/omnigen/omnigen2)\n\n## Model links\n\n**diffusion model**\n\n- [omnigen2_fp16.safetensors](https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors)\n\n**vae**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/blob/main/split_files/vae/ae.safetensors)\n\n**text encoder**\n\n- [qwen_2.5_vl_fp16.safetensors](https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors)\n\nFile save location\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ └── omnigen2_fp16.safetensors\n│ ├── 📂 vae/\n│ │ └── ae.safetensor\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_fp16.safetensors\n```\n"
572
+ "[Docs](http://docs.comfy.org/tutorials/image/omnigen/omnigen2)\n## Model links\n\n**diffusion model**\n\n- [omnigen2_fp16.safetensors](https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/omnigen2_fp16.safetensors)\n\n**vae**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/blob/main/split_files/vae/ae.safetensors)\n\n**text encoder**\n\n- [qwen_2.5_vl_fp16.safetensors](https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors)\n\nFile save location\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ └── omnigen2_fp16.safetensors\n│ ├── 📂 vae/\n│ │ └── ae.safetensor\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_fp16.safetensors\n```\n"
580
573
  ],
581
574
  "color": "#432",
582
575
  "bgcolor": "#653"
@@ -736,7 +729,7 @@
736
729
  "title": "Custom Sampling",
737
730
  "bounding": [
738
731
  280,
739
- 30,
732
+ 20,
740
733
  561.83544921875,
741
734
  630.7272338867188
742
735
  ],
@@ -761,13 +754,13 @@
761
754
  "config": {},
762
755
  "extra": {
763
756
  "ds": {
764
- "scale": 0.6830134553650705,
757
+ "scale": 0.683013455365071,
765
758
  "offset": [
766
- 1505.9433195285862,
767
- 143.4838026435537
759
+ 260.5088244392041,
760
+ 362.7687967524388
768
761
  ]
769
762
  },
770
- "frontendVersion": "1.23.2",
763
+ "frontendVersion": "1.28.6",
771
764
  "groupNodes": {}
772
765
  },
773
766
  "version": 0.4
@@ -30,9 +30,9 @@
30
30
  }
31
31
  ],
32
32
  "properties": {
33
+ "Node name for S&R": "VAELoader",
33
34
  "cnr_id": "comfy-core",
34
35
  "ver": "0.3.48",
35
- "Node name for S&R": "VAELoader",
36
36
  "models": [
37
37
  {
38
38
  "name": "qwen_image_vae.safetensors",
@@ -80,9 +80,9 @@
80
80
  }
81
81
  ],
82
82
  "properties": {
83
+ "Node name for S&R": "CLIPLoader",
83
84
  "cnr_id": "comfy-core",
84
85
  "ver": "0.3.48",
85
- "Node name for S&R": "CLIPLoader",
86
86
  "models": [
87
87
  {
88
88
  "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
@@ -130,9 +130,9 @@
130
130
  }
131
131
  ],
132
132
  "properties": {
133
+ "Node name for S&R": "EmptySD3LatentImage",
133
134
  "cnr_id": "comfy-core",
134
135
  "ver": "0.3.48",
135
- "Node name for S&R": "EmptySD3LatentImage",
136
136
  "enableTabs": false,
137
137
  "tabWidth": 65,
138
138
  "tabXOffset": 10,
@@ -181,9 +181,9 @@
181
181
  ],
182
182
  "title": "CLIP Text Encode (Positive Prompt)",
183
183
  "properties": {
184
+ "Node name for S&R": "CLIPTextEncode",
184
185
  "cnr_id": "comfy-core",
185
186
  "ver": "0.3.48",
186
- "Node name for S&R": "CLIPTextEncode",
187
187
  "enableTabs": false,
188
188
  "tabWidth": 65,
189
189
  "tabXOffset": 10,
@@ -232,9 +232,9 @@
232
232
  ],
233
233
  "title": "CLIP Text Encode (Negative Prompt)",
234
234
  "properties": {
235
+ "Node name for S&R": "CLIPTextEncode",
235
236
  "cnr_id": "comfy-core",
236
237
  "ver": "0.3.48",
237
- "Node name for S&R": "CLIPTextEncode",
238
238
  "enableTabs": false,
239
239
  "tabWidth": 65,
240
240
  "tabXOffset": 10,
@@ -273,9 +273,9 @@
273
273
  ],
274
274
  "outputs": [],
275
275
  "properties": {
276
+ "Node name for S&R": "SaveImage",
276
277
  "cnr_id": "comfy-core",
277
278
  "ver": "0.3.48",
278
- "Node name for S&R": "SaveImage",
279
279
  "enableTabs": false,
280
280
  "tabWidth": 65,
281
281
  "tabXOffset": 10,
@@ -320,9 +320,9 @@
320
320
  }
321
321
  ],
322
322
  "properties": {
323
+ "Node name for S&R": "ModelSamplingAuraFlow",
323
324
  "cnr_id": "comfy-core",
324
325
  "ver": "0.3.48",
325
- "Node name for S&R": "ModelSamplingAuraFlow",
326
326
  "enableTabs": false,
327
327
  "tabWidth": 65,
328
328
  "tabXOffset": 10,
@@ -367,9 +367,9 @@
367
367
  }
368
368
  ],
369
369
  "properties": {
370
+ "Node name for S&R": "LoraLoaderModelOnly",
370
371
  "cnr_id": "comfy-core",
371
372
  "ver": "0.3.49",
372
- "Node name for S&R": "LoraLoaderModelOnly",
373
373
  "models": [
374
374
  {
375
375
  "name": "Qwen-Image-Lightning-8steps-V1.0.safetensors",
@@ -471,9 +471,9 @@
471
471
  }
472
472
  ],
473
473
  "properties": {
474
+ "Node name for S&R": "VAEDecode",
474
475
  "cnr_id": "comfy-core",
475
476
  "ver": "0.3.48",
476
- "Node name for S&R": "VAEDecode",
477
477
  "enableTabs": false,
478
478
  "tabWidth": 65,
479
479
  "tabXOffset": 10,
@@ -532,9 +532,9 @@
532
532
  }
533
533
  ],
534
534
  "properties": {
535
+ "Node name for S&R": "KSampler",
535
536
  "cnr_id": "comfy-core",
536
537
  "ver": "0.3.48",
537
- "Node name for S&R": "KSampler",
538
538
  "enableTabs": false,
539
539
  "tabWidth": 65,
540
540
  "tabXOffset": 10,
@@ -580,9 +580,9 @@
580
580
  }
581
581
  ],
582
582
  "properties": {
583
+ "Node name for S&R": "UNETLoader",
583
584
  "cnr_id": "comfy-core",
584
585
  "ver": "0.3.48",
585
- "Node name for S&R": "UNETLoader",
586
586
  "models": [
587
587
  {
588
588
  "name": "qwen_image_fp8_e4m3fn.safetensors",
@@ -605,75 +605,75 @@
605
605
  ]
606
606
  },
607
607
  {
608
- "id": 67,
609
- "type": "MarkdownNote",
608
+ "id": 70,
609
+ "type": "Note",
610
610
  "pos": [
611
- -540,
612
- 10
611
+ 850,
612
+ 910
613
613
  ],
614
614
  "size": [
615
- 540,
616
- 630
615
+ 310,
616
+ 120
617
617
  ],
618
618
  "flags": {},
619
619
  "order": 6,
620
620
  "mode": 0,
621
621
  "inputs": [],
622
622
  "outputs": [],
623
- "title": "Model links",
624
- "properties": {
625
- "widget_ue_connectable": {}
626
- },
623
+ "title": "For fp8 without 8steps LoRA",
624
+ "properties": {},
627
625
  "widgets_values": [
628
- "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\nQwen_image_distill\n\n- [qwen_image_distill_full_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_fp8_e4m3fn.safetensors)\n- [qwen_image_distill_full_bf16.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_bf16.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
626
+ "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
629
627
  ],
630
628
  "color": "#432",
631
629
  "bgcolor": "#653"
632
630
  },
633
631
  {
634
- "id": 70,
635
- "type": "Note",
632
+ "id": 74,
633
+ "type": "MarkdownNote",
636
634
  "pos": [
637
635
  850,
638
- 910
636
+ 660
639
637
  ],
640
638
  "size": [
641
639
  310,
642
- 120
640
+ 210
643
641
  ],
644
642
  "flags": {},
645
643
  "order": 7,
646
644
  "mode": 0,
647
645
  "inputs": [],
648
646
  "outputs": [],
649
- "title": "For fp8 without 8steps LoRA",
647
+ "title": "KSampler settings",
650
648
  "properties": {},
651
649
  "widgets_values": [
652
- "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
650
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn(Qwen team's suggestion) | 40 | 2.5 \n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 8steps LoRA | 8 | 1.0 |\n| distill fp8_e4m3fn | 10 | 1.0 |"
653
651
  ],
654
652
  "color": "#432",
655
653
  "bgcolor": "#653"
656
654
  },
657
655
  {
658
- "id": 74,
656
+ "id": 67,
659
657
  "type": "MarkdownNote",
660
658
  "pos": [
661
- 850,
662
- 660
659
+ -540,
660
+ 10
663
661
  ],
664
662
  "size": [
665
- 310,
666
- 210
663
+ 540,
664
+ 630
667
665
  ],
668
666
  "flags": {},
669
667
  "order": 8,
670
668
  "mode": 0,
671
669
  "inputs": [],
672
670
  "outputs": [],
673
- "title": "KSampler settings",
674
- "properties": {},
671
+ "title": "Model links",
672
+ "properties": {
673
+ "widget_ue_connectable": {}
674
+ },
675
675
  "widgets_values": [
676
- "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn(Qwen team's suggestion) | 40 | 2.5 \n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 8steps LoRA | 8 | 1.0 |\n| distill fp8_e4m3fn | 10 | 1.0 |"
676
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) \n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\nQwen_image_distill\n\n- [qwen_image_distill_full_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_fp8_e4m3fn.safetensors)\n- [qwen_image_distill_full_bf16.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_bf16.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
677
677
  ],
678
678
  "color": "#432",
679
679
  "bgcolor": "#653"
@@ -826,13 +826,13 @@
826
826
  "config": {},
827
827
  "extra": {
828
828
  "ds": {
829
- "scale": 0.2480466293395618,
829
+ "scale": 0.33015006365095706,
830
830
  "offset": [
831
- 4246.689662080242,
832
- 988.9239858025617
831
+ 1294.3888372784004,
832
+ 848.3556072418501
833
833
  ]
834
834
  },
835
- "frontendVersion": "1.26.6",
835
+ "frontendVersion": "1.28.6",
836
836
  "ue_links": [],
837
837
  "links_added_by_ue": [],
838
838
  "VHS_latentpreview": false,
@@ -447,45 +447,6 @@
447
447
  "default"
448
448
  ]
449
449
  },
450
- {
451
- "id": 71,
452
- "type": "LoadImage",
453
- "pos": [
454
- -70,
455
- 635
456
- ],
457
- "size": [
458
- 274.080078125,
459
- 314.00006103515625
460
- ],
461
- "flags": {},
462
- "order": 6,
463
- "mode": 0,
464
- "inputs": [],
465
- "outputs": [
466
- {
467
- "name": "IMAGE",
468
- "type": "IMAGE",
469
- "links": [
470
- 140
471
- ]
472
- },
473
- {
474
- "name": "MASK",
475
- "type": "MASK",
476
- "links": null
477
- }
478
- ],
479
- "properties": {
480
- "cnr_id": "comfy-core",
481
- "ver": "0.3.51",
482
- "Node name for S&R": "LoadImage"
483
- },
484
- "widgets_values": [
485
- "ComfyUI_00944_.png",
486
- "image"
487
- ]
488
- },
489
450
  {
490
451
  "id": 73,
491
452
  "type": "PreviewImage",
@@ -582,7 +543,8 @@
582
543
  "outputs": [],
583
544
  "properties": {
584
545
  "cnr_id": "comfy-core",
585
- "ver": "0.3.51"
546
+ "ver": "0.3.51",
547
+ "Node name for S&R": "SaveImage"
586
548
  },
587
549
  "widgets_values": [
588
550
  "ComfyUI"
@@ -794,7 +756,7 @@
794
756
  630
795
757
  ],
796
758
  "flags": {},
797
- "order": 7,
759
+ "order": 6,
798
760
  "mode": 0,
799
761
  "inputs": [],
800
762
  "outputs": [],
@@ -803,10 +765,49 @@
803
765
  "widget_ue_connectable": {}
804
766
  },
805
767
  "widgets_values": [
806
- "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**Model patches**\n\n- [qwen_image_canny_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors)\n- [qwen_image_depth_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_depth_diffsynth_controlnet.safetensors)\n- [qwen_image_inpaint_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_inpaint_diffsynth_controlnet.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 model_patches/ # create one if you can't find it\n│ │ ├── qwen_image_depth_diffsynth_controlnet.safetensors\n│ │ ├── qwen_image_canny_diffsynth_controlnet.safetensors\n│ │ └── qwen_image_inpaint_diffsynth_controlnet.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
768
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) \n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**Model patches**\n\n- [qwen_image_canny_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors)\n- [qwen_image_depth_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_depth_diffsynth_controlnet.safetensors)\n- [qwen_image_inpaint_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_inpaint_diffsynth_controlnet.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 model_patches/ # create one if you can't find it\n│ │ ├── qwen_image_depth_diffsynth_controlnet.safetensors\n│ │ ├── qwen_image_canny_diffsynth_controlnet.safetensors\n│ │ └── qwen_image_inpaint_diffsynth_controlnet.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
807
769
  ],
808
770
  "color": "#432",
809
771
  "bgcolor": "#653"
772
+ },
773
+ {
774
+ "id": 71,
775
+ "type": "LoadImage",
776
+ "pos": [
777
+ -70,
778
+ 635
779
+ ],
780
+ "size": [
781
+ 274.080078125,
782
+ 314.00006103515625
783
+ ],
784
+ "flags": {},
785
+ "order": 7,
786
+ "mode": 0,
787
+ "inputs": [],
788
+ "outputs": [
789
+ {
790
+ "name": "IMAGE",
791
+ "type": "IMAGE",
792
+ "links": [
793
+ 140
794
+ ]
795
+ },
796
+ {
797
+ "name": "MASK",
798
+ "type": "MASK",
799
+ "links": null
800
+ }
801
+ ],
802
+ "properties": {
803
+ "cnr_id": "comfy-core",
804
+ "ver": "0.3.51",
805
+ "Node name for S&R": "LoadImage"
806
+ },
807
+ "widgets_values": [
808
+ "image_qwen_image_controlnet_patch_input_image.png",
809
+ "image"
810
+ ]
810
811
  }
811
812
  ],
812
813
  "links": [
@@ -1041,13 +1042,13 @@
1041
1042
  "config": {},
1042
1043
  "extra": {
1043
1044
  "ds": {
1044
- "scale": 0.48559562289012265,
1045
+ "scale": 0.5970401764784028,
1045
1046
  "offset": [
1046
- 1846.044139609573,
1047
- 391.24067543168553
1047
+ 1006.4422007985179,
1048
+ 72.61872930398539
1048
1049
  ]
1049
1050
  },
1050
- "frontendVersion": "1.26.6"
1051
+ "frontendVersion": "1.27.10"
1051
1052
  },
1052
1053
  "version": 0.4
1053
1054
  }