comfyui-workflow-templates-media-image 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. comfyui_workflow_templates_media_image/__init__.py +6 -0
  2. comfyui_workflow_templates_media_image/templates/01_qwen_t2i_subgraphed-1.webp +0 -0
  3. comfyui_workflow_templates_media_image/templates/01_qwen_t2i_subgraphed.json +1287 -0
  4. comfyui_workflow_templates_media_image/templates/02_qwen_Image_edit_subgraphed-1.webp +0 -0
  5. comfyui_workflow_templates_media_image/templates/02_qwen_Image_edit_subgraphed.json +1780 -0
  6. comfyui_workflow_templates_media_image/templates/3d_hunyuan3d_image_to_model-1.webp +0 -0
  7. comfyui_workflow_templates_media_image/templates/3d_hunyuan3d_image_to_model.json +678 -0
  8. comfyui_workflow_templates_media_image/templates/controlnet_example-1.webp +0 -0
  9. comfyui_workflow_templates_media_image/templates/controlnet_example-2.webp +0 -0
  10. comfyui_workflow_templates_media_image/templates/controlnet_example.json +848 -0
  11. comfyui_workflow_templates_media_image/templates/depth_controlnet-1.webp +0 -0
  12. comfyui_workflow_templates_media_image/templates/depth_controlnet-2.webp +0 -0
  13. comfyui_workflow_templates_media_image/templates/depth_controlnet.json +678 -0
  14. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter-1.webp +0 -0
  15. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter-2.webp +0 -0
  16. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter.json +1708 -0
  17. comfyui_workflow_templates_media_image/templates/flux1_dev_uso_reference_image_gen-1.webp +0 -0
  18. comfyui_workflow_templates_media_image/templates/flux1_dev_uso_reference_image_gen-2.webp +0 -0
  19. comfyui_workflow_templates_media_image/templates/flux1_dev_uso_reference_image_gen.json +3572 -0
  20. comfyui_workflow_templates_media_image/templates/flux1_krea_dev-1.webp +0 -0
  21. comfyui_workflow_templates_media_image/templates/flux1_krea_dev.json +543 -0
  22. comfyui_workflow_templates_media_image/templates/flux_canny_model_example-1.webp +0 -0
  23. comfyui_workflow_templates_media_image/templates/flux_canny_model_example-2.webp +0 -0
  24. comfyui_workflow_templates_media_image/templates/flux_canny_model_example.json +786 -0
  25. comfyui_workflow_templates_media_image/templates/flux_depth_lora_example-1.webp +0 -0
  26. comfyui_workflow_templates_media_image/templates/flux_depth_lora_example-2.webp +0 -0
  27. comfyui_workflow_templates_media_image/templates/flux_depth_lora_example.json +1824 -0
  28. comfyui_workflow_templates_media_image/templates/flux_dev_checkpoint_example-1.webp +0 -0
  29. comfyui_workflow_templates_media_image/templates/flux_dev_checkpoint_example.json +332 -0
  30. comfyui_workflow_templates_media_image/templates/flux_dev_full_text_to_image-1.webp +0 -0
  31. comfyui_workflow_templates_media_image/templates/flux_dev_full_text_to_image.json +552 -0
  32. comfyui_workflow_templates_media_image/templates/flux_fill_inpaint_example-1.webp +0 -0
  33. comfyui_workflow_templates_media_image/templates/flux_fill_inpaint_example-2.webp +0 -0
  34. comfyui_workflow_templates_media_image/templates/flux_fill_inpaint_example.json +765 -0
  35. comfyui_workflow_templates_media_image/templates/flux_fill_outpaint_example-1.webp +0 -0
  36. comfyui_workflow_templates_media_image/templates/flux_fill_outpaint_example-2.webp +0 -0
  37. comfyui_workflow_templates_media_image/templates/flux_fill_outpaint_example.json +823 -0
  38. comfyui_workflow_templates_media_image/templates/flux_kontext_dev_basic-1.webp +0 -0
  39. comfyui_workflow_templates_media_image/templates/flux_kontext_dev_basic-2.webp +0 -0
  40. comfyui_workflow_templates_media_image/templates/flux_kontext_dev_basic.json +1053 -0
  41. comfyui_workflow_templates_media_image/templates/flux_redux_model_example-1.webp +0 -0
  42. comfyui_workflow_templates_media_image/templates/flux_redux_model_example.json +1494 -0
  43. comfyui_workflow_templates_media_image/templates/flux_schnell-1.webp +0 -0
  44. comfyui_workflow_templates_media_image/templates/flux_schnell.json +302 -0
  45. comfyui_workflow_templates_media_image/templates/flux_schnell_full_text_to_image-1.webp +0 -0
  46. comfyui_workflow_templates_media_image/templates/flux_schnell_full_text_to_image.json +544 -0
  47. comfyui_workflow_templates_media_image/templates/image2image-1.webp +0 -0
  48. comfyui_workflow_templates_media_image/templates/image2image-2.webp +0 -0
  49. comfyui_workflow_templates_media_image/templates/image2image.json +633 -0
  50. comfyui_workflow_templates_media_image/templates/image_chroma1_radiance_text_to_image-1.webp +0 -0
  51. comfyui_workflow_templates_media_image/templates/image_chroma1_radiance_text_to_image.json +1126 -0
  52. comfyui_workflow_templates_media_image/templates/image_chroma_text_to_image-1.webp +0 -0
  53. comfyui_workflow_templates_media_image/templates/image_chroma_text_to_image.json +1025 -0
  54. comfyui_workflow_templates_media_image/templates/image_flux.1_fill_dev_OneReward-1.webp +0 -0
  55. comfyui_workflow_templates_media_image/templates/image_flux.1_fill_dev_OneReward-2.webp +0 -0
  56. comfyui_workflow_templates_media_image/templates/image_flux.1_fill_dev_OneReward.json +2596 -0
  57. comfyui_workflow_templates_media_image/templates/image_lotus_depth_v1_1-1.webp +0 -0
  58. comfyui_workflow_templates_media_image/templates/image_lotus_depth_v1_1-2.webp +0 -0
  59. comfyui_workflow_templates_media_image/templates/image_lotus_depth_v1_1.json +796 -0
  60. comfyui_workflow_templates_media_image/templates/image_netayume_lumina_t2i-1.webp +0 -0
  61. comfyui_workflow_templates_media_image/templates/image_netayume_lumina_t2i.json +597 -0
  62. comfyui_workflow_templates_media_image/templates/image_omnigen2_image_edit-1.webp +0 -0
  63. comfyui_workflow_templates_media_image/templates/image_omnigen2_image_edit-2.webp +0 -0
  64. comfyui_workflow_templates_media_image/templates/image_omnigen2_image_edit.json +1496 -0
  65. comfyui_workflow_templates_media_image/templates/image_omnigen2_t2i-1.webp +0 -0
  66. comfyui_workflow_templates_media_image/templates/image_omnigen2_t2i.json +767 -0
  67. comfyui_workflow_templates_media_image/templates/image_qwen_image-1.webp +0 -0
  68. comfyui_workflow_templates_media_image/templates/image_qwen_image.json +844 -0
  69. comfyui_workflow_templates_media_image/templates/image_qwen_image_controlnet_patch-1.webp +0 -0
  70. comfyui_workflow_templates_media_image/templates/image_qwen_image_controlnet_patch-2.webp +0 -0
  71. comfyui_workflow_templates_media_image/templates/image_qwen_image_controlnet_patch.json +1054 -0
  72. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit-1.webp +0 -0
  73. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit-2.webp +0 -0
  74. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit.json +1056 -0
  75. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509-1.webp +0 -0
  76. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509-2.webp +0 -0
  77. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509.json +3141 -0
  78. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_controlnet-1.webp +0 -0
  79. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_controlnet-2.webp +0 -0
  80. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_controlnet.json +2050 -0
  81. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_inpainting_controlnet-1.webp +0 -0
  82. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_inpainting_controlnet-2.webp +0 -0
  83. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_inpainting_controlnet.json +3693 -0
  84. comfyui_workflow_templates_media_image/templates/image_qwen_image_union_control_lora-1.webp +0 -0
  85. comfyui_workflow_templates_media_image/templates/image_qwen_image_union_control_lora-3.webp +0 -0
  86. comfyui_workflow_templates_media_image/templates/image_qwen_image_union_control_lora.json +1098 -0
  87. comfyui_workflow_templates_media_image/templates/inpaint_example-1.webp +0 -0
  88. comfyui_workflow_templates_media_image/templates/inpaint_example-2.webp +0 -0
  89. comfyui_workflow_templates_media_image/templates/inpaint_example.json +650 -0
  90. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting-1.webp +0 -0
  91. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting-2.webp +0 -0
  92. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting.json +710 -0
  93. comfyui_workflow_templates_media_image/templates/mixing_controlnets-1.webp +0 -0
  94. comfyui_workflow_templates_media_image/templates/mixing_controlnets-2.webp +0 -0
  95. comfyui_workflow_templates_media_image/templates/mixing_controlnets.json +913 -0
  96. comfyui_workflow_templates_media_image/templates/sd3.5_large_canny_controlnet_example-1.webp +0 -0
  97. comfyui_workflow_templates_media_image/templates/sd3.5_large_canny_controlnet_example-2.webp +0 -0
  98. comfyui_workflow_templates_media_image/templates/sd3.5_large_canny_controlnet_example.json +829 -0
  99. comfyui_workflow_templates_media_image/templates/sdxl_refiner_prompt_example-1.webp +0 -0
  100. comfyui_workflow_templates_media_image/templates/sdxl_refiner_prompt_example.json +758 -0
  101. comfyui_workflow_templates_media_image/templates/sdxl_revision_text_prompts-1.webp +0 -0
  102. comfyui_workflow_templates_media_image/templates/sdxl_revision_text_prompts.json +855 -0
  103. comfyui_workflow_templates_media_image/templates/sdxl_simple_example-1.webp +0 -0
  104. comfyui_workflow_templates_media_image/templates/sdxl_simple_example.json +1346 -0
  105. comfyui_workflow_templates_media_image/templates/sdxlturbo_example-1.webp +0 -0
  106. comfyui_workflow_templates_media_image/templates/sdxlturbo_example.json +518 -0
  107. comfyui_workflow_templates_media_image-0.3.0.dist-info/METADATA +9 -0
  108. comfyui_workflow_templates_media_image-0.3.0.dist-info/RECORD +110 -0
  109. comfyui_workflow_templates_media_image-0.3.0.dist-info/WHEEL +5 -0
  110. comfyui_workflow_templates_media_image-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1098 @@
1
+ {
2
+ "id": "00000000-0000-0000-0000-000000000000",
3
+ "revision": 0,
4
+ "last_node_id": 82,
5
+ "last_link_id": 46,
6
+ "nodes": [
7
+ {
8
+ "id": 7,
9
+ "type": "CLIPTextEncode",
10
+ "pos": [
11
+ 420,
12
+ 710
13
+ ],
14
+ "size": [
15
+ 400,
16
+ 150
17
+ ],
18
+ "flags": {},
19
+ "order": 8,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "clip",
24
+ "type": "CLIP",
25
+ "link": 25
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "CONDITIONING",
31
+ "type": "CONDITIONING",
32
+ "links": [
33
+ 33
34
+ ]
35
+ }
36
+ ],
37
+ "title": "CLIP Text Encode (Negative Prompt)",
38
+ "properties": {
39
+ "cnr_id": "comfy-core",
40
+ "ver": "0.3.51",
41
+ "Node name for S&R": "CLIPTextEncode"
42
+ },
43
+ "widgets_values": [
44
+ " "
45
+ ],
46
+ "color": "#223",
47
+ "bgcolor": "#335"
48
+ },
49
+ {
50
+ "id": 74,
51
+ "type": "Canny",
52
+ "pos": [
53
+ 440,
54
+ 1000
55
+ ],
56
+ "size": [
57
+ 350,
58
+ 82
59
+ ],
60
+ "flags": {},
61
+ "order": 12,
62
+ "mode": 0,
63
+ "inputs": [
64
+ {
65
+ "name": "image",
66
+ "type": "IMAGE",
67
+ "link": 42
68
+ }
69
+ ],
70
+ "outputs": [
71
+ {
72
+ "name": "IMAGE",
73
+ "type": "IMAGE",
74
+ "links": [
75
+ 35,
76
+ 38
77
+ ]
78
+ }
79
+ ],
80
+ "properties": {
81
+ "cnr_id": "comfy-core",
82
+ "ver": "0.3.51",
83
+ "Node name for S&R": "Canny"
84
+ },
85
+ "widgets_values": [
86
+ 0.4,
87
+ 0.8
88
+ ]
89
+ },
90
+ {
91
+ "id": 75,
92
+ "type": "PreviewImage",
93
+ "pos": [
94
+ 450,
95
+ 1140
96
+ ],
97
+ "size": [
98
+ 330,
99
+ 290
100
+ ],
101
+ "flags": {},
102
+ "order": 15,
103
+ "mode": 0,
104
+ "inputs": [
105
+ {
106
+ "name": "images",
107
+ "type": "IMAGE",
108
+ "link": 38
109
+ }
110
+ ],
111
+ "outputs": [],
112
+ "properties": {
113
+ "cnr_id": "comfy-core",
114
+ "ver": "0.3.51",
115
+ "Node name for S&R": "PreviewImage"
116
+ },
117
+ "widgets_values": []
118
+ },
119
+ {
120
+ "id": 70,
121
+ "type": "ReferenceLatent",
122
+ "pos": [
123
+ 860,
124
+ 470
125
+ ],
126
+ "size": [
127
+ 197.712890625,
128
+ 46
129
+ ],
130
+ "flags": {},
131
+ "order": 16,
132
+ "mode": 0,
133
+ "inputs": [
134
+ {
135
+ "name": "conditioning",
136
+ "type": "CONDITIONING",
137
+ "link": 31
138
+ },
139
+ {
140
+ "name": "latent",
141
+ "shape": 7,
142
+ "type": "LATENT",
143
+ "link": 32
144
+ }
145
+ ],
146
+ "outputs": [
147
+ {
148
+ "name": "CONDITIONING",
149
+ "type": "CONDITIONING",
150
+ "links": [
151
+ 21
152
+ ]
153
+ }
154
+ ],
155
+ "properties": {
156
+ "cnr_id": "comfy-core",
157
+ "ver": "0.3.51",
158
+ "Node name for S&R": "ReferenceLatent"
159
+ },
160
+ "widgets_values": []
161
+ },
162
+ {
163
+ "id": 71,
164
+ "type": "ReferenceLatent",
165
+ "pos": [
166
+ 850,
167
+ 720
168
+ ],
169
+ "size": [
170
+ 197.712890625,
171
+ 46
172
+ ],
173
+ "flags": {},
174
+ "order": 17,
175
+ "mode": 0,
176
+ "inputs": [
177
+ {
178
+ "name": "conditioning",
179
+ "type": "CONDITIONING",
180
+ "link": 33
181
+ },
182
+ {
183
+ "name": "latent",
184
+ "shape": 7,
185
+ "type": "LATENT",
186
+ "link": 34
187
+ }
188
+ ],
189
+ "outputs": [
190
+ {
191
+ "name": "CONDITIONING",
192
+ "type": "CONDITIONING",
193
+ "links": [
194
+ 22
195
+ ]
196
+ }
197
+ ],
198
+ "properties": {
199
+ "cnr_id": "comfy-core",
200
+ "ver": "0.3.51",
201
+ "Node name for S&R": "ReferenceLatent"
202
+ },
203
+ "widgets_values": []
204
+ },
205
+ {
206
+ "id": 72,
207
+ "type": "VAEEncode",
208
+ "pos": [
209
+ 900,
210
+ 950
211
+ ],
212
+ "size": [
213
+ 140,
214
+ 46
215
+ ],
216
+ "flags": {},
217
+ "order": 14,
218
+ "mode": 0,
219
+ "inputs": [
220
+ {
221
+ "name": "pixels",
222
+ "type": "IMAGE",
223
+ "link": 35
224
+ },
225
+ {
226
+ "name": "vae",
227
+ "type": "VAE",
228
+ "link": 36
229
+ }
230
+ ],
231
+ "outputs": [
232
+ {
233
+ "name": "LATENT",
234
+ "type": "LATENT",
235
+ "links": [
236
+ 32,
237
+ 34,
238
+ 44
239
+ ]
240
+ }
241
+ ],
242
+ "properties": {
243
+ "cnr_id": "comfy-core",
244
+ "ver": "0.3.51",
245
+ "Node name for S&R": "VAEEncode"
246
+ },
247
+ "widgets_values": []
248
+ },
249
+ {
250
+ "id": 39,
251
+ "type": "VAELoader",
252
+ "pos": [
253
+ 30,
254
+ 650
255
+ ],
256
+ "size": [
257
+ 330,
258
+ 58
259
+ ],
260
+ "flags": {},
261
+ "order": 0,
262
+ "mode": 0,
263
+ "inputs": [],
264
+ "outputs": [
265
+ {
266
+ "name": "VAE",
267
+ "type": "VAE",
268
+ "links": [
269
+ 27,
270
+ 36
271
+ ]
272
+ }
273
+ ],
274
+ "properties": {
275
+ "cnr_id": "comfy-core",
276
+ "ver": "0.3.51",
277
+ "Node name for S&R": "VAELoader",
278
+ "models": [
279
+ {
280
+ "name": "qwen_image_vae.safetensors",
281
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
282
+ "directory": "vae"
283
+ }
284
+ ]
285
+ },
286
+ "widgets_values": [
287
+ "qwen_image_vae.safetensors"
288
+ ]
289
+ },
290
+ {
291
+ "id": 38,
292
+ "type": "CLIPLoader",
293
+ "pos": [
294
+ 30,
295
+ 490
296
+ ],
297
+ "size": [
298
+ 330,
299
+ 110
300
+ ],
301
+ "flags": {},
302
+ "order": 1,
303
+ "mode": 0,
304
+ "inputs": [],
305
+ "outputs": [
306
+ {
307
+ "name": "CLIP",
308
+ "type": "CLIP",
309
+ "links": [
310
+ 24,
311
+ 25
312
+ ]
313
+ }
314
+ ],
315
+ "properties": {
316
+ "cnr_id": "comfy-core",
317
+ "ver": "0.3.51",
318
+ "Node name for S&R": "CLIPLoader",
319
+ "models": [
320
+ {
321
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
322
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
323
+ "directory": "text_encoders"
324
+ }
325
+ ]
326
+ },
327
+ "widgets_values": [
328
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
329
+ "qwen_image",
330
+ "default"
331
+ ]
332
+ },
333
+ {
334
+ "id": 69,
335
+ "type": "LoraLoaderModelOnly",
336
+ "pos": [
337
+ 30,
338
+ 360
339
+ ],
340
+ "size": [
341
+ 330,
342
+ 82
343
+ ],
344
+ "flags": {},
345
+ "order": 9,
346
+ "mode": 0,
347
+ "inputs": [
348
+ {
349
+ "name": "model",
350
+ "type": "MODEL",
351
+ "link": 30
352
+ }
353
+ ],
354
+ "outputs": [
355
+ {
356
+ "name": "MODEL",
357
+ "type": "MODEL",
358
+ "links": [
359
+ 45
360
+ ]
361
+ }
362
+ ],
363
+ "properties": {
364
+ "cnr_id": "comfy-core",
365
+ "ver": "0.3.51",
366
+ "Node name for S&R": "LoraLoaderModelOnly",
367
+ "models": [
368
+ {
369
+ "name": "qwen_image_union_diffsynth_lora.safetensors",
370
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
371
+ "directory": "loras"
372
+ }
373
+ ]
374
+ },
375
+ "widgets_values": [
376
+ "qwen_image_union_diffsynth_lora.safetensors",
377
+ 1
378
+ ]
379
+ },
380
+ {
381
+ "id": 37,
382
+ "type": "UNETLoader",
383
+ "pos": [
384
+ 30,
385
+ 220
386
+ ],
387
+ "size": [
388
+ 330,
389
+ 82
390
+ ],
391
+ "flags": {},
392
+ "order": 2,
393
+ "mode": 0,
394
+ "inputs": [],
395
+ "outputs": [
396
+ {
397
+ "name": "MODEL",
398
+ "type": "MODEL",
399
+ "links": [
400
+ 30
401
+ ]
402
+ }
403
+ ],
404
+ "properties": {
405
+ "cnr_id": "comfy-core",
406
+ "ver": "0.3.51",
407
+ "Node name for S&R": "UNETLoader",
408
+ "models": [
409
+ {
410
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
411
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
412
+ "directory": "diffusion_models"
413
+ }
414
+ ]
415
+ },
416
+ "widgets_values": [
417
+ "qwen_image_fp8_e4m3fn.safetensors",
418
+ "default"
419
+ ]
420
+ },
421
+ {
422
+ "id": 77,
423
+ "type": "ImageScaleToTotalPixels",
424
+ "pos": [
425
+ 60,
426
+ 1220
427
+ ],
428
+ "size": [
429
+ 270,
430
+ 82
431
+ ],
432
+ "flags": {},
433
+ "order": 10,
434
+ "mode": 0,
435
+ "inputs": [
436
+ {
437
+ "name": "image",
438
+ "type": "IMAGE",
439
+ "link": 41
440
+ }
441
+ ],
442
+ "outputs": [
443
+ {
444
+ "name": "IMAGE",
445
+ "type": "IMAGE",
446
+ "links": [
447
+ 42
448
+ ]
449
+ }
450
+ ],
451
+ "properties": {
452
+ "cnr_id": "comfy-core",
453
+ "ver": "0.3.51",
454
+ "Node name for S&R": "ImageScaleToTotalPixels"
455
+ },
456
+ "widgets_values": [
457
+ "lanczos",
458
+ 1
459
+ ]
460
+ },
461
+ {
462
+ "id": 82,
463
+ "type": "MarkdownNote",
464
+ "pos": [
465
+ 60,
466
+ 1350
467
+ ],
468
+ "size": [
469
+ 270,
470
+ 120
471
+ ],
472
+ "flags": {},
473
+ "order": 3,
474
+ "mode": 0,
475
+ "inputs": [],
476
+ "outputs": [],
477
+ "title": "About Scale Image to Total Pixels",
478
+ "properties": {},
479
+ "widgets_values": [
480
+ "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
481
+ ],
482
+ "color": "#432",
483
+ "bgcolor": "#653"
484
+ },
485
+ {
486
+ "id": 60,
487
+ "type": "SaveImage",
488
+ "pos": [
489
+ 1400,
490
+ 280
491
+ ],
492
+ "size": [
493
+ 1030,
494
+ 1150
495
+ ],
496
+ "flags": {},
497
+ "order": 20,
498
+ "mode": 0,
499
+ "inputs": [
500
+ {
501
+ "name": "images",
502
+ "type": "IMAGE",
503
+ "link": 28
504
+ }
505
+ ],
506
+ "outputs": [],
507
+ "properties": {
508
+ "cnr_id": "comfy-core",
509
+ "ver": "0.3.51",
510
+ "Node name for S&R": "SaveImage"
511
+ },
512
+ "widgets_values": [
513
+ "ComfyUI"
514
+ ]
515
+ },
516
+ {
517
+ "id": 81,
518
+ "type": "MarkdownNote",
519
+ "pos": [
520
+ 1100,
521
+ 780
522
+ ],
523
+ "size": [
524
+ 260,
525
+ 150
526
+ ],
527
+ "flags": {},
528
+ "order": 4,
529
+ "mode": 0,
530
+ "inputs": [],
531
+ "outputs": [],
532
+ "title": "KSampler settings",
533
+ "properties": {},
534
+ "widgets_values": [
535
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
536
+ ],
537
+ "color": "#432",
538
+ "bgcolor": "#653"
539
+ },
540
+ {
541
+ "id": 66,
542
+ "type": "ModelSamplingAuraFlow",
543
+ "pos": [
544
+ 1100,
545
+ 170
546
+ ],
547
+ "size": [
548
+ 260,
549
+ 58
550
+ ],
551
+ "flags": {},
552
+ "order": 13,
553
+ "mode": 0,
554
+ "inputs": [
555
+ {
556
+ "name": "model",
557
+ "type": "MODEL",
558
+ "link": 46
559
+ }
560
+ ],
561
+ "outputs": [
562
+ {
563
+ "name": "MODEL",
564
+ "type": "MODEL",
565
+ "links": [
566
+ 20
567
+ ]
568
+ }
569
+ ],
570
+ "properties": {
571
+ "cnr_id": "comfy-core",
572
+ "ver": "0.3.51",
573
+ "Node name for S&R": "ModelSamplingAuraFlow"
574
+ },
575
+ "widgets_values": [
576
+ 3.1
577
+ ]
578
+ },
579
+ {
580
+ "id": 6,
581
+ "type": "CLIPTextEncode",
582
+ "pos": [
583
+ 420,
584
+ 460
585
+ ],
586
+ "size": [
587
+ 400,
588
+ 200
589
+ ],
590
+ "flags": {},
591
+ "order": 7,
592
+ "mode": 0,
593
+ "inputs": [
594
+ {
595
+ "name": "clip",
596
+ "type": "CLIP",
597
+ "link": 24
598
+ }
599
+ ],
600
+ "outputs": [
601
+ {
602
+ "name": "CONDITIONING",
603
+ "type": "CONDITIONING",
604
+ "links": [
605
+ 31
606
+ ]
607
+ }
608
+ ],
609
+ "title": "CLIP Text Encode (Positive Prompt)",
610
+ "properties": {
611
+ "cnr_id": "comfy-core",
612
+ "ver": "0.3.51",
613
+ "Node name for S&R": "CLIPTextEncode"
614
+ },
615
+ "widgets_values": [
616
+ "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition."
617
+ ],
618
+ "color": "#232",
619
+ "bgcolor": "#353"
620
+ },
621
+ {
622
+ "id": 8,
623
+ "type": "VAEDecode",
624
+ "pos": [
625
+ 1400,
626
+ 170
627
+ ],
628
+ "size": [
629
+ 140,
630
+ 46
631
+ ],
632
+ "flags": {},
633
+ "order": 19,
634
+ "mode": 0,
635
+ "inputs": [
636
+ {
637
+ "name": "samples",
638
+ "type": "LATENT",
639
+ "link": 26
640
+ },
641
+ {
642
+ "name": "vae",
643
+ "type": "VAE",
644
+ "link": 27
645
+ }
646
+ ],
647
+ "outputs": [
648
+ {
649
+ "name": "IMAGE",
650
+ "type": "IMAGE",
651
+ "links": [
652
+ 28
653
+ ]
654
+ }
655
+ ],
656
+ "properties": {
657
+ "cnr_id": "comfy-core",
658
+ "ver": "0.3.51",
659
+ "Node name for S&R": "VAEDecode"
660
+ },
661
+ "widgets_values": []
662
+ },
663
+ {
664
+ "id": 80,
665
+ "type": "MarkdownNote",
666
+ "pos": [
667
+ -560,
668
+ 160
669
+ ],
670
+ "size": [
671
+ 540,
672
+ 630
673
+ ],
674
+ "flags": {},
675
+ "order": 5,
676
+ "mode": 0,
677
+ "inputs": [],
678
+ "outputs": [],
679
+ "title": "Model links",
680
+ "properties": {
681
+ "widget_ue_connectable": {}
682
+ },
683
+ "widgets_values": [
684
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) \n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
685
+ ],
686
+ "color": "#432",
687
+ "bgcolor": "#653"
688
+ },
689
+ {
690
+ "id": 73,
691
+ "type": "LoadImage",
692
+ "pos": [
693
+ 60,
694
+ 860
695
+ ],
696
+ "size": [
697
+ 274.080078125,
698
+ 314.00006103515625
699
+ ],
700
+ "flags": {},
701
+ "order": 6,
702
+ "mode": 0,
703
+ "inputs": [],
704
+ "outputs": [
705
+ {
706
+ "name": "IMAGE",
707
+ "type": "IMAGE",
708
+ "links": [
709
+ 41
710
+ ]
711
+ },
712
+ {
713
+ "name": "MASK",
714
+ "type": "MASK",
715
+ "links": null
716
+ }
717
+ ],
718
+ "properties": {
719
+ "cnr_id": "comfy-core",
720
+ "ver": "0.3.51",
721
+ "Node name for S&R": "LoadImage"
722
+ },
723
+ "widgets_values": [
724
+ "image_qwen_image_union_control_lora_input_image.png",
725
+ "image"
726
+ ]
727
+ },
728
+ {
729
+ "id": 79,
730
+ "type": "LoraLoaderModelOnly",
731
+ "pos": [
732
+ 490,
733
+ 210
734
+ ],
735
+ "size": [
736
+ 470,
737
+ 82
738
+ ],
739
+ "flags": {},
740
+ "order": 11,
741
+ "mode": 0,
742
+ "inputs": [
743
+ {
744
+ "name": "model",
745
+ "type": "MODEL",
746
+ "link": 45
747
+ }
748
+ ],
749
+ "outputs": [
750
+ {
751
+ "name": "MODEL",
752
+ "type": "MODEL",
753
+ "links": [
754
+ 46
755
+ ]
756
+ }
757
+ ],
758
+ "properties": {
759
+ "cnr_id": "comfy-core",
760
+ "ver": "0.3.51",
761
+ "Node name for S&R": "LoraLoaderModelOnly",
762
+ "models": [
763
+ {
764
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
765
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
766
+ "directory": "loras"
767
+ }
768
+ ]
769
+ },
770
+ "widgets_values": [
771
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
772
+ 1
773
+ ]
774
+ },
775
+ {
776
+ "id": 3,
777
+ "type": "KSampler",
778
+ "pos": [
779
+ 1100,
780
+ 280
781
+ ],
782
+ "size": [
783
+ 260,
784
+ 450
785
+ ],
786
+ "flags": {},
787
+ "order": 18,
788
+ "mode": 0,
789
+ "inputs": [
790
+ {
791
+ "name": "model",
792
+ "type": "MODEL",
793
+ "link": 20
794
+ },
795
+ {
796
+ "name": "positive",
797
+ "type": "CONDITIONING",
798
+ "link": 21
799
+ },
800
+ {
801
+ "name": "negative",
802
+ "type": "CONDITIONING",
803
+ "link": 22
804
+ },
805
+ {
806
+ "name": "latent_image",
807
+ "type": "LATENT",
808
+ "link": 44
809
+ }
810
+ ],
811
+ "outputs": [
812
+ {
813
+ "name": "LATENT",
814
+ "type": "LATENT",
815
+ "links": [
816
+ 26
817
+ ]
818
+ }
819
+ ],
820
+ "properties": {
821
+ "cnr_id": "comfy-core",
822
+ "ver": "0.3.51",
823
+ "Node name for S&R": "KSampler"
824
+ },
825
+ "widgets_values": [
826
+ 70741926012422,
827
+ "randomize",
828
+ 4,
829
+ 1,
830
+ "euler",
831
+ "simple",
832
+ 1
833
+ ]
834
+ }
835
+ ],
836
+ "links": [
837
+ [
838
+ 20,
839
+ 66,
840
+ 0,
841
+ 3,
842
+ 0,
843
+ "MODEL"
844
+ ],
845
+ [
846
+ 21,
847
+ 70,
848
+ 0,
849
+ 3,
850
+ 1,
851
+ "CONDITIONING"
852
+ ],
853
+ [
854
+ 22,
855
+ 71,
856
+ 0,
857
+ 3,
858
+ 2,
859
+ "CONDITIONING"
860
+ ],
861
+ [
862
+ 24,
863
+ 38,
864
+ 0,
865
+ 6,
866
+ 0,
867
+ "CLIP"
868
+ ],
869
+ [
870
+ 25,
871
+ 38,
872
+ 0,
873
+ 7,
874
+ 0,
875
+ "CLIP"
876
+ ],
877
+ [
878
+ 26,
879
+ 3,
880
+ 0,
881
+ 8,
882
+ 0,
883
+ "LATENT"
884
+ ],
885
+ [
886
+ 27,
887
+ 39,
888
+ 0,
889
+ 8,
890
+ 1,
891
+ "VAE"
892
+ ],
893
+ [
894
+ 28,
895
+ 8,
896
+ 0,
897
+ 60,
898
+ 0,
899
+ "IMAGE"
900
+ ],
901
+ [
902
+ 30,
903
+ 37,
904
+ 0,
905
+ 69,
906
+ 0,
907
+ "MODEL"
908
+ ],
909
+ [
910
+ 31,
911
+ 6,
912
+ 0,
913
+ 70,
914
+ 0,
915
+ "CONDITIONING"
916
+ ],
917
+ [
918
+ 32,
919
+ 72,
920
+ 0,
921
+ 70,
922
+ 1,
923
+ "LATENT"
924
+ ],
925
+ [
926
+ 33,
927
+ 7,
928
+ 0,
929
+ 71,
930
+ 0,
931
+ "CONDITIONING"
932
+ ],
933
+ [
934
+ 34,
935
+ 72,
936
+ 0,
937
+ 71,
938
+ 1,
939
+ "LATENT"
940
+ ],
941
+ [
942
+ 35,
943
+ 74,
944
+ 0,
945
+ 72,
946
+ 0,
947
+ "IMAGE"
948
+ ],
949
+ [
950
+ 36,
951
+ 39,
952
+ 0,
953
+ 72,
954
+ 1,
955
+ "VAE"
956
+ ],
957
+ [
958
+ 38,
959
+ 74,
960
+ 0,
961
+ 75,
962
+ 0,
963
+ "IMAGE"
964
+ ],
965
+ [
966
+ 41,
967
+ 73,
968
+ 0,
969
+ 77,
970
+ 0,
971
+ "IMAGE"
972
+ ],
973
+ [
974
+ 42,
975
+ 77,
976
+ 0,
977
+ 74,
978
+ 0,
979
+ "IMAGE"
980
+ ],
981
+ [
982
+ 44,
983
+ 72,
984
+ 0,
985
+ 3,
986
+ 3,
987
+ "LATENT"
988
+ ],
989
+ [
990
+ 45,
991
+ 69,
992
+ 0,
993
+ 79,
994
+ 0,
995
+ "MODEL"
996
+ ],
997
+ [
998
+ 46,
999
+ 79,
1000
+ 0,
1001
+ 66,
1002
+ 0,
1003
+ "MODEL"
1004
+ ]
1005
+ ],
1006
+ "groups": [
1007
+ {
1008
+ "id": 1,
1009
+ "title": "Step 1 - Load models",
1010
+ "bounding": [
1011
+ 10,
1012
+ 130,
1013
+ 370,
1014
+ 620
1015
+ ],
1016
+ "color": "#3f789e",
1017
+ "font_size": 24,
1018
+ "flags": {}
1019
+ },
1020
+ {
1021
+ "id": 2,
1022
+ "title": "Step 2 - Upload reference image",
1023
+ "bounding": [
1024
+ 10,
1025
+ 770,
1026
+ 370,
1027
+ 730
1028
+ ],
1029
+ "color": "#3f789e",
1030
+ "font_size": 24,
1031
+ "flags": {}
1032
+ },
1033
+ {
1034
+ "id": 5,
1035
+ "title": "Conditioning",
1036
+ "bounding": [
1037
+ 400,
1038
+ 330,
1039
+ 680,
1040
+ 570
1041
+ ],
1042
+ "color": "#3f789e",
1043
+ "font_size": 24,
1044
+ "flags": {}
1045
+ },
1046
+ {
1047
+ "id": 3,
1048
+ "title": "Step 3 - Prompt",
1049
+ "bounding": [
1050
+ 410,
1051
+ 390,
1052
+ 420,
1053
+ 490
1054
+ ],
1055
+ "color": "#3f789e",
1056
+ "font_size": 24,
1057
+ "flags": {}
1058
+ },
1059
+ {
1060
+ "id": 4,
1061
+ "title": "Image Processing",
1062
+ "bounding": [
1063
+ 410,
1064
+ 920,
1065
+ 410,
1066
+ 573.5999755859375
1067
+ ],
1068
+ "color": "#3f789e",
1069
+ "font_size": 24,
1070
+ "flags": {}
1071
+ },
1072
+ {
1073
+ "id": 6,
1074
+ "title": "4 steps lighting LoRA",
1075
+ "bounding": [
1076
+ 400,
1077
+ 130,
1078
+ 680,
1079
+ 180
1080
+ ],
1081
+ "color": "#3f789e",
1082
+ "font_size": 24,
1083
+ "flags": {}
1084
+ }
1085
+ ],
1086
+ "config": {},
1087
+ "extra": {
1088
+ "ds": {
1089
+ "scale": 0.5786562229169053,
1090
+ "offset": [
1091
+ 872.7752229086909,
1092
+ -184.78246118792714
1093
+ ]
1094
+ },
1095
+ "frontendVersion": "1.27.10"
1096
+ },
1097
+ "version": 0.4
1098
+ }