comfyui-workflow-templates 0.1.61__py3-none-any.whl → 0.1.63__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of comfyui-workflow-templates might be problematic. Click here for more details.

Files changed (23) hide show
  1. comfyui_workflow_templates/templates/api_hailuo_minimax_video-1.webp +0 -0
  2. comfyui_workflow_templates/templates/api_hailuo_minimax_video.json +203 -0
  3. comfyui_workflow_templates/templates/api_veo3-1.webp +0 -0
  4. comfyui_workflow_templates/templates/api_veo3.json +207 -0
  5. comfyui_workflow_templates/templates/api_vidu_image_to_video-1.webp +0 -0
  6. comfyui_workflow_templates/templates/api_vidu_image_to_video.json +180 -0
  7. comfyui_workflow_templates/templates/api_vidu_reference_to_video-1.webp +0 -0
  8. comfyui_workflow_templates/templates/api_vidu_reference_to_video.json +416 -0
  9. comfyui_workflow_templates/templates/api_vidu_start_end_to_video-1.webp +0 -0
  10. comfyui_workflow_templates/templates/api_vidu_start_end_to_video.json +230 -0
  11. comfyui_workflow_templates/templates/api_vidu_text_to_video-1.webp +0 -0
  12. comfyui_workflow_templates/templates/api_vidu_text_to_video.json +130 -0
  13. comfyui_workflow_templates/templates/image_qwen_image_edit.json +88 -88
  14. comfyui_workflow_templates/templates/image_qwen_image_union_control-1.webp +0 -0
  15. comfyui_workflow_templates/templates/image_qwen_image_union_control-3.webp +0 -0
  16. comfyui_workflow_templates/templates/image_qwen_image_union_control.json +1097 -0
  17. comfyui_workflow_templates/templates/index.json +76 -0
  18. comfyui_workflow_templates/templates/video_wan2_2_14B_i2v.json +1 -1
  19. {comfyui_workflow_templates-0.1.61.dist-info → comfyui_workflow_templates-0.1.63.dist-info}/METADATA +1 -1
  20. {comfyui_workflow_templates-0.1.61.dist-info → comfyui_workflow_templates-0.1.63.dist-info}/RECORD +23 -8
  21. {comfyui_workflow_templates-0.1.61.dist-info → comfyui_workflow_templates-0.1.63.dist-info}/WHEEL +0 -0
  22. {comfyui_workflow_templates-0.1.61.dist-info → comfyui_workflow_templates-0.1.63.dist-info}/licenses/LICENSE +0 -0
  23. {comfyui_workflow_templates-0.1.61.dist-info → comfyui_workflow_templates-0.1.63.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1097 @@
1
+ {
2
+ "id": "00000000-0000-0000-0000-000000000000",
3
+ "revision": 0,
4
+ "last_node_id": 82,
5
+ "last_link_id": 46,
6
+ "nodes": [
7
+ {
8
+ "id": 7,
9
+ "type": "CLIPTextEncode",
10
+ "pos": [
11
+ 420,
12
+ 710
13
+ ],
14
+ "size": [
15
+ 400,
16
+ 150
17
+ ],
18
+ "flags": {},
19
+ "order": 9,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "clip",
24
+ "type": "CLIP",
25
+ "link": 25
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "CONDITIONING",
31
+ "type": "CONDITIONING",
32
+ "links": [
33
+ 33
34
+ ]
35
+ }
36
+ ],
37
+ "title": "CLIP Text Encode (Negative Prompt)",
38
+ "properties": {
39
+ "Node name for S&R": "CLIPTextEncode",
40
+ "cnr_id": "comfy-core",
41
+ "ver": "0.3.51"
42
+ },
43
+ "widgets_values": [
44
+ " "
45
+ ],
46
+ "color": "#223",
47
+ "bgcolor": "#335"
48
+ },
49
+ {
50
+ "id": 73,
51
+ "type": "LoadImage",
52
+ "pos": [
53
+ 60,
54
+ 860
55
+ ],
56
+ "size": [
57
+ 274.080078125,
58
+ 314.00006103515625
59
+ ],
60
+ "flags": {},
61
+ "order": 0,
62
+ "mode": 0,
63
+ "inputs": [],
64
+ "outputs": [
65
+ {
66
+ "name": "IMAGE",
67
+ "type": "IMAGE",
68
+ "links": [
69
+ 41
70
+ ]
71
+ },
72
+ {
73
+ "name": "MASK",
74
+ "type": "MASK",
75
+ "links": null
76
+ }
77
+ ],
78
+ "properties": {
79
+ "Node name for S&R": "LoadImage",
80
+ "cnr_id": "comfy-core",
81
+ "ver": "0.3.51"
82
+ },
83
+ "widgets_values": [
84
+ "ComfyUI_00752_.png",
85
+ "image"
86
+ ]
87
+ },
88
+ {
89
+ "id": 74,
90
+ "type": "Canny",
91
+ "pos": [
92
+ 440,
93
+ 1000
94
+ ],
95
+ "size": [
96
+ 350,
97
+ 82
98
+ ],
99
+ "flags": {},
100
+ "order": 11,
101
+ "mode": 0,
102
+ "inputs": [
103
+ {
104
+ "name": "image",
105
+ "type": "IMAGE",
106
+ "link": 42
107
+ }
108
+ ],
109
+ "outputs": [
110
+ {
111
+ "name": "IMAGE",
112
+ "type": "IMAGE",
113
+ "links": [
114
+ 35,
115
+ 38
116
+ ]
117
+ }
118
+ ],
119
+ "properties": {
120
+ "Node name for S&R": "Canny",
121
+ "cnr_id": "comfy-core",
122
+ "ver": "0.3.51"
123
+ },
124
+ "widgets_values": [
125
+ 0.4,
126
+ 0.8
127
+ ]
128
+ },
129
+ {
130
+ "id": 75,
131
+ "type": "PreviewImage",
132
+ "pos": [
133
+ 450,
134
+ 1140
135
+ ],
136
+ "size": [
137
+ 330,
138
+ 290
139
+ ],
140
+ "flags": {},
141
+ "order": 14,
142
+ "mode": 0,
143
+ "inputs": [
144
+ {
145
+ "name": "images",
146
+ "type": "IMAGE",
147
+ "link": 38
148
+ }
149
+ ],
150
+ "outputs": [],
151
+ "properties": {
152
+ "Node name for S&R": "PreviewImage",
153
+ "cnr_id": "comfy-core",
154
+ "ver": "0.3.51"
155
+ },
156
+ "widgets_values": []
157
+ },
158
+ {
159
+ "id": 70,
160
+ "type": "ReferenceLatent",
161
+ "pos": [
162
+ 860,
163
+ 470
164
+ ],
165
+ "size": [
166
+ 197.712890625,
167
+ 46
168
+ ],
169
+ "flags": {},
170
+ "order": 16,
171
+ "mode": 0,
172
+ "inputs": [
173
+ {
174
+ "name": "conditioning",
175
+ "type": "CONDITIONING",
176
+ "link": 31
177
+ },
178
+ {
179
+ "name": "latent",
180
+ "shape": 7,
181
+ "type": "LATENT",
182
+ "link": 32
183
+ }
184
+ ],
185
+ "outputs": [
186
+ {
187
+ "name": "CONDITIONING",
188
+ "type": "CONDITIONING",
189
+ "links": [
190
+ 21
191
+ ]
192
+ }
193
+ ],
194
+ "properties": {
195
+ "Node name for S&R": "ReferenceLatent",
196
+ "cnr_id": "comfy-core",
197
+ "ver": "0.3.51"
198
+ },
199
+ "widgets_values": []
200
+ },
201
+ {
202
+ "id": 71,
203
+ "type": "ReferenceLatent",
204
+ "pos": [
205
+ 850,
206
+ 720
207
+ ],
208
+ "size": [
209
+ 197.712890625,
210
+ 46
211
+ ],
212
+ "flags": {},
213
+ "order": 17,
214
+ "mode": 0,
215
+ "inputs": [
216
+ {
217
+ "name": "conditioning",
218
+ "type": "CONDITIONING",
219
+ "link": 33
220
+ },
221
+ {
222
+ "name": "latent",
223
+ "shape": 7,
224
+ "type": "LATENT",
225
+ "link": 34
226
+ }
227
+ ],
228
+ "outputs": [
229
+ {
230
+ "name": "CONDITIONING",
231
+ "type": "CONDITIONING",
232
+ "links": [
233
+ 22
234
+ ]
235
+ }
236
+ ],
237
+ "properties": {
238
+ "Node name for S&R": "ReferenceLatent",
239
+ "cnr_id": "comfy-core",
240
+ "ver": "0.3.51"
241
+ },
242
+ "widgets_values": []
243
+ },
244
+ {
245
+ "id": 72,
246
+ "type": "VAEEncode",
247
+ "pos": [
248
+ 900,
249
+ 950
250
+ ],
251
+ "size": [
252
+ 140,
253
+ 46
254
+ ],
255
+ "flags": {},
256
+ "order": 13,
257
+ "mode": 0,
258
+ "inputs": [
259
+ {
260
+ "name": "pixels",
261
+ "type": "IMAGE",
262
+ "link": 35
263
+ },
264
+ {
265
+ "name": "vae",
266
+ "type": "VAE",
267
+ "link": 36
268
+ }
269
+ ],
270
+ "outputs": [
271
+ {
272
+ "name": "LATENT",
273
+ "type": "LATENT",
274
+ "links": [
275
+ 32,
276
+ 34,
277
+ 44
278
+ ]
279
+ }
280
+ ],
281
+ "properties": {
282
+ "Node name for S&R": "VAEEncode",
283
+ "cnr_id": "comfy-core",
284
+ "ver": "0.3.51"
285
+ },
286
+ "widgets_values": []
287
+ },
288
+ {
289
+ "id": 79,
290
+ "type": "LoraLoaderModelOnly",
291
+ "pos": [
292
+ 490,
293
+ 210
294
+ ],
295
+ "size": [
296
+ 470,
297
+ 82
298
+ ],
299
+ "flags": {},
300
+ "order": 12,
301
+ "mode": 4,
302
+ "inputs": [
303
+ {
304
+ "name": "model",
305
+ "type": "MODEL",
306
+ "link": 45
307
+ }
308
+ ],
309
+ "outputs": [
310
+ {
311
+ "name": "MODEL",
312
+ "type": "MODEL",
313
+ "links": [
314
+ 46
315
+ ]
316
+ }
317
+ ],
318
+ "properties": {
319
+ "Node name for S&R": "LoraLoaderModelOnly",
320
+ "cnr_id": "comfy-core",
321
+ "ver": "0.3.51",
322
+ "models": [
323
+ {
324
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
325
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
326
+ "directory": "loras"
327
+ }
328
+ ]
329
+ },
330
+ "widgets_values": [
331
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
332
+ 1
333
+ ]
334
+ },
335
+ {
336
+ "id": 39,
337
+ "type": "VAELoader",
338
+ "pos": [
339
+ 30,
340
+ 650
341
+ ],
342
+ "size": [
343
+ 330,
344
+ 58
345
+ ],
346
+ "flags": {},
347
+ "order": 1,
348
+ "mode": 0,
349
+ "inputs": [],
350
+ "outputs": [
351
+ {
352
+ "name": "VAE",
353
+ "type": "VAE",
354
+ "links": [
355
+ 27,
356
+ 36
357
+ ]
358
+ }
359
+ ],
360
+ "properties": {
361
+ "Node name for S&R": "VAELoader",
362
+ "cnr_id": "comfy-core",
363
+ "ver": "0.3.51",
364
+ "models": [
365
+ {
366
+ "name": "qwen_image_vae.safetensors",
367
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
368
+ "directory": "vae"
369
+ }
370
+ ]
371
+ },
372
+ "widgets_values": [
373
+ "qwen_image_vae.safetensors"
374
+ ]
375
+ },
376
+ {
377
+ "id": 38,
378
+ "type": "CLIPLoader",
379
+ "pos": [
380
+ 30,
381
+ 490
382
+ ],
383
+ "size": [
384
+ 330,
385
+ 110
386
+ ],
387
+ "flags": {},
388
+ "order": 2,
389
+ "mode": 0,
390
+ "inputs": [],
391
+ "outputs": [
392
+ {
393
+ "name": "CLIP",
394
+ "type": "CLIP",
395
+ "links": [
396
+ 24,
397
+ 25
398
+ ]
399
+ }
400
+ ],
401
+ "properties": {
402
+ "Node name for S&R": "CLIPLoader",
403
+ "cnr_id": "comfy-core",
404
+ "ver": "0.3.51",
405
+ "models": [
406
+ {
407
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
408
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
409
+ "directory": "text_encoders"
410
+ }
411
+ ]
412
+ },
413
+ "widgets_values": [
414
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
415
+ "qwen_image",
416
+ "default"
417
+ ]
418
+ },
419
+ {
420
+ "id": 69,
421
+ "type": "LoraLoaderModelOnly",
422
+ "pos": [
423
+ 30,
424
+ 360
425
+ ],
426
+ "size": [
427
+ 330,
428
+ 82
429
+ ],
430
+ "flags": {},
431
+ "order": 10,
432
+ "mode": 0,
433
+ "inputs": [
434
+ {
435
+ "name": "model",
436
+ "type": "MODEL",
437
+ "link": 30
438
+ }
439
+ ],
440
+ "outputs": [
441
+ {
442
+ "name": "MODEL",
443
+ "type": "MODEL",
444
+ "links": [
445
+ 45
446
+ ]
447
+ }
448
+ ],
449
+ "properties": {
450
+ "Node name for S&R": "LoraLoaderModelOnly",
451
+ "cnr_id": "comfy-core",
452
+ "ver": "0.3.51",
453
+ "models": [
454
+ {
455
+ "name": "qwen_image_union_diffsynth_lora.safetensors",
456
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
457
+ "directory": "loras"
458
+ }
459
+ ]
460
+ },
461
+ "widgets_values": [
462
+ "qwen_image_union_diffsynth_lora.safetensors",
463
+ 1
464
+ ]
465
+ },
466
+ {
467
+ "id": 37,
468
+ "type": "UNETLoader",
469
+ "pos": [
470
+ 30,
471
+ 220
472
+ ],
473
+ "size": [
474
+ 330,
475
+ 82
476
+ ],
477
+ "flags": {},
478
+ "order": 3,
479
+ "mode": 0,
480
+ "inputs": [],
481
+ "outputs": [
482
+ {
483
+ "name": "MODEL",
484
+ "type": "MODEL",
485
+ "links": [
486
+ 30
487
+ ]
488
+ }
489
+ ],
490
+ "properties": {
491
+ "Node name for S&R": "UNETLoader",
492
+ "cnr_id": "comfy-core",
493
+ "ver": "0.3.51",
494
+ "models": [
495
+ {
496
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
497
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
498
+ "directory": "diffusion_models"
499
+ }
500
+ ]
501
+ },
502
+ "widgets_values": [
503
+ "qwen_image_fp8_e4m3fn.safetensors",
504
+ "default"
505
+ ]
506
+ },
507
+ {
508
+ "id": 77,
509
+ "type": "ImageScaleToTotalPixels",
510
+ "pos": [
511
+ 60,
512
+ 1220
513
+ ],
514
+ "size": [
515
+ 270,
516
+ 82
517
+ ],
518
+ "flags": {},
519
+ "order": 7,
520
+ "mode": 0,
521
+ "inputs": [
522
+ {
523
+ "name": "image",
524
+ "type": "IMAGE",
525
+ "link": 41
526
+ }
527
+ ],
528
+ "outputs": [
529
+ {
530
+ "name": "IMAGE",
531
+ "type": "IMAGE",
532
+ "links": [
533
+ 42
534
+ ]
535
+ }
536
+ ],
537
+ "properties": {
538
+ "Node name for S&R": "ImageScaleToTotalPixels",
539
+ "cnr_id": "comfy-core",
540
+ "ver": "0.3.51"
541
+ },
542
+ "widgets_values": [
543
+ "lanczos",
544
+ 1
545
+ ]
546
+ },
547
+ {
548
+ "id": 82,
549
+ "type": "MarkdownNote",
550
+ "pos": [
551
+ 60,
552
+ 1350
553
+ ],
554
+ "size": [
555
+ 270,
556
+ 120
557
+ ],
558
+ "flags": {},
559
+ "order": 4,
560
+ "mode": 0,
561
+ "inputs": [],
562
+ "outputs": [],
563
+ "title": "About Scale Image to Total Pixels",
564
+ "properties": {},
565
+ "widgets_values": [
566
+ "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
567
+ ],
568
+ "color": "#432",
569
+ "bgcolor": "#653"
570
+ },
571
+ {
572
+ "id": 3,
573
+ "type": "KSampler",
574
+ "pos": [
575
+ 1100,
576
+ 280
577
+ ],
578
+ "size": [
579
+ 260,
580
+ 450
581
+ ],
582
+ "flags": {},
583
+ "order": 18,
584
+ "mode": 0,
585
+ "inputs": [
586
+ {
587
+ "name": "model",
588
+ "type": "MODEL",
589
+ "link": 20
590
+ },
591
+ {
592
+ "name": "positive",
593
+ "type": "CONDITIONING",
594
+ "link": 21
595
+ },
596
+ {
597
+ "name": "negative",
598
+ "type": "CONDITIONING",
599
+ "link": 22
600
+ },
601
+ {
602
+ "name": "latent_image",
603
+ "type": "LATENT",
604
+ "link": 44
605
+ }
606
+ ],
607
+ "outputs": [
608
+ {
609
+ "name": "LATENT",
610
+ "type": "LATENT",
611
+ "links": [
612
+ 26
613
+ ]
614
+ }
615
+ ],
616
+ "properties": {
617
+ "Node name for S&R": "KSampler",
618
+ "cnr_id": "comfy-core",
619
+ "ver": "0.3.51"
620
+ },
621
+ "widgets_values": [
622
+ 70741926012422,
623
+ "randomize",
624
+ 20,
625
+ 2.5,
626
+ "euler",
627
+ "simple",
628
+ 1
629
+ ]
630
+ },
631
+ {
632
+ "id": 60,
633
+ "type": "SaveImage",
634
+ "pos": [
635
+ 1400,
636
+ 280
637
+ ],
638
+ "size": [
639
+ 1030,
640
+ 1150
641
+ ],
642
+ "flags": {},
643
+ "order": 20,
644
+ "mode": 0,
645
+ "inputs": [
646
+ {
647
+ "name": "images",
648
+ "type": "IMAGE",
649
+ "link": 28
650
+ }
651
+ ],
652
+ "outputs": [],
653
+ "properties": {
654
+ "cnr_id": "comfy-core",
655
+ "ver": "0.3.51"
656
+ },
657
+ "widgets_values": [
658
+ "ComfyUI"
659
+ ]
660
+ },
661
+ {
662
+ "id": 81,
663
+ "type": "MarkdownNote",
664
+ "pos": [
665
+ 1100,
666
+ 780
667
+ ],
668
+ "size": [
669
+ 260,
670
+ 150
671
+ ],
672
+ "flags": {},
673
+ "order": 5,
674
+ "mode": 0,
675
+ "inputs": [],
676
+ "outputs": [],
677
+ "title": "KSampler settings",
678
+ "properties": {},
679
+ "widgets_values": [
680
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
681
+ ],
682
+ "color": "#432",
683
+ "bgcolor": "#653"
684
+ },
685
+ {
686
+ "id": 66,
687
+ "type": "ModelSamplingAuraFlow",
688
+ "pos": [
689
+ 1100,
690
+ 170
691
+ ],
692
+ "size": [
693
+ 260,
694
+ 58
695
+ ],
696
+ "flags": {},
697
+ "order": 15,
698
+ "mode": 0,
699
+ "inputs": [
700
+ {
701
+ "name": "model",
702
+ "type": "MODEL",
703
+ "link": 46
704
+ }
705
+ ],
706
+ "outputs": [
707
+ {
708
+ "name": "MODEL",
709
+ "type": "MODEL",
710
+ "links": [
711
+ 20
712
+ ]
713
+ }
714
+ ],
715
+ "properties": {
716
+ "Node name for S&R": "ModelSamplingAuraFlow",
717
+ "cnr_id": "comfy-core",
718
+ "ver": "0.3.51"
719
+ },
720
+ "widgets_values": [
721
+ 3.1
722
+ ]
723
+ },
724
+ {
725
+ "id": 6,
726
+ "type": "CLIPTextEncode",
727
+ "pos": [
728
+ 420,
729
+ 460
730
+ ],
731
+ "size": [
732
+ 400,
733
+ 200
734
+ ],
735
+ "flags": {},
736
+ "order": 8,
737
+ "mode": 0,
738
+ "inputs": [
739
+ {
740
+ "name": "clip",
741
+ "type": "CLIP",
742
+ "link": 24
743
+ }
744
+ ],
745
+ "outputs": [
746
+ {
747
+ "name": "CONDITIONING",
748
+ "type": "CONDITIONING",
749
+ "links": [
750
+ 31
751
+ ]
752
+ }
753
+ ],
754
+ "title": "CLIP Text Encode (Positive Prompt)",
755
+ "properties": {
756
+ "Node name for S&R": "CLIPTextEncode",
757
+ "cnr_id": "comfy-core",
758
+ "ver": "0.3.51"
759
+ },
760
+ "widgets_values": [
761
+ "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition."
762
+ ],
763
+ "color": "#232",
764
+ "bgcolor": "#353"
765
+ },
766
+ {
767
+ "id": 8,
768
+ "type": "VAEDecode",
769
+ "pos": [
770
+ 1400,
771
+ 170
772
+ ],
773
+ "size": [
774
+ 140,
775
+ 46
776
+ ],
777
+ "flags": {},
778
+ "order": 19,
779
+ "mode": 0,
780
+ "inputs": [
781
+ {
782
+ "name": "samples",
783
+ "type": "LATENT",
784
+ "link": 26
785
+ },
786
+ {
787
+ "name": "vae",
788
+ "type": "VAE",
789
+ "link": 27
790
+ }
791
+ ],
792
+ "outputs": [
793
+ {
794
+ "name": "IMAGE",
795
+ "type": "IMAGE",
796
+ "links": [
797
+ 28
798
+ ]
799
+ }
800
+ ],
801
+ "properties": {
802
+ "Node name for S&R": "VAEDecode",
803
+ "cnr_id": "comfy-core",
804
+ "ver": "0.3.51"
805
+ },
806
+ "widgets_values": []
807
+ },
808
+ {
809
+ "id": 80,
810
+ "type": "MarkdownNote",
811
+ "pos": [
812
+ -560,
813
+ 160
814
+ ],
815
+ "size": [
816
+ 540,
817
+ 630
818
+ ],
819
+ "flags": {},
820
+ "order": 6,
821
+ "mode": 0,
822
+ "inputs": [],
823
+ "outputs": [],
824
+ "title": "Model links",
825
+ "properties": {
826
+ "widget_ue_connectable": {}
827
+ },
828
+ "widgets_values": [
829
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
830
+ ],
831
+ "color": "#432",
832
+ "bgcolor": "#653"
833
+ }
834
+ ],
835
+ "links": [
836
+ [
837
+ 20,
838
+ 66,
839
+ 0,
840
+ 3,
841
+ 0,
842
+ "MODEL"
843
+ ],
844
+ [
845
+ 21,
846
+ 70,
847
+ 0,
848
+ 3,
849
+ 1,
850
+ "CONDITIONING"
851
+ ],
852
+ [
853
+ 22,
854
+ 71,
855
+ 0,
856
+ 3,
857
+ 2,
858
+ "CONDITIONING"
859
+ ],
860
+ [
861
+ 24,
862
+ 38,
863
+ 0,
864
+ 6,
865
+ 0,
866
+ "CLIP"
867
+ ],
868
+ [
869
+ 25,
870
+ 38,
871
+ 0,
872
+ 7,
873
+ 0,
874
+ "CLIP"
875
+ ],
876
+ [
877
+ 26,
878
+ 3,
879
+ 0,
880
+ 8,
881
+ 0,
882
+ "LATENT"
883
+ ],
884
+ [
885
+ 27,
886
+ 39,
887
+ 0,
888
+ 8,
889
+ 1,
890
+ "VAE"
891
+ ],
892
+ [
893
+ 28,
894
+ 8,
895
+ 0,
896
+ 60,
897
+ 0,
898
+ "IMAGE"
899
+ ],
900
+ [
901
+ 30,
902
+ 37,
903
+ 0,
904
+ 69,
905
+ 0,
906
+ "MODEL"
907
+ ],
908
+ [
909
+ 31,
910
+ 6,
911
+ 0,
912
+ 70,
913
+ 0,
914
+ "CONDITIONING"
915
+ ],
916
+ [
917
+ 32,
918
+ 72,
919
+ 0,
920
+ 70,
921
+ 1,
922
+ "LATENT"
923
+ ],
924
+ [
925
+ 33,
926
+ 7,
927
+ 0,
928
+ 71,
929
+ 0,
930
+ "CONDITIONING"
931
+ ],
932
+ [
933
+ 34,
934
+ 72,
935
+ 0,
936
+ 71,
937
+ 1,
938
+ "LATENT"
939
+ ],
940
+ [
941
+ 35,
942
+ 74,
943
+ 0,
944
+ 72,
945
+ 0,
946
+ "IMAGE"
947
+ ],
948
+ [
949
+ 36,
950
+ 39,
951
+ 0,
952
+ 72,
953
+ 1,
954
+ "VAE"
955
+ ],
956
+ [
957
+ 38,
958
+ 74,
959
+ 0,
960
+ 75,
961
+ 0,
962
+ "IMAGE"
963
+ ],
964
+ [
965
+ 41,
966
+ 73,
967
+ 0,
968
+ 77,
969
+ 0,
970
+ "IMAGE"
971
+ ],
972
+ [
973
+ 42,
974
+ 77,
975
+ 0,
976
+ 74,
977
+ 0,
978
+ "IMAGE"
979
+ ],
980
+ [
981
+ 44,
982
+ 72,
983
+ 0,
984
+ 3,
985
+ 3,
986
+ "LATENT"
987
+ ],
988
+ [
989
+ 45,
990
+ 69,
991
+ 0,
992
+ 79,
993
+ 0,
994
+ "MODEL"
995
+ ],
996
+ [
997
+ 46,
998
+ 79,
999
+ 0,
1000
+ 66,
1001
+ 0,
1002
+ "MODEL"
1003
+ ]
1004
+ ],
1005
+ "groups": [
1006
+ {
1007
+ "id": 1,
1008
+ "title": "Step 1 - Load models",
1009
+ "bounding": [
1010
+ 10,
1011
+ 130,
1012
+ 370,
1013
+ 620
1014
+ ],
1015
+ "color": "#3f789e",
1016
+ "font_size": 24,
1017
+ "flags": {}
1018
+ },
1019
+ {
1020
+ "id": 2,
1021
+ "title": "Step 2 - Upload reference image",
1022
+ "bounding": [
1023
+ 10,
1024
+ 770,
1025
+ 370,
1026
+ 730
1027
+ ],
1028
+ "color": "#3f789e",
1029
+ "font_size": 24,
1030
+ "flags": {}
1031
+ },
1032
+ {
1033
+ "id": 5,
1034
+ "title": "Conditioning",
1035
+ "bounding": [
1036
+ 400,
1037
+ 330,
1038
+ 680,
1039
+ 570
1040
+ ],
1041
+ "color": "#3f789e",
1042
+ "font_size": 24,
1043
+ "flags": {}
1044
+ },
1045
+ {
1046
+ "id": 3,
1047
+ "title": "Step 3 - Prompt",
1048
+ "bounding": [
1049
+ 410,
1050
+ 390,
1051
+ 420,
1052
+ 490
1053
+ ],
1054
+ "color": "#3f789e",
1055
+ "font_size": 24,
1056
+ "flags": {}
1057
+ },
1058
+ {
1059
+ "id": 4,
1060
+ "title": "Image Processing",
1061
+ "bounding": [
1062
+ 410,
1063
+ 920,
1064
+ 410,
1065
+ 573.5999755859375
1066
+ ],
1067
+ "color": "#3f789e",
1068
+ "font_size": 24,
1069
+ "flags": {}
1070
+ },
1071
+ {
1072
+ "id": 6,
1073
+ "title": "4 steps lighting LoRA",
1074
+ "bounding": [
1075
+ 400,
1076
+ 130,
1077
+ 680,
1078
+ 180
1079
+ ],
1080
+ "color": "#3f789e",
1081
+ "font_size": 24,
1082
+ "flags": {}
1083
+ }
1084
+ ],
1085
+ "config": {},
1086
+ "extra": {
1087
+ "ds": {
1088
+ "scale": 0.7213855104977631,
1089
+ "offset": [
1090
+ 235.0958937828103,
1091
+ -225.58323513433564
1092
+ ]
1093
+ },
1094
+ "frontendVersion": "1.26.6"
1095
+ },
1096
+ "version": 0.4
1097
+ }