comfyui-workflow-templates 0.1.64__py3-none-any.whl → 0.1.66__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of comfyui-workflow-templates might be problematic. Click here for more details.

Files changed (24) hide show
  1. comfyui_workflow_templates/templates/image_chroma_text_to_image.json +229 -277
  2. comfyui_workflow_templates/templates/image_qwen_image.json +16 -16
  3. comfyui_workflow_templates/templates/image_qwen_image_controlnet_patch-1.webp +0 -0
  4. comfyui_workflow_templates/templates/image_qwen_image_controlnet_patch-2.webp +0 -0
  5. comfyui_workflow_templates/templates/image_qwen_image_controlnet_patch.json +1053 -0
  6. comfyui_workflow_templates/templates/image_qwen_image_instantx_controlnet-1.webp +0 -0
  7. comfyui_workflow_templates/templates/image_qwen_image_instantx_controlnet-2.webp +0 -0
  8. comfyui_workflow_templates/templates/image_qwen_image_instantx_controlnet.json +2048 -0
  9. comfyui_workflow_templates/templates/index.es.json +696 -1381
  10. comfyui_workflow_templates/templates/index.fr.json +676 -1319
  11. comfyui_workflow_templates/templates/index.ja.json +684 -1337
  12. comfyui_workflow_templates/templates/index.json +25 -35
  13. comfyui_workflow_templates/templates/index.ko.json +969 -1564
  14. comfyui_workflow_templates/templates/index.ru.json +623 -1265
  15. comfyui_workflow_templates/templates/index.zh-TW.json +694 -1347
  16. comfyui_workflow_templates/templates/index.zh.json +700 -1353
  17. {comfyui_workflow_templates-0.1.64.dist-info → comfyui_workflow_templates-0.1.66.dist-info}/METADATA +1 -1
  18. {comfyui_workflow_templates-0.1.64.dist-info → comfyui_workflow_templates-0.1.66.dist-info}/RECORD +24 -18
  19. /comfyui_workflow_templates/templates/{image_qwen_image_union_control-1.webp → image_qwen_image_union_control_lora-1.webp} +0 -0
  20. /comfyui_workflow_templates/templates/{image_qwen_image_union_control-3.webp → image_qwen_image_union_control_lora-3.webp} +0 -0
  21. /comfyui_workflow_templates/templates/{image_qwen_image_union_control.json → image_qwen_image_union_control_lora.json} +0 -0
  22. {comfyui_workflow_templates-0.1.64.dist-info → comfyui_workflow_templates-0.1.66.dist-info}/WHEEL +0 -0
  23. {comfyui_workflow_templates-0.1.64.dist-info → comfyui_workflow_templates-0.1.66.dist-info}/licenses/LICENSE +0 -0
  24. {comfyui_workflow_templates-0.1.64.dist-info → comfyui_workflow_templates-0.1.66.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1053 @@
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 83,
5
+ "last_link_id": 149,
6
+ "nodes": [
7
+ {
8
+ "id": 75,
9
+ "type": "ImageScaleToTotalPixels",
10
+ "pos": [
11
+ -60,
12
+ 995
13
+ ],
14
+ "size": [
15
+ 270,
16
+ 82
17
+ ],
18
+ "flags": {},
19
+ "order": 11,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "image",
24
+ "type": "IMAGE",
25
+ "link": 140
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "IMAGE",
31
+ "type": "IMAGE",
32
+ "links": [
33
+ 141,
34
+ 143
35
+ ]
36
+ }
37
+ ],
38
+ "properties": {
39
+ "cnr_id": "comfy-core",
40
+ "ver": "0.3.51",
41
+ "Node name for S&R": "ImageScaleToTotalPixels"
42
+ },
43
+ "widgets_values": [
44
+ "area",
45
+ 1.68
46
+ ]
47
+ },
48
+ {
49
+ "id": 69,
50
+ "type": "QwenImageDiffsynthControlnet",
51
+ "pos": [
52
+ 810,
53
+ 70
54
+ ],
55
+ "size": [
56
+ 310,
57
+ 138
58
+ ],
59
+ "flags": {},
60
+ "order": 15,
61
+ "mode": 0,
62
+ "inputs": [
63
+ {
64
+ "name": "model",
65
+ "type": "MODEL",
66
+ "link": 130
67
+ },
68
+ {
69
+ "name": "model_patch",
70
+ "type": "MODEL_PATCH",
71
+ "link": 129
72
+ },
73
+ {
74
+ "name": "vae",
75
+ "type": "VAE",
76
+ "link": 132
77
+ },
78
+ {
79
+ "name": "image",
80
+ "type": "IMAGE",
81
+ "link": 135
82
+ },
83
+ {
84
+ "name": "mask",
85
+ "shape": 7,
86
+ "type": "MASK",
87
+ "link": null
88
+ }
89
+ ],
90
+ "outputs": [
91
+ {
92
+ "name": "MODEL",
93
+ "type": "MODEL",
94
+ "links": [
95
+ 131
96
+ ]
97
+ }
98
+ ],
99
+ "properties": {
100
+ "cnr_id": "comfy-core",
101
+ "ver": "0.3.51",
102
+ "Node name for S&R": "QwenImageDiffsynthControlnet"
103
+ },
104
+ "widgets_values": [
105
+ 1
106
+ ]
107
+ },
108
+ {
109
+ "id": 66,
110
+ "type": "ModelSamplingAuraFlow",
111
+ "pos": [
112
+ 810,
113
+ -40
114
+ ],
115
+ "size": [
116
+ 310,
117
+ 58
118
+ ],
119
+ "flags": {},
120
+ "order": 12,
121
+ "mode": 0,
122
+ "inputs": [
123
+ {
124
+ "name": "model",
125
+ "type": "MODEL",
126
+ "link": 149
127
+ }
128
+ ],
129
+ "outputs": [
130
+ {
131
+ "name": "MODEL",
132
+ "type": "MODEL",
133
+ "links": [
134
+ 130
135
+ ]
136
+ }
137
+ ],
138
+ "properties": {
139
+ "cnr_id": "comfy-core",
140
+ "ver": "0.3.51",
141
+ "Node name for S&R": "ModelSamplingAuraFlow"
142
+ },
143
+ "widgets_values": [
144
+ 3.1000000000000005
145
+ ]
146
+ },
147
+ {
148
+ "id": 79,
149
+ "type": "MarkdownNote",
150
+ "pos": [
151
+ 810,
152
+ 760
153
+ ],
154
+ "size": [
155
+ 310,
156
+ 140
157
+ ],
158
+ "flags": {},
159
+ "order": 0,
160
+ "mode": 0,
161
+ "inputs": [],
162
+ "outputs": [],
163
+ "title": "KSampler settings",
164
+ "properties": {},
165
+ "widgets_values": [
166
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n"
167
+ ],
168
+ "color": "#432",
169
+ "bgcolor": "#653"
170
+ },
171
+ {
172
+ "id": 8,
173
+ "type": "VAEDecode",
174
+ "pos": [
175
+ 810,
176
+ 950
177
+ ],
178
+ "size": [
179
+ 310,
180
+ 46
181
+ ],
182
+ "flags": {},
183
+ "order": 18,
184
+ "mode": 0,
185
+ "inputs": [
186
+ {
187
+ "name": "samples",
188
+ "type": "LATENT",
189
+ "link": 128
190
+ },
191
+ {
192
+ "name": "vae",
193
+ "type": "VAE",
194
+ "link": 76
195
+ }
196
+ ],
197
+ "outputs": [
198
+ {
199
+ "name": "IMAGE",
200
+ "type": "IMAGE",
201
+ "slot_index": 0,
202
+ "links": [
203
+ 110
204
+ ]
205
+ }
206
+ ],
207
+ "properties": {
208
+ "cnr_id": "comfy-core",
209
+ "ver": "0.3.51",
210
+ "Node name for S&R": "VAEDecode"
211
+ },
212
+ "widgets_values": []
213
+ },
214
+ {
215
+ "id": 70,
216
+ "type": "ModelPatchLoader",
217
+ "pos": [
218
+ -120,
219
+ 130
220
+ ],
221
+ "size": [
222
+ 380,
223
+ 60
224
+ ],
225
+ "flags": {},
226
+ "order": 1,
227
+ "mode": 0,
228
+ "inputs": [],
229
+ "outputs": [
230
+ {
231
+ "name": "MODEL_PATCH",
232
+ "type": "MODEL_PATCH",
233
+ "links": [
234
+ 129
235
+ ]
236
+ }
237
+ ],
238
+ "properties": {
239
+ "cnr_id": "comfy-core",
240
+ "ver": "0.3.51",
241
+ "Node name for S&R": "ModelPatchLoader",
242
+ "models": [
243
+ {
244
+ "name": "qwen_image_canny_diffsynth_controlnet.safetensors",
245
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors",
246
+ "directory": "model_patches"
247
+ }
248
+ ]
249
+ },
250
+ "widgets_values": [
251
+ "qwen_image_canny_diffsynth_controlnet.safetensors"
252
+ ]
253
+ },
254
+ {
255
+ "id": 39,
256
+ "type": "VAELoader",
257
+ "pos": [
258
+ -120,
259
+ 400
260
+ ],
261
+ "size": [
262
+ 380,
263
+ 58
264
+ ],
265
+ "flags": {},
266
+ "order": 2,
267
+ "mode": 0,
268
+ "inputs": [],
269
+ "outputs": [
270
+ {
271
+ "name": "VAE",
272
+ "type": "VAE",
273
+ "slot_index": 0,
274
+ "links": [
275
+ 76,
276
+ 132,
277
+ 144
278
+ ]
279
+ }
280
+ ],
281
+ "properties": {
282
+ "cnr_id": "comfy-core",
283
+ "ver": "0.3.51",
284
+ "Node name for S&R": "VAELoader",
285
+ "models": [
286
+ {
287
+ "name": "qwen_image_vae.safetensors",
288
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
289
+ "directory": "vae"
290
+ }
291
+ ]
292
+ },
293
+ "widgets_values": [
294
+ "qwen_image_vae.safetensors"
295
+ ]
296
+ },
297
+ {
298
+ "id": 37,
299
+ "type": "UNETLoader",
300
+ "pos": [
301
+ -120,
302
+ 0
303
+ ],
304
+ "size": [
305
+ 380,
306
+ 82
307
+ ],
308
+ "flags": {},
309
+ "order": 3,
310
+ "mode": 0,
311
+ "inputs": [],
312
+ "outputs": [
313
+ {
314
+ "name": "MODEL",
315
+ "type": "MODEL",
316
+ "slot_index": 0,
317
+ "links": [
318
+ 145
319
+ ]
320
+ }
321
+ ],
322
+ "properties": {
323
+ "cnr_id": "comfy-core",
324
+ "ver": "0.3.51",
325
+ "Node name for S&R": "UNETLoader",
326
+ "models": [
327
+ {
328
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
329
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
330
+ "directory": "diffusion_models"
331
+ }
332
+ ]
333
+ },
334
+ "widgets_values": [
335
+ "qwen_image_fp8_e4m3fn.safetensors",
336
+ "default"
337
+ ]
338
+ },
339
+ {
340
+ "id": 76,
341
+ "type": "VAEEncode",
342
+ "pos": [
343
+ 640,
344
+ 630
345
+ ],
346
+ "size": [
347
+ 140,
348
+ 46
349
+ ],
350
+ "flags": {
351
+ "collapsed": true
352
+ },
353
+ "order": 14,
354
+ "mode": 0,
355
+ "inputs": [
356
+ {
357
+ "name": "pixels",
358
+ "type": "IMAGE",
359
+ "link": 143
360
+ },
361
+ {
362
+ "name": "vae",
363
+ "type": "VAE",
364
+ "link": 144
365
+ }
366
+ ],
367
+ "outputs": [
368
+ {
369
+ "name": "LATENT",
370
+ "type": "LATENT",
371
+ "links": [
372
+ 142
373
+ ]
374
+ }
375
+ ],
376
+ "properties": {
377
+ "cnr_id": "comfy-core",
378
+ "ver": "0.3.51",
379
+ "Node name for S&R": "VAEEncode"
380
+ },
381
+ "widgets_values": []
382
+ },
383
+ {
384
+ "id": 68,
385
+ "type": "Note",
386
+ "pos": [
387
+ 810,
388
+ -180
389
+ ],
390
+ "size": [
391
+ 310,
392
+ 90
393
+ ],
394
+ "flags": {},
395
+ "order": 4,
396
+ "mode": 0,
397
+ "inputs": [],
398
+ "outputs": [],
399
+ "properties": {},
400
+ "widgets_values": [
401
+ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
402
+ ],
403
+ "color": "#432",
404
+ "bgcolor": "#653"
405
+ },
406
+ {
407
+ "id": 38,
408
+ "type": "CLIPLoader",
409
+ "pos": [
410
+ -120,
411
+ 240
412
+ ],
413
+ "size": [
414
+ 380,
415
+ 106
416
+ ],
417
+ "flags": {},
418
+ "order": 5,
419
+ "mode": 0,
420
+ "inputs": [],
421
+ "outputs": [
422
+ {
423
+ "name": "CLIP",
424
+ "type": "CLIP",
425
+ "slot_index": 0,
426
+ "links": [
427
+ 74,
428
+ 75
429
+ ]
430
+ }
431
+ ],
432
+ "properties": {
433
+ "cnr_id": "comfy-core",
434
+ "ver": "0.3.51",
435
+ "Node name for S&R": "CLIPLoader",
436
+ "models": [
437
+ {
438
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
439
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
440
+ "directory": "text_encoders"
441
+ }
442
+ ]
443
+ },
444
+ "widgets_values": [
445
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
446
+ "qwen_image",
447
+ "default"
448
+ ]
449
+ },
450
+ {
451
+ "id": 71,
452
+ "type": "LoadImage",
453
+ "pos": [
454
+ -70,
455
+ 635
456
+ ],
457
+ "size": [
458
+ 274.080078125,
459
+ 314.00006103515625
460
+ ],
461
+ "flags": {},
462
+ "order": 6,
463
+ "mode": 0,
464
+ "inputs": [],
465
+ "outputs": [
466
+ {
467
+ "name": "IMAGE",
468
+ "type": "IMAGE",
469
+ "links": [
470
+ 140
471
+ ]
472
+ },
473
+ {
474
+ "name": "MASK",
475
+ "type": "MASK",
476
+ "links": null
477
+ }
478
+ ],
479
+ "properties": {
480
+ "cnr_id": "comfy-core",
481
+ "ver": "0.3.51",
482
+ "Node name for S&R": "LoadImage"
483
+ },
484
+ "widgets_values": [
485
+ "ComfyUI_00944_.png",
486
+ "image"
487
+ ]
488
+ },
489
+ {
490
+ "id": 73,
491
+ "type": "PreviewImage",
492
+ "pos": [
493
+ 340,
494
+ 770
495
+ ],
496
+ "size": [
497
+ 380,
498
+ 320
499
+ ],
500
+ "flags": {},
501
+ "order": 16,
502
+ "mode": 0,
503
+ "inputs": [
504
+ {
505
+ "name": "images",
506
+ "type": "IMAGE",
507
+ "link": 136
508
+ }
509
+ ],
510
+ "outputs": [],
511
+ "properties": {
512
+ "cnr_id": "comfy-core",
513
+ "ver": "0.3.51",
514
+ "Node name for S&R": "PreviewImage"
515
+ },
516
+ "widgets_values": []
517
+ },
518
+ {
519
+ "id": 72,
520
+ "type": "Canny",
521
+ "pos": [
522
+ 340,
523
+ 630
524
+ ],
525
+ "size": [
526
+ 240,
527
+ 90
528
+ ],
529
+ "flags": {},
530
+ "order": 13,
531
+ "mode": 0,
532
+ "inputs": [
533
+ {
534
+ "name": "image",
535
+ "type": "IMAGE",
536
+ "link": 141
537
+ }
538
+ ],
539
+ "outputs": [
540
+ {
541
+ "name": "IMAGE",
542
+ "type": "IMAGE",
543
+ "links": [
544
+ 135,
545
+ 136
546
+ ]
547
+ }
548
+ ],
549
+ "properties": {
550
+ "cnr_id": "comfy-core",
551
+ "ver": "0.3.51",
552
+ "Node name for S&R": "Canny"
553
+ },
554
+ "widgets_values": [
555
+ 0.1,
556
+ 0.2
557
+ ],
558
+ "color": "#322",
559
+ "bgcolor": "#533"
560
+ },
561
+ {
562
+ "id": 60,
563
+ "type": "SaveImage",
564
+ "pos": [
565
+ 1150,
566
+ -40
567
+ ],
568
+ "size": [
569
+ 970,
570
+ 1030
571
+ ],
572
+ "flags": {},
573
+ "order": 19,
574
+ "mode": 0,
575
+ "inputs": [
576
+ {
577
+ "name": "images",
578
+ "type": "IMAGE",
579
+ "link": 110
580
+ }
581
+ ],
582
+ "outputs": [],
583
+ "properties": {
584
+ "cnr_id": "comfy-core",
585
+ "ver": "0.3.51"
586
+ },
587
+ "widgets_values": [
588
+ "ComfyUI"
589
+ ]
590
+ },
591
+ {
592
+ "id": 80,
593
+ "type": "LoraLoaderModelOnly",
594
+ "pos": [
595
+ 320,
596
+ -10
597
+ ],
598
+ "size": [
599
+ 430,
600
+ 82
601
+ ],
602
+ "flags": {},
603
+ "order": 8,
604
+ "mode": 4,
605
+ "inputs": [
606
+ {
607
+ "name": "model",
608
+ "type": "MODEL",
609
+ "link": 145
610
+ }
611
+ ],
612
+ "outputs": [
613
+ {
614
+ "name": "MODEL",
615
+ "type": "MODEL",
616
+ "links": [
617
+ 149
618
+ ]
619
+ }
620
+ ],
621
+ "properties": {
622
+ "cnr_id": "comfy-core",
623
+ "ver": "0.3.51",
624
+ "Node name for S&R": "LoraLoaderModelOnly",
625
+ "models": [
626
+ {
627
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
628
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
629
+ "directory": "loras"
630
+ }
631
+ ]
632
+ },
633
+ "widgets_values": [
634
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
635
+ 1
636
+ ]
637
+ },
638
+ {
639
+ "id": 6,
640
+ "type": "CLIPTextEncode",
641
+ "pos": [
642
+ 300,
643
+ 170
644
+ ],
645
+ "size": [
646
+ 460,
647
+ 164.31304931640625
648
+ ],
649
+ "flags": {},
650
+ "order": 9,
651
+ "mode": 0,
652
+ "inputs": [
653
+ {
654
+ "name": "clip",
655
+ "type": "CLIP",
656
+ "link": 74
657
+ }
658
+ ],
659
+ "outputs": [
660
+ {
661
+ "name": "CONDITIONING",
662
+ "type": "CONDITIONING",
663
+ "slot_index": 0,
664
+ "links": [
665
+ 46
666
+ ]
667
+ }
668
+ ],
669
+ "title": "CLIP Text Encode (Positive Prompt)",
670
+ "properties": {
671
+ "cnr_id": "comfy-core",
672
+ "ver": "0.3.51",
673
+ "Node name for S&R": "CLIPTextEncode"
674
+ },
675
+ "widgets_values": [
676
+ "Conceptual makeup, a fairy girl with pink hair, pink shimmery scales dotted at the corners of her eyes, starry eyeshadow, makeup painting, thin eyebrows, three-dimensional features, a glossy finish, dazzling gold powder, silver fine glitter, a sense of layering and depth in the makeup, decorated with gold ornaments, pearlescent eyeshadow, dreamy makeup, soft pastels and subtle sparkles, a mysterious and fantasy-filled atmosphere, high-end makeup, dappled light on the face, soft lighting, optimal shadows, complex depth of field, dramatic lighting, clear focus, 8k, high quality, Fujifilm filter, surreal, a dreamy pastel wonderland, bright colors, a starry pink background, realistic.\n"
677
+ ],
678
+ "color": "#232",
679
+ "bgcolor": "#353"
680
+ },
681
+ {
682
+ "id": 7,
683
+ "type": "CLIPTextEncode",
684
+ "pos": [
685
+ 300,
686
+ 380
687
+ ],
688
+ "size": [
689
+ 460,
690
+ 140
691
+ ],
692
+ "flags": {},
693
+ "order": 10,
694
+ "mode": 0,
695
+ "inputs": [
696
+ {
697
+ "name": "clip",
698
+ "type": "CLIP",
699
+ "link": 75
700
+ }
701
+ ],
702
+ "outputs": [
703
+ {
704
+ "name": "CONDITIONING",
705
+ "type": "CONDITIONING",
706
+ "slot_index": 0,
707
+ "links": [
708
+ 52
709
+ ]
710
+ }
711
+ ],
712
+ "title": "CLIP Text Encode (Negative Prompt)",
713
+ "properties": {
714
+ "cnr_id": "comfy-core",
715
+ "ver": "0.3.51",
716
+ "Node name for S&R": "CLIPTextEncode"
717
+ },
718
+ "widgets_values": [
719
+ " "
720
+ ],
721
+ "color": "#223",
722
+ "bgcolor": "#335"
723
+ },
724
+ {
725
+ "id": 3,
726
+ "type": "KSampler",
727
+ "pos": [
728
+ 810,
729
+ 260
730
+ ],
731
+ "size": [
732
+ 310,
733
+ 430
734
+ ],
735
+ "flags": {},
736
+ "order": 17,
737
+ "mode": 0,
738
+ "inputs": [
739
+ {
740
+ "name": "model",
741
+ "type": "MODEL",
742
+ "link": 131
743
+ },
744
+ {
745
+ "name": "positive",
746
+ "type": "CONDITIONING",
747
+ "link": 46
748
+ },
749
+ {
750
+ "name": "negative",
751
+ "type": "CONDITIONING",
752
+ "link": 52
753
+ },
754
+ {
755
+ "name": "latent_image",
756
+ "type": "LATENT",
757
+ "link": 142
758
+ }
759
+ ],
760
+ "outputs": [
761
+ {
762
+ "name": "LATENT",
763
+ "type": "LATENT",
764
+ "slot_index": 0,
765
+ "links": [
766
+ 128
767
+ ]
768
+ }
769
+ ],
770
+ "properties": {
771
+ "cnr_id": "comfy-core",
772
+ "ver": "0.3.51",
773
+ "Node name for S&R": "KSampler"
774
+ },
775
+ "widgets_values": [
776
+ 91832422759220,
777
+ "randomize",
778
+ 20,
779
+ 2.5,
780
+ "euler",
781
+ "simple",
782
+ 1
783
+ ]
784
+ },
785
+ {
786
+ "id": 78,
787
+ "type": "MarkdownNote",
788
+ "pos": [
789
+ -690,
790
+ -50
791
+ ],
792
+ "size": [
793
+ 540,
794
+ 630
795
+ ],
796
+ "flags": {},
797
+ "order": 7,
798
+ "mode": 0,
799
+ "inputs": [],
800
+ "outputs": [],
801
+ "title": "Model links",
802
+ "properties": {
803
+ "widget_ue_connectable": {}
804
+ },
805
+ "widgets_values": [
806
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**Model patches**\n\n- [qwen_image_canny_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors)\n- [qwen_image_depth_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_depth_diffsynth_controlnet.safetensors)\n- [qwen_image_inpaint_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_inpaint_diffsynth_controlnet.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 model_patches/ # create one if you can't find it\n│ │ ├── qwen_image_depth_diffsynth_controlnet.safetensors\n│ │ ├── qwen_image_canny_diffsynth_controlnet.safetensors\n│ │ └── qwen_image_inpaint_diffsynth_controlnet.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
807
+ ],
808
+ "color": "#432",
809
+ "bgcolor": "#653"
810
+ }
811
+ ],
812
+ "links": [
813
+ [
814
+ 46,
815
+ 6,
816
+ 0,
817
+ 3,
818
+ 1,
819
+ "CONDITIONING"
820
+ ],
821
+ [
822
+ 52,
823
+ 7,
824
+ 0,
825
+ 3,
826
+ 2,
827
+ "CONDITIONING"
828
+ ],
829
+ [
830
+ 74,
831
+ 38,
832
+ 0,
833
+ 6,
834
+ 0,
835
+ "CLIP"
836
+ ],
837
+ [
838
+ 75,
839
+ 38,
840
+ 0,
841
+ 7,
842
+ 0,
843
+ "CLIP"
844
+ ],
845
+ [
846
+ 76,
847
+ 39,
848
+ 0,
849
+ 8,
850
+ 1,
851
+ "VAE"
852
+ ],
853
+ [
854
+ 110,
855
+ 8,
856
+ 0,
857
+ 60,
858
+ 0,
859
+ "IMAGE"
860
+ ],
861
+ [
862
+ 128,
863
+ 3,
864
+ 0,
865
+ 8,
866
+ 0,
867
+ "LATENT"
868
+ ],
869
+ [
870
+ 129,
871
+ 70,
872
+ 0,
873
+ 69,
874
+ 1,
875
+ "MODEL_PATCH"
876
+ ],
877
+ [
878
+ 130,
879
+ 66,
880
+ 0,
881
+ 69,
882
+ 0,
883
+ "MODEL"
884
+ ],
885
+ [
886
+ 131,
887
+ 69,
888
+ 0,
889
+ 3,
890
+ 0,
891
+ "MODEL"
892
+ ],
893
+ [
894
+ 132,
895
+ 39,
896
+ 0,
897
+ 69,
898
+ 2,
899
+ "VAE"
900
+ ],
901
+ [
902
+ 135,
903
+ 72,
904
+ 0,
905
+ 69,
906
+ 3,
907
+ "IMAGE"
908
+ ],
909
+ [
910
+ 136,
911
+ 72,
912
+ 0,
913
+ 73,
914
+ 0,
915
+ "IMAGE"
916
+ ],
917
+ [
918
+ 140,
919
+ 71,
920
+ 0,
921
+ 75,
922
+ 0,
923
+ "IMAGE"
924
+ ],
925
+ [
926
+ 141,
927
+ 75,
928
+ 0,
929
+ 72,
930
+ 0,
931
+ "IMAGE"
932
+ ],
933
+ [
934
+ 142,
935
+ 76,
936
+ 0,
937
+ 3,
938
+ 3,
939
+ "LATENT"
940
+ ],
941
+ [
942
+ 143,
943
+ 75,
944
+ 0,
945
+ 76,
946
+ 0,
947
+ "IMAGE"
948
+ ],
949
+ [
950
+ 144,
951
+ 39,
952
+ 0,
953
+ 76,
954
+ 1,
955
+ "VAE"
956
+ ],
957
+ [
958
+ 145,
959
+ 37,
960
+ 0,
961
+ 80,
962
+ 0,
963
+ "MODEL"
964
+ ],
965
+ [
966
+ 149,
967
+ 80,
968
+ 0,
969
+ 66,
970
+ 0,
971
+ "MODEL"
972
+ ]
973
+ ],
974
+ "groups": [
975
+ {
976
+ "id": 1,
977
+ "title": "Step 1 - Upload models",
978
+ "bounding": [
979
+ -130,
980
+ -80,
981
+ 400,
982
+ 610
983
+ ],
984
+ "color": "#3f789e",
985
+ "font_size": 24,
986
+ "flags": {}
987
+ },
988
+ {
989
+ "id": 2,
990
+ "title": "Step 2 - Upload reference image",
991
+ "bounding": [
992
+ -130,
993
+ 550,
994
+ 400,
995
+ 550
996
+ ],
997
+ "color": "#3f789e",
998
+ "font_size": 24,
999
+ "flags": {}
1000
+ },
1001
+ {
1002
+ "id": 3,
1003
+ "title": "Image processing",
1004
+ "bounding": [
1005
+ 290,
1006
+ 550,
1007
+ 490,
1008
+ 550
1009
+ ],
1010
+ "color": "#3f789e",
1011
+ "font_size": 24,
1012
+ "flags": {}
1013
+ },
1014
+ {
1015
+ "id": 4,
1016
+ "title": "Step 3 - Prompt",
1017
+ "bounding": [
1018
+ 290,
1019
+ 100,
1020
+ 490,
1021
+ 430
1022
+ ],
1023
+ "color": "#3f789e",
1024
+ "font_size": 24,
1025
+ "flags": {}
1026
+ },
1027
+ {
1028
+ "id": 5,
1029
+ "title": "4 steps lightning LoRA",
1030
+ "bounding": [
1031
+ 290,
1032
+ -80,
1033
+ 490,
1034
+ 160
1035
+ ],
1036
+ "color": "#3f789e",
1037
+ "font_size": 24,
1038
+ "flags": {}
1039
+ }
1040
+ ],
1041
+ "config": {},
1042
+ "extra": {
1043
+ "ds": {
1044
+ "scale": 0.48559562289012265,
1045
+ "offset": [
1046
+ 1846.044139609573,
1047
+ 391.24067543168553
1048
+ ]
1049
+ },
1050
+ "frontendVersion": "1.26.6"
1051
+ },
1052
+ "version": 0.4
1053
+ }