comfyui-workflow-templates-media-image 0.3.0__py3-none-any.whl → 0.3.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. comfyui_workflow_templates_media_image/templates/01_get_started_text_to_image.json +796 -0
  2. comfyui_workflow_templates_media_image/templates/02_qwen_Image_edit_subgraphed.json +104 -150
  3. comfyui_workflow_templates_media_image/templates/3d_hunyuan3d_image_to_model.json +3 -3
  4. comfyui_workflow_templates_media_image/templates/default-1.webp +0 -0
  5. comfyui_workflow_templates_media_image/templates/{image2image.json → default.json} +213 -299
  6. comfyui_workflow_templates_media_image/templates/flux1_dev_uso_reference_image_gen.json +3446 -2631
  7. comfyui_workflow_templates_media_image/templates/flux1_krea_dev.json +715 -453
  8. comfyui_workflow_templates_media_image/templates/flux_canny_model_example.json +2 -2
  9. comfyui_workflow_templates_media_image/templates/flux_depth_lora_example.json +3 -3
  10. comfyui_workflow_templates_media_image/templates/flux_dev_checkpoint_example.json +828 -283
  11. comfyui_workflow_templates_media_image/templates/flux_dev_full_text_to_image.json +2 -2
  12. comfyui_workflow_templates_media_image/templates/flux_fill_inpaint_example.json +862 -645
  13. comfyui_workflow_templates_media_image/templates/flux_fill_outpaint_example.json +2 -2
  14. comfyui_workflow_templates_media_image/templates/flux_kontext_dev_basic.json +1055 -896
  15. comfyui_workflow_templates_media_image/templates/flux_redux_model_example.json +6 -6
  16. comfyui_workflow_templates_media_image/templates/flux_schnell.json +190 -43
  17. comfyui_workflow_templates_media_image/templates/flux_schnell_full_text_to_image.json +2 -2
  18. comfyui_workflow_templates_media_image/templates/hidream_e1_1-1.webp +0 -0
  19. comfyui_workflow_templates_media_image/templates/hidream_e1_1-2.webp +0 -0
  20. comfyui_workflow_templates_media_image/templates/hidream_e1_1.json +1133 -0
  21. comfyui_workflow_templates_media_image/templates/image_chroma1_radiance_text_to_image.json +1320 -1035
  22. comfyui_workflow_templates_media_image/templates/image_chroma_text_to_image.json +96 -94
  23. comfyui_workflow_templates_media_image/templates/image_chrono_edit_14B-1.webp +0 -0
  24. comfyui_workflow_templates_media_image/templates/image_chrono_edit_14B-2.webp +0 -0
  25. comfyui_workflow_templates_media_image/templates/image_chrono_edit_14B.json +2564 -0
  26. comfyui_workflow_templates_media_image/templates/image_flux.1_fill_dev_OneReward.json +2142 -2235
  27. comfyui_workflow_templates_media_image/templates/image_flux2-1.webp +0 -0
  28. comfyui_workflow_templates_media_image/templates/image_flux2-2.webp +0 -0
  29. comfyui_workflow_templates_media_image/templates/image_flux2.json +2810 -0
  30. comfyui_workflow_templates_media_image/templates/image_flux2_fp8-1.webp +0 -0
  31. comfyui_workflow_templates_media_image/templates/image_flux2_fp8.json +3448 -0
  32. comfyui_workflow_templates_media_image/templates/image_flux2_text_to_image-1.webp +0 -0
  33. comfyui_workflow_templates_media_image/templates/image_flux2_text_to_image.json +1108 -0
  34. comfyui_workflow_templates_media_image/templates/image_lotus_depth_v1_1.json +937 -663
  35. comfyui_workflow_templates_media_image/templates/image_netayume_lumina_t2i.json +811 -163
  36. comfyui_workflow_templates_media_image/templates/image_newbieimage_exp0_1-t2i-1.webp +0 -0
  37. comfyui_workflow_templates_media_image/templates/image_newbieimage_exp0_1-t2i.json +1158 -0
  38. comfyui_workflow_templates_media_image/templates/image_omnigen2_image_edit.json +5 -5
  39. comfyui_workflow_templates_media_image/templates/image_omnigen2_t2i.json +958 -688
  40. comfyui_workflow_templates_media_image/templates/image_ovis_text_to_image-1.webp +0 -0
  41. comfyui_workflow_templates_media_image/templates/image_ovis_text_to_image.json +918 -0
  42. comfyui_workflow_templates_media_image/templates/image_qwen_Image_2512-1.webp +0 -0
  43. comfyui_workflow_templates_media_image/templates/image_qwen_Image_2512.json +2047 -0
  44. comfyui_workflow_templates_media_image/templates/image_qwen_image.json +839 -729
  45. comfyui_workflow_templates_media_image/templates/image_qwen_image_controlnet_patch.json +4 -4
  46. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit.json +1068 -855
  47. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509.json +2107 -2707
  48. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509_relight-1.webp +0 -0
  49. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2509_relight-2.webp +0 -0
  50. comfyui_workflow_templates_media_image/templates/{01_qwen_t2i_subgraphed.json → image_qwen_image_edit_2509_relight.json} +855 -590
  51. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2511-1.webp +0 -0
  52. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2511-2.webp +0 -0
  53. comfyui_workflow_templates_media_image/templates/image_qwen_image_edit_2511.json +2870 -0
  54. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_controlnet.json +1666 -1422
  55. comfyui_workflow_templates_media_image/templates/image_qwen_image_instantx_inpainting_controlnet.json +625 -600
  56. comfyui_workflow_templates_media_image/templates/image_qwen_image_layered-1.webp +0 -0
  57. comfyui_workflow_templates_media_image/templates/image_qwen_image_layered.json +2849 -0
  58. comfyui_workflow_templates_media_image/templates/image_qwen_image_union_control_lora.json +1234 -861
  59. comfyui_workflow_templates_media_image/templates/image_z_image_turbo_fun_union_controlnet-1.webp +0 -0
  60. comfyui_workflow_templates_media_image/templates/image_z_image_turbo_fun_union_controlnet-2.webp +0 -0
  61. comfyui_workflow_templates_media_image/templates/image_z_image_turbo_fun_union_controlnet.json +1355 -0
  62. comfyui_workflow_templates_media_image/templates/sd3.5_large_canny_controlnet_example.json +2 -2
  63. comfyui_workflow_templates_media_image/templates/sdxl_refiner_prompt_example.json +451 -100
  64. comfyui_workflow_templates_media_image/templates/sdxl_revision_text_prompts.json +2 -2
  65. comfyui_workflow_templates_media_image/templates/sdxl_simple_example.json +11 -11
  66. comfyui_workflow_templates_media_image/templates/sdxlturbo_example.json +2 -2
  67. comfyui_workflow_templates_media_image/templates/templates-1_click_multiple_character_angles-v1.0-1.webp +0 -0
  68. comfyui_workflow_templates_media_image/templates/templates-1_click_multiple_character_angles-v1.0.json +10191 -0
  69. comfyui_workflow_templates_media_image/templates/templates-1_click_multiple_scene_angles-v1.0-1.webp +0 -0
  70. comfyui_workflow_templates_media_image/templates/templates-1_click_multiple_scene_angles-v1.0.json +10155 -0
  71. comfyui_workflow_templates_media_image/templates/templates-6-key-frames-1.webp +0 -0
  72. comfyui_workflow_templates_media_image/templates/templates-6-key-frames.json +8012 -0
  73. comfyui_workflow_templates_media_image/templates/templates-product_ad-v2.0-1.webp +0 -0
  74. comfyui_workflow_templates_media_image/templates/templates-product_ad-v2.0.json +652 -0
  75. comfyui_workflow_templates_media_image/templates/templates-qwen_image_edit-crop_and_stitch-fusion-1.webp +0 -0
  76. comfyui_workflow_templates_media_image/templates/templates-qwen_image_edit-crop_and_stitch-fusion-2.webp +0 -0
  77. comfyui_workflow_templates_media_image/templates/templates-qwen_image_edit-crop_and_stitch-fusion.json +1458 -0
  78. comfyui_workflow_templates_media_image/templates/templates-textured_logotype-v2.1-1.webp +0 -0
  79. comfyui_workflow_templates_media_image/templates/templates-textured_logotype-v2.1.json +738 -0
  80. {comfyui_workflow_templates_media_image-0.3.0.dist-info → comfyui_workflow_templates_media_image-0.3.50.dist-info}/METADATA +1 -1
  81. comfyui_workflow_templates_media_image-0.3.50.dist-info/RECORD +134 -0
  82. comfyui_workflow_templates_media_image/templates/controlnet_example-1.webp +0 -0
  83. comfyui_workflow_templates_media_image/templates/controlnet_example-2.webp +0 -0
  84. comfyui_workflow_templates_media_image/templates/controlnet_example.json +0 -848
  85. comfyui_workflow_templates_media_image/templates/depth_controlnet-1.webp +0 -0
  86. comfyui_workflow_templates_media_image/templates/depth_controlnet-2.webp +0 -0
  87. comfyui_workflow_templates_media_image/templates/depth_controlnet.json +0 -678
  88. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter-1.webp +0 -0
  89. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter-2.webp +0 -0
  90. comfyui_workflow_templates_media_image/templates/depth_t2i_adapter.json +0 -1708
  91. comfyui_workflow_templates_media_image/templates/image2image-1.webp +0 -0
  92. comfyui_workflow_templates_media_image/templates/image2image-2.webp +0 -0
  93. comfyui_workflow_templates_media_image/templates/inpaint_example-1.webp +0 -0
  94. comfyui_workflow_templates_media_image/templates/inpaint_example-2.webp +0 -0
  95. comfyui_workflow_templates_media_image/templates/inpaint_example.json +0 -650
  96. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting-1.webp +0 -0
  97. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting-2.webp +0 -0
  98. comfyui_workflow_templates_media_image/templates/inpaint_model_outpainting.json +0 -710
  99. comfyui_workflow_templates_media_image/templates/mixing_controlnets-1.webp +0 -0
  100. comfyui_workflow_templates_media_image/templates/mixing_controlnets-2.webp +0 -0
  101. comfyui_workflow_templates_media_image/templates/mixing_controlnets.json +0 -913
  102. comfyui_workflow_templates_media_image-0.3.0.dist-info/RECORD +0 -110
  103. /comfyui_workflow_templates_media_image/templates/{01_qwen_t2i_subgraphed-1.webp → 01_get_started_text_to_image-1.webp} +0 -0
  104. /comfyui_workflow_templates_media_image/templates/{image_qwen_image_union_control_lora-3.webp → image_qwen_image_union_control_lora-2.webp} +0 -0
  105. {comfyui_workflow_templates_media_image-0.3.0.dist-info → comfyui_workflow_templates_media_image-0.3.50.dist-info}/WHEEL +0 -0
  106. {comfyui_workflow_templates_media_image-0.3.0.dist-info → comfyui_workflow_templates_media_image-0.3.50.dist-info}/top_level.txt +0 -0
@@ -1,105 +1,22 @@
1
1
  {
2
2
  "id": "00000000-0000-0000-0000-000000000000",
3
3
  "revision": 0,
4
- "last_node_id": 82,
5
- "last_link_id": 46,
4
+ "last_node_id": 84,
5
+ "last_link_id": 52,
6
6
  "nodes": [
7
- {
8
- "id": 7,
9
- "type": "CLIPTextEncode",
10
- "pos": [
11
- 420,
12
- 710
13
- ],
14
- "size": [
15
- 400,
16
- 150
17
- ],
18
- "flags": {},
19
- "order": 8,
20
- "mode": 0,
21
- "inputs": [
22
- {
23
- "name": "clip",
24
- "type": "CLIP",
25
- "link": 25
26
- }
27
- ],
28
- "outputs": [
29
- {
30
- "name": "CONDITIONING",
31
- "type": "CONDITIONING",
32
- "links": [
33
- 33
34
- ]
35
- }
36
- ],
37
- "title": "CLIP Text Encode (Negative Prompt)",
38
- "properties": {
39
- "cnr_id": "comfy-core",
40
- "ver": "0.3.51",
41
- "Node name for S&R": "CLIPTextEncode"
42
- },
43
- "widgets_values": [
44
- " "
45
- ],
46
- "color": "#223",
47
- "bgcolor": "#335"
48
- },
49
- {
50
- "id": 74,
51
- "type": "Canny",
52
- "pos": [
53
- 440,
54
- 1000
55
- ],
56
- "size": [
57
- 350,
58
- 82
59
- ],
60
- "flags": {},
61
- "order": 12,
62
- "mode": 0,
63
- "inputs": [
64
- {
65
- "name": "image",
66
- "type": "IMAGE",
67
- "link": 42
68
- }
69
- ],
70
- "outputs": [
71
- {
72
- "name": "IMAGE",
73
- "type": "IMAGE",
74
- "links": [
75
- 35,
76
- 38
77
- ]
78
- }
79
- ],
80
- "properties": {
81
- "cnr_id": "comfy-core",
82
- "ver": "0.3.51",
83
- "Node name for S&R": "Canny"
84
- },
85
- "widgets_values": [
86
- 0.4,
87
- 0.8
88
- ]
89
- },
90
7
  {
91
8
  "id": 75,
92
9
  "type": "PreviewImage",
93
10
  "pos": [
94
- 450,
95
- 1140
11
+ 440,
12
+ 350
96
13
  ],
97
14
  "size": [
98
15
  330,
99
16
  290
100
17
  ],
101
18
  "flags": {},
102
- "order": 15,
19
+ "order": 5,
103
20
  "mode": 0,
104
21
  "inputs": [
105
22
  {
@@ -110,327 +27,32 @@
110
27
  ],
111
28
  "outputs": [],
112
29
  "properties": {
30
+ "enableTabs": false,
31
+ "tabWidth": 65,
32
+ "tabXOffset": 10,
33
+ "hasSecondTab": false,
34
+ "secondTabText": "Send Back",
35
+ "secondTabOffset": 80,
36
+ "secondTabWidth": 65,
113
37
  "cnr_id": "comfy-core",
114
38
  "ver": "0.3.51",
115
39
  "Node name for S&R": "PreviewImage"
116
40
  },
117
41
  "widgets_values": []
118
42
  },
119
- {
120
- "id": 70,
121
- "type": "ReferenceLatent",
122
- "pos": [
123
- 860,
124
- 470
125
- ],
126
- "size": [
127
- 197.712890625,
128
- 46
129
- ],
130
- "flags": {},
131
- "order": 16,
132
- "mode": 0,
133
- "inputs": [
134
- {
135
- "name": "conditioning",
136
- "type": "CONDITIONING",
137
- "link": 31
138
- },
139
- {
140
- "name": "latent",
141
- "shape": 7,
142
- "type": "LATENT",
143
- "link": 32
144
- }
145
- ],
146
- "outputs": [
147
- {
148
- "name": "CONDITIONING",
149
- "type": "CONDITIONING",
150
- "links": [
151
- 21
152
- ]
153
- }
154
- ],
155
- "properties": {
156
- "cnr_id": "comfy-core",
157
- "ver": "0.3.51",
158
- "Node name for S&R": "ReferenceLatent"
159
- },
160
- "widgets_values": []
161
- },
162
- {
163
- "id": 71,
164
- "type": "ReferenceLatent",
165
- "pos": [
166
- 850,
167
- 720
168
- ],
169
- "size": [
170
- 197.712890625,
171
- 46
172
- ],
173
- "flags": {},
174
- "order": 17,
175
- "mode": 0,
176
- "inputs": [
177
- {
178
- "name": "conditioning",
179
- "type": "CONDITIONING",
180
- "link": 33
181
- },
182
- {
183
- "name": "latent",
184
- "shape": 7,
185
- "type": "LATENT",
186
- "link": 34
187
- }
188
- ],
189
- "outputs": [
190
- {
191
- "name": "CONDITIONING",
192
- "type": "CONDITIONING",
193
- "links": [
194
- 22
195
- ]
196
- }
197
- ],
198
- "properties": {
199
- "cnr_id": "comfy-core",
200
- "ver": "0.3.51",
201
- "Node name for S&R": "ReferenceLatent"
202
- },
203
- "widgets_values": []
204
- },
205
- {
206
- "id": 72,
207
- "type": "VAEEncode",
208
- "pos": [
209
- 900,
210
- 950
211
- ],
212
- "size": [
213
- 140,
214
- 46
215
- ],
216
- "flags": {},
217
- "order": 14,
218
- "mode": 0,
219
- "inputs": [
220
- {
221
- "name": "pixels",
222
- "type": "IMAGE",
223
- "link": 35
224
- },
225
- {
226
- "name": "vae",
227
- "type": "VAE",
228
- "link": 36
229
- }
230
- ],
231
- "outputs": [
232
- {
233
- "name": "LATENT",
234
- "type": "LATENT",
235
- "links": [
236
- 32,
237
- 34,
238
- 44
239
- ]
240
- }
241
- ],
242
- "properties": {
243
- "cnr_id": "comfy-core",
244
- "ver": "0.3.51",
245
- "Node name for S&R": "VAEEncode"
246
- },
247
- "widgets_values": []
248
- },
249
- {
250
- "id": 39,
251
- "type": "VAELoader",
252
- "pos": [
253
- 30,
254
- 650
255
- ],
256
- "size": [
257
- 330,
258
- 58
259
- ],
260
- "flags": {},
261
- "order": 0,
262
- "mode": 0,
263
- "inputs": [],
264
- "outputs": [
265
- {
266
- "name": "VAE",
267
- "type": "VAE",
268
- "links": [
269
- 27,
270
- 36
271
- ]
272
- }
273
- ],
274
- "properties": {
275
- "cnr_id": "comfy-core",
276
- "ver": "0.3.51",
277
- "Node name for S&R": "VAELoader",
278
- "models": [
279
- {
280
- "name": "qwen_image_vae.safetensors",
281
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
282
- "directory": "vae"
283
- }
284
- ]
285
- },
286
- "widgets_values": [
287
- "qwen_image_vae.safetensors"
288
- ]
289
- },
290
- {
291
- "id": 38,
292
- "type": "CLIPLoader",
293
- "pos": [
294
- 30,
295
- 490
296
- ],
297
- "size": [
298
- 330,
299
- 110
300
- ],
301
- "flags": {},
302
- "order": 1,
303
- "mode": 0,
304
- "inputs": [],
305
- "outputs": [
306
- {
307
- "name": "CLIP",
308
- "type": "CLIP",
309
- "links": [
310
- 24,
311
- 25
312
- ]
313
- }
314
- ],
315
- "properties": {
316
- "cnr_id": "comfy-core",
317
- "ver": "0.3.51",
318
- "Node name for S&R": "CLIPLoader",
319
- "models": [
320
- {
321
- "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
322
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
323
- "directory": "text_encoders"
324
- }
325
- ]
326
- },
327
- "widgets_values": [
328
- "qwen_2.5_vl_7b_fp8_scaled.safetensors",
329
- "qwen_image",
330
- "default"
331
- ]
332
- },
333
- {
334
- "id": 69,
335
- "type": "LoraLoaderModelOnly",
336
- "pos": [
337
- 30,
338
- 360
339
- ],
340
- "size": [
341
- 330,
342
- 82
343
- ],
344
- "flags": {},
345
- "order": 9,
346
- "mode": 0,
347
- "inputs": [
348
- {
349
- "name": "model",
350
- "type": "MODEL",
351
- "link": 30
352
- }
353
- ],
354
- "outputs": [
355
- {
356
- "name": "MODEL",
357
- "type": "MODEL",
358
- "links": [
359
- 45
360
- ]
361
- }
362
- ],
363
- "properties": {
364
- "cnr_id": "comfy-core",
365
- "ver": "0.3.51",
366
- "Node name for S&R": "LoraLoaderModelOnly",
367
- "models": [
368
- {
369
- "name": "qwen_image_union_diffsynth_lora.safetensors",
370
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
371
- "directory": "loras"
372
- }
373
- ]
374
- },
375
- "widgets_values": [
376
- "qwen_image_union_diffsynth_lora.safetensors",
377
- 1
378
- ]
379
- },
380
- {
381
- "id": 37,
382
- "type": "UNETLoader",
383
- "pos": [
384
- 30,
385
- 220
386
- ],
387
- "size": [
388
- 330,
389
- 82
390
- ],
391
- "flags": {},
392
- "order": 2,
393
- "mode": 0,
394
- "inputs": [],
395
- "outputs": [
396
- {
397
- "name": "MODEL",
398
- "type": "MODEL",
399
- "links": [
400
- 30
401
- ]
402
- }
403
- ],
404
- "properties": {
405
- "cnr_id": "comfy-core",
406
- "ver": "0.3.51",
407
- "Node name for S&R": "UNETLoader",
408
- "models": [
409
- {
410
- "name": "qwen_image_fp8_e4m3fn.safetensors",
411
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
412
- "directory": "diffusion_models"
413
- }
414
- ]
415
- },
416
- "widgets_values": [
417
- "qwen_image_fp8_e4m3fn.safetensors",
418
- "default"
419
- ]
420
- },
421
43
  {
422
44
  "id": 77,
423
45
  "type": "ImageScaleToTotalPixels",
424
46
  "pos": [
425
- 60,
426
- 1220
47
+ 50,
48
+ 580
427
49
  ],
428
50
  "size": [
429
51
  270,
430
- 82
52
+ 106
431
53
  ],
432
54
  "flags": {},
433
- "order": 10,
55
+ "order": 3,
434
56
  "mode": 0,
435
57
  "inputs": [
436
58
  {
@@ -444,221 +66,27 @@
444
66
  "name": "IMAGE",
445
67
  "type": "IMAGE",
446
68
  "links": [
447
- 42
448
- ]
449
- }
450
- ],
451
- "properties": {
452
- "cnr_id": "comfy-core",
453
- "ver": "0.3.51",
454
- "Node name for S&R": "ImageScaleToTotalPixels"
455
- },
456
- "widgets_values": [
457
- "lanczos",
458
- 1
459
- ]
460
- },
461
- {
462
- "id": 82,
463
- "type": "MarkdownNote",
464
- "pos": [
465
- 60,
466
- 1350
467
- ],
468
- "size": [
469
- 270,
470
- 120
471
- ],
472
- "flags": {},
473
- "order": 3,
474
- "mode": 0,
475
- "inputs": [],
476
- "outputs": [],
477
- "title": "About Scale Image to Total Pixels",
478
- "properties": {},
479
- "widgets_values": [
480
- "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
481
- ],
482
- "color": "#432",
483
- "bgcolor": "#653"
484
- },
485
- {
486
- "id": 60,
487
- "type": "SaveImage",
488
- "pos": [
489
- 1400,
490
- 280
491
- ],
492
- "size": [
493
- 1030,
494
- 1150
495
- ],
496
- "flags": {},
497
- "order": 20,
498
- "mode": 0,
499
- "inputs": [
500
- {
501
- "name": "images",
502
- "type": "IMAGE",
503
- "link": 28
504
- }
505
- ],
506
- "outputs": [],
507
- "properties": {
508
- "cnr_id": "comfy-core",
509
- "ver": "0.3.51",
510
- "Node name for S&R": "SaveImage"
511
- },
512
- "widgets_values": [
513
- "ComfyUI"
514
- ]
515
- },
516
- {
517
- "id": 81,
518
- "type": "MarkdownNote",
519
- "pos": [
520
- 1100,
521
- 780
522
- ],
523
- "size": [
524
- 260,
525
- 150
526
- ],
527
- "flags": {},
528
- "order": 4,
529
- "mode": 0,
530
- "inputs": [],
531
- "outputs": [],
532
- "title": "KSampler settings",
533
- "properties": {},
534
- "widgets_values": [
535
- "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
536
- ],
537
- "color": "#432",
538
- "bgcolor": "#653"
539
- },
540
- {
541
- "id": 66,
542
- "type": "ModelSamplingAuraFlow",
543
- "pos": [
544
- 1100,
545
- 170
546
- ],
547
- "size": [
548
- 260,
549
- 58
550
- ],
551
- "flags": {},
552
- "order": 13,
553
- "mode": 0,
554
- "inputs": [
555
- {
556
- "name": "model",
557
- "type": "MODEL",
558
- "link": 46
559
- }
560
- ],
561
- "outputs": [
562
- {
563
- "name": "MODEL",
564
- "type": "MODEL",
565
- "links": [
566
- 20
567
- ]
568
- }
569
- ],
570
- "properties": {
571
- "cnr_id": "comfy-core",
572
- "ver": "0.3.51",
573
- "Node name for S&R": "ModelSamplingAuraFlow"
574
- },
575
- "widgets_values": [
576
- 3.1
577
- ]
578
- },
579
- {
580
- "id": 6,
581
- "type": "CLIPTextEncode",
582
- "pos": [
583
- 420,
584
- 460
585
- ],
586
- "size": [
587
- 400,
588
- 200
589
- ],
590
- "flags": {},
591
- "order": 7,
592
- "mode": 0,
593
- "inputs": [
594
- {
595
- "name": "clip",
596
- "type": "CLIP",
597
- "link": 24
598
- }
599
- ],
600
- "outputs": [
601
- {
602
- "name": "CONDITIONING",
603
- "type": "CONDITIONING",
604
- "links": [
605
- 31
606
- ]
607
- }
608
- ],
609
- "title": "CLIP Text Encode (Positive Prompt)",
610
- "properties": {
611
- "cnr_id": "comfy-core",
612
- "ver": "0.3.51",
613
- "Node name for S&R": "CLIPTextEncode"
614
- },
615
- "widgets_values": [
616
- "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition."
617
- ],
618
- "color": "#232",
619
- "bgcolor": "#353"
620
- },
621
- {
622
- "id": 8,
623
- "type": "VAEDecode",
624
- "pos": [
625
- 1400,
626
- 170
627
- ],
628
- "size": [
629
- 140,
630
- 46
631
- ],
632
- "flags": {},
633
- "order": 19,
634
- "mode": 0,
635
- "inputs": [
636
- {
637
- "name": "samples",
638
- "type": "LATENT",
639
- "link": 26
640
- },
641
- {
642
- "name": "vae",
643
- "type": "VAE",
644
- "link": 27
645
- }
646
- ],
647
- "outputs": [
648
- {
649
- "name": "IMAGE",
650
- "type": "IMAGE",
651
- "links": [
652
- 28
69
+ 42
653
70
  ]
654
71
  }
655
72
  ],
656
73
  "properties": {
74
+ "enableTabs": false,
75
+ "tabWidth": 65,
76
+ "tabXOffset": 10,
77
+ "hasSecondTab": false,
78
+ "secondTabText": "Send Back",
79
+ "secondTabOffset": 80,
80
+ "secondTabWidth": 65,
657
81
  "cnr_id": "comfy-core",
658
82
  "ver": "0.3.51",
659
- "Node name for S&R": "VAEDecode"
83
+ "Node name for S&R": "ImageScaleToTotalPixels"
660
84
  },
661
- "widgets_values": []
85
+ "widgets_values": [
86
+ "lanczos",
87
+ 1,
88
+ 1
89
+ ]
662
90
  },
663
91
  {
664
92
  "id": 80,
@@ -672,7 +100,7 @@
672
100
  630
673
101
  ],
674
102
  "flags": {},
675
- "order": 5,
103
+ "order": 0,
676
104
  "mode": 0,
677
105
  "inputs": [],
678
106
  "outputs": [],
@@ -684,21 +112,21 @@
684
112
  "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) \n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
685
113
  ],
686
114
  "color": "#432",
687
- "bgcolor": "#653"
115
+ "bgcolor": "#000"
688
116
  },
689
117
  {
690
118
  "id": 73,
691
119
  "type": "LoadImage",
692
120
  "pos": [
693
- 60,
694
- 860
121
+ 50,
122
+ 220
695
123
  ],
696
124
  "size": [
697
125
  274.080078125,
698
126
  314.00006103515625
699
127
  ],
700
128
  "flags": {},
701
- "order": 6,
129
+ "order": 1,
702
130
  "mode": 0,
703
131
  "inputs": [],
704
132
  "outputs": [
@@ -716,6 +144,13 @@
716
144
  }
717
145
  ],
718
146
  "properties": {
147
+ "enableTabs": false,
148
+ "tabWidth": 65,
149
+ "tabXOffset": 10,
150
+ "hasSecondTab": false,
151
+ "secondTabText": "Send Back",
152
+ "secondTabOffset": 80,
153
+ "secondTabWidth": 65,
719
154
  "cnr_id": "comfy-core",
720
155
  "ver": "0.3.51",
721
156
  "Node name for S&R": "LoadImage"
@@ -726,234 +161,178 @@
726
161
  ]
727
162
  },
728
163
  {
729
- "id": 79,
730
- "type": "LoraLoaderModelOnly",
164
+ "id": 74,
165
+ "type": "Canny",
731
166
  "pos": [
732
- 490,
167
+ 430,
733
168
  210
734
169
  ],
735
170
  "size": [
736
- 470,
171
+ 350,
737
172
  82
738
173
  ],
739
174
  "flags": {},
740
- "order": 11,
175
+ "order": 4,
741
176
  "mode": 0,
742
177
  "inputs": [
743
178
  {
744
- "name": "model",
745
- "type": "MODEL",
746
- "link": 45
179
+ "name": "image",
180
+ "type": "IMAGE",
181
+ "link": 42
747
182
  }
748
183
  ],
749
184
  "outputs": [
750
185
  {
751
- "name": "MODEL",
752
- "type": "MODEL",
186
+ "name": "IMAGE",
187
+ "type": "IMAGE",
753
188
  "links": [
754
- 46
189
+ 38,
190
+ 48
755
191
  ]
756
192
  }
757
193
  ],
758
194
  "properties": {
195
+ "enableTabs": false,
196
+ "tabWidth": 65,
197
+ "tabXOffset": 10,
198
+ "hasSecondTab": false,
199
+ "secondTabText": "Send Back",
200
+ "secondTabOffset": 80,
201
+ "secondTabWidth": 65,
759
202
  "cnr_id": "comfy-core",
760
203
  "ver": "0.3.51",
761
- "Node name for S&R": "LoraLoaderModelOnly",
762
- "models": [
763
- {
764
- "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
765
- "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
766
- "directory": "loras"
767
- }
768
- ]
204
+ "Node name for S&R": "Canny"
769
205
  },
770
206
  "widgets_values": [
771
- "Qwen-Image-Lightning-4steps-V1.0.safetensors",
772
- 1
207
+ 0.4,
208
+ 0.8
773
209
  ]
774
210
  },
775
211
  {
776
- "id": 3,
777
- "type": "KSampler",
212
+ "id": 82,
213
+ "type": "MarkdownNote",
214
+ "pos": [
215
+ 50,
216
+ 730
217
+ ],
218
+ "size": [
219
+ 270,
220
+ 120
221
+ ],
222
+ "flags": {},
223
+ "order": 2,
224
+ "mode": 0,
225
+ "inputs": [],
226
+ "outputs": [],
227
+ "title": "About Scale Image to Total Pixels",
228
+ "properties": {},
229
+ "widgets_values": [
230
+ "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
231
+ ],
232
+ "color": "#432",
233
+ "bgcolor": "#000"
234
+ },
235
+ {
236
+ "id": 84,
237
+ "type": "7db92ebb-840a-4c81-927e-6929224c69b5",
778
238
  "pos": [
779
- 1100,
780
- 280
239
+ 830,
240
+ 210
781
241
  ],
782
242
  "size": [
783
- 260,
784
- 450
243
+ 400,
244
+ 460
785
245
  ],
786
246
  "flags": {},
787
- "order": 18,
247
+ "order": 6,
788
248
  "mode": 0,
789
249
  "inputs": [
790
250
  {
791
- "name": "model",
792
- "type": "MODEL",
793
- "link": 20
794
- },
795
- {
796
- "name": "positive",
797
- "type": "CONDITIONING",
798
- "link": 21
799
- },
800
- {
801
- "name": "negative",
802
- "type": "CONDITIONING",
803
- "link": 22
804
- },
805
- {
806
- "name": "latent_image",
807
- "type": "LATENT",
808
- "link": 44
251
+ "label": "canny_image",
252
+ "name": "image",
253
+ "type": "IMAGE",
254
+ "link": 48
809
255
  }
810
256
  ],
811
257
  "outputs": [
812
258
  {
813
- "name": "LATENT",
814
- "type": "LATENT",
259
+ "name": "IMAGE",
260
+ "type": "IMAGE",
815
261
  "links": [
816
- 26
262
+ 49
817
263
  ]
818
264
  }
819
265
  ],
820
266
  "properties": {
267
+ "proxyWidgets": [
268
+ [
269
+ "-1",
270
+ "text"
271
+ ],
272
+ [
273
+ "-1",
274
+ "seed"
275
+ ],
276
+ [
277
+ "3",
278
+ "control_after_generate"
279
+ ]
280
+ ],
281
+ "enableTabs": false,
282
+ "tabWidth": 65,
283
+ "tabXOffset": 10,
284
+ "hasSecondTab": false,
285
+ "secondTabText": "Send Back",
286
+ "secondTabOffset": 80,
287
+ "secondTabWidth": 65,
288
+ "cnr_id": "comfy-core",
289
+ "ver": "0.7.0"
290
+ },
291
+ "widgets_values": [
292
+ "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition.",
293
+ 70741926012422
294
+ ]
295
+ },
296
+ {
297
+ "id": 60,
298
+ "type": "SaveImage",
299
+ "pos": [
300
+ 1250,
301
+ 210
302
+ ],
303
+ "size": [
304
+ 1030,
305
+ 1150
306
+ ],
307
+ "flags": {},
308
+ "order": 7,
309
+ "mode": 0,
310
+ "inputs": [
311
+ {
312
+ "name": "images",
313
+ "type": "IMAGE",
314
+ "link": 49
315
+ }
316
+ ],
317
+ "outputs": [],
318
+ "properties": {
319
+ "enableTabs": false,
320
+ "tabWidth": 65,
321
+ "tabXOffset": 10,
322
+ "hasSecondTab": false,
323
+ "secondTabText": "Send Back",
324
+ "secondTabOffset": 80,
325
+ "secondTabWidth": 65,
821
326
  "cnr_id": "comfy-core",
822
327
  "ver": "0.3.51",
823
- "Node name for S&R": "KSampler"
328
+ "Node name for S&R": "SaveImage"
824
329
  },
825
330
  "widgets_values": [
826
- 70741926012422,
827
- "randomize",
828
- 4,
829
- 1,
830
- "euler",
831
- "simple",
832
- 1
331
+ "ComfyUI"
833
332
  ]
834
333
  }
835
334
  ],
836
335
  "links": [
837
- [
838
- 20,
839
- 66,
840
- 0,
841
- 3,
842
- 0,
843
- "MODEL"
844
- ],
845
- [
846
- 21,
847
- 70,
848
- 0,
849
- 3,
850
- 1,
851
- "CONDITIONING"
852
- ],
853
- [
854
- 22,
855
- 71,
856
- 0,
857
- 3,
858
- 2,
859
- "CONDITIONING"
860
- ],
861
- [
862
- 24,
863
- 38,
864
- 0,
865
- 6,
866
- 0,
867
- "CLIP"
868
- ],
869
- [
870
- 25,
871
- 38,
872
- 0,
873
- 7,
874
- 0,
875
- "CLIP"
876
- ],
877
- [
878
- 26,
879
- 3,
880
- 0,
881
- 8,
882
- 0,
883
- "LATENT"
884
- ],
885
- [
886
- 27,
887
- 39,
888
- 0,
889
- 8,
890
- 1,
891
- "VAE"
892
- ],
893
- [
894
- 28,
895
- 8,
896
- 0,
897
- 60,
898
- 0,
899
- "IMAGE"
900
- ],
901
- [
902
- 30,
903
- 37,
904
- 0,
905
- 69,
906
- 0,
907
- "MODEL"
908
- ],
909
- [
910
- 31,
911
- 6,
912
- 0,
913
- 70,
914
- 0,
915
- "CONDITIONING"
916
- ],
917
- [
918
- 32,
919
- 72,
920
- 0,
921
- 70,
922
- 1,
923
- "LATENT"
924
- ],
925
- [
926
- 33,
927
- 7,
928
- 0,
929
- 71,
930
- 0,
931
- "CONDITIONING"
932
- ],
933
- [
934
- 34,
935
- 72,
936
- 0,
937
- 71,
938
- 1,
939
- "LATENT"
940
- ],
941
- [
942
- 35,
943
- 74,
944
- 0,
945
- 72,
946
- 0,
947
- "IMAGE"
948
- ],
949
- [
950
- 36,
951
- 39,
952
- 0,
953
- 72,
954
- 1,
955
- "VAE"
956
- ],
957
336
  [
958
337
  38,
959
338
  74,
@@ -979,50 +358,29 @@
979
358
  "IMAGE"
980
359
  ],
981
360
  [
982
- 44,
983
- 72,
984
- 0,
985
- 3,
986
- 3,
987
- "LATENT"
988
- ],
989
- [
990
- 45,
991
- 69,
361
+ 48,
362
+ 74,
992
363
  0,
993
- 79,
364
+ 84,
994
365
  0,
995
- "MODEL"
366
+ "IMAGE"
996
367
  ],
997
368
  [
998
- 46,
999
- 79,
369
+ 49,
370
+ 84,
1000
371
  0,
1001
- 66,
372
+ 60,
1002
373
  0,
1003
- "MODEL"
374
+ "IMAGE"
1004
375
  ]
1005
376
  ],
1006
377
  "groups": [
1007
- {
1008
- "id": 1,
1009
- "title": "Step 1 - Load models",
1010
- "bounding": [
1011
- 10,
1012
- 130,
1013
- 370,
1014
- 620
1015
- ],
1016
- "color": "#3f789e",
1017
- "font_size": 24,
1018
- "flags": {}
1019
- },
1020
378
  {
1021
379
  "id": 2,
1022
- "title": "Step 2 - Upload reference image",
380
+ "title": "Reference image",
1023
381
  "bounding": [
1024
- 10,
1025
- 770,
382
+ 0,
383
+ 130,
1026
384
  370,
1027
385
  730
1028
386
  ],
@@ -1030,69 +388,1084 @@
1030
388
  "font_size": 24,
1031
389
  "flags": {}
1032
390
  },
1033
- {
1034
- "id": 5,
1035
- "title": "Conditioning",
1036
- "bounding": [
1037
- 400,
1038
- 330,
1039
- 680,
1040
- 570
1041
- ],
1042
- "color": "#3f789e",
1043
- "font_size": 24,
1044
- "flags": {}
1045
- },
1046
- {
1047
- "id": 3,
1048
- "title": "Step 3 - Prompt",
1049
- "bounding": [
1050
- 410,
1051
- 390,
1052
- 420,
1053
- 490
1054
- ],
1055
- "color": "#3f789e",
1056
- "font_size": 24,
1057
- "flags": {}
1058
- },
1059
391
  {
1060
392
  "id": 4,
1061
393
  "title": "Image Processing",
1062
- "bounding": [
1063
- 410,
1064
- 920,
1065
- 410,
1066
- 573.5999755859375
1067
- ],
1068
- "color": "#3f789e",
1069
- "font_size": 24,
1070
- "flags": {}
1071
- },
1072
- {
1073
- "id": 6,
1074
- "title": "4 steps lighting LoRA",
1075
394
  "bounding": [
1076
395
  400,
1077
396
  130,
1078
- 680,
1079
- 180
397
+ 410,
398
+ 573.5999755859375
1080
399
  ],
1081
400
  "color": "#3f789e",
1082
401
  "font_size": 24,
1083
402
  "flags": {}
1084
403
  }
1085
404
  ],
405
+ "definitions": {
406
+ "subgraphs": [
407
+ {
408
+ "id": "7db92ebb-840a-4c81-927e-6929224c69b5",
409
+ "version": 1,
410
+ "state": {
411
+ "lastGroupId": 6,
412
+ "lastNodeId": 83,
413
+ "lastLinkId": 51,
414
+ "lastRerouteId": 0
415
+ },
416
+ "revision": 0,
417
+ "config": {},
418
+ "name": "ControlNet (Qwen-Image DiffSynth Canny)",
419
+ "inputNode": {
420
+ "id": -10,
421
+ "bounding": [
422
+ -170,
423
+ 781.7999877929688,
424
+ 120,
425
+ 100
426
+ ]
427
+ },
428
+ "outputNode": {
429
+ "id": -20,
430
+ "bounding": [
431
+ 1600,
432
+ 781.7999877929688,
433
+ 120,
434
+ 60
435
+ ]
436
+ },
437
+ "inputs": [
438
+ {
439
+ "id": "adf06190-9b59-4a3b-a96b-026ae5643874",
440
+ "name": "image",
441
+ "type": "IMAGE",
442
+ "linkIds": [
443
+ 51
444
+ ],
445
+ "localized_name": "image",
446
+ "label": "canny_image",
447
+ "pos": [
448
+ -70,
449
+ 801.7999877929688
450
+ ]
451
+ },
452
+ {
453
+ "id": "ad4061dd-c4c0-4d77-b8c5-c86350011c7e",
454
+ "name": "text",
455
+ "type": "STRING",
456
+ "linkIds": [
457
+ 49
458
+ ],
459
+ "pos": [
460
+ -70,
461
+ 821.7999877929688
462
+ ]
463
+ },
464
+ {
465
+ "id": "d1f77a17-21b9-46d0-bfb0-9289c357f6ba",
466
+ "name": "seed",
467
+ "type": "INT",
468
+ "linkIds": [
469
+ 50
470
+ ],
471
+ "pos": [
472
+ -70,
473
+ 841.7999877929688
474
+ ]
475
+ }
476
+ ],
477
+ "outputs": [
478
+ {
479
+ "id": "af89884c-7127-4b50-ae43-17049230d419",
480
+ "name": "IMAGE",
481
+ "type": "IMAGE",
482
+ "linkIds": [
483
+ 28
484
+ ],
485
+ "localized_name": "IMAGE",
486
+ "pos": [
487
+ 1620,
488
+ 801.7999877929688
489
+ ]
490
+ }
491
+ ],
492
+ "widgets": [],
493
+ "nodes": [
494
+ {
495
+ "id": 7,
496
+ "type": "CLIPTextEncode",
497
+ "pos": [
498
+ 420,
499
+ 710
500
+ ],
501
+ "size": [
502
+ 400,
503
+ 150
504
+ ],
505
+ "flags": {},
506
+ "order": 4,
507
+ "mode": 0,
508
+ "inputs": [
509
+ {
510
+ "localized_name": "clip",
511
+ "name": "clip",
512
+ "type": "CLIP",
513
+ "link": 25
514
+ }
515
+ ],
516
+ "outputs": [
517
+ {
518
+ "localized_name": "CONDITIONING",
519
+ "name": "CONDITIONING",
520
+ "type": "CONDITIONING",
521
+ "links": [
522
+ 33
523
+ ]
524
+ }
525
+ ],
526
+ "title": "CLIP Text Encode (Negative Prompt)",
527
+ "properties": {
528
+ "enableTabs": false,
529
+ "tabWidth": 65,
530
+ "tabXOffset": 10,
531
+ "hasSecondTab": false,
532
+ "secondTabText": "Send Back",
533
+ "secondTabOffset": 80,
534
+ "secondTabWidth": 65,
535
+ "cnr_id": "comfy-core",
536
+ "ver": "0.3.51",
537
+ "Node name for S&R": "CLIPTextEncode"
538
+ },
539
+ "widgets_values": [
540
+ " "
541
+ ],
542
+ "color": "#223",
543
+ "bgcolor": "#335"
544
+ },
545
+ {
546
+ "id": 70,
547
+ "type": "ReferenceLatent",
548
+ "pos": [
549
+ 860,
550
+ 470
551
+ ],
552
+ "size": [
553
+ 204.134765625,
554
+ 46
555
+ ],
556
+ "flags": {},
557
+ "order": 11,
558
+ "mode": 0,
559
+ "inputs": [
560
+ {
561
+ "localized_name": "conditioning",
562
+ "name": "conditioning",
563
+ "type": "CONDITIONING",
564
+ "link": 31
565
+ },
566
+ {
567
+ "localized_name": "latent",
568
+ "name": "latent",
569
+ "shape": 7,
570
+ "type": "LATENT",
571
+ "link": 32
572
+ }
573
+ ],
574
+ "outputs": [
575
+ {
576
+ "localized_name": "CONDITIONING",
577
+ "name": "CONDITIONING",
578
+ "type": "CONDITIONING",
579
+ "links": [
580
+ 21
581
+ ]
582
+ }
583
+ ],
584
+ "properties": {
585
+ "enableTabs": false,
586
+ "tabWidth": 65,
587
+ "tabXOffset": 10,
588
+ "hasSecondTab": false,
589
+ "secondTabText": "Send Back",
590
+ "secondTabOffset": 80,
591
+ "secondTabWidth": 65,
592
+ "cnr_id": "comfy-core",
593
+ "ver": "0.3.51",
594
+ "Node name for S&R": "ReferenceLatent"
595
+ },
596
+ "widgets_values": []
597
+ },
598
+ {
599
+ "id": 71,
600
+ "type": "ReferenceLatent",
601
+ "pos": [
602
+ 850,
603
+ 720
604
+ ],
605
+ "size": [
606
+ 204.134765625,
607
+ 46
608
+ ],
609
+ "flags": {},
610
+ "order": 12,
611
+ "mode": 0,
612
+ "inputs": [
613
+ {
614
+ "localized_name": "conditioning",
615
+ "name": "conditioning",
616
+ "type": "CONDITIONING",
617
+ "link": 33
618
+ },
619
+ {
620
+ "localized_name": "latent",
621
+ "name": "latent",
622
+ "shape": 7,
623
+ "type": "LATENT",
624
+ "link": 34
625
+ }
626
+ ],
627
+ "outputs": [
628
+ {
629
+ "localized_name": "CONDITIONING",
630
+ "name": "CONDITIONING",
631
+ "type": "CONDITIONING",
632
+ "links": [
633
+ 22
634
+ ]
635
+ }
636
+ ],
637
+ "properties": {
638
+ "enableTabs": false,
639
+ "tabWidth": 65,
640
+ "tabXOffset": 10,
641
+ "hasSecondTab": false,
642
+ "secondTabText": "Send Back",
643
+ "secondTabOffset": 80,
644
+ "secondTabWidth": 65,
645
+ "cnr_id": "comfy-core",
646
+ "ver": "0.3.51",
647
+ "Node name for S&R": "ReferenceLatent"
648
+ },
649
+ "widgets_values": []
650
+ },
651
+ {
652
+ "id": 39,
653
+ "type": "VAELoader",
654
+ "pos": [
655
+ 30,
656
+ 650
657
+ ],
658
+ "size": [
659
+ 330,
660
+ 58
661
+ ],
662
+ "flags": {},
663
+ "order": 0,
664
+ "mode": 0,
665
+ "inputs": [],
666
+ "outputs": [
667
+ {
668
+ "localized_name": "VAE",
669
+ "name": "VAE",
670
+ "type": "VAE",
671
+ "links": [
672
+ 27,
673
+ 36
674
+ ]
675
+ }
676
+ ],
677
+ "properties": {
678
+ "enableTabs": false,
679
+ "tabWidth": 65,
680
+ "tabXOffset": 10,
681
+ "hasSecondTab": false,
682
+ "secondTabText": "Send Back",
683
+ "secondTabOffset": 80,
684
+ "secondTabWidth": 65,
685
+ "cnr_id": "comfy-core",
686
+ "ver": "0.3.51",
687
+ "Node name for S&R": "VAELoader",
688
+ "models": [
689
+ {
690
+ "name": "qwen_image_vae.safetensors",
691
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
692
+ "directory": "vae"
693
+ }
694
+ ]
695
+ },
696
+ "widgets_values": [
697
+ "qwen_image_vae.safetensors"
698
+ ]
699
+ },
700
+ {
701
+ "id": 38,
702
+ "type": "CLIPLoader",
703
+ "pos": [
704
+ 30,
705
+ 490
706
+ ],
707
+ "size": [
708
+ 330,
709
+ 110
710
+ ],
711
+ "flags": {},
712
+ "order": 1,
713
+ "mode": 0,
714
+ "inputs": [],
715
+ "outputs": [
716
+ {
717
+ "localized_name": "CLIP",
718
+ "name": "CLIP",
719
+ "type": "CLIP",
720
+ "links": [
721
+ 24,
722
+ 25
723
+ ]
724
+ }
725
+ ],
726
+ "properties": {
727
+ "enableTabs": false,
728
+ "tabWidth": 65,
729
+ "tabXOffset": 10,
730
+ "hasSecondTab": false,
731
+ "secondTabText": "Send Back",
732
+ "secondTabOffset": 80,
733
+ "secondTabWidth": 65,
734
+ "cnr_id": "comfy-core",
735
+ "ver": "0.3.51",
736
+ "Node name for S&R": "CLIPLoader",
737
+ "models": [
738
+ {
739
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
740
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
741
+ "directory": "text_encoders"
742
+ }
743
+ ]
744
+ },
745
+ "widgets_values": [
746
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
747
+ "qwen_image",
748
+ "default"
749
+ ]
750
+ },
751
+ {
752
+ "id": 69,
753
+ "type": "LoraLoaderModelOnly",
754
+ "pos": [
755
+ 30,
756
+ 360
757
+ ],
758
+ "size": [
759
+ 330,
760
+ 82
761
+ ],
762
+ "flags": {},
763
+ "order": 5,
764
+ "mode": 0,
765
+ "inputs": [
766
+ {
767
+ "localized_name": "model",
768
+ "name": "model",
769
+ "type": "MODEL",
770
+ "link": 30
771
+ }
772
+ ],
773
+ "outputs": [
774
+ {
775
+ "localized_name": "MODEL",
776
+ "name": "MODEL",
777
+ "type": "MODEL",
778
+ "links": [
779
+ 45
780
+ ]
781
+ }
782
+ ],
783
+ "properties": {
784
+ "enableTabs": false,
785
+ "tabWidth": 65,
786
+ "tabXOffset": 10,
787
+ "hasSecondTab": false,
788
+ "secondTabText": "Send Back",
789
+ "secondTabOffset": 80,
790
+ "secondTabWidth": 65,
791
+ "cnr_id": "comfy-core",
792
+ "ver": "0.3.51",
793
+ "Node name for S&R": "LoraLoaderModelOnly",
794
+ "models": [
795
+ {
796
+ "name": "qwen_image_union_diffsynth_lora.safetensors",
797
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
798
+ "directory": "loras"
799
+ }
800
+ ]
801
+ },
802
+ "widgets_values": [
803
+ "qwen_image_union_diffsynth_lora.safetensors",
804
+ 1
805
+ ]
806
+ },
807
+ {
808
+ "id": 37,
809
+ "type": "UNETLoader",
810
+ "pos": [
811
+ 30,
812
+ 220
813
+ ],
814
+ "size": [
815
+ 330,
816
+ 82
817
+ ],
818
+ "flags": {},
819
+ "order": 2,
820
+ "mode": 0,
821
+ "inputs": [],
822
+ "outputs": [
823
+ {
824
+ "localized_name": "MODEL",
825
+ "name": "MODEL",
826
+ "type": "MODEL",
827
+ "links": [
828
+ 30
829
+ ]
830
+ }
831
+ ],
832
+ "properties": {
833
+ "enableTabs": false,
834
+ "tabWidth": 65,
835
+ "tabXOffset": 10,
836
+ "hasSecondTab": false,
837
+ "secondTabText": "Send Back",
838
+ "secondTabOffset": 80,
839
+ "secondTabWidth": 65,
840
+ "cnr_id": "comfy-core",
841
+ "ver": "0.3.51",
842
+ "Node name for S&R": "UNETLoader",
843
+ "models": [
844
+ {
845
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
846
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
847
+ "directory": "diffusion_models"
848
+ }
849
+ ]
850
+ },
851
+ "widgets_values": [
852
+ "qwen_image_fp8_e4m3fn.safetensors",
853
+ "default"
854
+ ]
855
+ },
856
+ {
857
+ "id": 81,
858
+ "type": "MarkdownNote",
859
+ "pos": [
860
+ 1100,
861
+ 780
862
+ ],
863
+ "size": [
864
+ 260,
865
+ 150
866
+ ],
867
+ "flags": {},
868
+ "order": 3,
869
+ "mode": 0,
870
+ "inputs": [],
871
+ "outputs": [],
872
+ "title": "KSampler settings",
873
+ "properties": {},
874
+ "widgets_values": [
875
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
876
+ ],
877
+ "color": "#432",
878
+ "bgcolor": "#000"
879
+ },
880
+ {
881
+ "id": 66,
882
+ "type": "ModelSamplingAuraFlow",
883
+ "pos": [
884
+ 1100,
885
+ 170
886
+ ],
887
+ "size": [
888
+ 260,
889
+ 58
890
+ ],
891
+ "flags": {},
892
+ "order": 7,
893
+ "mode": 0,
894
+ "inputs": [
895
+ {
896
+ "localized_name": "model",
897
+ "name": "model",
898
+ "type": "MODEL",
899
+ "link": 46
900
+ }
901
+ ],
902
+ "outputs": [
903
+ {
904
+ "localized_name": "MODEL",
905
+ "name": "MODEL",
906
+ "type": "MODEL",
907
+ "links": [
908
+ 20
909
+ ]
910
+ }
911
+ ],
912
+ "properties": {
913
+ "enableTabs": false,
914
+ "tabWidth": 65,
915
+ "tabXOffset": 10,
916
+ "hasSecondTab": false,
917
+ "secondTabText": "Send Back",
918
+ "secondTabOffset": 80,
919
+ "secondTabWidth": 65,
920
+ "cnr_id": "comfy-core",
921
+ "ver": "0.3.51",
922
+ "Node name for S&R": "ModelSamplingAuraFlow"
923
+ },
924
+ "widgets_values": [
925
+ 3.1
926
+ ]
927
+ },
928
+ {
929
+ "id": 8,
930
+ "type": "VAEDecode",
931
+ "pos": [
932
+ 1400,
933
+ 170
934
+ ],
935
+ "size": [
936
+ 140,
937
+ 46
938
+ ],
939
+ "flags": {},
940
+ "order": 10,
941
+ "mode": 0,
942
+ "inputs": [
943
+ {
944
+ "localized_name": "samples",
945
+ "name": "samples",
946
+ "type": "LATENT",
947
+ "link": 26
948
+ },
949
+ {
950
+ "localized_name": "vae",
951
+ "name": "vae",
952
+ "type": "VAE",
953
+ "link": 27
954
+ }
955
+ ],
956
+ "outputs": [
957
+ {
958
+ "localized_name": "IMAGE",
959
+ "name": "IMAGE",
960
+ "type": "IMAGE",
961
+ "links": [
962
+ 28
963
+ ]
964
+ }
965
+ ],
966
+ "properties": {
967
+ "enableTabs": false,
968
+ "tabWidth": 65,
969
+ "tabXOffset": 10,
970
+ "hasSecondTab": false,
971
+ "secondTabText": "Send Back",
972
+ "secondTabOffset": 80,
973
+ "secondTabWidth": 65,
974
+ "cnr_id": "comfy-core",
975
+ "ver": "0.3.51",
976
+ "Node name for S&R": "VAEDecode"
977
+ },
978
+ "widgets_values": []
979
+ },
980
+ {
981
+ "id": 6,
982
+ "type": "CLIPTextEncode",
983
+ "pos": [
984
+ 420,
985
+ 460
986
+ ],
987
+ "size": [
988
+ 400,
989
+ 200
990
+ ],
991
+ "flags": {},
992
+ "order": 9,
993
+ "mode": 0,
994
+ "inputs": [
995
+ {
996
+ "localized_name": "clip",
997
+ "name": "clip",
998
+ "type": "CLIP",
999
+ "link": 24
1000
+ },
1001
+ {
1002
+ "localized_name": "text",
1003
+ "name": "text",
1004
+ "type": "STRING",
1005
+ "widget": {
1006
+ "name": "text"
1007
+ },
1008
+ "link": 49
1009
+ }
1010
+ ],
1011
+ "outputs": [
1012
+ {
1013
+ "localized_name": "CONDITIONING",
1014
+ "name": "CONDITIONING",
1015
+ "type": "CONDITIONING",
1016
+ "links": [
1017
+ 31
1018
+ ]
1019
+ }
1020
+ ],
1021
+ "title": "CLIP Text Encode (Positive Prompt)",
1022
+ "properties": {
1023
+ "enableTabs": false,
1024
+ "tabWidth": 65,
1025
+ "tabXOffset": 10,
1026
+ "hasSecondTab": false,
1027
+ "secondTabText": "Send Back",
1028
+ "secondTabOffset": 80,
1029
+ "secondTabWidth": 65,
1030
+ "cnr_id": "comfy-core",
1031
+ "ver": "0.3.51",
1032
+ "Node name for S&R": "CLIPTextEncode"
1033
+ },
1034
+ "widgets_values": [
1035
+ "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition."
1036
+ ],
1037
+ "color": "#232",
1038
+ "bgcolor": "#353"
1039
+ },
1040
+ {
1041
+ "id": 3,
1042
+ "type": "KSampler",
1043
+ "pos": [
1044
+ 1100,
1045
+ 280
1046
+ ],
1047
+ "size": [
1048
+ 260,
1049
+ 450
1050
+ ],
1051
+ "flags": {},
1052
+ "order": 8,
1053
+ "mode": 0,
1054
+ "inputs": [
1055
+ {
1056
+ "localized_name": "model",
1057
+ "name": "model",
1058
+ "type": "MODEL",
1059
+ "link": 20
1060
+ },
1061
+ {
1062
+ "localized_name": "positive",
1063
+ "name": "positive",
1064
+ "type": "CONDITIONING",
1065
+ "link": 21
1066
+ },
1067
+ {
1068
+ "localized_name": "negative",
1069
+ "name": "negative",
1070
+ "type": "CONDITIONING",
1071
+ "link": 22
1072
+ },
1073
+ {
1074
+ "localized_name": "latent_image",
1075
+ "name": "latent_image",
1076
+ "type": "LATENT",
1077
+ "link": 44
1078
+ },
1079
+ {
1080
+ "localized_name": "seed",
1081
+ "name": "seed",
1082
+ "type": "INT",
1083
+ "widget": {
1084
+ "name": "seed"
1085
+ },
1086
+ "link": 50
1087
+ }
1088
+ ],
1089
+ "outputs": [
1090
+ {
1091
+ "localized_name": "LATENT",
1092
+ "name": "LATENT",
1093
+ "type": "LATENT",
1094
+ "links": [
1095
+ 26
1096
+ ]
1097
+ }
1098
+ ],
1099
+ "properties": {
1100
+ "enableTabs": false,
1101
+ "tabWidth": 65,
1102
+ "tabXOffset": 10,
1103
+ "hasSecondTab": false,
1104
+ "secondTabText": "Send Back",
1105
+ "secondTabOffset": 80,
1106
+ "secondTabWidth": 65,
1107
+ "cnr_id": "comfy-core",
1108
+ "ver": "0.3.51",
1109
+ "Node name for S&R": "KSampler"
1110
+ },
1111
+ "widgets_values": [
1112
+ 70741926012422,
1113
+ "randomize",
1114
+ 4,
1115
+ 1,
1116
+ "euler",
1117
+ "simple",
1118
+ 1
1119
+ ]
1120
+ },
1121
+ {
1122
+ "id": 79,
1123
+ "type": "LoraLoaderModelOnly",
1124
+ "pos": [
1125
+ 490,
1126
+ 210
1127
+ ],
1128
+ "size": [
1129
+ 470,
1130
+ 82
1131
+ ],
1132
+ "flags": {},
1133
+ "order": 6,
1134
+ "mode": 0,
1135
+ "inputs": [
1136
+ {
1137
+ "localized_name": "model",
1138
+ "name": "model",
1139
+ "type": "MODEL",
1140
+ "link": 45
1141
+ }
1142
+ ],
1143
+ "outputs": [
1144
+ {
1145
+ "localized_name": "MODEL",
1146
+ "name": "MODEL",
1147
+ "type": "MODEL",
1148
+ "links": [
1149
+ 46
1150
+ ]
1151
+ }
1152
+ ],
1153
+ "properties": {
1154
+ "enableTabs": false,
1155
+ "tabWidth": 65,
1156
+ "tabXOffset": 10,
1157
+ "hasSecondTab": false,
1158
+ "secondTabText": "Send Back",
1159
+ "secondTabOffset": 80,
1160
+ "secondTabWidth": 65,
1161
+ "cnr_id": "comfy-core",
1162
+ "ver": "0.3.51",
1163
+ "Node name for S&R": "LoraLoaderModelOnly",
1164
+ "models": [
1165
+ {
1166
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
1167
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
1168
+ "directory": "loras"
1169
+ }
1170
+ ]
1171
+ },
1172
+ "widgets_values": [
1173
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
1174
+ 1
1175
+ ]
1176
+ },
1177
+ {
1178
+ "id": 72,
1179
+ "type": "VAEEncode",
1180
+ "pos": [
1181
+ 420,
1182
+ 950
1183
+ ],
1184
+ "size": [
1185
+ 140,
1186
+ 46
1187
+ ],
1188
+ "flags": {},
1189
+ "order": 13,
1190
+ "mode": 0,
1191
+ "inputs": [
1192
+ {
1193
+ "localized_name": "pixels",
1194
+ "name": "pixels",
1195
+ "type": "IMAGE",
1196
+ "link": 51
1197
+ },
1198
+ {
1199
+ "localized_name": "vae",
1200
+ "name": "vae",
1201
+ "type": "VAE",
1202
+ "link": 36
1203
+ }
1204
+ ],
1205
+ "outputs": [
1206
+ {
1207
+ "localized_name": "LATENT",
1208
+ "name": "LATENT",
1209
+ "type": "LATENT",
1210
+ "links": [
1211
+ 32,
1212
+ 34,
1213
+ 44
1214
+ ]
1215
+ }
1216
+ ],
1217
+ "properties": {
1218
+ "enableTabs": false,
1219
+ "tabWidth": 65,
1220
+ "tabXOffset": 10,
1221
+ "hasSecondTab": false,
1222
+ "secondTabText": "Send Back",
1223
+ "secondTabOffset": 80,
1224
+ "secondTabWidth": 65,
1225
+ "cnr_id": "comfy-core",
1226
+ "ver": "0.3.51",
1227
+ "Node name for S&R": "VAEEncode"
1228
+ },
1229
+ "widgets_values": []
1230
+ }
1231
+ ],
1232
+ "groups": [
1233
+ {
1234
+ "id": 1,
1235
+ "title": "Models",
1236
+ "bounding": [
1237
+ 10,
1238
+ 130,
1239
+ 370,
1240
+ 620
1241
+ ],
1242
+ "color": "#3f789e",
1243
+ "font_size": 24,
1244
+ "flags": {}
1245
+ },
1246
+ {
1247
+ "id": 5,
1248
+ "title": "Conditioning",
1249
+ "bounding": [
1250
+ 400,
1251
+ 330,
1252
+ 680,
1253
+ 570
1254
+ ],
1255
+ "color": "#3f789e",
1256
+ "font_size": 24,
1257
+ "flags": {}
1258
+ },
1259
+ {
1260
+ "id": 3,
1261
+ "title": "Prompt",
1262
+ "bounding": [
1263
+ 410,
1264
+ 390,
1265
+ 420,
1266
+ 490
1267
+ ],
1268
+ "color": "#3f789e",
1269
+ "font_size": 24,
1270
+ "flags": {}
1271
+ },
1272
+ {
1273
+ "id": 6,
1274
+ "title": "4 steps lighting LoRA",
1275
+ "bounding": [
1276
+ 400,
1277
+ 130,
1278
+ 680,
1279
+ 180
1280
+ ],
1281
+ "color": "#3f789e",
1282
+ "font_size": 24,
1283
+ "flags": {}
1284
+ }
1285
+ ],
1286
+ "links": [
1287
+ {
1288
+ "id": 25,
1289
+ "origin_id": 38,
1290
+ "origin_slot": 0,
1291
+ "target_id": 7,
1292
+ "target_slot": 0,
1293
+ "type": "CLIP"
1294
+ },
1295
+ {
1296
+ "id": 31,
1297
+ "origin_id": 6,
1298
+ "origin_slot": 0,
1299
+ "target_id": 70,
1300
+ "target_slot": 0,
1301
+ "type": "CONDITIONING"
1302
+ },
1303
+ {
1304
+ "id": 32,
1305
+ "origin_id": 72,
1306
+ "origin_slot": 0,
1307
+ "target_id": 70,
1308
+ "target_slot": 1,
1309
+ "type": "LATENT"
1310
+ },
1311
+ {
1312
+ "id": 33,
1313
+ "origin_id": 7,
1314
+ "origin_slot": 0,
1315
+ "target_id": 71,
1316
+ "target_slot": 0,
1317
+ "type": "CONDITIONING"
1318
+ },
1319
+ {
1320
+ "id": 34,
1321
+ "origin_id": 72,
1322
+ "origin_slot": 0,
1323
+ "target_id": 71,
1324
+ "target_slot": 1,
1325
+ "type": "LATENT"
1326
+ },
1327
+ {
1328
+ "id": 36,
1329
+ "origin_id": 39,
1330
+ "origin_slot": 0,
1331
+ "target_id": 72,
1332
+ "target_slot": 1,
1333
+ "type": "VAE"
1334
+ },
1335
+ {
1336
+ "id": 30,
1337
+ "origin_id": 37,
1338
+ "origin_slot": 0,
1339
+ "target_id": 69,
1340
+ "target_slot": 0,
1341
+ "type": "MODEL"
1342
+ },
1343
+ {
1344
+ "id": 46,
1345
+ "origin_id": 79,
1346
+ "origin_slot": 0,
1347
+ "target_id": 66,
1348
+ "target_slot": 0,
1349
+ "type": "MODEL"
1350
+ },
1351
+ {
1352
+ "id": 24,
1353
+ "origin_id": 38,
1354
+ "origin_slot": 0,
1355
+ "target_id": 6,
1356
+ "target_slot": 0,
1357
+ "type": "CLIP"
1358
+ },
1359
+ {
1360
+ "id": 26,
1361
+ "origin_id": 3,
1362
+ "origin_slot": 0,
1363
+ "target_id": 8,
1364
+ "target_slot": 0,
1365
+ "type": "LATENT"
1366
+ },
1367
+ {
1368
+ "id": 27,
1369
+ "origin_id": 39,
1370
+ "origin_slot": 0,
1371
+ "target_id": 8,
1372
+ "target_slot": 1,
1373
+ "type": "VAE"
1374
+ },
1375
+ {
1376
+ "id": 45,
1377
+ "origin_id": 69,
1378
+ "origin_slot": 0,
1379
+ "target_id": 79,
1380
+ "target_slot": 0,
1381
+ "type": "MODEL"
1382
+ },
1383
+ {
1384
+ "id": 20,
1385
+ "origin_id": 66,
1386
+ "origin_slot": 0,
1387
+ "target_id": 3,
1388
+ "target_slot": 0,
1389
+ "type": "MODEL"
1390
+ },
1391
+ {
1392
+ "id": 21,
1393
+ "origin_id": 70,
1394
+ "origin_slot": 0,
1395
+ "target_id": 3,
1396
+ "target_slot": 1,
1397
+ "type": "CONDITIONING"
1398
+ },
1399
+ {
1400
+ "id": 22,
1401
+ "origin_id": 71,
1402
+ "origin_slot": 0,
1403
+ "target_id": 3,
1404
+ "target_slot": 2,
1405
+ "type": "CONDITIONING"
1406
+ },
1407
+ {
1408
+ "id": 44,
1409
+ "origin_id": 72,
1410
+ "origin_slot": 0,
1411
+ "target_id": 3,
1412
+ "target_slot": 3,
1413
+ "type": "LATENT"
1414
+ },
1415
+ {
1416
+ "id": 28,
1417
+ "origin_id": 8,
1418
+ "origin_slot": 0,
1419
+ "target_id": -20,
1420
+ "target_slot": 0,
1421
+ "type": "IMAGE"
1422
+ },
1423
+ {
1424
+ "id": 49,
1425
+ "origin_id": -10,
1426
+ "origin_slot": 1,
1427
+ "target_id": 6,
1428
+ "target_slot": 1,
1429
+ "type": "STRING"
1430
+ },
1431
+ {
1432
+ "id": 50,
1433
+ "origin_id": -10,
1434
+ "origin_slot": 2,
1435
+ "target_id": 3,
1436
+ "target_slot": 4,
1437
+ "type": "INT"
1438
+ },
1439
+ {
1440
+ "id": 51,
1441
+ "origin_id": -10,
1442
+ "origin_slot": 0,
1443
+ "target_id": 72,
1444
+ "target_slot": 0,
1445
+ "type": "IMAGE"
1446
+ }
1447
+ ],
1448
+ "extra": {
1449
+ "workflowRendererVersion": "LG"
1450
+ }
1451
+ }
1452
+ ]
1453
+ },
1086
1454
  "config": {},
1087
1455
  "extra": {
1088
1456
  "ds": {
1089
- "scale": 0.5786562229169053,
1457
+ "scale": 0.48452349866321315,
1090
1458
  "offset": [
1091
- 872.7752229086909,
1092
- -184.78246118792714
1459
+ 1003.1816493061101,
1460
+ 504.7105616403554
1093
1461
  ]
1094
1462
  },
1095
- "frontendVersion": "1.27.10"
1463
+ "frontendVersion": "1.35.9",
1464
+ "workflowRendererVersion": "LG",
1465
+ "VHS_latentpreview": false,
1466
+ "VHS_latentpreviewrate": 0,
1467
+ "VHS_MetadataImage": true,
1468
+ "VHS_KeepIntermediate": true
1096
1469
  },
1097
1470
  "version": 0.4
1098
1471
  }