comfyui-workflow-templates 0.1.45__py3-none-any.whl → 0.1.46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of comfyui-workflow-templates might be problematic. Click here for more details.

Files changed (23) hide show
  1. comfyui_workflow_templates/templates/hunyuan_video_text_to_video.json +689 -312
  2. comfyui_workflow_templates/templates/image_to_video.json +390 -161
  3. comfyui_workflow_templates/templates/image_to_video_wan.json +610 -271
  4. comfyui_workflow_templates/templates/index.json +12 -0
  5. comfyui_workflow_templates/templates/index.schema.json +19 -0
  6. comfyui_workflow_templates/templates/ltxv_image_to_video.json +676 -274
  7. comfyui_workflow_templates/templates/ltxv_text_to_video.json +547 -203
  8. comfyui_workflow_templates/templates/mochi_text_to_video_example.json +433 -170
  9. comfyui_workflow_templates/templates/text_to_video_wan.json +409 -169
  10. comfyui_workflow_templates/templates/txt_to_image_to_video.json +556 -223
  11. comfyui_workflow_templates/templates/video_wan2_2_14B_flf2v-1.webp +0 -0
  12. comfyui_workflow_templates/templates/video_wan2_2_14B_flf2v-2.webp +0 -0
  13. comfyui_workflow_templates/templates/video_wan2_2_14B_flf2v.json +1090 -0
  14. comfyui_workflow_templates/templates/video_wan2_2_14B_i2v.json +41 -17
  15. comfyui_workflow_templates/templates/video_wan2_2_14B_t2v.json +41 -17
  16. comfyui_workflow_templates/templates/wan2.1_flf2v_720_f16.json +567 -182
  17. comfyui_workflow_templates/templates/wan2.1_fun_control.json +168 -110
  18. comfyui_workflow_templates/templates/wan2.1_fun_inp.json +470 -418
  19. {comfyui_workflow_templates-0.1.45.dist-info → comfyui_workflow_templates-0.1.46.dist-info}/METADATA +1 -1
  20. {comfyui_workflow_templates-0.1.45.dist-info → comfyui_workflow_templates-0.1.46.dist-info}/RECORD +23 -20
  21. {comfyui_workflow_templates-0.1.45.dist-info → comfyui_workflow_templates-0.1.46.dist-info}/WHEEL +0 -0
  22. {comfyui_workflow_templates-0.1.45.dist-info → comfyui_workflow_templates-0.1.46.dist-info}/licenses/LICENSE +0 -0
  23. {comfyui_workflow_templates-0.1.45.dist-info → comfyui_workflow_templates-0.1.46.dist-info}/top_level.txt +0 -0
@@ -1,12 +1,20 @@
1
1
  {
2
- "last_node_id": 77,
3
- "last_link_id": 182,
2
+ "id": "44b59e39-769c-43b9-a97c-651f369294c8",
3
+ "revision": 0,
4
+ "last_node_id": 79,
5
+ "last_link_id": 184,
4
6
  "nodes": [
5
7
  {
6
8
  "id": 38,
7
9
  "type": "CLIPLoader",
8
- "pos": [60, 190],
9
- "size": [315, 82],
10
+ "pos": [
11
+ 60,
12
+ 190
13
+ ],
14
+ "size": [
15
+ 315,
16
+ 106
17
+ ],
10
18
  "flags": {},
11
19
  "order": 0,
12
20
  "mode": 0,
@@ -15,8 +23,11 @@
15
23
  {
16
24
  "name": "CLIP",
17
25
  "type": "CLIP",
18
- "links": [74, 75],
19
- "slot_index": 0
26
+ "slot_index": 0,
27
+ "links": [
28
+ 74,
29
+ 75
30
+ ]
20
31
  }
21
32
  ],
22
33
  "properties": {
@@ -29,85 +40,216 @@
29
40
  }
30
41
  ]
31
42
  },
32
- "widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
43
+ "widgets_values": [
44
+ "t5xxl_fp16.safetensors",
45
+ "ltxv",
46
+ "default"
47
+ ]
33
48
  },
34
49
  {
35
- "id": 8,
36
- "type": "VAEDecode",
37
- "pos": [1600, 30],
38
- "size": [210, 46],
50
+ "id": 44,
51
+ "type": "CheckpointLoaderSimple",
52
+ "pos": [
53
+ 60,
54
+ 40
55
+ ],
56
+ "size": [
57
+ 320,
58
+ 100
59
+ ],
39
60
  "flags": {},
40
- "order": 11,
61
+ "order": 1,
41
62
  "mode": 0,
42
- "inputs": [
63
+ "inputs": [],
64
+ "outputs": [
43
65
  {
44
- "name": "samples",
45
- "type": "LATENT",
46
- "link": 171
66
+ "name": "MODEL",
67
+ "type": "MODEL",
68
+ "slot_index": 0,
69
+ "links": [
70
+ 181
71
+ ]
47
72
  },
48
73
  {
49
- "name": "vae",
74
+ "name": "CLIP",
75
+ "type": "CLIP",
76
+ "links": null
77
+ },
78
+ {
79
+ "name": "VAE",
50
80
  "type": "VAE",
51
- "link": 87
81
+ "slot_index": 2,
82
+ "links": [
83
+ 87
84
+ ]
85
+ }
86
+ ],
87
+ "properties": {
88
+ "Node name for S&R": "CheckpointLoaderSimple",
89
+ "models": [
90
+ {
91
+ "name": "ltx-video-2b-v0.9.safetensors",
92
+ "url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
93
+ "directory": "checkpoints"
94
+ }
95
+ ]
96
+ },
97
+ "widgets_values": [
98
+ "ltx-video-2b-v0.9.safetensors"
99
+ ]
100
+ },
101
+ {
102
+ "id": 7,
103
+ "type": "CLIPTextEncode",
104
+ "pos": [
105
+ 420,
106
+ 380
107
+ ],
108
+ "size": [
109
+ 425.2799987792969,
110
+ 180.61000061035156
111
+ ],
112
+ "flags": {},
113
+ "order": 7,
114
+ "mode": 0,
115
+ "inputs": [
116
+ {
117
+ "name": "clip",
118
+ "type": "CLIP",
119
+ "link": 75
52
120
  }
53
121
  ],
54
122
  "outputs": [
55
123
  {
56
- "name": "IMAGE",
57
- "type": "IMAGE",
58
- "links": [106],
59
- "slot_index": 0
124
+ "name": "CONDITIONING",
125
+ "type": "CONDITIONING",
126
+ "slot_index": 0,
127
+ "links": [
128
+ 170
129
+ ]
60
130
  }
61
131
  ],
132
+ "title": "CLIP Text Encode (Negative Prompt)",
62
133
  "properties": {
63
- "Node name for S&R": "VAEDecode"
134
+ "Node name for S&R": "CLIPTextEncode"
64
135
  },
65
- "widgets_values": []
136
+ "widgets_values": [
137
+ "low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
138
+ ],
139
+ "color": "#223",
140
+ "bgcolor": "#335"
66
141
  },
67
142
  {
68
- "id": 69,
69
- "type": "LTXVConditioning",
70
- "pos": [920, 60],
71
- "size": [223.87, 78],
143
+ "id": 6,
144
+ "type": "CLIPTextEncode",
145
+ "pos": [
146
+ 420,
147
+ 180
148
+ ],
149
+ "size": [
150
+ 422.8500061035156,
151
+ 164.30999755859375
152
+ ],
72
153
  "flags": {},
73
- "order": 9,
154
+ "order": 6,
74
155
  "mode": 0,
75
156
  "inputs": [
76
157
  {
77
- "name": "positive",
78
- "type": "CONDITIONING",
79
- "link": 169
80
- },
81
- {
82
- "name": "negative",
83
- "type": "CONDITIONING",
84
- "link": 170
158
+ "name": "clip",
159
+ "type": "CLIP",
160
+ "link": 74
85
161
  }
86
162
  ],
87
163
  "outputs": [
88
164
  {
89
- "name": "positive",
165
+ "name": "CONDITIONING",
90
166
  "type": "CONDITIONING",
91
- "links": [166],
92
- "slot_index": 0
93
- },
167
+ "slot_index": 0,
168
+ "links": [
169
+ 169
170
+ ]
171
+ }
172
+ ],
173
+ "title": "CLIP Text Encode (Positive Prompt)",
174
+ "properties": {
175
+ "Node name for S&R": "CLIPTextEncode"
176
+ },
177
+ "widgets_values": [
178
+ "A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
179
+ ],
180
+ "color": "#232",
181
+ "bgcolor": "#353"
182
+ },
183
+ {
184
+ "id": 76,
185
+ "type": "Note",
186
+ "pos": [
187
+ 420,
188
+ 40
189
+ ],
190
+ "size": [
191
+ 420,
192
+ 90
193
+ ],
194
+ "flags": {},
195
+ "order": 2,
196
+ "mode": 0,
197
+ "inputs": [],
198
+ "outputs": [],
199
+ "properties": {},
200
+ "widgets_values": [
201
+ "This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
202
+ ],
203
+ "color": "#432",
204
+ "bgcolor": "#653"
205
+ },
206
+ {
207
+ "id": 70,
208
+ "type": "EmptyLTXVLatentVideo",
209
+ "pos": [
210
+ 60,
211
+ 390
212
+ ],
213
+ "size": [
214
+ 315,
215
+ 130
216
+ ],
217
+ "flags": {},
218
+ "order": 3,
219
+ "mode": 0,
220
+ "inputs": [],
221
+ "outputs": [
94
222
  {
95
- "name": "negative",
96
- "type": "CONDITIONING",
97
- "links": [167],
98
- "slot_index": 1
223
+ "name": "LATENT",
224
+ "type": "LATENT",
225
+ "slot_index": 0,
226
+ "links": [
227
+ 168,
228
+ 175
229
+ ]
99
230
  }
100
231
  ],
101
232
  "properties": {
102
- "Node name for S&R": "LTXVConditioning"
233
+ "Node name for S&R": "EmptyLTXVLatentVideo"
103
234
  },
104
- "widgets_values": [25]
235
+ "widgets_values": [
236
+ 768,
237
+ 512,
238
+ 97,
239
+ 1
240
+ ]
105
241
  },
106
242
  {
107
243
  "id": 72,
108
244
  "type": "SamplerCustom",
109
- "pos": [1201, 32],
110
- "size": [355.2, 230],
245
+ "pos": [
246
+ 890,
247
+ 340
248
+ ],
249
+ "size": [
250
+ 330,
251
+ 230
252
+ ],
111
253
  "flags": {},
112
254
  "order": 10,
113
255
  "mode": 0,
@@ -147,8 +289,10 @@
147
289
  {
148
290
  "name": "output",
149
291
  "type": "LATENT",
150
- "links": [171],
151
- "slot_index": 0
292
+ "slot_index": 0,
293
+ "links": [
294
+ 171
295
+ ]
152
296
  },
153
297
  {
154
298
  "name": "denoised_output",
@@ -159,83 +303,63 @@
159
303
  "properties": {
160
304
  "Node name for S&R": "SamplerCustom"
161
305
  },
162
- "widgets_values": [true, 497797676867141, "randomize", 3]
306
+ "widgets_values": [
307
+ true,
308
+ 497797676867141,
309
+ "randomize",
310
+ 3
311
+ ]
163
312
  },
164
313
  {
165
- "id": 44,
166
- "type": "CheckpointLoaderSimple",
167
- "pos": [520, 30],
168
- "size": [315, 98],
169
- "flags": {},
170
- "order": 1,
171
- "mode": 0,
172
- "inputs": [],
173
- "outputs": [
174
- {
175
- "name": "MODEL",
176
- "type": "MODEL",
177
- "links": [181],
178
- "slot_index": 0
179
- },
180
- {
181
- "name": "CLIP",
182
- "type": "CLIP",
183
- "links": null
184
- },
185
- {
186
- "name": "VAE",
187
- "type": "VAE",
188
- "links": [87],
189
- "slot_index": 2
190
- }
314
+ "id": 73,
315
+ "type": "KSamplerSelect",
316
+ "pos": [
317
+ 890,
318
+ 240
319
+ ],
320
+ "size": [
321
+ 315,
322
+ 58
191
323
  ],
192
- "properties": {
193
- "Node name for S&R": "CheckpointLoaderSimple",
194
- "models": [
195
- {
196
- "name": "ltx-video-2b-v0.9.safetensors",
197
- "url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
198
- "directory": "checkpoints"
199
- }
200
- ]
201
- },
202
- "widgets_values": ["ltx-video-2b-v0.9.safetensors"]
203
- },
204
- {
205
- "id": 70,
206
- "type": "EmptyLTXVLatentVideo",
207
- "pos": [860, 240],
208
- "size": [315, 130],
209
324
  "flags": {},
210
- "order": 2,
325
+ "order": 4,
211
326
  "mode": 0,
212
327
  "inputs": [],
213
328
  "outputs": [
214
329
  {
215
- "name": "LATENT",
216
- "type": "LATENT",
217
- "links": [168, 175],
218
- "slot_index": 0
330
+ "name": "SAMPLER",
331
+ "type": "SAMPLER",
332
+ "links": [
333
+ 172
334
+ ]
219
335
  }
220
336
  ],
221
337
  "properties": {
222
- "Node name for S&R": "EmptyLTXVLatentVideo"
338
+ "Node name for S&R": "KSamplerSelect"
223
339
  },
224
- "widgets_values": [768, 512, 97, 1]
340
+ "widgets_values": [
341
+ "euler"
342
+ ]
225
343
  },
226
344
  {
227
345
  "id": 71,
228
346
  "type": "LTXVScheduler",
229
- "pos": [856, 531],
230
- "size": [315, 154],
347
+ "pos": [
348
+ 890,
349
+ 40
350
+ ],
351
+ "size": [
352
+ 315,
353
+ 154
354
+ ],
231
355
  "flags": {},
232
356
  "order": 8,
233
357
  "mode": 0,
234
358
  "inputs": [
235
359
  {
236
360
  "name": "latent",
237
- "type": "LATENT",
238
361
  "shape": 7,
362
+ "type": "LATENT",
239
363
  "link": 168
240
364
  }
241
365
  ],
@@ -243,143 +367,200 @@
243
367
  {
244
368
  "name": "SIGMAS",
245
369
  "type": "SIGMAS",
246
- "links": [182],
247
- "slot_index": 0
370
+ "slot_index": 0,
371
+ "links": [
372
+ 182
373
+ ]
248
374
  }
249
375
  ],
250
376
  "properties": {
251
377
  "Node name for S&R": "LTXVScheduler"
252
378
  },
253
- "widgets_values": [30, 2.05, 0.95, true, 0.1]
379
+ "widgets_values": [
380
+ 30,
381
+ 2.05,
382
+ 0.95,
383
+ true,
384
+ 0.1
385
+ ]
254
386
  },
255
387
  {
256
- "id": 6,
257
- "type": "CLIPTextEncode",
258
- "pos": [420, 190],
259
- "size": [422.85, 164.31],
388
+ "id": 69,
389
+ "type": "LTXVConditioning",
390
+ "pos": [
391
+ 630,
392
+ 620
393
+ ],
394
+ "size": [
395
+ 223.8699951171875,
396
+ 78
397
+ ],
260
398
  "flags": {},
261
- "order": 6,
399
+ "order": 9,
262
400
  "mode": 0,
263
401
  "inputs": [
264
402
  {
265
- "name": "clip",
266
- "type": "CLIP",
267
- "link": 74
403
+ "name": "positive",
404
+ "type": "CONDITIONING",
405
+ "link": 169
406
+ },
407
+ {
408
+ "name": "negative",
409
+ "type": "CONDITIONING",
410
+ "link": 170
268
411
  }
269
412
  ],
270
413
  "outputs": [
271
414
  {
272
- "name": "CONDITIONING",
415
+ "name": "positive",
416
+ "type": "CONDITIONING",
417
+ "slot_index": 0,
418
+ "links": [
419
+ 166
420
+ ]
421
+ },
422
+ {
423
+ "name": "negative",
273
424
  "type": "CONDITIONING",
274
- "links": [169],
275
- "slot_index": 0
425
+ "slot_index": 1,
426
+ "links": [
427
+ 167
428
+ ]
276
429
  }
277
430
  ],
278
- "title": "CLIP Text Encode (Positive Prompt)",
279
431
  "properties": {
280
- "Node name for S&R": "CLIPTextEncode"
432
+ "Node name for S&R": "LTXVConditioning"
281
433
  },
282
434
  "widgets_values": [
283
- "A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
284
- ],
285
- "color": "#232",
286
- "bgcolor": "#353"
435
+ 25
436
+ ]
287
437
  },
288
438
  {
289
- "id": 7,
290
- "type": "CLIPTextEncode",
291
- "pos": [420, 390],
292
- "size": [425.28, 180.61],
439
+ "id": 8,
440
+ "type": "VAEDecode",
441
+ "pos": [
442
+ 1250,
443
+ 0
444
+ ],
445
+ "size": [
446
+ 210,
447
+ 46
448
+ ],
293
449
  "flags": {},
294
- "order": 7,
450
+ "order": 11,
295
451
  "mode": 0,
296
452
  "inputs": [
297
453
  {
298
- "name": "clip",
299
- "type": "CLIP",
300
- "link": 75
454
+ "name": "samples",
455
+ "type": "LATENT",
456
+ "link": 171
457
+ },
458
+ {
459
+ "name": "vae",
460
+ "type": "VAE",
461
+ "link": 87
301
462
  }
302
463
  ],
303
464
  "outputs": [
304
465
  {
305
- "name": "CONDITIONING",
306
- "type": "CONDITIONING",
307
- "links": [170],
308
- "slot_index": 0
466
+ "name": "IMAGE",
467
+ "type": "IMAGE",
468
+ "slot_index": 0,
469
+ "links": [
470
+ 183
471
+ ]
309
472
  }
310
473
  ],
311
- "title": "CLIP Text Encode (Negative Prompt)",
312
474
  "properties": {
313
- "Node name for S&R": "CLIPTextEncode"
475
+ "Node name for S&R": "VAEDecode"
314
476
  },
315
- "widgets_values": [
316
- "low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
317
- ],
318
- "color": "#322",
319
- "bgcolor": "#533"
477
+ "widgets_values": []
320
478
  },
321
479
  {
322
- "id": 73,
323
- "type": "KSamplerSelect",
324
- "pos": [860, 420],
325
- "size": [315, 58],
480
+ "id": 78,
481
+ "type": "CreateVideo",
482
+ "pos": [
483
+ 1250,
484
+ 90
485
+ ],
486
+ "size": [
487
+ 220,
488
+ 80
489
+ ],
326
490
  "flags": {},
327
- "order": 3,
491
+ "order": 12,
328
492
  "mode": 0,
329
- "inputs": [],
493
+ "inputs": [
494
+ {
495
+ "name": "images",
496
+ "type": "IMAGE",
497
+ "link": 183
498
+ },
499
+ {
500
+ "name": "audio",
501
+ "shape": 7,
502
+ "type": "AUDIO",
503
+ "link": null
504
+ }
505
+ ],
330
506
  "outputs": [
331
507
  {
332
- "name": "SAMPLER",
333
- "type": "SAMPLER",
334
- "links": [172]
508
+ "name": "VIDEO",
509
+ "type": "VIDEO",
510
+ "links": [
511
+ 184
512
+ ]
335
513
  }
336
514
  ],
337
515
  "properties": {
338
- "Node name for S&R": "KSamplerSelect"
516
+ "Node name for S&R": "CreateVideo"
339
517
  },
340
- "widgets_values": ["euler"]
341
- },
342
- {
343
- "id": 76,
344
- "type": "Note",
345
- "pos": [40, 350],
346
- "size": [360, 200],
347
- "flags": {},
348
- "order": 4,
349
- "mode": 0,
350
- "inputs": [],
351
- "outputs": [],
352
- "properties": {},
353
518
  "widgets_values": [
354
- "This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
355
- ],
356
- "color": "#432",
357
- "bgcolor": "#653"
519
+ 24
520
+ ]
358
521
  },
359
522
  {
360
- "id": 41,
361
- "type": "SaveAnimatedWEBP",
362
- "pos": [1830, 30],
363
- "size": [680, 610],
523
+ "id": 79,
524
+ "type": "SaveVideo",
525
+ "pos": [
526
+ 1250,
527
+ 220
528
+ ],
529
+ "size": [
530
+ 450,
531
+ 370
532
+ ],
364
533
  "flags": {},
365
- "order": 12,
534
+ "order": 13,
366
535
  "mode": 0,
367
536
  "inputs": [
368
537
  {
369
- "name": "images",
370
- "type": "IMAGE",
371
- "link": 106
538
+ "name": "video",
539
+ "type": "VIDEO",
540
+ "link": 184
372
541
  }
373
542
  ],
374
543
  "outputs": [],
375
- "properties": {},
376
- "widgets_values": ["ComfyUI", 24, false, 90, "default"]
544
+ "properties": {
545
+ "Node name for S&R": "SaveVideo"
546
+ },
547
+ "widgets_values": [
548
+ "video/ComfyUI",
549
+ "auto",
550
+ "auto"
551
+ ]
377
552
  },
378
553
  {
379
554
  "id": 77,
380
555
  "type": "MarkdownNote",
381
- "pos": [45, 600],
382
- "size": [225, 60],
556
+ "pos": [
557
+ -190,
558
+ 0
559
+ ],
560
+ "size": [
561
+ 225,
562
+ 88
563
+ ],
383
564
  "flags": {},
384
565
  "order": 5,
385
566
  "mode": 0,
@@ -387,35 +568,198 @@
387
568
  "outputs": [],
388
569
  "properties": {},
389
570
  "widgets_values": [
390
- "\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
571
+ "🛈 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
391
572
  ],
392
573
  "color": "#432",
393
574
  "bgcolor": "#653"
394
575
  }
395
576
  ],
396
577
  "links": [
397
- [74, 38, 0, 6, 0, "CLIP"],
398
- [75, 38, 0, 7, 0, "CLIP"],
399
- [87, 44, 2, 8, 1, "VAE"],
400
- [106, 8, 0, 41, 0, "IMAGE"],
401
- [166, 69, 0, 72, 1, "CONDITIONING"],
402
- [167, 69, 1, 72, 2, "CONDITIONING"],
403
- [168, 70, 0, 71, 0, "LATENT"],
404
- [169, 6, 0, 69, 0, "CONDITIONING"],
405
- [170, 7, 0, 69, 1, "CONDITIONING"],
406
- [171, 72, 0, 8, 0, "LATENT"],
407
- [172, 73, 0, 72, 3, "SAMPLER"],
408
- [175, 70, 0, 72, 5, "LATENT"],
409
- [181, 44, 0, 72, 0, "MODEL"],
410
- [182, 71, 0, 72, 4, "SIGMAS"]
578
+ [
579
+ 74,
580
+ 38,
581
+ 0,
582
+ 6,
583
+ 0,
584
+ "CLIP"
585
+ ],
586
+ [
587
+ 75,
588
+ 38,
589
+ 0,
590
+ 7,
591
+ 0,
592
+ "CLIP"
593
+ ],
594
+ [
595
+ 87,
596
+ 44,
597
+ 2,
598
+ 8,
599
+ 1,
600
+ "VAE"
601
+ ],
602
+ [
603
+ 166,
604
+ 69,
605
+ 0,
606
+ 72,
607
+ 1,
608
+ "CONDITIONING"
609
+ ],
610
+ [
611
+ 167,
612
+ 69,
613
+ 1,
614
+ 72,
615
+ 2,
616
+ "CONDITIONING"
617
+ ],
618
+ [
619
+ 168,
620
+ 70,
621
+ 0,
622
+ 71,
623
+ 0,
624
+ "LATENT"
625
+ ],
626
+ [
627
+ 169,
628
+ 6,
629
+ 0,
630
+ 69,
631
+ 0,
632
+ "CONDITIONING"
633
+ ],
634
+ [
635
+ 170,
636
+ 7,
637
+ 0,
638
+ 69,
639
+ 1,
640
+ "CONDITIONING"
641
+ ],
642
+ [
643
+ 171,
644
+ 72,
645
+ 0,
646
+ 8,
647
+ 0,
648
+ "LATENT"
649
+ ],
650
+ [
651
+ 172,
652
+ 73,
653
+ 0,
654
+ 72,
655
+ 3,
656
+ "SAMPLER"
657
+ ],
658
+ [
659
+ 175,
660
+ 70,
661
+ 0,
662
+ 72,
663
+ 5,
664
+ "LATENT"
665
+ ],
666
+ [
667
+ 181,
668
+ 44,
669
+ 0,
670
+ 72,
671
+ 0,
672
+ "MODEL"
673
+ ],
674
+ [
675
+ 182,
676
+ 71,
677
+ 0,
678
+ 72,
679
+ 4,
680
+ "SIGMAS"
681
+ ],
682
+ [
683
+ 183,
684
+ 8,
685
+ 0,
686
+ 78,
687
+ 0,
688
+ "IMAGE"
689
+ ],
690
+ [
691
+ 184,
692
+ 78,
693
+ 0,
694
+ 79,
695
+ 0,
696
+ "VIDEO"
697
+ ]
698
+ ],
699
+ "groups": [
700
+ {
701
+ "id": 1,
702
+ "title": "Step1 - Load models",
703
+ "bounding": [
704
+ 50,
705
+ -30,
706
+ 340,
707
+ 339.6000061035156
708
+ ],
709
+ "color": "#3f789e",
710
+ "font_size": 24,
711
+ "flags": {}
712
+ },
713
+ {
714
+ "id": 2,
715
+ "title": "Step3 - Prompt",
716
+ "bounding": [
717
+ 410,
718
+ -30,
719
+ 445.280029296875,
720
+ 604.2099609375
721
+ ],
722
+ "color": "#3f789e",
723
+ "font_size": 24,
724
+ "flags": {}
725
+ },
726
+ {
727
+ "id": 3,
728
+ "title": "Step2 - Video size",
729
+ "bounding": [
730
+ 50,
731
+ 320,
732
+ 340,
733
+ 220
734
+ ],
735
+ "color": "#3f789e",
736
+ "font_size": 24,
737
+ "flags": {}
738
+ },
739
+ {
740
+ "id": 4,
741
+ "title": "Custom sampler",
742
+ "bounding": [
743
+ 880,
744
+ -30,
745
+ 350,
746
+ 613.5999755859375
747
+ ],
748
+ "color": "#3f789e",
749
+ "font_size": 24,
750
+ "flags": {}
751
+ }
411
752
  ],
412
- "groups": [],
413
753
  "config": {},
414
754
  "extra": {
415
755
  "ds": {
416
- "scale": 0.65,
417
- "offset": [1490.32, 926.49]
418
- }
756
+ "scale": 1.2666661150000043,
757
+ "offset": [
758
+ 282.3491595411123,
759
+ 142.22408251179098
760
+ ]
761
+ },
762
+ "frontendVersion": "1.25.3"
419
763
  },
420
764
  "version": 0.4
421
- }
765
+ }