@genfeedai/workflow-ui 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/canvas.js +13 -13
- package/dist/canvas.mjs +7 -7
- package/dist/{chunk-HPQT36RR.js → chunk-3TMV3K34.js} +18 -27
- package/dist/{chunk-Z7PWFZG5.js → chunk-4MZ62VMF.js} +8 -1
- package/dist/{chunk-VOGL2WCE.mjs → chunk-7P2JWDC7.mjs} +9 -18
- package/dist/{chunk-FT64PCUP.mjs → chunk-AOTUCJMA.mjs} +6 -15
- package/dist/{chunk-LAJ34AH2.mjs → chunk-AUZR6REQ.mjs} +4 -7
- package/dist/{chunk-EC2ZIWOK.js → chunk-AXFOCPPP.js} +36 -45
- package/dist/{chunk-CETJJ73S.js → chunk-BMFRA6GK.js} +28 -37
- package/dist/{chunk-XV5Z5XYR.mjs → chunk-E3YBVMYZ.mjs} +403 -59
- package/dist/{chunk-H6LZKSLY.js → chunk-ECD5J2BA.js} +496 -152
- package/dist/{chunk-ADWNF7V3.js → chunk-EMGXUNBL.js} +3 -3
- package/dist/{chunk-22PDGHNQ.mjs → chunk-HCXI63ME.mjs} +2 -2
- package/dist/{chunk-UQQUWGHW.mjs → chunk-IASLG6IA.mjs} +1 -1
- package/dist/chunk-IHF35QZD.js +1095 -0
- package/dist/{chunk-E544XUBL.js → chunk-KDIWRSYV.js} +8 -11
- package/dist/chunk-RIGVIEYB.mjs +1093 -0
- package/dist/{chunk-SW7QNEZU.js → chunk-SEV2DWKF.js} +30 -30
- package/dist/{chunk-CSUBLSKZ.mjs → chunk-SQK4JDYY.mjs} +27 -36
- package/dist/{chunk-AC6TWLRT.mjs → chunk-ZJWP5KGZ.mjs} +8 -2
- package/dist/hooks.js +15 -15
- package/dist/hooks.mjs +5 -5
- package/dist/index.js +42 -42
- package/dist/index.mjs +9 -9
- package/dist/lib.js +1 -1
- package/dist/lib.mjs +1 -1
- package/dist/nodes.js +38 -38
- package/dist/nodes.mjs +5 -5
- package/dist/panels.js +7 -7
- package/dist/panels.mjs +4 -4
- package/dist/provider.js +1 -1
- package/dist/provider.mjs +1 -1
- package/dist/stores.js +8 -8
- package/dist/stores.mjs +3 -3
- package/dist/toolbar.js +10 -10
- package/dist/toolbar.mjs +4 -4
- package/dist/ui.js +1 -1
- package/dist/ui.mjs +1 -1
- package/dist/workflowStore-7SDJC4UR.mjs +3 -0
- package/dist/workflowStore-LNJQ5RZG.js +12 -0
- package/package.json +1 -1
- package/dist/chunk-BJ3R5R32.mjs +0 -2163
- package/dist/chunk-NSDLGLAQ.js +0 -2166
- package/dist/workflowStore-4EGKJLYK.mjs +0 -3
- package/dist/workflowStore-KM32FDL7.js +0 -12
package/dist/chunk-NSDLGLAQ.js
DELETED
|
@@ -1,2166 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var chunkZ7PWFZG5_js = require('./chunk-Z7PWFZG5.js');
|
|
4
|
-
var zundo = require('zundo');
|
|
5
|
-
var zustand = require('zustand');
|
|
6
|
-
var react = require('@xyflow/react');
|
|
7
|
-
var nanoid = require('nanoid');
|
|
8
|
-
|
|
9
|
-
// ../types/dist/chunk-P7K5LM6V.js
|
|
10
|
-
var require_chunk_P7K5LM6V = chunkZ7PWFZG5_js.__commonJS({
|
|
11
|
-
"../types/dist/chunk-P7K5LM6V.js"(exports$1) {
|
|
12
|
-
var HandleTypeEnum = /* @__PURE__ */ ((HandleTypeEnum2) => {
|
|
13
|
-
HandleTypeEnum2["IMAGE"] = "image";
|
|
14
|
-
HandleTypeEnum2["TEXT"] = "text";
|
|
15
|
-
HandleTypeEnum2["VIDEO"] = "video";
|
|
16
|
-
HandleTypeEnum2["NUMBER"] = "number";
|
|
17
|
-
HandleTypeEnum2["AUDIO"] = "audio";
|
|
18
|
-
return HandleTypeEnum2;
|
|
19
|
-
})(HandleTypeEnum || {});
|
|
20
|
-
var CONNECTION_RULES2 = {
|
|
21
|
-
image: ["image"],
|
|
22
|
-
text: ["text"],
|
|
23
|
-
video: ["video"],
|
|
24
|
-
number: ["number"],
|
|
25
|
-
audio: ["audio"]
|
|
26
|
-
};
|
|
27
|
-
var ProviderTypeEnum = /* @__PURE__ */ ((ProviderTypeEnum2) => {
|
|
28
|
-
ProviderTypeEnum2["REPLICATE"] = "replicate";
|
|
29
|
-
ProviderTypeEnum2["FAL"] = "fal";
|
|
30
|
-
ProviderTypeEnum2["HUGGINGFACE"] = "huggingface";
|
|
31
|
-
ProviderTypeEnum2["GENFEED_AI"] = "genfeed-ai";
|
|
32
|
-
return ProviderTypeEnum2;
|
|
33
|
-
})(ProviderTypeEnum || {});
|
|
34
|
-
var ModelCapabilityEnum = /* @__PURE__ */ ((ModelCapabilityEnum2) => {
|
|
35
|
-
ModelCapabilityEnum2["TEXT_TO_IMAGE"] = "text-to-image";
|
|
36
|
-
ModelCapabilityEnum2["IMAGE_TO_IMAGE"] = "image-to-image";
|
|
37
|
-
ModelCapabilityEnum2["TEXT_TO_VIDEO"] = "text-to-video";
|
|
38
|
-
ModelCapabilityEnum2["IMAGE_TO_VIDEO"] = "image-to-video";
|
|
39
|
-
ModelCapabilityEnum2["TEXT_GENERATION"] = "text-generation";
|
|
40
|
-
return ModelCapabilityEnum2;
|
|
41
|
-
})(ModelCapabilityEnum || {});
|
|
42
|
-
var ModelUseCaseEnum = /* @__PURE__ */ ((ModelUseCaseEnum2) => {
|
|
43
|
-
ModelUseCaseEnum2["STYLE_TRANSFER"] = "style-transfer";
|
|
44
|
-
ModelUseCaseEnum2["CHARACTER_CONSISTENT"] = "character-consistent";
|
|
45
|
-
ModelUseCaseEnum2["IMAGE_VARIATION"] = "image-variation";
|
|
46
|
-
ModelUseCaseEnum2["INPAINTING"] = "inpainting";
|
|
47
|
-
ModelUseCaseEnum2["UPSCALE"] = "upscale";
|
|
48
|
-
ModelUseCaseEnum2["GENERAL"] = "general";
|
|
49
|
-
return ModelUseCaseEnum2;
|
|
50
|
-
})(ModelUseCaseEnum || {});
|
|
51
|
-
var NodeTypeEnum = /* @__PURE__ */ ((NodeTypeEnum2) => {
|
|
52
|
-
NodeTypeEnum2["IMAGE_INPUT"] = "imageInput";
|
|
53
|
-
NodeTypeEnum2["AUDIO_INPUT"] = "audioInput";
|
|
54
|
-
NodeTypeEnum2["VIDEO_INPUT"] = "videoInput";
|
|
55
|
-
NodeTypeEnum2["PROMPT"] = "prompt";
|
|
56
|
-
NodeTypeEnum2["PROMPT_CONSTRUCTOR"] = "promptConstructor";
|
|
57
|
-
NodeTypeEnum2["IMAGE_GEN"] = "imageGen";
|
|
58
|
-
NodeTypeEnum2["VIDEO_GEN"] = "videoGen";
|
|
59
|
-
NodeTypeEnum2["LLM"] = "llm";
|
|
60
|
-
NodeTypeEnum2["LIP_SYNC"] = "lipSync";
|
|
61
|
-
NodeTypeEnum2["VOICE_CHANGE"] = "voiceChange";
|
|
62
|
-
NodeTypeEnum2["TEXT_TO_SPEECH"] = "textToSpeech";
|
|
63
|
-
NodeTypeEnum2["TRANSCRIBE"] = "transcribe";
|
|
64
|
-
NodeTypeEnum2["MOTION_CONTROL"] = "motionControl";
|
|
65
|
-
NodeTypeEnum2["RESIZE"] = "resize";
|
|
66
|
-
NodeTypeEnum2["ANIMATION"] = "animation";
|
|
67
|
-
NodeTypeEnum2["VIDEO_STITCH"] = "videoStitch";
|
|
68
|
-
NodeTypeEnum2["VIDEO_TRIM"] = "videoTrim";
|
|
69
|
-
NodeTypeEnum2["VIDEO_FRAME_EXTRACT"] = "videoFrameExtract";
|
|
70
|
-
NodeTypeEnum2["REFRAME"] = "reframe";
|
|
71
|
-
NodeTypeEnum2["UPSCALE"] = "upscale";
|
|
72
|
-
NodeTypeEnum2["IMAGE_GRID_SPLIT"] = "imageGridSplit";
|
|
73
|
-
NodeTypeEnum2["ANNOTATION"] = "annotation";
|
|
74
|
-
NodeTypeEnum2["SUBTITLE"] = "subtitle";
|
|
75
|
-
NodeTypeEnum2["OUTPUT_GALLERY"] = "outputGallery";
|
|
76
|
-
NodeTypeEnum2["IMAGE_COMPARE"] = "imageCompare";
|
|
77
|
-
NodeTypeEnum2["DOWNLOAD"] = "download";
|
|
78
|
-
NodeTypeEnum2["WORKFLOW_INPUT"] = "workflowInput";
|
|
79
|
-
NodeTypeEnum2["WORKFLOW_OUTPUT"] = "workflowOutput";
|
|
80
|
-
NodeTypeEnum2["WORKFLOW_REF"] = "workflowRef";
|
|
81
|
-
return NodeTypeEnum2;
|
|
82
|
-
})(NodeTypeEnum || {});
|
|
83
|
-
var NodeCategoryEnum = /* @__PURE__ */ ((NodeCategoryEnum2) => {
|
|
84
|
-
NodeCategoryEnum2["INPUT"] = "input";
|
|
85
|
-
NodeCategoryEnum2["AI"] = "ai";
|
|
86
|
-
NodeCategoryEnum2["PROCESSING"] = "processing";
|
|
87
|
-
NodeCategoryEnum2["OUTPUT"] = "output";
|
|
88
|
-
NodeCategoryEnum2["COMPOSITION"] = "composition";
|
|
89
|
-
return NodeCategoryEnum2;
|
|
90
|
-
})(NodeCategoryEnum || {});
|
|
91
|
-
var NodeStatusEnum = /* @__PURE__ */ ((NodeStatusEnum2) => {
|
|
92
|
-
NodeStatusEnum2["IDLE"] = "idle";
|
|
93
|
-
NodeStatusEnum2["PENDING"] = "pending";
|
|
94
|
-
NodeStatusEnum2["PROCESSING"] = "processing";
|
|
95
|
-
NodeStatusEnum2["COMPLETE"] = "complete";
|
|
96
|
-
NodeStatusEnum2["ERROR"] = "error";
|
|
97
|
-
return NodeStatusEnum2;
|
|
98
|
-
})(NodeStatusEnum || {});
|
|
99
|
-
var TemplateCategory = /* @__PURE__ */ ((TemplateCategory2) => {
|
|
100
|
-
TemplateCategory2["IMAGE"] = "image";
|
|
101
|
-
TemplateCategory2["VIDEO"] = "video";
|
|
102
|
-
TemplateCategory2["AUDIO"] = "audio";
|
|
103
|
-
TemplateCategory2["FULL_PIPELINE"] = "full-pipeline";
|
|
104
|
-
return TemplateCategory2;
|
|
105
|
-
})(TemplateCategory || {});
|
|
106
|
-
var ReframeNodeType = /* @__PURE__ */ ((ReframeNodeType2) => {
|
|
107
|
-
ReframeNodeType2["REFRAME"] = "reframe";
|
|
108
|
-
ReframeNodeType2["LUMA_REFRAME_IMAGE"] = "lumaReframeImage";
|
|
109
|
-
ReframeNodeType2["LUMA_REFRAME_VIDEO"] = "lumaReframeVideo";
|
|
110
|
-
return ReframeNodeType2;
|
|
111
|
-
})(ReframeNodeType || {});
|
|
112
|
-
var UpscaleNodeType = /* @__PURE__ */ ((UpscaleNodeType2) => {
|
|
113
|
-
UpscaleNodeType2["UPSCALE"] = "upscale";
|
|
114
|
-
UpscaleNodeType2["TOPAZ_IMAGE_UPSCALE"] = "topazImageUpscale";
|
|
115
|
-
UpscaleNodeType2["TOPAZ_VIDEO_UPSCALE"] = "topazVideoUpscale";
|
|
116
|
-
return UpscaleNodeType2;
|
|
117
|
-
})(UpscaleNodeType || {});
|
|
118
|
-
var KlingQuality = /* @__PURE__ */ ((KlingQuality2) => {
|
|
119
|
-
KlingQuality2["STANDARD"] = "std";
|
|
120
|
-
KlingQuality2["PRO"] = "pro";
|
|
121
|
-
return KlingQuality2;
|
|
122
|
-
})(KlingQuality || {});
|
|
123
|
-
var ProcessingNodeType = /* @__PURE__ */ ((ProcessingNodeType2) => {
|
|
124
|
-
ProcessingNodeType2["REFRAME"] = "reframe";
|
|
125
|
-
ProcessingNodeType2["LUMA_REFRAME_IMAGE"] = "lumaReframeImage";
|
|
126
|
-
ProcessingNodeType2["LUMA_REFRAME_VIDEO"] = "lumaReframeVideo";
|
|
127
|
-
ProcessingNodeType2["UPSCALE"] = "upscale";
|
|
128
|
-
ProcessingNodeType2["TOPAZ_IMAGE_UPSCALE"] = "topazImageUpscale";
|
|
129
|
-
ProcessingNodeType2["TOPAZ_VIDEO_UPSCALE"] = "topazVideoUpscale";
|
|
130
|
-
ProcessingNodeType2["VIDEO_FRAME_EXTRACT"] = "videoFrameExtract";
|
|
131
|
-
ProcessingNodeType2["LIP_SYNC"] = "lipSync";
|
|
132
|
-
ProcessingNodeType2["TEXT_TO_SPEECH"] = "textToSpeech";
|
|
133
|
-
ProcessingNodeType2["VOICE_CHANGE"] = "voiceChange";
|
|
134
|
-
ProcessingNodeType2["SUBTITLE"] = "subtitle";
|
|
135
|
-
ProcessingNodeType2["VIDEO_STITCH"] = "videoStitch";
|
|
136
|
-
ProcessingNodeType2["WORKFLOW_REF"] = "workflowRef";
|
|
137
|
-
return ProcessingNodeType2;
|
|
138
|
-
})(ProcessingNodeType || {});
|
|
139
|
-
var NODE_DEFINITIONS5 = {
|
|
140
|
-
// Input nodes
|
|
141
|
-
imageInput: {
|
|
142
|
-
type: "imageInput",
|
|
143
|
-
label: "Image",
|
|
144
|
-
description: "Upload or reference an image",
|
|
145
|
-
category: "input",
|
|
146
|
-
icon: "Image",
|
|
147
|
-
inputs: [],
|
|
148
|
-
outputs: [{ id: "image", type: "image", label: "Image" }],
|
|
149
|
-
defaultData: {
|
|
150
|
-
label: "Image",
|
|
151
|
-
status: "idle",
|
|
152
|
-
image: null,
|
|
153
|
-
filename: null,
|
|
154
|
-
dimensions: null,
|
|
155
|
-
source: "upload"
|
|
156
|
-
}
|
|
157
|
-
},
|
|
158
|
-
prompt: {
|
|
159
|
-
type: "prompt",
|
|
160
|
-
label: "Prompt",
|
|
161
|
-
description: "Text prompt for AI generation",
|
|
162
|
-
category: "input",
|
|
163
|
-
icon: "MessageSquare",
|
|
164
|
-
inputs: [],
|
|
165
|
-
outputs: [{ id: "text", type: "text", label: "Prompt" }],
|
|
166
|
-
defaultData: {
|
|
167
|
-
label: "Prompt",
|
|
168
|
-
status: "idle",
|
|
169
|
-
prompt: "",
|
|
170
|
-
variables: {}
|
|
171
|
-
}
|
|
172
|
-
},
|
|
173
|
-
audioInput: {
|
|
174
|
-
type: "audioInput",
|
|
175
|
-
label: "Audio",
|
|
176
|
-
description: "Upload an audio file (MP3, WAV)",
|
|
177
|
-
category: "input",
|
|
178
|
-
icon: "Volume2",
|
|
179
|
-
inputs: [],
|
|
180
|
-
outputs: [{ id: "audio", type: "audio", label: "Audio" }],
|
|
181
|
-
defaultData: {
|
|
182
|
-
label: "Audio",
|
|
183
|
-
status: "idle",
|
|
184
|
-
audio: null,
|
|
185
|
-
filename: null,
|
|
186
|
-
duration: null,
|
|
187
|
-
source: "upload"
|
|
188
|
-
}
|
|
189
|
-
},
|
|
190
|
-
videoInput: {
|
|
191
|
-
type: "videoInput",
|
|
192
|
-
label: "Video",
|
|
193
|
-
description: "Upload or reference a video file",
|
|
194
|
-
category: "input",
|
|
195
|
-
icon: "FileVideo",
|
|
196
|
-
inputs: [],
|
|
197
|
-
outputs: [{ id: "video", type: "video", label: "Video" }],
|
|
198
|
-
defaultData: {
|
|
199
|
-
label: "Video",
|
|
200
|
-
status: "idle",
|
|
201
|
-
video: null,
|
|
202
|
-
filename: null,
|
|
203
|
-
duration: null,
|
|
204
|
-
dimensions: null,
|
|
205
|
-
source: "upload"
|
|
206
|
-
}
|
|
207
|
-
},
|
|
208
|
-
promptConstructor: {
|
|
209
|
-
type: "promptConstructor",
|
|
210
|
-
label: "Prompt Constructor",
|
|
211
|
-
description: "Template-based prompt with @variable interpolation from connected Prompt nodes",
|
|
212
|
-
category: "input",
|
|
213
|
-
icon: "Puzzle",
|
|
214
|
-
inputs: [{ id: "text", type: "text", label: "Variables", multiple: true }],
|
|
215
|
-
outputs: [{ id: "text", type: "text", label: "Prompt" }],
|
|
216
|
-
defaultData: {
|
|
217
|
-
label: "Prompt Constructor",
|
|
218
|
-
status: "idle",
|
|
219
|
-
template: "",
|
|
220
|
-
outputText: null,
|
|
221
|
-
unresolvedVars: []
|
|
222
|
-
}
|
|
223
|
-
},
|
|
224
|
-
// AI nodes
|
|
225
|
-
imageGen: {
|
|
226
|
-
type: "imageGen",
|
|
227
|
-
label: "Image Generator",
|
|
228
|
-
description: "Generate images with nano-banana models",
|
|
229
|
-
category: "ai",
|
|
230
|
-
icon: "Sparkles",
|
|
231
|
-
inputs: [
|
|
232
|
-
{ id: "prompt", type: "text", label: "Prompt", required: true },
|
|
233
|
-
{ id: "images", type: "image", label: "Reference Images", multiple: true }
|
|
234
|
-
],
|
|
235
|
-
outputs: [{ id: "image", type: "image", label: "Generated Image" }],
|
|
236
|
-
defaultData: {
|
|
237
|
-
label: "Image Generator",
|
|
238
|
-
status: "idle",
|
|
239
|
-
inputImages: [],
|
|
240
|
-
inputPrompt: null,
|
|
241
|
-
outputImage: null,
|
|
242
|
-
outputImages: [],
|
|
243
|
-
model: "nano-banana-pro",
|
|
244
|
-
aspectRatio: "1:1",
|
|
245
|
-
resolution: "2K",
|
|
246
|
-
outputFormat: "jpg",
|
|
247
|
-
jobId: null
|
|
248
|
-
}
|
|
249
|
-
},
|
|
250
|
-
videoGen: {
|
|
251
|
-
type: "videoGen",
|
|
252
|
-
label: "Video Generator",
|
|
253
|
-
description: "Generate videos with veo-3.1 models",
|
|
254
|
-
category: "ai",
|
|
255
|
-
icon: "Video",
|
|
256
|
-
inputs: [
|
|
257
|
-
{ id: "prompt", type: "text", label: "Prompt", required: true },
|
|
258
|
-
{ id: "image", type: "image", label: "Starting Frame" },
|
|
259
|
-
{ id: "lastFrame", type: "image", label: "Last Frame (interpolation)" }
|
|
260
|
-
],
|
|
261
|
-
outputs: [{ id: "video", type: "video", label: "Generated Video" }],
|
|
262
|
-
defaultData: {
|
|
263
|
-
label: "Video Generator",
|
|
264
|
-
status: "idle",
|
|
265
|
-
inputImage: null,
|
|
266
|
-
lastFrame: null,
|
|
267
|
-
referenceImages: [],
|
|
268
|
-
inputPrompt: null,
|
|
269
|
-
negativePrompt: "",
|
|
270
|
-
outputVideo: null,
|
|
271
|
-
model: "veo-3.1-fast",
|
|
272
|
-
duration: 8,
|
|
273
|
-
aspectRatio: "16:9",
|
|
274
|
-
resolution: "1080p",
|
|
275
|
-
generateAudio: true,
|
|
276
|
-
jobId: null
|
|
277
|
-
}
|
|
278
|
-
},
|
|
279
|
-
llm: {
|
|
280
|
-
type: "llm",
|
|
281
|
-
label: "LLM",
|
|
282
|
-
description: "Generate text with meta-llama",
|
|
283
|
-
category: "ai",
|
|
284
|
-
icon: "Brain",
|
|
285
|
-
inputs: [{ id: "prompt", type: "text", label: "Prompt", required: true }],
|
|
286
|
-
outputs: [{ id: "text", type: "text", label: "Generated Text" }],
|
|
287
|
-
defaultData: {
|
|
288
|
-
label: "LLM",
|
|
289
|
-
status: "idle",
|
|
290
|
-
inputPrompt: null,
|
|
291
|
-
outputText: null,
|
|
292
|
-
model: "meta-llama-3.1-405b-instruct",
|
|
293
|
-
systemPrompt: "You are a creative assistant helping generate content prompts.",
|
|
294
|
-
temperature: 0.7,
|
|
295
|
-
maxTokens: 1024,
|
|
296
|
-
topP: 0.9,
|
|
297
|
-
jobId: null
|
|
298
|
-
}
|
|
299
|
-
},
|
|
300
|
-
lipSync: {
|
|
301
|
-
type: "lipSync",
|
|
302
|
-
label: "Lip Sync",
|
|
303
|
-
description: "Generate talking-head video from image/video and audio using Replicate",
|
|
304
|
-
category: "ai",
|
|
305
|
-
icon: "Mic",
|
|
306
|
-
inputs: [
|
|
307
|
-
{ id: "image", type: "image", label: "Face Image" },
|
|
308
|
-
{ id: "video", type: "video", label: "Source Video" },
|
|
309
|
-
{ id: "audio", type: "audio", label: "Audio", required: true }
|
|
310
|
-
],
|
|
311
|
-
outputs: [{ id: "video", type: "video", label: "Generated Video" }],
|
|
312
|
-
defaultData: {
|
|
313
|
-
label: "Lip Sync",
|
|
314
|
-
status: "idle",
|
|
315
|
-
inputImage: null,
|
|
316
|
-
inputVideo: null,
|
|
317
|
-
inputAudio: null,
|
|
318
|
-
outputVideo: null,
|
|
319
|
-
model: "sync/lipsync-2",
|
|
320
|
-
syncMode: "loop",
|
|
321
|
-
temperature: 0.5,
|
|
322
|
-
activeSpeaker: false,
|
|
323
|
-
jobId: null
|
|
324
|
-
}
|
|
325
|
-
},
|
|
326
|
-
voiceChange: {
|
|
327
|
-
type: "voiceChange",
|
|
328
|
-
label: "Voice Change",
|
|
329
|
-
description: "Replace or mix audio track in a video",
|
|
330
|
-
category: "ai",
|
|
331
|
-
icon: "AudioLines",
|
|
332
|
-
inputs: [
|
|
333
|
-
{ id: "video", type: "video", label: "Video", required: true },
|
|
334
|
-
{ id: "audio", type: "audio", label: "New Audio", required: true }
|
|
335
|
-
],
|
|
336
|
-
outputs: [{ id: "video", type: "video", label: "Output Video" }],
|
|
337
|
-
defaultData: {
|
|
338
|
-
label: "Voice Change",
|
|
339
|
-
status: "idle",
|
|
340
|
-
inputVideo: null,
|
|
341
|
-
inputAudio: null,
|
|
342
|
-
outputVideo: null,
|
|
343
|
-
preserveOriginalAudio: false,
|
|
344
|
-
audioMixLevel: 0.5,
|
|
345
|
-
jobId: null
|
|
346
|
-
}
|
|
347
|
-
},
|
|
348
|
-
textToSpeech: {
|
|
349
|
-
type: "textToSpeech",
|
|
350
|
-
label: "Text to Speech",
|
|
351
|
-
description: "Convert text to natural-sounding speech using ElevenLabs",
|
|
352
|
-
category: "ai",
|
|
353
|
-
icon: "AudioLines",
|
|
354
|
-
inputs: [{ id: "text", type: "text", label: "Text", required: true }],
|
|
355
|
-
outputs: [{ id: "audio", type: "audio", label: "Audio" }],
|
|
356
|
-
defaultData: {
|
|
357
|
-
label: "Text to Speech",
|
|
358
|
-
status: "idle",
|
|
359
|
-
inputText: null,
|
|
360
|
-
outputAudio: null,
|
|
361
|
-
provider: "elevenlabs",
|
|
362
|
-
voice: "rachel",
|
|
363
|
-
stability: 0.5,
|
|
364
|
-
similarityBoost: 0.75,
|
|
365
|
-
speed: 1,
|
|
366
|
-
jobId: null
|
|
367
|
-
}
|
|
368
|
-
},
|
|
369
|
-
transcribe: {
|
|
370
|
-
type: "transcribe",
|
|
371
|
-
label: "Transcribe",
|
|
372
|
-
description: "Convert video or audio to text transcript",
|
|
373
|
-
category: "ai",
|
|
374
|
-
icon: "FileText",
|
|
375
|
-
inputs: [
|
|
376
|
-
{ id: "video", type: "video", label: "Video" },
|
|
377
|
-
{ id: "audio", type: "audio", label: "Audio" }
|
|
378
|
-
],
|
|
379
|
-
outputs: [{ id: "text", type: "text", label: "Transcript" }],
|
|
380
|
-
defaultData: {
|
|
381
|
-
label: "Transcribe",
|
|
382
|
-
status: "idle",
|
|
383
|
-
inputVideo: null,
|
|
384
|
-
inputAudio: null,
|
|
385
|
-
outputText: null,
|
|
386
|
-
language: "auto",
|
|
387
|
-
timestamps: false,
|
|
388
|
-
jobId: null
|
|
389
|
-
}
|
|
390
|
-
},
|
|
391
|
-
motionControl: {
|
|
392
|
-
type: "motionControl",
|
|
393
|
-
label: "Motion Control",
|
|
394
|
-
description: "Generate video with precise motion control using Kling AI",
|
|
395
|
-
category: "ai",
|
|
396
|
-
icon: "Navigation",
|
|
397
|
-
inputs: [
|
|
398
|
-
{ id: "image", type: "image", label: "Image", required: true },
|
|
399
|
-
{ id: "video", type: "video", label: "Motion Video" },
|
|
400
|
-
{ id: "prompt", type: "text", label: "Prompt" }
|
|
401
|
-
],
|
|
402
|
-
outputs: [{ id: "video", type: "video", label: "Video" }],
|
|
403
|
-
defaultData: {
|
|
404
|
-
label: "Motion Control",
|
|
405
|
-
status: "idle",
|
|
406
|
-
inputImage: null,
|
|
407
|
-
inputVideo: null,
|
|
408
|
-
inputPrompt: null,
|
|
409
|
-
outputVideo: null,
|
|
410
|
-
mode: "video_transfer",
|
|
411
|
-
duration: 5,
|
|
412
|
-
aspectRatio: "16:9",
|
|
413
|
-
trajectoryPoints: [],
|
|
414
|
-
cameraMovement: "static",
|
|
415
|
-
cameraIntensity: 50,
|
|
416
|
-
qualityMode: "pro",
|
|
417
|
-
characterOrientation: "image",
|
|
418
|
-
keepOriginalSound: true,
|
|
419
|
-
motionStrength: 50,
|
|
420
|
-
negativePrompt: "",
|
|
421
|
-
seed: null,
|
|
422
|
-
jobId: null
|
|
423
|
-
}
|
|
424
|
-
},
|
|
425
|
-
// Processing nodes
|
|
426
|
-
resize: {
|
|
427
|
-
type: "resize",
|
|
428
|
-
label: "Resize",
|
|
429
|
-
description: "Resize images or videos to different aspect ratios using Luma AI",
|
|
430
|
-
category: "processing",
|
|
431
|
-
icon: "Maximize2",
|
|
432
|
-
inputs: [{ id: "media", type: "image", label: "Media", required: true }],
|
|
433
|
-
outputs: [{ id: "media", type: "image", label: "Resized Media" }],
|
|
434
|
-
defaultData: {
|
|
435
|
-
label: "Resize",
|
|
436
|
-
status: "idle",
|
|
437
|
-
inputMedia: null,
|
|
438
|
-
inputType: null,
|
|
439
|
-
outputMedia: null,
|
|
440
|
-
targetAspectRatio: "16:9",
|
|
441
|
-
prompt: "",
|
|
442
|
-
gridPosition: { x: 0.5, y: 0.5 },
|
|
443
|
-
jobId: null
|
|
444
|
-
}
|
|
445
|
-
},
|
|
446
|
-
animation: {
|
|
447
|
-
type: "animation",
|
|
448
|
-
label: "Animation",
|
|
449
|
-
description: "Apply easing curve to video",
|
|
450
|
-
category: "processing",
|
|
451
|
-
icon: "Wand2",
|
|
452
|
-
inputs: [{ id: "video", type: "video", label: "Video", required: true }],
|
|
453
|
-
outputs: [{ id: "video", type: "video", label: "Animated Video" }],
|
|
454
|
-
defaultData: {
|
|
455
|
-
label: "Animation",
|
|
456
|
-
status: "idle",
|
|
457
|
-
inputVideo: null,
|
|
458
|
-
outputVideo: null,
|
|
459
|
-
curveType: "preset",
|
|
460
|
-
preset: "easeInOutCubic",
|
|
461
|
-
customCurve: [0.645, 0.045, 0.355, 1],
|
|
462
|
-
speedMultiplier: 1
|
|
463
|
-
}
|
|
464
|
-
},
|
|
465
|
-
videoStitch: {
|
|
466
|
-
type: "videoStitch",
|
|
467
|
-
label: "Video Stitch",
|
|
468
|
-
description: "Concatenate multiple videos",
|
|
469
|
-
category: "processing",
|
|
470
|
-
icon: "Layers",
|
|
471
|
-
inputs: [{ id: "videos", type: "video", label: "Videos", multiple: true, required: true }],
|
|
472
|
-
outputs: [{ id: "video", type: "video", label: "Stitched Video" }],
|
|
473
|
-
defaultData: {
|
|
474
|
-
label: "Video Stitch",
|
|
475
|
-
status: "idle",
|
|
476
|
-
inputVideos: [],
|
|
477
|
-
outputVideo: null,
|
|
478
|
-
transitionType: "crossfade",
|
|
479
|
-
transitionDuration: 0.5,
|
|
480
|
-
seamlessLoop: false
|
|
481
|
-
}
|
|
482
|
-
},
|
|
483
|
-
videoTrim: {
|
|
484
|
-
type: "videoTrim",
|
|
485
|
-
label: "Video Trim",
|
|
486
|
-
description: "Trim video to a specific time range",
|
|
487
|
-
category: "processing",
|
|
488
|
-
icon: "Scissors",
|
|
489
|
-
inputs: [{ id: "video", type: "video", label: "Video", required: true }],
|
|
490
|
-
outputs: [{ id: "video", type: "video", label: "Trimmed Video" }],
|
|
491
|
-
defaultData: {
|
|
492
|
-
label: "Video Trim",
|
|
493
|
-
status: "idle",
|
|
494
|
-
inputVideo: null,
|
|
495
|
-
outputVideo: null,
|
|
496
|
-
startTime: 0,
|
|
497
|
-
endTime: 60,
|
|
498
|
-
duration: null,
|
|
499
|
-
jobId: null
|
|
500
|
-
}
|
|
501
|
-
},
|
|
502
|
-
videoFrameExtract: {
|
|
503
|
-
type: "videoFrameExtract",
|
|
504
|
-
label: "Frame Extract",
|
|
505
|
-
description: "Extract a specific frame from video as image",
|
|
506
|
-
category: "processing",
|
|
507
|
-
icon: "Film",
|
|
508
|
-
inputs: [{ id: "video", type: "video", label: "Video", required: true }],
|
|
509
|
-
outputs: [{ id: "image", type: "image", label: "Extracted Frame" }],
|
|
510
|
-
defaultData: {
|
|
511
|
-
label: "Frame Extract",
|
|
512
|
-
status: "idle",
|
|
513
|
-
inputVideo: null,
|
|
514
|
-
outputImage: null,
|
|
515
|
-
selectionMode: "last",
|
|
516
|
-
timestampSeconds: 0,
|
|
517
|
-
percentagePosition: 100,
|
|
518
|
-
videoDuration: null,
|
|
519
|
-
jobId: null
|
|
520
|
-
}
|
|
521
|
-
},
|
|
522
|
-
reframe: {
|
|
523
|
-
type: "reframe",
|
|
524
|
-
label: "Reframe",
|
|
525
|
-
description: "Reframe images or videos to different aspect ratios with AI outpainting",
|
|
526
|
-
category: "processing",
|
|
527
|
-
icon: "Crop",
|
|
528
|
-
inputs: [
|
|
529
|
-
{ id: "image", type: "image", label: "Image" },
|
|
530
|
-
{ id: "video", type: "video", label: "Video" }
|
|
531
|
-
],
|
|
532
|
-
outputs: [
|
|
533
|
-
{ id: "image", type: "image", label: "Reframed Image" },
|
|
534
|
-
{ id: "video", type: "video", label: "Reframed Video" }
|
|
535
|
-
],
|
|
536
|
-
defaultData: {
|
|
537
|
-
label: "Reframe",
|
|
538
|
-
status: "idle",
|
|
539
|
-
inputImage: null,
|
|
540
|
-
inputVideo: null,
|
|
541
|
-
inputType: null,
|
|
542
|
-
outputImage: null,
|
|
543
|
-
outputVideo: null,
|
|
544
|
-
model: "photon-flash-1",
|
|
545
|
-
aspectRatio: "16:9",
|
|
546
|
-
prompt: "",
|
|
547
|
-
gridPosition: { x: 0.5, y: 0.5 },
|
|
548
|
-
jobId: null
|
|
549
|
-
}
|
|
550
|
-
},
|
|
551
|
-
upscale: {
|
|
552
|
-
type: "upscale",
|
|
553
|
-
label: "Upscale",
|
|
554
|
-
description: "AI-powered upscaling for images and videos",
|
|
555
|
-
category: "processing",
|
|
556
|
-
icon: "Maximize",
|
|
557
|
-
inputs: [
|
|
558
|
-
{ id: "image", type: "image", label: "Image" },
|
|
559
|
-
{ id: "video", type: "video", label: "Video" }
|
|
560
|
-
],
|
|
561
|
-
outputs: [
|
|
562
|
-
{ id: "image", type: "image", label: "Upscaled Image" },
|
|
563
|
-
{ id: "video", type: "video", label: "Upscaled Video" }
|
|
564
|
-
],
|
|
565
|
-
defaultData: {
|
|
566
|
-
label: "Upscale",
|
|
567
|
-
status: "idle",
|
|
568
|
-
inputImage: null,
|
|
569
|
-
inputVideo: null,
|
|
570
|
-
inputType: null,
|
|
571
|
-
outputImage: null,
|
|
572
|
-
outputVideo: null,
|
|
573
|
-
originalPreview: null,
|
|
574
|
-
outputPreview: null,
|
|
575
|
-
model: "topaz-standard-v2",
|
|
576
|
-
upscaleFactor: "2x",
|
|
577
|
-
outputFormat: "png",
|
|
578
|
-
faceEnhancement: false,
|
|
579
|
-
faceEnhancementStrength: 80,
|
|
580
|
-
faceEnhancementCreativity: 0,
|
|
581
|
-
targetResolution: "1080p",
|
|
582
|
-
targetFps: 30,
|
|
583
|
-
showComparison: true,
|
|
584
|
-
comparisonPosition: 50,
|
|
585
|
-
jobId: null
|
|
586
|
-
}
|
|
587
|
-
},
|
|
588
|
-
imageGridSplit: {
|
|
589
|
-
type: "imageGridSplit",
|
|
590
|
-
label: "Grid Split",
|
|
591
|
-
description: "Split image into grid cells",
|
|
592
|
-
category: "processing",
|
|
593
|
-
icon: "Grid3X3",
|
|
594
|
-
inputs: [{ id: "image", type: "image", label: "Image", required: true }],
|
|
595
|
-
outputs: [{ id: "images", type: "image", label: "Split Images", multiple: true }],
|
|
596
|
-
defaultData: {
|
|
597
|
-
label: "Grid Split",
|
|
598
|
-
status: "idle",
|
|
599
|
-
inputImage: null,
|
|
600
|
-
outputImages: [],
|
|
601
|
-
gridRows: 2,
|
|
602
|
-
gridCols: 3,
|
|
603
|
-
borderInset: 10,
|
|
604
|
-
outputFormat: "jpg",
|
|
605
|
-
quality: 95
|
|
606
|
-
}
|
|
607
|
-
},
|
|
608
|
-
annotation: {
|
|
609
|
-
type: "annotation",
|
|
610
|
-
label: "Annotation",
|
|
611
|
-
description: "Add shapes, arrows, and text to images",
|
|
612
|
-
category: "processing",
|
|
613
|
-
icon: "Pencil",
|
|
614
|
-
inputs: [{ id: "image", type: "image", label: "Image", required: true }],
|
|
615
|
-
outputs: [{ id: "image", type: "image", label: "Annotated Image" }],
|
|
616
|
-
defaultData: {
|
|
617
|
-
label: "Annotation",
|
|
618
|
-
status: "idle",
|
|
619
|
-
inputImage: null,
|
|
620
|
-
outputImage: null,
|
|
621
|
-
annotations: [],
|
|
622
|
-
hasAnnotations: false
|
|
623
|
-
}
|
|
624
|
-
},
|
|
625
|
-
subtitle: {
|
|
626
|
-
type: "subtitle",
|
|
627
|
-
label: "Subtitle",
|
|
628
|
-
description: "Burn subtitles into video using FFmpeg",
|
|
629
|
-
category: "processing",
|
|
630
|
-
icon: "Subtitles",
|
|
631
|
-
inputs: [
|
|
632
|
-
{ id: "video", type: "video", label: "Video", required: true },
|
|
633
|
-
{ id: "text", type: "text", label: "Subtitle Text", required: true }
|
|
634
|
-
],
|
|
635
|
-
outputs: [{ id: "video", type: "video", label: "Video with Subtitles" }],
|
|
636
|
-
defaultData: {
|
|
637
|
-
label: "Subtitle",
|
|
638
|
-
status: "idle",
|
|
639
|
-
inputVideo: null,
|
|
640
|
-
inputText: null,
|
|
641
|
-
outputVideo: null,
|
|
642
|
-
style: "modern",
|
|
643
|
-
position: "bottom",
|
|
644
|
-
fontSize: 24,
|
|
645
|
-
fontColor: "#FFFFFF",
|
|
646
|
-
backgroundColor: "rgba(0,0,0,0.7)",
|
|
647
|
-
fontFamily: "Arial",
|
|
648
|
-
jobId: null
|
|
649
|
-
}
|
|
650
|
-
},
|
|
651
|
-
outputGallery: {
|
|
652
|
-
type: "outputGallery",
|
|
653
|
-
label: "Output Gallery",
|
|
654
|
-
description: "Thumbnail grid with lightbox for multi-image outputs",
|
|
655
|
-
category: "output",
|
|
656
|
-
icon: "LayoutGrid",
|
|
657
|
-
inputs: [{ id: "image", type: "image", label: "Images", multiple: true }],
|
|
658
|
-
outputs: [],
|
|
659
|
-
defaultData: {
|
|
660
|
-
label: "Output Gallery",
|
|
661
|
-
status: "idle",
|
|
662
|
-
images: []
|
|
663
|
-
}
|
|
664
|
-
},
|
|
665
|
-
imageCompare: {
|
|
666
|
-
type: "imageCompare",
|
|
667
|
-
label: "Image Compare",
|
|
668
|
-
description: "Side-by-side A/B comparison with draggable slider",
|
|
669
|
-
category: "output",
|
|
670
|
-
icon: "Columns2",
|
|
671
|
-
inputs: [
|
|
672
|
-
{ id: "image", type: "image", label: "Image A" },
|
|
673
|
-
{ id: "image-1", type: "image", label: "Image B" }
|
|
674
|
-
],
|
|
675
|
-
outputs: [],
|
|
676
|
-
defaultData: {
|
|
677
|
-
label: "Image Compare",
|
|
678
|
-
status: "idle",
|
|
679
|
-
imageA: null,
|
|
680
|
-
imageB: null
|
|
681
|
-
}
|
|
682
|
-
},
|
|
683
|
-
// Output nodes
|
|
684
|
-
download: {
|
|
685
|
-
type: "download",
|
|
686
|
-
label: "Download",
|
|
687
|
-
description: "Download workflow output with custom filename",
|
|
688
|
-
category: "output",
|
|
689
|
-
icon: "Download",
|
|
690
|
-
inputs: [
|
|
691
|
-
{ id: "image", type: "image", label: "Image" },
|
|
692
|
-
{ id: "video", type: "video", label: "Video" }
|
|
693
|
-
],
|
|
694
|
-
outputs: [],
|
|
695
|
-
defaultData: {
|
|
696
|
-
label: "Download",
|
|
697
|
-
status: "idle",
|
|
698
|
-
inputImage: null,
|
|
699
|
-
inputVideo: null,
|
|
700
|
-
inputType: null,
|
|
701
|
-
outputName: "output"
|
|
702
|
-
}
|
|
703
|
-
},
|
|
704
|
-
// Composition nodes (workflow-as-node)
|
|
705
|
-
workflowInput: {
|
|
706
|
-
type: "workflowInput",
|
|
707
|
-
label: "Workflow Input",
|
|
708
|
-
description: "Define an input port for when this workflow is used as a subworkflow",
|
|
709
|
-
category: "composition",
|
|
710
|
-
icon: "ArrowRightToLine",
|
|
711
|
-
inputs: [],
|
|
712
|
-
outputs: [{ id: "value", type: "image", label: "Value" }],
|
|
713
|
-
// Type is dynamic based on inputType
|
|
714
|
-
defaultData: {
|
|
715
|
-
label: "Workflow Input",
|
|
716
|
-
status: "idle",
|
|
717
|
-
inputName: "input",
|
|
718
|
-
inputType: "image",
|
|
719
|
-
required: true,
|
|
720
|
-
description: ""
|
|
721
|
-
}
|
|
722
|
-
},
|
|
723
|
-
workflowOutput: {
|
|
724
|
-
type: "workflowOutput",
|
|
725
|
-
label: "Workflow Output",
|
|
726
|
-
description: "Define an output port for when this workflow is used as a subworkflow",
|
|
727
|
-
category: "composition",
|
|
728
|
-
icon: "ArrowLeftFromLine",
|
|
729
|
-
inputs: [{ id: "value", type: "image", label: "Value", required: true }],
|
|
730
|
-
// Type is dynamic based on outputType
|
|
731
|
-
outputs: [],
|
|
732
|
-
defaultData: {
|
|
733
|
-
label: "Workflow Output",
|
|
734
|
-
status: "idle",
|
|
735
|
-
outputName: "output",
|
|
736
|
-
outputType: "image",
|
|
737
|
-
description: "",
|
|
738
|
-
inputValue: null
|
|
739
|
-
}
|
|
740
|
-
},
|
|
741
|
-
workflowRef: {
|
|
742
|
-
type: "workflowRef",
|
|
743
|
-
label: "Subworkflow",
|
|
744
|
-
description: "Reference another workflow as a subworkflow",
|
|
745
|
-
category: "composition",
|
|
746
|
-
icon: "GitBranch",
|
|
747
|
-
inputs: [],
|
|
748
|
-
// Dynamic based on referenced workflow interface
|
|
749
|
-
outputs: [],
|
|
750
|
-
// Dynamic based on referenced workflow interface
|
|
751
|
-
defaultData: {
|
|
752
|
-
label: "Subworkflow",
|
|
753
|
-
status: "idle",
|
|
754
|
-
referencedWorkflowId: null,
|
|
755
|
-
referencedWorkflowName: null,
|
|
756
|
-
cachedInterface: null,
|
|
757
|
-
inputMappings: {},
|
|
758
|
-
outputMappings: {},
|
|
759
|
-
childExecutionId: null
|
|
760
|
-
}
|
|
761
|
-
}
|
|
762
|
-
// Multi-format nodes removed - format conversion now handled by schema-driven engine
|
|
763
|
-
};
|
|
764
|
-
var NODE_ORDER = {
|
|
765
|
-
input: ["imageInput", "videoInput", "audioInput", "prompt", "promptConstructor"],
|
|
766
|
-
ai: [
|
|
767
|
-
"imageGen",
|
|
768
|
-
"videoGen",
|
|
769
|
-
"llm",
|
|
770
|
-
"lipSync",
|
|
771
|
-
"textToSpeech",
|
|
772
|
-
"transcribe",
|
|
773
|
-
"voiceChange",
|
|
774
|
-
"motionControl"
|
|
775
|
-
],
|
|
776
|
-
processing: [
|
|
777
|
-
"reframe",
|
|
778
|
-
"upscale",
|
|
779
|
-
"resize",
|
|
780
|
-
"videoStitch",
|
|
781
|
-
"videoTrim",
|
|
782
|
-
"videoFrameExtract",
|
|
783
|
-
"imageGridSplit",
|
|
784
|
-
"annotation",
|
|
785
|
-
"subtitle",
|
|
786
|
-
"animation"
|
|
787
|
-
],
|
|
788
|
-
output: ["download", "outputGallery", "imageCompare"],
|
|
789
|
-
composition: ["workflowRef", "workflowInput", "workflowOutput"]
|
|
790
|
-
};
|
|
791
|
-
function getNodesByCategory() {
|
|
792
|
-
const categories = {
|
|
793
|
-
input: [],
|
|
794
|
-
ai: [],
|
|
795
|
-
processing: [],
|
|
796
|
-
output: [],
|
|
797
|
-
composition: []
|
|
798
|
-
};
|
|
799
|
-
for (const category of Object.keys(NODE_ORDER)) {
|
|
800
|
-
for (const nodeType of NODE_ORDER[category]) {
|
|
801
|
-
const def = NODE_DEFINITIONS5[nodeType];
|
|
802
|
-
if (def) {
|
|
803
|
-
categories[category].push(def);
|
|
804
|
-
}
|
|
805
|
-
}
|
|
806
|
-
}
|
|
807
|
-
return categories;
|
|
808
|
-
}
|
|
809
|
-
exports$1.CONNECTION_RULES = CONNECTION_RULES2;
|
|
810
|
-
exports$1.HandleTypeEnum = HandleTypeEnum;
|
|
811
|
-
exports$1.KlingQuality = KlingQuality;
|
|
812
|
-
exports$1.ModelCapabilityEnum = ModelCapabilityEnum;
|
|
813
|
-
exports$1.ModelUseCaseEnum = ModelUseCaseEnum;
|
|
814
|
-
exports$1.NODE_DEFINITIONS = NODE_DEFINITIONS5;
|
|
815
|
-
exports$1.NODE_ORDER = NODE_ORDER;
|
|
816
|
-
exports$1.NodeCategoryEnum = NodeCategoryEnum;
|
|
817
|
-
exports$1.NodeStatusEnum = NodeStatusEnum;
|
|
818
|
-
exports$1.NodeTypeEnum = NodeTypeEnum;
|
|
819
|
-
exports$1.ProcessingNodeType = ProcessingNodeType;
|
|
820
|
-
exports$1.ProviderTypeEnum = ProviderTypeEnum;
|
|
821
|
-
exports$1.ReframeNodeType = ReframeNodeType;
|
|
822
|
-
exports$1.TemplateCategory = TemplateCategory;
|
|
823
|
-
exports$1.UpscaleNodeType = UpscaleNodeType;
|
|
824
|
-
exports$1.getNodesByCategory = getNodesByCategory;
|
|
825
|
-
}
|
|
826
|
-
});
|
|
827
|
-
|
|
828
|
-
// ../types/dist/chunk-KXAKQO3U.js
|
|
829
|
-
var require_chunk_KXAKQO3U = chunkZ7PWFZG5_js.__commonJS({
|
|
830
|
-
"../types/dist/chunk-KXAKQO3U.js"(exports$1) {
|
|
831
|
-
var EdgeStyleEnum = /* @__PURE__ */ ((EdgeStyleEnum2) => {
|
|
832
|
-
EdgeStyleEnum2["DEFAULT"] = "default";
|
|
833
|
-
EdgeStyleEnum2["SMOOTHSTEP"] = "smoothstep";
|
|
834
|
-
EdgeStyleEnum2["STRAIGHT"] = "straight";
|
|
835
|
-
return EdgeStyleEnum2;
|
|
836
|
-
})(EdgeStyleEnum || {});
|
|
837
|
-
exports$1.EdgeStyleEnum = EdgeStyleEnum;
|
|
838
|
-
}
|
|
839
|
-
});
|
|
840
|
-
|
|
841
|
-
// ../types/dist/chunk-RNGYPX4W.js
|
|
842
|
-
var require_chunk_RNGYPX4W = chunkZ7PWFZG5_js.__commonJS({
|
|
843
|
-
"../types/dist/chunk-RNGYPX4W.js"() {
|
|
844
|
-
}
|
|
845
|
-
});
|
|
846
|
-
|
|
847
|
-
// ../types/dist/index.js
|
|
848
|
-
var require_dist = chunkZ7PWFZG5_js.__commonJS({
|
|
849
|
-
"../types/dist/index.js"(exports$1) {
|
|
850
|
-
var chunkP7K5LM6V_js = require_chunk_P7K5LM6V();
|
|
851
|
-
var chunkKXAKQO3U_js = require_chunk_KXAKQO3U();
|
|
852
|
-
require_chunk_RNGYPX4W();
|
|
853
|
-
var PROMPT_CATEGORIES = [
|
|
854
|
-
"ads",
|
|
855
|
-
"anime",
|
|
856
|
-
"product",
|
|
857
|
-
"portrait",
|
|
858
|
-
"landscape",
|
|
859
|
-
"abstract",
|
|
860
|
-
"fashion",
|
|
861
|
-
"food",
|
|
862
|
-
"architecture",
|
|
863
|
-
"custom"
|
|
864
|
-
];
|
|
865
|
-
var MOOD_PRESETS = [
|
|
866
|
-
"cinematic",
|
|
867
|
-
"dreamy",
|
|
868
|
-
"gritty",
|
|
869
|
-
"ethereal",
|
|
870
|
-
"nostalgic",
|
|
871
|
-
"futuristic",
|
|
872
|
-
"mysterious",
|
|
873
|
-
"peaceful",
|
|
874
|
-
"energetic",
|
|
875
|
-
"moody",
|
|
876
|
-
"dramatic",
|
|
877
|
-
"whimsical"
|
|
878
|
-
];
|
|
879
|
-
var STYLE_PRESETS = [
|
|
880
|
-
"photorealistic",
|
|
881
|
-
"anime",
|
|
882
|
-
"3d-render",
|
|
883
|
-
"oil-painting",
|
|
884
|
-
"watercolor",
|
|
885
|
-
"digital-art",
|
|
886
|
-
"comic-book",
|
|
887
|
-
"sketch",
|
|
888
|
-
"pixel-art",
|
|
889
|
-
"minimalist",
|
|
890
|
-
"cyberpunk",
|
|
891
|
-
"fantasy",
|
|
892
|
-
"retro",
|
|
893
|
-
"vintage"
|
|
894
|
-
];
|
|
895
|
-
var CAMERA_PRESETS = [
|
|
896
|
-
"wide-angle",
|
|
897
|
-
"macro",
|
|
898
|
-
"telephoto",
|
|
899
|
-
"drone",
|
|
900
|
-
"portrait",
|
|
901
|
-
"fisheye",
|
|
902
|
-
"tilt-shift",
|
|
903
|
-
"gopro",
|
|
904
|
-
"close-up",
|
|
905
|
-
"establishing",
|
|
906
|
-
"eye-level",
|
|
907
|
-
"low-angle",
|
|
908
|
-
"high-angle",
|
|
909
|
-
"dutch-angle"
|
|
910
|
-
];
|
|
911
|
-
var LIGHTING_PRESETS = [
|
|
912
|
-
"golden-hour",
|
|
913
|
-
"studio",
|
|
914
|
-
"neon",
|
|
915
|
-
"natural",
|
|
916
|
-
"dramatic",
|
|
917
|
-
"soft",
|
|
918
|
-
"backlit",
|
|
919
|
-
"rim-light",
|
|
920
|
-
"high-key",
|
|
921
|
-
"low-key",
|
|
922
|
-
"candlelight",
|
|
923
|
-
"moonlight",
|
|
924
|
-
"fluorescent",
|
|
925
|
-
"cinematic"
|
|
926
|
-
];
|
|
927
|
-
var SCENE_PRESETS = [
|
|
928
|
-
"indoor",
|
|
929
|
-
"outdoor",
|
|
930
|
-
"urban",
|
|
931
|
-
"nature",
|
|
932
|
-
"studio",
|
|
933
|
-
"underwater",
|
|
934
|
-
"space",
|
|
935
|
-
"abstract",
|
|
936
|
-
"industrial",
|
|
937
|
-
"domestic",
|
|
938
|
-
"beach",
|
|
939
|
-
"forest",
|
|
940
|
-
"city-skyline",
|
|
941
|
-
"desert"
|
|
942
|
-
];
|
|
943
|
-
var CATEGORY_LABELS = {
|
|
944
|
-
ads: "Ads & Marketing",
|
|
945
|
-
anime: "Anime & Manga",
|
|
946
|
-
product: "Product Photography",
|
|
947
|
-
portrait: "Portraits",
|
|
948
|
-
landscape: "Landscapes",
|
|
949
|
-
abstract: "Abstract Art",
|
|
950
|
-
fashion: "Fashion",
|
|
951
|
-
food: "Food & Culinary",
|
|
952
|
-
architecture: "Architecture",
|
|
953
|
-
custom: "Custom"
|
|
954
|
-
};
|
|
955
|
-
Object.defineProperty(exports$1, "CONNECTION_RULES", {
|
|
956
|
-
enumerable: true,
|
|
957
|
-
get: function() {
|
|
958
|
-
return chunkP7K5LM6V_js.CONNECTION_RULES;
|
|
959
|
-
}
|
|
960
|
-
});
|
|
961
|
-
Object.defineProperty(exports$1, "HandleTypeEnum", {
|
|
962
|
-
enumerable: true,
|
|
963
|
-
get: function() {
|
|
964
|
-
return chunkP7K5LM6V_js.HandleTypeEnum;
|
|
965
|
-
}
|
|
966
|
-
});
|
|
967
|
-
Object.defineProperty(exports$1, "KlingQuality", {
|
|
968
|
-
enumerable: true,
|
|
969
|
-
get: function() {
|
|
970
|
-
return chunkP7K5LM6V_js.KlingQuality;
|
|
971
|
-
}
|
|
972
|
-
});
|
|
973
|
-
Object.defineProperty(exports$1, "ModelCapabilityEnum", {
|
|
974
|
-
enumerable: true,
|
|
975
|
-
get: function() {
|
|
976
|
-
return chunkP7K5LM6V_js.ModelCapabilityEnum;
|
|
977
|
-
}
|
|
978
|
-
});
|
|
979
|
-
Object.defineProperty(exports$1, "ModelUseCaseEnum", {
|
|
980
|
-
enumerable: true,
|
|
981
|
-
get: function() {
|
|
982
|
-
return chunkP7K5LM6V_js.ModelUseCaseEnum;
|
|
983
|
-
}
|
|
984
|
-
});
|
|
985
|
-
Object.defineProperty(exports$1, "NODE_DEFINITIONS", {
|
|
986
|
-
enumerable: true,
|
|
987
|
-
get: function() {
|
|
988
|
-
return chunkP7K5LM6V_js.NODE_DEFINITIONS;
|
|
989
|
-
}
|
|
990
|
-
});
|
|
991
|
-
Object.defineProperty(exports$1, "NODE_ORDER", {
|
|
992
|
-
enumerable: true,
|
|
993
|
-
get: function() {
|
|
994
|
-
return chunkP7K5LM6V_js.NODE_ORDER;
|
|
995
|
-
}
|
|
996
|
-
});
|
|
997
|
-
Object.defineProperty(exports$1, "NodeCategoryEnum", {
|
|
998
|
-
enumerable: true,
|
|
999
|
-
get: function() {
|
|
1000
|
-
return chunkP7K5LM6V_js.NodeCategoryEnum;
|
|
1001
|
-
}
|
|
1002
|
-
});
|
|
1003
|
-
Object.defineProperty(exports$1, "NodeStatusEnum", {
|
|
1004
|
-
enumerable: true,
|
|
1005
|
-
get: function() {
|
|
1006
|
-
return chunkP7K5LM6V_js.NodeStatusEnum;
|
|
1007
|
-
}
|
|
1008
|
-
});
|
|
1009
|
-
Object.defineProperty(exports$1, "NodeTypeEnum", {
|
|
1010
|
-
enumerable: true,
|
|
1011
|
-
get: function() {
|
|
1012
|
-
return chunkP7K5LM6V_js.NodeTypeEnum;
|
|
1013
|
-
}
|
|
1014
|
-
});
|
|
1015
|
-
Object.defineProperty(exports$1, "ProcessingNodeType", {
|
|
1016
|
-
enumerable: true,
|
|
1017
|
-
get: function() {
|
|
1018
|
-
return chunkP7K5LM6V_js.ProcessingNodeType;
|
|
1019
|
-
}
|
|
1020
|
-
});
|
|
1021
|
-
Object.defineProperty(exports$1, "ProviderTypeEnum", {
|
|
1022
|
-
enumerable: true,
|
|
1023
|
-
get: function() {
|
|
1024
|
-
return chunkP7K5LM6V_js.ProviderTypeEnum;
|
|
1025
|
-
}
|
|
1026
|
-
});
|
|
1027
|
-
Object.defineProperty(exports$1, "ReframeNodeType", {
|
|
1028
|
-
enumerable: true,
|
|
1029
|
-
get: function() {
|
|
1030
|
-
return chunkP7K5LM6V_js.ReframeNodeType;
|
|
1031
|
-
}
|
|
1032
|
-
});
|
|
1033
|
-
Object.defineProperty(exports$1, "TemplateCategory", {
|
|
1034
|
-
enumerable: true,
|
|
1035
|
-
get: function() {
|
|
1036
|
-
return chunkP7K5LM6V_js.TemplateCategory;
|
|
1037
|
-
}
|
|
1038
|
-
});
|
|
1039
|
-
Object.defineProperty(exports$1, "UpscaleNodeType", {
|
|
1040
|
-
enumerable: true,
|
|
1041
|
-
get: function() {
|
|
1042
|
-
return chunkP7K5LM6V_js.UpscaleNodeType;
|
|
1043
|
-
}
|
|
1044
|
-
});
|
|
1045
|
-
Object.defineProperty(exports$1, "getNodesByCategory", {
|
|
1046
|
-
enumerable: true,
|
|
1047
|
-
get: function() {
|
|
1048
|
-
return chunkP7K5LM6V_js.getNodesByCategory;
|
|
1049
|
-
}
|
|
1050
|
-
});
|
|
1051
|
-
Object.defineProperty(exports$1, "EdgeStyleEnum", {
|
|
1052
|
-
enumerable: true,
|
|
1053
|
-
get: function() {
|
|
1054
|
-
return chunkKXAKQO3U_js.EdgeStyleEnum;
|
|
1055
|
-
}
|
|
1056
|
-
});
|
|
1057
|
-
exports$1.CAMERA_PRESETS = CAMERA_PRESETS;
|
|
1058
|
-
exports$1.CATEGORY_LABELS = CATEGORY_LABELS;
|
|
1059
|
-
exports$1.LIGHTING_PRESETS = LIGHTING_PRESETS;
|
|
1060
|
-
exports$1.MOOD_PRESETS = MOOD_PRESETS;
|
|
1061
|
-
exports$1.PROMPT_CATEGORIES = PROMPT_CATEGORIES;
|
|
1062
|
-
exports$1.SCENE_PRESETS = SCENE_PRESETS;
|
|
1063
|
-
exports$1.STYLE_PRESETS = STYLE_PRESETS;
|
|
1064
|
-
}
|
|
1065
|
-
});
|
|
1066
|
-
|
|
1067
|
-
// src/stores/workflow/helpers/equality.ts
|
|
1068
|
-
function temporalStateEquals(a, b) {
|
|
1069
|
-
if (a === b) return true;
|
|
1070
|
-
if (!arraysShallowEqual(a.nodes, b.nodes, nodeEquals)) return false;
|
|
1071
|
-
if (!arraysShallowEqual(a.edges, b.edges, edgeEquals)) return false;
|
|
1072
|
-
if (!arraysShallowEqual(a.groups, b.groups, groupEquals)) return false;
|
|
1073
|
-
return true;
|
|
1074
|
-
}
|
|
1075
|
-
function arraysShallowEqual(a, b, itemEquals) {
|
|
1076
|
-
if (a === b) return true;
|
|
1077
|
-
if (a.length !== b.length) return false;
|
|
1078
|
-
for (let i = 0; i < a.length; i++) {
|
|
1079
|
-
if (!itemEquals(a[i], b[i])) return false;
|
|
1080
|
-
}
|
|
1081
|
-
return true;
|
|
1082
|
-
}
|
|
1083
|
-
function nodeEquals(a, b) {
|
|
1084
|
-
if (a === b) return true;
|
|
1085
|
-
if (a.id !== b.id) return false;
|
|
1086
|
-
if (a.type !== b.type) return false;
|
|
1087
|
-
if (a.position.x !== b.position.x || a.position.y !== b.position.y) return false;
|
|
1088
|
-
if (a.width !== b.width || a.height !== b.height) return false;
|
|
1089
|
-
const aData = a.data;
|
|
1090
|
-
const bData = b.data;
|
|
1091
|
-
if (aData.status !== bData.status) return false;
|
|
1092
|
-
if (aData.outputImage !== bData.outputImage) return false;
|
|
1093
|
-
if (aData.outputVideo !== bData.outputVideo) return false;
|
|
1094
|
-
if (aData.outputText !== bData.outputText) return false;
|
|
1095
|
-
if (aData.outputAudio !== bData.outputAudio) return false;
|
|
1096
|
-
if (aData.prompt !== bData.prompt) return false;
|
|
1097
|
-
if (aData.image !== bData.image) return false;
|
|
1098
|
-
if (aData.video !== bData.video) return false;
|
|
1099
|
-
if (aData.audio !== bData.audio) return false;
|
|
1100
|
-
if (aData.inputPrompt !== bData.inputPrompt) return false;
|
|
1101
|
-
if (aData.inputImage !== bData.inputImage) return false;
|
|
1102
|
-
if (aData.inputVideo !== bData.inputVideo) return false;
|
|
1103
|
-
if (aData.inputAudio !== bData.inputAudio) return false;
|
|
1104
|
-
if (aData.inputText !== bData.inputText) return false;
|
|
1105
|
-
if (aData.model !== bData.model) return false;
|
|
1106
|
-
if (aData.schemaParams !== bData.schemaParams) {
|
|
1107
|
-
if (JSON.stringify(aData.schemaParams) !== JSON.stringify(bData.schemaParams)) {
|
|
1108
|
-
return false;
|
|
1109
|
-
}
|
|
1110
|
-
}
|
|
1111
|
-
return true;
|
|
1112
|
-
}
|
|
1113
|
-
function edgeEquals(a, b) {
|
|
1114
|
-
if (a === b) return true;
|
|
1115
|
-
return a.id === b.id && a.source === b.source && a.target === b.target && a.sourceHandle === b.sourceHandle && a.targetHandle === b.targetHandle;
|
|
1116
|
-
}
|
|
1117
|
-
function groupEquals(a, b) {
|
|
1118
|
-
if (a === b) return true;
|
|
1119
|
-
if (a.id !== b.id) return false;
|
|
1120
|
-
if (a.name !== b.name) return false;
|
|
1121
|
-
if (a.color !== b.color) return false;
|
|
1122
|
-
if (a.nodeIds.length !== b.nodeIds.length) return false;
|
|
1123
|
-
for (let i = 0; i < a.nodeIds.length; i++) {
|
|
1124
|
-
if (a.nodeIds[i] !== b.nodeIds[i]) return false;
|
|
1125
|
-
}
|
|
1126
|
-
return true;
|
|
1127
|
-
}
|
|
1128
|
-
|
|
1129
|
-
// src/stores/workflow/slices/chatSlice.ts
|
|
1130
|
-
var createChatSlice = (set, get) => ({
|
|
1131
|
-
chatMessages: [],
|
|
1132
|
-
isChatOpen: false,
|
|
1133
|
-
addChatMessage: (role, content) => {
|
|
1134
|
-
set((state) => ({
|
|
1135
|
-
chatMessages: [
|
|
1136
|
-
...state.chatMessages,
|
|
1137
|
-
{
|
|
1138
|
-
id: `msg-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
|
|
1139
|
-
role,
|
|
1140
|
-
content,
|
|
1141
|
-
timestamp: Date.now()
|
|
1142
|
-
}
|
|
1143
|
-
]
|
|
1144
|
-
}));
|
|
1145
|
-
},
|
|
1146
|
-
clearChatMessages: () => {
|
|
1147
|
-
set({ chatMessages: [] });
|
|
1148
|
-
},
|
|
1149
|
-
toggleChat: () => {
|
|
1150
|
-
set((state) => ({ isChatOpen: !state.isChatOpen }));
|
|
1151
|
-
},
|
|
1152
|
-
setChatOpen: (open) => {
|
|
1153
|
-
set({ isChatOpen: open });
|
|
1154
|
-
},
|
|
1155
|
-
applyChatEditOperations: (operations) => {
|
|
1156
|
-
const state = get();
|
|
1157
|
-
state.captureSnapshot();
|
|
1158
|
-
return state.applyEditOperations(operations);
|
|
1159
|
-
}
|
|
1160
|
-
});
|
|
1161
|
-
|
|
1162
|
-
// src/stores/workflow/slices/edgeSlice.ts
|
|
1163
|
-
var import_types2 = chunkZ7PWFZG5_js.__toESM(require_dist());
|
|
1164
|
-
|
|
1165
|
-
// src/stores/workflow/helpers/nodeHelpers.ts
|
|
1166
|
-
var import_types = chunkZ7PWFZG5_js.__toESM(require_dist());
|
|
1167
|
-
function generateId() {
|
|
1168
|
-
return nanoid.nanoid(8);
|
|
1169
|
-
}
|
|
1170
|
-
function getHandleType(nodeType, handleId, direction) {
|
|
1171
|
-
const nodeDef = import_types.NODE_DEFINITIONS[nodeType];
|
|
1172
|
-
if (!nodeDef) return null;
|
|
1173
|
-
const handles = direction === "source" ? nodeDef.outputs : nodeDef.inputs;
|
|
1174
|
-
const handle = handles.find((h) => h.id === handleId);
|
|
1175
|
-
return handle?.type ?? null;
|
|
1176
|
-
}
|
|
1177
|
-
|
|
1178
|
-
// src/stores/workflow/slices/edgeSlice.ts
|
|
1179
|
-
var createEdgeSlice = (set, get) => ({
|
|
1180
|
-
onNodesChange: (changes) => {
|
|
1181
|
-
const hasMeaningfulChange = changes.some(
|
|
1182
|
-
(change) => change.type === "add" || change.type === "remove" || change.type === "replace"
|
|
1183
|
-
);
|
|
1184
|
-
set((state) => ({
|
|
1185
|
-
nodes: react.applyNodeChanges(changes, state.nodes),
|
|
1186
|
-
...hasMeaningfulChange && { isDirty: true }
|
|
1187
|
-
}));
|
|
1188
|
-
},
|
|
1189
|
-
onEdgesChange: (changes) => {
|
|
1190
|
-
const hasMeaningfulChange = changes.some(
|
|
1191
|
-
(change) => change.type === "add" || change.type === "remove" || change.type === "replace"
|
|
1192
|
-
);
|
|
1193
|
-
set((state) => ({
|
|
1194
|
-
edges: react.applyEdgeChanges(changes, state.edges),
|
|
1195
|
-
...hasMeaningfulChange && { isDirty: true }
|
|
1196
|
-
}));
|
|
1197
|
-
},
|
|
1198
|
-
onConnect: (connection) => {
|
|
1199
|
-
const { isValidConnection, propagateOutputsDownstream } = get();
|
|
1200
|
-
if (!isValidConnection(connection)) return;
|
|
1201
|
-
set((state) => ({
|
|
1202
|
-
edges: react.addEdge(
|
|
1203
|
-
{
|
|
1204
|
-
...connection,
|
|
1205
|
-
id: generateId(),
|
|
1206
|
-
type: state.edgeStyle
|
|
1207
|
-
},
|
|
1208
|
-
state.edges
|
|
1209
|
-
),
|
|
1210
|
-
isDirty: true
|
|
1211
|
-
}));
|
|
1212
|
-
if (connection.source) {
|
|
1213
|
-
propagateOutputsDownstream(connection.source);
|
|
1214
|
-
}
|
|
1215
|
-
},
|
|
1216
|
-
removeEdge: (edgeId) => {
|
|
1217
|
-
set((state) => ({
|
|
1218
|
-
edges: state.edges.filter((edge) => edge.id !== edgeId),
|
|
1219
|
-
isDirty: true
|
|
1220
|
-
}));
|
|
1221
|
-
},
|
|
1222
|
-
setEdgeStyle: (style) => {
|
|
1223
|
-
set((state) => ({
|
|
1224
|
-
edgeStyle: style,
|
|
1225
|
-
edges: state.edges.map((edge) => ({ ...edge, type: style })),
|
|
1226
|
-
isDirty: true
|
|
1227
|
-
}));
|
|
1228
|
-
},
|
|
1229
|
-
toggleEdgePause: (edgeId) => {
|
|
1230
|
-
set((state) => ({
|
|
1231
|
-
edges: state.edges.map(
|
|
1232
|
-
(edge) => edge.id === edgeId ? {
|
|
1233
|
-
...edge,
|
|
1234
|
-
data: {
|
|
1235
|
-
...edge.data,
|
|
1236
|
-
hasPause: !edge.data?.hasPause
|
|
1237
|
-
}
|
|
1238
|
-
} : edge
|
|
1239
|
-
),
|
|
1240
|
-
isDirty: true
|
|
1241
|
-
}));
|
|
1242
|
-
},
|
|
1243
|
-
isValidConnection: (connection) => {
|
|
1244
|
-
const { nodes } = get();
|
|
1245
|
-
const sourceNode = nodes.find((n) => n.id === connection.source);
|
|
1246
|
-
const targetNode = nodes.find((n) => n.id === connection.target);
|
|
1247
|
-
if (!sourceNode || !targetNode) return false;
|
|
1248
|
-
const sourceType = getHandleType(
|
|
1249
|
-
sourceNode.type,
|
|
1250
|
-
connection.sourceHandle ?? null,
|
|
1251
|
-
"source"
|
|
1252
|
-
);
|
|
1253
|
-
const targetType = getHandleType(
|
|
1254
|
-
targetNode.type,
|
|
1255
|
-
connection.targetHandle ?? null,
|
|
1256
|
-
"target"
|
|
1257
|
-
);
|
|
1258
|
-
if (!sourceType || !targetType) return false;
|
|
1259
|
-
return import_types2.CONNECTION_RULES[sourceType]?.includes(targetType) ?? false;
|
|
1260
|
-
},
|
|
1261
|
-
findCompatibleHandle: (sourceNodeId, sourceHandleId, targetNodeId) => {
|
|
1262
|
-
const { nodes, edges } = get();
|
|
1263
|
-
const sourceNode = nodes.find((n) => n.id === sourceNodeId);
|
|
1264
|
-
const targetNode = nodes.find((n) => n.id === targetNodeId);
|
|
1265
|
-
if (!sourceNode || !targetNode) return null;
|
|
1266
|
-
const sourceType = getHandleType(sourceNode.type, sourceHandleId, "source");
|
|
1267
|
-
if (!sourceType) return null;
|
|
1268
|
-
const targetDef = import_types2.NODE_DEFINITIONS[targetNode.type];
|
|
1269
|
-
if (!targetDef) return null;
|
|
1270
|
-
const existingTargetHandles = new Set(
|
|
1271
|
-
edges.filter((e) => e.target === targetNodeId).map((e) => e.targetHandle)
|
|
1272
|
-
);
|
|
1273
|
-
for (const input of targetDef.inputs) {
|
|
1274
|
-
const hasExistingConnection = existingTargetHandles.has(input.id);
|
|
1275
|
-
if (hasExistingConnection && !input.multiple) continue;
|
|
1276
|
-
if (import_types2.CONNECTION_RULES[sourceType]?.includes(input.type)) {
|
|
1277
|
-
return input.id;
|
|
1278
|
-
}
|
|
1279
|
-
}
|
|
1280
|
-
return null;
|
|
1281
|
-
}
|
|
1282
|
-
});
|
|
1283
|
-
|
|
1284
|
-
// src/stores/workflow/slices/groupSlice.ts
|
|
1285
|
-
var DEFAULT_GROUP_COLORS = [
|
|
1286
|
-
"purple",
|
|
1287
|
-
"blue",
|
|
1288
|
-
"green",
|
|
1289
|
-
"yellow",
|
|
1290
|
-
"orange",
|
|
1291
|
-
"red",
|
|
1292
|
-
"pink",
|
|
1293
|
-
"gray"
|
|
1294
|
-
];
|
|
1295
|
-
var createGroupSlice = (set, get) => ({
|
|
1296
|
-
createGroup: (nodeIds, name) => {
|
|
1297
|
-
if (nodeIds.length === 0) return "";
|
|
1298
|
-
const groupId = generateId();
|
|
1299
|
-
const { groups } = get();
|
|
1300
|
-
const colorIndex = groups.length % DEFAULT_GROUP_COLORS.length;
|
|
1301
|
-
const newGroup = {
|
|
1302
|
-
id: groupId,
|
|
1303
|
-
name: name ?? `Group ${groups.length + 1}`,
|
|
1304
|
-
nodeIds,
|
|
1305
|
-
isLocked: false,
|
|
1306
|
-
color: DEFAULT_GROUP_COLORS[colorIndex]
|
|
1307
|
-
};
|
|
1308
|
-
set((state) => ({
|
|
1309
|
-
groups: [...state.groups, newGroup],
|
|
1310
|
-
isDirty: true
|
|
1311
|
-
}));
|
|
1312
|
-
return groupId;
|
|
1313
|
-
},
|
|
1314
|
-
deleteGroup: (groupId) => {
|
|
1315
|
-
set((state) => ({
|
|
1316
|
-
groups: state.groups.filter((g) => g.id !== groupId),
|
|
1317
|
-
isDirty: true
|
|
1318
|
-
}));
|
|
1319
|
-
},
|
|
1320
|
-
addToGroup: (groupId, nodeIds) => {
|
|
1321
|
-
set((state) => ({
|
|
1322
|
-
groups: state.groups.map(
|
|
1323
|
-
(g) => g.id === groupId ? { ...g, nodeIds: [.../* @__PURE__ */ new Set([...g.nodeIds, ...nodeIds])] } : g
|
|
1324
|
-
),
|
|
1325
|
-
isDirty: true
|
|
1326
|
-
}));
|
|
1327
|
-
},
|
|
1328
|
-
removeFromGroup: (groupId, nodeIds) => {
|
|
1329
|
-
set((state) => ({
|
|
1330
|
-
groups: state.groups.map(
|
|
1331
|
-
(g) => g.id === groupId ? { ...g, nodeIds: g.nodeIds.filter((id) => !nodeIds.includes(id)) } : g
|
|
1332
|
-
),
|
|
1333
|
-
isDirty: true
|
|
1334
|
-
}));
|
|
1335
|
-
},
|
|
1336
|
-
toggleGroupLock: (groupId) => {
|
|
1337
|
-
const { groups, lockMultipleNodes, unlockMultipleNodes } = get();
|
|
1338
|
-
const group = groups.find((g) => g.id === groupId);
|
|
1339
|
-
if (!group) return;
|
|
1340
|
-
set((state) => ({
|
|
1341
|
-
groups: state.groups.map((g) => g.id === groupId ? { ...g, isLocked: !g.isLocked } : g),
|
|
1342
|
-
isDirty: true
|
|
1343
|
-
}));
|
|
1344
|
-
if (!group.isLocked) {
|
|
1345
|
-
lockMultipleNodes(group.nodeIds);
|
|
1346
|
-
} else {
|
|
1347
|
-
unlockMultipleNodes(group.nodeIds);
|
|
1348
|
-
}
|
|
1349
|
-
},
|
|
1350
|
-
renameGroup: (groupId, name) => {
|
|
1351
|
-
set((state) => ({
|
|
1352
|
-
groups: state.groups.map((g) => g.id === groupId ? { ...g, name } : g),
|
|
1353
|
-
isDirty: true
|
|
1354
|
-
}));
|
|
1355
|
-
},
|
|
1356
|
-
setGroupColor: (groupId, color) => {
|
|
1357
|
-
set((state) => ({
|
|
1358
|
-
groups: state.groups.map((g) => g.id === groupId ? { ...g, color } : g),
|
|
1359
|
-
isDirty: true
|
|
1360
|
-
}));
|
|
1361
|
-
},
|
|
1362
|
-
getGroupByNodeId: (nodeId) => {
|
|
1363
|
-
return get().groups.find((g) => g.nodeIds.includes(nodeId));
|
|
1364
|
-
},
|
|
1365
|
-
getGroupById: (groupId) => {
|
|
1366
|
-
return get().groups.find((g) => g.id === groupId);
|
|
1367
|
-
}
|
|
1368
|
-
});
|
|
1369
|
-
|
|
1370
|
-
// src/stores/workflow/helpers/propagation.ts
|
|
1371
|
-
function getNodeOutput(node) {
|
|
1372
|
-
const data = node.data;
|
|
1373
|
-
const outputImages = data.outputImages;
|
|
1374
|
-
if (outputImages?.length) return outputImages[0];
|
|
1375
|
-
const output = data.outputImage ?? data.outputVideo ?? data.outputText ?? data.outputAudio ?? data.prompt ?? data.extractedTweet ?? data.image ?? data.video ?? data.audio ?? null;
|
|
1376
|
-
if (output === null) return null;
|
|
1377
|
-
if (typeof output === "string") return output;
|
|
1378
|
-
if (Array.isArray(output) && output.length > 0) return String(output[0]);
|
|
1379
|
-
return null;
|
|
1380
|
-
}
|
|
1381
|
-
function getOutputType(sourceType) {
|
|
1382
|
-
if (["prompt", "llm", "tweetParser", "transcribe"].includes(sourceType)) {
|
|
1383
|
-
return "text";
|
|
1384
|
-
}
|
|
1385
|
-
if (["imageGen", "image", "imageInput", "upscale", "resize", "reframe", "imageGridSplit"].includes(
|
|
1386
|
-
sourceType
|
|
1387
|
-
)) {
|
|
1388
|
-
return "image";
|
|
1389
|
-
}
|
|
1390
|
-
if ([
|
|
1391
|
-
"videoGen",
|
|
1392
|
-
"video",
|
|
1393
|
-
"videoInput",
|
|
1394
|
-
"animation",
|
|
1395
|
-
"videoStitch",
|
|
1396
|
-
"lipSync",
|
|
1397
|
-
"voiceChange",
|
|
1398
|
-
"motionControl",
|
|
1399
|
-
"videoTrim",
|
|
1400
|
-
"videoFrameExtract",
|
|
1401
|
-
"subtitle"
|
|
1402
|
-
].includes(sourceType)) {
|
|
1403
|
-
return "video";
|
|
1404
|
-
}
|
|
1405
|
-
if (["textToSpeech", "audio", "audioInput"].includes(sourceType)) {
|
|
1406
|
-
return "audio";
|
|
1407
|
-
}
|
|
1408
|
-
return null;
|
|
1409
|
-
}
|
|
1410
|
-
function mapOutputToInput(output, sourceType, targetType) {
|
|
1411
|
-
const outputType = getOutputType(sourceType);
|
|
1412
|
-
if (targetType === "download") {
|
|
1413
|
-
if (outputType === "video") {
|
|
1414
|
-
return { inputVideo: output, inputImage: null, inputType: "video" };
|
|
1415
|
-
}
|
|
1416
|
-
if (outputType === "image") {
|
|
1417
|
-
return { inputImage: output, inputVideo: null, inputType: "image" };
|
|
1418
|
-
}
|
|
1419
|
-
return null;
|
|
1420
|
-
}
|
|
1421
|
-
if (outputType === "text") {
|
|
1422
|
-
if (["textToSpeech", "subtitle"].includes(targetType)) {
|
|
1423
|
-
return { inputText: output };
|
|
1424
|
-
}
|
|
1425
|
-
if (["imageGen", "videoGen", "llm", "motionControl"].includes(targetType)) {
|
|
1426
|
-
return { inputPrompt: output };
|
|
1427
|
-
}
|
|
1428
|
-
}
|
|
1429
|
-
if (outputType === "image") {
|
|
1430
|
-
if (["upscale", "reframe"].includes(targetType)) {
|
|
1431
|
-
return { inputImage: output, inputVideo: null, inputType: "image" };
|
|
1432
|
-
}
|
|
1433
|
-
if (["videoGen", "lipSync", "voiceChange", "motionControl", "resize", "animation"].includes(
|
|
1434
|
-
targetType
|
|
1435
|
-
)) {
|
|
1436
|
-
return { inputImage: output };
|
|
1437
|
-
}
|
|
1438
|
-
if (targetType === "imageGen") {
|
|
1439
|
-
return { inputImages: [output] };
|
|
1440
|
-
}
|
|
1441
|
-
}
|
|
1442
|
-
if (outputType === "video") {
|
|
1443
|
-
if (["upscale", "reframe"].includes(targetType)) {
|
|
1444
|
-
return { inputVideo: output, inputImage: null, inputType: "video" };
|
|
1445
|
-
}
|
|
1446
|
-
if ([
|
|
1447
|
-
"lipSync",
|
|
1448
|
-
"voiceChange",
|
|
1449
|
-
"resize",
|
|
1450
|
-
"videoStitch",
|
|
1451
|
-
"videoTrim",
|
|
1452
|
-
"videoFrameExtract",
|
|
1453
|
-
"subtitle",
|
|
1454
|
-
"transcribe"
|
|
1455
|
-
].includes(targetType)) {
|
|
1456
|
-
return { inputVideo: output };
|
|
1457
|
-
}
|
|
1458
|
-
}
|
|
1459
|
-
if (outputType === "audio") {
|
|
1460
|
-
if (["lipSync", "voiceChange", "transcribe"].includes(targetType)) {
|
|
1461
|
-
return { inputAudio: output };
|
|
1462
|
-
}
|
|
1463
|
-
}
|
|
1464
|
-
return null;
|
|
1465
|
-
}
|
|
1466
|
-
function collectGalleryUpdate(sourceData, currentOutput, existingGalleryImages, pendingUpdateImages) {
|
|
1467
|
-
const allImages = [];
|
|
1468
|
-
const outputImagesArr = sourceData.outputImages;
|
|
1469
|
-
if (outputImagesArr?.length) {
|
|
1470
|
-
allImages.push(...outputImagesArr);
|
|
1471
|
-
} else if (typeof currentOutput === "string") {
|
|
1472
|
-
allImages.push(currentOutput);
|
|
1473
|
-
}
|
|
1474
|
-
if (allImages.length === 0) return null;
|
|
1475
|
-
return {
|
|
1476
|
-
images: [.../* @__PURE__ */ new Set([...existingGalleryImages, ...pendingUpdateImages, ...allImages])]
|
|
1477
|
-
};
|
|
1478
|
-
}
|
|
1479
|
-
function computeDownstreamUpdates(sourceNodeId, initialOutput, nodes, edges) {
|
|
1480
|
-
const updates = /* @__PURE__ */ new Map();
|
|
1481
|
-
const visited = /* @__PURE__ */ new Set();
|
|
1482
|
-
const queue = [
|
|
1483
|
-
{ nodeId: sourceNodeId, output: initialOutput }
|
|
1484
|
-
];
|
|
1485
|
-
while (queue.length > 0) {
|
|
1486
|
-
const current = queue.shift();
|
|
1487
|
-
if (visited.has(current.nodeId)) continue;
|
|
1488
|
-
visited.add(current.nodeId);
|
|
1489
|
-
const currentNode = nodes.find((n) => n.id === current.nodeId);
|
|
1490
|
-
if (!currentNode) continue;
|
|
1491
|
-
const downstreamEdges = edges.filter((e) => e.source === current.nodeId);
|
|
1492
|
-
for (const edge of downstreamEdges) {
|
|
1493
|
-
const targetNode = nodes.find((n) => n.id === edge.target);
|
|
1494
|
-
if (!targetNode) continue;
|
|
1495
|
-
if (targetNode.type === "outputGallery") {
|
|
1496
|
-
const sourceData = currentNode.data;
|
|
1497
|
-
const existing = updates.get(edge.target) ?? {};
|
|
1498
|
-
const pendingImages = existing.images ?? [];
|
|
1499
|
-
const targetData = targetNode.data;
|
|
1500
|
-
const galleryExisting = targetData.images ?? [];
|
|
1501
|
-
const galleryUpdate = collectGalleryUpdate(
|
|
1502
|
-
sourceData,
|
|
1503
|
-
current.output,
|
|
1504
|
-
galleryExisting,
|
|
1505
|
-
pendingImages
|
|
1506
|
-
);
|
|
1507
|
-
if (galleryUpdate) {
|
|
1508
|
-
updates.set(edge.target, { ...existing, ...galleryUpdate });
|
|
1509
|
-
}
|
|
1510
|
-
continue;
|
|
1511
|
-
}
|
|
1512
|
-
const inputUpdate = mapOutputToInput(current.output, currentNode.type, targetNode.type);
|
|
1513
|
-
if (inputUpdate) {
|
|
1514
|
-
const existing = updates.get(edge.target) ?? {};
|
|
1515
|
-
updates.set(edge.target, { ...existing, ...inputUpdate });
|
|
1516
|
-
const targetOutput = getNodeOutput(targetNode);
|
|
1517
|
-
if (targetOutput && !visited.has(edge.target)) {
|
|
1518
|
-
queue.push({ nodeId: edge.target, output: targetOutput });
|
|
1519
|
-
}
|
|
1520
|
-
}
|
|
1521
|
-
}
|
|
1522
|
-
}
|
|
1523
|
-
return updates;
|
|
1524
|
-
}
|
|
1525
|
-
function hasStateChanged(updates, nodes) {
|
|
1526
|
-
for (const [nodeId, update] of updates) {
|
|
1527
|
-
const existingNode = nodes.find((n) => n.id === nodeId);
|
|
1528
|
-
if (!existingNode) continue;
|
|
1529
|
-
const existingData = existingNode.data;
|
|
1530
|
-
for (const [key, value] of Object.entries(update)) {
|
|
1531
|
-
const prev = existingData[key];
|
|
1532
|
-
if (Array.isArray(prev) && Array.isArray(value)) {
|
|
1533
|
-
if (prev.length !== value.length || prev.some((v, i) => v !== value[i])) {
|
|
1534
|
-
return true;
|
|
1535
|
-
}
|
|
1536
|
-
} else if (prev !== value) {
|
|
1537
|
-
return true;
|
|
1538
|
-
}
|
|
1539
|
-
}
|
|
1540
|
-
}
|
|
1541
|
-
return false;
|
|
1542
|
-
}
|
|
1543
|
-
function applyNodeUpdates(nodes, updates) {
|
|
1544
|
-
return nodes.map((n) => {
|
|
1545
|
-
const update = updates.get(n.id);
|
|
1546
|
-
if (update) {
|
|
1547
|
-
return { ...n, data: { ...n.data, ...update } };
|
|
1548
|
-
}
|
|
1549
|
-
return n;
|
|
1550
|
-
});
|
|
1551
|
-
}
|
|
1552
|
-
function propagateExistingOutputs(nodes, propagateFn) {
|
|
1553
|
-
for (const node of nodes) {
|
|
1554
|
-
if (getNodeOutput(node) !== null) {
|
|
1555
|
-
propagateFn(node.id);
|
|
1556
|
-
}
|
|
1557
|
-
}
|
|
1558
|
-
}
|
|
1559
|
-
|
|
1560
|
-
// src/stores/workflow/slices/lockingSlice.ts
|
|
1561
|
-
var createLockingSlice = (set, get) => ({
|
|
1562
|
-
_setNodeLockState: (predicate, lock) => {
|
|
1563
|
-
set((state) => ({
|
|
1564
|
-
nodes: state.nodes.map(
|
|
1565
|
-
(n) => predicate(n.id) ? {
|
|
1566
|
-
...n,
|
|
1567
|
-
draggable: !lock,
|
|
1568
|
-
data: {
|
|
1569
|
-
...n.data,
|
|
1570
|
-
isLocked: lock,
|
|
1571
|
-
lockTimestamp: lock ? Date.now() : void 0,
|
|
1572
|
-
...lock && { cachedOutput: getNodeOutput(n) }
|
|
1573
|
-
}
|
|
1574
|
-
} : n
|
|
1575
|
-
),
|
|
1576
|
-
isDirty: true
|
|
1577
|
-
}));
|
|
1578
|
-
},
|
|
1579
|
-
toggleNodeLock: (nodeId) => {
|
|
1580
|
-
const node = get().getNodeById(nodeId);
|
|
1581
|
-
if (!node) return;
|
|
1582
|
-
const shouldLock = !(node.data.isLocked ?? false);
|
|
1583
|
-
get()._setNodeLockState((id) => id === nodeId, shouldLock);
|
|
1584
|
-
},
|
|
1585
|
-
lockNode: (nodeId) => {
|
|
1586
|
-
const node = get().getNodeById(nodeId);
|
|
1587
|
-
if (!node || node.data.isLocked) return;
|
|
1588
|
-
get()._setNodeLockState((id) => id === nodeId, true);
|
|
1589
|
-
},
|
|
1590
|
-
unlockNode: (nodeId) => {
|
|
1591
|
-
get()._setNodeLockState((id) => id === nodeId, false);
|
|
1592
|
-
},
|
|
1593
|
-
lockMultipleNodes: (nodeIds) => {
|
|
1594
|
-
get()._setNodeLockState((id) => nodeIds.includes(id), true);
|
|
1595
|
-
},
|
|
1596
|
-
unlockMultipleNodes: (nodeIds) => {
|
|
1597
|
-
get()._setNodeLockState((id) => nodeIds.includes(id), false);
|
|
1598
|
-
},
|
|
1599
|
-
unlockAllNodes: () => {
|
|
1600
|
-
get()._setNodeLockState(() => true, false);
|
|
1601
|
-
},
|
|
1602
|
-
isNodeLocked: (nodeId) => {
|
|
1603
|
-
const { nodes, groups } = get();
|
|
1604
|
-
const node = nodes.find((n) => n.id === nodeId);
|
|
1605
|
-
if (!node) return false;
|
|
1606
|
-
if (node.data.isLocked) return true;
|
|
1607
|
-
return groups.some((group) => group.isLocked && group.nodeIds.includes(nodeId));
|
|
1608
|
-
}
|
|
1609
|
-
});
|
|
1610
|
-
|
|
1611
|
-
// src/stores/workflow/slices/nodeSlice.ts
|
|
1612
|
-
var import_types3 = chunkZ7PWFZG5_js.__toESM(require_dist());
|
|
1613
|
-
var createNodeSlice = (set, get) => ({
|
|
1614
|
-
addNode: (type, position) => {
|
|
1615
|
-
const nodeDef = import_types3.NODE_DEFINITIONS[type];
|
|
1616
|
-
if (!nodeDef) return "";
|
|
1617
|
-
const id = generateId();
|
|
1618
|
-
const newNode = {
|
|
1619
|
-
id,
|
|
1620
|
-
type,
|
|
1621
|
-
position,
|
|
1622
|
-
data: {
|
|
1623
|
-
...nodeDef.defaultData,
|
|
1624
|
-
label: nodeDef.label,
|
|
1625
|
-
status: "idle"
|
|
1626
|
-
},
|
|
1627
|
-
...type === "download" && { width: 280, height: 320 }
|
|
1628
|
-
};
|
|
1629
|
-
set((state) => ({
|
|
1630
|
-
nodes: [...state.nodes, newNode],
|
|
1631
|
-
isDirty: true
|
|
1632
|
-
}));
|
|
1633
|
-
return id;
|
|
1634
|
-
},
|
|
1635
|
-
addNodesAndEdges: (newNodes, newEdges) => {
|
|
1636
|
-
if (newNodes.length === 0) return;
|
|
1637
|
-
set((state) => ({
|
|
1638
|
-
nodes: [...state.nodes, ...newNodes],
|
|
1639
|
-
edges: [...state.edges, ...newEdges],
|
|
1640
|
-
isDirty: true
|
|
1641
|
-
}));
|
|
1642
|
-
const { propagateOutputsDownstream } = get();
|
|
1643
|
-
const sourceNodeIds = new Set(newEdges.map((e) => e.source));
|
|
1644
|
-
for (const sourceId of sourceNodeIds) {
|
|
1645
|
-
propagateOutputsDownstream(sourceId);
|
|
1646
|
-
}
|
|
1647
|
-
},
|
|
1648
|
-
updateNodeData: (nodeId, data) => {
|
|
1649
|
-
const { nodes, propagateOutputsDownstream } = get();
|
|
1650
|
-
const node = nodes.find((n) => n.id === nodeId);
|
|
1651
|
-
const TRANSIENT_KEYS = /* @__PURE__ */ new Set(["status", "progress", "error", "jobId"]);
|
|
1652
|
-
const dataKeys = Object.keys(data);
|
|
1653
|
-
const hasPersistedChange = dataKeys.some((key) => !TRANSIENT_KEYS.has(key));
|
|
1654
|
-
set((state) => ({
|
|
1655
|
-
nodes: state.nodes.map((n) => n.id === nodeId ? { ...n, data: { ...n.data, ...data } } : n),
|
|
1656
|
-
...hasPersistedChange && { isDirty: true }
|
|
1657
|
-
}));
|
|
1658
|
-
const inputNodeTypes = [
|
|
1659
|
-
"prompt",
|
|
1660
|
-
"image",
|
|
1661
|
-
"imageInput",
|
|
1662
|
-
"video",
|
|
1663
|
-
"videoInput",
|
|
1664
|
-
"audio",
|
|
1665
|
-
"audioInput",
|
|
1666
|
-
"tweetParser"
|
|
1667
|
-
];
|
|
1668
|
-
const hasOutputUpdate = "outputImage" in data || "outputImages" in data || "outputVideo" in data || "outputAudio" in data || "outputText" in data;
|
|
1669
|
-
if (node && (inputNodeTypes.includes(node.type) || hasOutputUpdate)) {
|
|
1670
|
-
if (hasOutputUpdate) {
|
|
1671
|
-
const dataRecord = data;
|
|
1672
|
-
if ("outputImages" in dataRecord) {
|
|
1673
|
-
propagateOutputsDownstream(nodeId);
|
|
1674
|
-
} else {
|
|
1675
|
-
const outputValue = dataRecord.outputImage ?? dataRecord.outputVideo ?? dataRecord.outputAudio ?? dataRecord.outputText;
|
|
1676
|
-
if (typeof outputValue === "string") {
|
|
1677
|
-
propagateOutputsDownstream(nodeId, outputValue);
|
|
1678
|
-
}
|
|
1679
|
-
}
|
|
1680
|
-
} else {
|
|
1681
|
-
propagateOutputsDownstream(nodeId);
|
|
1682
|
-
}
|
|
1683
|
-
}
|
|
1684
|
-
},
|
|
1685
|
-
removeNode: (nodeId) => {
|
|
1686
|
-
set((state) => ({
|
|
1687
|
-
nodes: state.nodes.filter((node) => node.id !== nodeId),
|
|
1688
|
-
edges: state.edges.filter((edge) => edge.source !== nodeId && edge.target !== nodeId),
|
|
1689
|
-
isDirty: true
|
|
1690
|
-
}));
|
|
1691
|
-
},
|
|
1692
|
-
duplicateNode: (nodeId) => {
|
|
1693
|
-
const { nodes, edges, edgeStyle, propagateOutputsDownstream } = get();
|
|
1694
|
-
const node = nodes.find((n) => n.id === nodeId);
|
|
1695
|
-
if (!node) return null;
|
|
1696
|
-
const newId = generateId();
|
|
1697
|
-
const newNode = {
|
|
1698
|
-
...node,
|
|
1699
|
-
id: newId,
|
|
1700
|
-
position: {
|
|
1701
|
-
x: node.position.x + 50,
|
|
1702
|
-
y: node.position.y + 50
|
|
1703
|
-
},
|
|
1704
|
-
data: {
|
|
1705
|
-
...node.data,
|
|
1706
|
-
status: "idle",
|
|
1707
|
-
jobId: null
|
|
1708
|
-
}
|
|
1709
|
-
};
|
|
1710
|
-
const incomingEdges = edges.filter((e) => e.target === nodeId && e.source !== nodeId);
|
|
1711
|
-
const clonedEdges = incomingEdges.map((edge) => ({
|
|
1712
|
-
...edge,
|
|
1713
|
-
id: generateId(),
|
|
1714
|
-
target: newId,
|
|
1715
|
-
type: edgeStyle
|
|
1716
|
-
}));
|
|
1717
|
-
set((state) => ({
|
|
1718
|
-
nodes: [...state.nodes, newNode],
|
|
1719
|
-
edges: [...state.edges, ...clonedEdges],
|
|
1720
|
-
isDirty: true
|
|
1721
|
-
}));
|
|
1722
|
-
const sourcesNotified = /* @__PURE__ */ new Set();
|
|
1723
|
-
for (const edge of incomingEdges) {
|
|
1724
|
-
if (!sourcesNotified.has(edge.source)) {
|
|
1725
|
-
sourcesNotified.add(edge.source);
|
|
1726
|
-
propagateOutputsDownstream(edge.source);
|
|
1727
|
-
}
|
|
1728
|
-
}
|
|
1729
|
-
return newId;
|
|
1730
|
-
},
|
|
1731
|
-
propagateOutputsDownstream: (sourceNodeId, outputValue) => {
|
|
1732
|
-
const { nodes, edges } = get();
|
|
1733
|
-
const sourceNode = nodes.find((n) => n.id === sourceNodeId);
|
|
1734
|
-
if (!sourceNode) return;
|
|
1735
|
-
const output = outputValue ?? getNodeOutput(sourceNode);
|
|
1736
|
-
if (!output) return;
|
|
1737
|
-
const updates = computeDownstreamUpdates(sourceNodeId, output, nodes, edges);
|
|
1738
|
-
if (updates.size === 0) return;
|
|
1739
|
-
if (!hasStateChanged(updates, nodes)) return;
|
|
1740
|
-
set((state) => ({
|
|
1741
|
-
nodes: applyNodeUpdates(state.nodes, updates),
|
|
1742
|
-
isDirty: true
|
|
1743
|
-
}));
|
|
1744
|
-
}
|
|
1745
|
-
});
|
|
1746
|
-
|
|
1747
|
-
// src/stores/workflow/slices/persistenceSlice.ts
|
|
1748
|
-
var import_types4 = chunkZ7PWFZG5_js.__toESM(require_dist());
|
|
1749
|
-
function normalizeEdgeTypes(edges) {
|
|
1750
|
-
return edges.map((edge) => ({
|
|
1751
|
-
...edge,
|
|
1752
|
-
type: edge.type === "bezier" ? "default" : edge.type
|
|
1753
|
-
}));
|
|
1754
|
-
}
|
|
1755
|
-
function hydrateWorkflowNodes(nodes) {
|
|
1756
|
-
return nodes.map((node) => {
|
|
1757
|
-
const nodeDef = import_types4.NODE_DEFINITIONS[node.type];
|
|
1758
|
-
if (!nodeDef) return node;
|
|
1759
|
-
return {
|
|
1760
|
-
...node,
|
|
1761
|
-
data: {
|
|
1762
|
-
...nodeDef.defaultData,
|
|
1763
|
-
...node.data
|
|
1764
|
-
}
|
|
1765
|
-
};
|
|
1766
|
-
});
|
|
1767
|
-
}
|
|
1768
|
-
var createPersistenceSlice = (set, get) => ({
|
|
1769
|
-
loadWorkflow: (workflow) => {
|
|
1770
|
-
const hydratedNodes = hydrateWorkflowNodes(workflow.nodes);
|
|
1771
|
-
set({
|
|
1772
|
-
nodes: hydratedNodes,
|
|
1773
|
-
edges: normalizeEdgeTypes(workflow.edges),
|
|
1774
|
-
edgeStyle: workflow.edgeStyle,
|
|
1775
|
-
workflowName: workflow.name,
|
|
1776
|
-
workflowId: null,
|
|
1777
|
-
isDirty: true,
|
|
1778
|
-
groups: workflow.groups ?? [],
|
|
1779
|
-
selectedNodeIds: []
|
|
1780
|
-
});
|
|
1781
|
-
propagateExistingOutputs(hydratedNodes, get().propagateOutputsDownstream);
|
|
1782
|
-
set({ isDirty: false });
|
|
1783
|
-
},
|
|
1784
|
-
clearWorkflow: () => {
|
|
1785
|
-
set({
|
|
1786
|
-
nodes: [],
|
|
1787
|
-
edges: [],
|
|
1788
|
-
workflowName: "Untitled Workflow",
|
|
1789
|
-
workflowId: null,
|
|
1790
|
-
isDirty: false,
|
|
1791
|
-
groups: [],
|
|
1792
|
-
selectedNodeIds: []
|
|
1793
|
-
});
|
|
1794
|
-
},
|
|
1795
|
-
exportWorkflow: () => {
|
|
1796
|
-
const { nodes, edges, edgeStyle, workflowName, groups } = get();
|
|
1797
|
-
return {
|
|
1798
|
-
version: 1,
|
|
1799
|
-
name: workflowName,
|
|
1800
|
-
description: "",
|
|
1801
|
-
nodes,
|
|
1802
|
-
edges,
|
|
1803
|
-
edgeStyle,
|
|
1804
|
-
groups,
|
|
1805
|
-
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1806
|
-
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1807
|
-
};
|
|
1808
|
-
},
|
|
1809
|
-
getNodeById: (id) => {
|
|
1810
|
-
return get().nodes.find((node) => node.id === id);
|
|
1811
|
-
},
|
|
1812
|
-
getConnectedInputs: (nodeId) => {
|
|
1813
|
-
const { nodes, edges } = get();
|
|
1814
|
-
const inputs = /* @__PURE__ */ new Map();
|
|
1815
|
-
const incomingEdges = edges.filter((edge) => edge.target === nodeId);
|
|
1816
|
-
for (const edge of incomingEdges) {
|
|
1817
|
-
const sourceNode = nodes.find((n) => n.id === edge.source);
|
|
1818
|
-
if (!sourceNode) continue;
|
|
1819
|
-
const handleId = edge.targetHandle;
|
|
1820
|
-
if (!handleId) continue;
|
|
1821
|
-
const sourceData = sourceNode.data;
|
|
1822
|
-
let value = null;
|
|
1823
|
-
if (edge.sourceHandle === "image") {
|
|
1824
|
-
value = sourceData.outputImage ?? sourceData.image ?? null;
|
|
1825
|
-
} else if (edge.sourceHandle === "video") {
|
|
1826
|
-
value = sourceData.outputVideo ?? sourceData.video ?? null;
|
|
1827
|
-
} else if (edge.sourceHandle === "text") {
|
|
1828
|
-
value = sourceData.outputText ?? sourceData.prompt ?? null;
|
|
1829
|
-
} else if (edge.sourceHandle === "audio") {
|
|
1830
|
-
value = sourceData.outputAudio ?? sourceData.audio ?? null;
|
|
1831
|
-
}
|
|
1832
|
-
if (value) {
|
|
1833
|
-
const existing = inputs.get(handleId);
|
|
1834
|
-
if (existing) {
|
|
1835
|
-
if (Array.isArray(existing)) {
|
|
1836
|
-
inputs.set(handleId, [...existing, value]);
|
|
1837
|
-
} else {
|
|
1838
|
-
inputs.set(handleId, [existing, value]);
|
|
1839
|
-
}
|
|
1840
|
-
} else {
|
|
1841
|
-
inputs.set(handleId, value);
|
|
1842
|
-
}
|
|
1843
|
-
}
|
|
1844
|
-
}
|
|
1845
|
-
return inputs;
|
|
1846
|
-
},
|
|
1847
|
-
getConnectedNodeIds: (nodeIds) => {
|
|
1848
|
-
const { edges } = get();
|
|
1849
|
-
const connected = new Set(nodeIds);
|
|
1850
|
-
const visited = /* @__PURE__ */ new Set();
|
|
1851
|
-
const queue = [...nodeIds];
|
|
1852
|
-
while (queue.length > 0) {
|
|
1853
|
-
const currentId = queue.shift();
|
|
1854
|
-
if (visited.has(currentId)) continue;
|
|
1855
|
-
visited.add(currentId);
|
|
1856
|
-
const upstreamEdges = edges.filter((e) => e.target === currentId);
|
|
1857
|
-
for (const edge of upstreamEdges) {
|
|
1858
|
-
if (!connected.has(edge.source)) {
|
|
1859
|
-
connected.add(edge.source);
|
|
1860
|
-
queue.push(edge.source);
|
|
1861
|
-
}
|
|
1862
|
-
}
|
|
1863
|
-
}
|
|
1864
|
-
return Array.from(connected);
|
|
1865
|
-
},
|
|
1866
|
-
validateWorkflow: () => {
|
|
1867
|
-
const { nodes, edges } = get();
|
|
1868
|
-
const errors = [];
|
|
1869
|
-
const warnings = [];
|
|
1870
|
-
if (nodes.length === 0) {
|
|
1871
|
-
errors.push({
|
|
1872
|
-
nodeId: "",
|
|
1873
|
-
message: "Workflow is empty - add some nodes first",
|
|
1874
|
-
severity: "error"
|
|
1875
|
-
});
|
|
1876
|
-
return { isValid: false, errors, warnings };
|
|
1877
|
-
}
|
|
1878
|
-
if (edges.length === 0 && nodes.length > 1) {
|
|
1879
|
-
errors.push({
|
|
1880
|
-
nodeId: "",
|
|
1881
|
-
message: "No connections - connect your nodes together",
|
|
1882
|
-
severity: "error"
|
|
1883
|
-
});
|
|
1884
|
-
return { isValid: false, errors, warnings };
|
|
1885
|
-
}
|
|
1886
|
-
const hasNodeOutput = (node) => {
|
|
1887
|
-
const data = node.data;
|
|
1888
|
-
switch (node.type) {
|
|
1889
|
-
case "prompt":
|
|
1890
|
-
return Boolean(data.prompt?.trim());
|
|
1891
|
-
case "imageInput":
|
|
1892
|
-
return Boolean(data.image);
|
|
1893
|
-
case "videoInput":
|
|
1894
|
-
return Boolean(data.video);
|
|
1895
|
-
case "audioInput":
|
|
1896
|
-
return Boolean(data.audio);
|
|
1897
|
-
default:
|
|
1898
|
-
return true;
|
|
1899
|
-
}
|
|
1900
|
-
};
|
|
1901
|
-
for (const node of nodes) {
|
|
1902
|
-
const nodeDef = import_types4.NODE_DEFINITIONS[node.type];
|
|
1903
|
-
if (!nodeDef) continue;
|
|
1904
|
-
const incomingEdges = edges.filter((e) => e.target === node.id);
|
|
1905
|
-
for (const input of nodeDef.inputs) {
|
|
1906
|
-
if (input.required) {
|
|
1907
|
-
const connectionEdge = incomingEdges.find((e) => e.targetHandle === input.id);
|
|
1908
|
-
if (!connectionEdge) {
|
|
1909
|
-
errors.push({
|
|
1910
|
-
nodeId: node.id,
|
|
1911
|
-
message: `Missing required input: ${input.label}`,
|
|
1912
|
-
severity: "error"
|
|
1913
|
-
});
|
|
1914
|
-
} else {
|
|
1915
|
-
const sourceNode = nodes.find((n) => n.id === connectionEdge.source);
|
|
1916
|
-
if (sourceNode && !hasNodeOutput(sourceNode)) {
|
|
1917
|
-
errors.push({
|
|
1918
|
-
nodeId: sourceNode.id,
|
|
1919
|
-
message: `${sourceNode.data.label} is empty`,
|
|
1920
|
-
severity: "error"
|
|
1921
|
-
});
|
|
1922
|
-
}
|
|
1923
|
-
}
|
|
1924
|
-
}
|
|
1925
|
-
}
|
|
1926
|
-
}
|
|
1927
|
-
const visited = /* @__PURE__ */ new Set();
|
|
1928
|
-
const recStack = /* @__PURE__ */ new Set();
|
|
1929
|
-
function hasCycle(nodeId) {
|
|
1930
|
-
if (recStack.has(nodeId)) return true;
|
|
1931
|
-
if (visited.has(nodeId)) return false;
|
|
1932
|
-
visited.add(nodeId);
|
|
1933
|
-
recStack.add(nodeId);
|
|
1934
|
-
const outgoing = edges.filter((e) => e.source === nodeId);
|
|
1935
|
-
for (const edge of outgoing) {
|
|
1936
|
-
if (hasCycle(edge.target)) return true;
|
|
1937
|
-
}
|
|
1938
|
-
recStack.delete(nodeId);
|
|
1939
|
-
return false;
|
|
1940
|
-
}
|
|
1941
|
-
for (const node of nodes) {
|
|
1942
|
-
if (hasCycle(node.id)) {
|
|
1943
|
-
errors.push({
|
|
1944
|
-
nodeId: node.id,
|
|
1945
|
-
message: "Workflow contains a cycle",
|
|
1946
|
-
severity: "error"
|
|
1947
|
-
});
|
|
1948
|
-
break;
|
|
1949
|
-
}
|
|
1950
|
-
}
|
|
1951
|
-
for (const node of nodes) {
|
|
1952
|
-
if (node.type === "workflowRef") {
|
|
1953
|
-
const refData = node.data;
|
|
1954
|
-
if (!refData.referencedWorkflowId) {
|
|
1955
|
-
errors.push({
|
|
1956
|
-
nodeId: node.id,
|
|
1957
|
-
message: "Subworkflow node must reference a workflow",
|
|
1958
|
-
severity: "error"
|
|
1959
|
-
});
|
|
1960
|
-
} else if (!refData.cachedInterface) {
|
|
1961
|
-
warnings.push({
|
|
1962
|
-
nodeId: node.id,
|
|
1963
|
-
message: "Subworkflow interface not loaded - refresh to update handles",
|
|
1964
|
-
severity: "warning"
|
|
1965
|
-
});
|
|
1966
|
-
}
|
|
1967
|
-
}
|
|
1968
|
-
}
|
|
1969
|
-
return {
|
|
1970
|
-
isValid: errors.length === 0,
|
|
1971
|
-
errors,
|
|
1972
|
-
warnings
|
|
1973
|
-
};
|
|
1974
|
-
},
|
|
1975
|
-
setDirty: (dirty) => {
|
|
1976
|
-
set({ isDirty: dirty });
|
|
1977
|
-
},
|
|
1978
|
-
setWorkflowName: (name) => {
|
|
1979
|
-
set({ workflowName: name, isDirty: true });
|
|
1980
|
-
},
|
|
1981
|
-
// API operations - stubs that throw by default.
|
|
1982
|
-
// Consuming apps override these via the store creator or by extending the slice.
|
|
1983
|
-
saveWorkflow: async () => {
|
|
1984
|
-
throw new Error("saveWorkflow not implemented - consuming app must provide API integration");
|
|
1985
|
-
},
|
|
1986
|
-
loadWorkflowById: async () => {
|
|
1987
|
-
throw new Error(
|
|
1988
|
-
"loadWorkflowById not implemented - consuming app must provide API integration"
|
|
1989
|
-
);
|
|
1990
|
-
},
|
|
1991
|
-
listWorkflows: async () => {
|
|
1992
|
-
throw new Error("listWorkflows not implemented - consuming app must provide API integration");
|
|
1993
|
-
},
|
|
1994
|
-
deleteWorkflow: async () => {
|
|
1995
|
-
throw new Error("deleteWorkflow not implemented - consuming app must provide API integration");
|
|
1996
|
-
},
|
|
1997
|
-
duplicateWorkflowApi: async () => {
|
|
1998
|
-
throw new Error(
|
|
1999
|
-
"duplicateWorkflowApi not implemented - consuming app must provide API integration"
|
|
2000
|
-
);
|
|
2001
|
-
},
|
|
2002
|
-
createNewWorkflow: async () => {
|
|
2003
|
-
throw new Error(
|
|
2004
|
-
"createNewWorkflow not implemented - consuming app must provide API integration"
|
|
2005
|
-
);
|
|
2006
|
-
},
|
|
2007
|
-
getNodesWithComments: () => {
|
|
2008
|
-
const { nodes } = get();
|
|
2009
|
-
return nodes.filter((node) => {
|
|
2010
|
-
const data = node.data;
|
|
2011
|
-
return data.comment?.trim();
|
|
2012
|
-
}).sort((a, b) => {
|
|
2013
|
-
if (Math.abs(a.position.y - b.position.y) < 50) {
|
|
2014
|
-
return a.position.x - b.position.x;
|
|
2015
|
-
}
|
|
2016
|
-
return a.position.y - b.position.y;
|
|
2017
|
-
});
|
|
2018
|
-
},
|
|
2019
|
-
markCommentViewed: (nodeId) => {
|
|
2020
|
-
set((state) => {
|
|
2021
|
-
const newSet = new Set(state.viewedCommentIds);
|
|
2022
|
-
newSet.add(nodeId);
|
|
2023
|
-
return { viewedCommentIds: newSet };
|
|
2024
|
-
});
|
|
2025
|
-
},
|
|
2026
|
-
setNavigationTarget: (nodeId) => {
|
|
2027
|
-
set({ navigationTargetId: nodeId });
|
|
2028
|
-
},
|
|
2029
|
-
getUnviewedCommentCount: () => {
|
|
2030
|
-
const { nodes, viewedCommentIds } = get();
|
|
2031
|
-
return nodes.filter((node) => {
|
|
2032
|
-
const data = node.data;
|
|
2033
|
-
return data.comment?.trim() && !viewedCommentIds.has(node.id);
|
|
2034
|
-
}).length;
|
|
2035
|
-
}
|
|
2036
|
-
});
|
|
2037
|
-
|
|
2038
|
-
// src/stores/workflow/slices/selectionSlice.ts
|
|
2039
|
-
var createSelectionSlice = (set) => ({
|
|
2040
|
-
setSelectedNodeIds: (nodeIds) => {
|
|
2041
|
-
set({ selectedNodeIds: nodeIds });
|
|
2042
|
-
},
|
|
2043
|
-
addToSelection: (nodeId) => {
|
|
2044
|
-
set((state) => ({
|
|
2045
|
-
selectedNodeIds: state.selectedNodeIds.includes(nodeId) ? state.selectedNodeIds : [...state.selectedNodeIds, nodeId]
|
|
2046
|
-
}));
|
|
2047
|
-
},
|
|
2048
|
-
removeFromSelection: (nodeId) => {
|
|
2049
|
-
set((state) => ({
|
|
2050
|
-
selectedNodeIds: state.selectedNodeIds.filter((id) => id !== nodeId)
|
|
2051
|
-
}));
|
|
2052
|
-
},
|
|
2053
|
-
clearSelection: () => {
|
|
2054
|
-
set({ selectedNodeIds: [] });
|
|
2055
|
-
}
|
|
2056
|
-
});
|
|
2057
|
-
|
|
2058
|
-
// src/stores/workflow/slices/snapshotSlice.ts
|
|
2059
|
-
function defaultApplyEditOperations(_operations, state) {
|
|
2060
|
-
return { nodes: state.nodes, edges: state.edges, applied: 0, skipped: [] };
|
|
2061
|
-
}
|
|
2062
|
-
var createSnapshotSlice = (set, get) => ({
|
|
2063
|
-
previousWorkflowSnapshot: null,
|
|
2064
|
-
manualChangeCount: 0,
|
|
2065
|
-
captureSnapshot: () => {
|
|
2066
|
-
const state = get();
|
|
2067
|
-
const snapshot = {
|
|
2068
|
-
nodes: JSON.parse(JSON.stringify(state.nodes)),
|
|
2069
|
-
edges: JSON.parse(JSON.stringify(state.edges)),
|
|
2070
|
-
groups: JSON.parse(JSON.stringify(state.groups)),
|
|
2071
|
-
edgeStyle: state.edgeStyle
|
|
2072
|
-
};
|
|
2073
|
-
set({
|
|
2074
|
-
previousWorkflowSnapshot: snapshot,
|
|
2075
|
-
manualChangeCount: 0
|
|
2076
|
-
});
|
|
2077
|
-
},
|
|
2078
|
-
revertToSnapshot: () => {
|
|
2079
|
-
const state = get();
|
|
2080
|
-
if (state.previousWorkflowSnapshot) {
|
|
2081
|
-
set({
|
|
2082
|
-
nodes: state.previousWorkflowSnapshot.nodes,
|
|
2083
|
-
edges: state.previousWorkflowSnapshot.edges,
|
|
2084
|
-
groups: state.previousWorkflowSnapshot.groups,
|
|
2085
|
-
edgeStyle: state.previousWorkflowSnapshot.edgeStyle,
|
|
2086
|
-
previousWorkflowSnapshot: null,
|
|
2087
|
-
manualChangeCount: 0,
|
|
2088
|
-
isDirty: true
|
|
2089
|
-
});
|
|
2090
|
-
}
|
|
2091
|
-
},
|
|
2092
|
-
clearSnapshot: () => {
|
|
2093
|
-
set({
|
|
2094
|
-
previousWorkflowSnapshot: null,
|
|
2095
|
-
manualChangeCount: 0
|
|
2096
|
-
});
|
|
2097
|
-
},
|
|
2098
|
-
incrementManualChangeCount: () => {
|
|
2099
|
-
const state = get();
|
|
2100
|
-
const newCount = state.manualChangeCount + 1;
|
|
2101
|
-
if (newCount >= 3) {
|
|
2102
|
-
set({
|
|
2103
|
-
previousWorkflowSnapshot: null,
|
|
2104
|
-
manualChangeCount: 0
|
|
2105
|
-
});
|
|
2106
|
-
} else {
|
|
2107
|
-
set({ manualChangeCount: newCount });
|
|
2108
|
-
}
|
|
2109
|
-
},
|
|
2110
|
-
applyEditOperations: (operations) => {
|
|
2111
|
-
const state = get();
|
|
2112
|
-
const result = defaultApplyEditOperations(operations, {
|
|
2113
|
-
nodes: state.nodes,
|
|
2114
|
-
edges: state.edges
|
|
2115
|
-
});
|
|
2116
|
-
set({
|
|
2117
|
-
nodes: result.nodes,
|
|
2118
|
-
edges: result.edges,
|
|
2119
|
-
isDirty: true
|
|
2120
|
-
});
|
|
2121
|
-
return { applied: result.applied, skipped: result.skipped };
|
|
2122
|
-
}
|
|
2123
|
-
});
|
|
2124
|
-
|
|
2125
|
-
// src/stores/workflow/workflowStore.ts
|
|
2126
|
-
var storeCreator = ((...args) => ({
|
|
2127
|
-
// Initial state
|
|
2128
|
-
nodes: [],
|
|
2129
|
-
edges: [],
|
|
2130
|
-
edgeStyle: "default",
|
|
2131
|
-
workflowName: "Untitled Workflow",
|
|
2132
|
-
workflowId: null,
|
|
2133
|
-
isDirty: false,
|
|
2134
|
-
isSaving: false,
|
|
2135
|
-
isLoading: false,
|
|
2136
|
-
groups: [],
|
|
2137
|
-
selectedNodeIds: [],
|
|
2138
|
-
viewedCommentIds: /* @__PURE__ */ new Set(),
|
|
2139
|
-
navigationTargetId: null,
|
|
2140
|
-
// Compose slices
|
|
2141
|
-
...createNodeSlice(...args),
|
|
2142
|
-
...createEdgeSlice(...args),
|
|
2143
|
-
...createLockingSlice(...args),
|
|
2144
|
-
...createGroupSlice(...args),
|
|
2145
|
-
...createSelectionSlice(...args),
|
|
2146
|
-
...createPersistenceSlice(...args),
|
|
2147
|
-
...createSnapshotSlice(...args),
|
|
2148
|
-
...createChatSlice(...args)
|
|
2149
|
-
}));
|
|
2150
|
-
var useWorkflowStore = zustand.create()(
|
|
2151
|
-
zundo.temporal(storeCreator, {
|
|
2152
|
-
// Only track meaningful state (not UI flags like isDirty, isSaving, etc.)
|
|
2153
|
-
partialize: (state) => ({
|
|
2154
|
-
nodes: state.nodes,
|
|
2155
|
-
edges: state.edges,
|
|
2156
|
-
groups: state.groups
|
|
2157
|
-
}),
|
|
2158
|
-
// Limit history to prevent memory issues
|
|
2159
|
-
limit: 50,
|
|
2160
|
-
// Optimized equality check using shallow comparison instead of JSON.stringify
|
|
2161
|
-
equality: temporalStateEquals
|
|
2162
|
-
})
|
|
2163
|
-
);
|
|
2164
|
-
|
|
2165
|
-
exports.require_dist = require_dist;
|
|
2166
|
-
exports.useWorkflowStore = useWorkflowStore;
|