comfyui-node 1.4.2 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +16 -3
- package/dist/.tsbuildinfo +1 -1
- package/dist/call-wrapper.d.ts +124 -124
- package/dist/call-wrapper.js +567 -567
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +2 -0
- package/dist/client.js.map +1 -1
- package/dist/pool/WorkflowPool.d.ts.map +1 -1
- package/dist/pool/WorkflowPool.js +463 -455
- package/dist/pool/WorkflowPool.js.map +1 -1
- package/dist/pool/client/ClientManager.d.ts +15 -1
- package/dist/pool/client/ClientManager.d.ts.map +1 -1
- package/dist/pool/client/ClientManager.js +35 -6
- package/dist/pool/client/ClientManager.js.map +1 -1
- package/dist/pool/utils/hash.d.ts +13 -1
- package/dist/pool/utils/hash.d.ts.map +1 -1
- package/dist/pool/utils/hash.js +14 -1
- package/dist/pool/utils/hash.js.map +1 -1
- package/dist/workflow.d.ts +27 -4
- package/dist/workflow.d.ts.map +1 -1
- package/dist/workflow.js +29 -6
- package/dist/workflow.js.map +1 -1
- package/package.json +2 -2
package/dist/call-wrapper.js
CHANGED
|
@@ -1,568 +1,568 @@
|
|
|
1
|
-
import { FailedCacheError, WentMissingError, EnqueueFailedError, DisconnectedError, CustomEventError, ExecutionFailedError, ExecutionInterruptedError, MissingNodeError } from "./types/error.js";
|
|
2
|
-
import { buildEnqueueFailedError } from "./utils/response-error.js";
|
|
3
|
-
/**
|
|
4
|
-
* Represents a wrapper class for making API calls using the ComfyApi client.
|
|
5
|
-
* Provides methods for setting callback functions and executing the job.
|
|
6
|
-
*/
|
|
7
|
-
export class CallWrapper {
|
|
8
|
-
client;
|
|
9
|
-
prompt;
|
|
10
|
-
started = false;
|
|
11
|
-
isCompletingSuccessfully = false;
|
|
12
|
-
promptId;
|
|
13
|
-
output = {};
|
|
14
|
-
onPreviewFn;
|
|
15
|
-
onPreviewMetaFn;
|
|
16
|
-
onPendingFn;
|
|
17
|
-
onStartFn;
|
|
18
|
-
onOutputFn;
|
|
19
|
-
onFinishedFn;
|
|
20
|
-
onFailedFn;
|
|
21
|
-
onProgressFn;
|
|
22
|
-
onDisconnectedHandlerOffFn;
|
|
23
|
-
checkExecutingOffFn;
|
|
24
|
-
checkExecutedOffFn;
|
|
25
|
-
progressHandlerOffFn;
|
|
26
|
-
previewHandlerOffFn;
|
|
27
|
-
executionHandlerOffFn;
|
|
28
|
-
errorHandlerOffFn;
|
|
29
|
-
executionEndSuccessOffFn;
|
|
30
|
-
statusHandlerOffFn;
|
|
31
|
-
interruptionHandlerOffFn;
|
|
32
|
-
/**
|
|
33
|
-
* Constructs a new CallWrapper instance.
|
|
34
|
-
* @param client The ComfyApi client.
|
|
35
|
-
* @param workflow The workflow object.
|
|
36
|
-
*/
|
|
37
|
-
constructor(client, workflow) {
|
|
38
|
-
this.client = client;
|
|
39
|
-
this.prompt = workflow;
|
|
40
|
-
return this;
|
|
41
|
-
}
|
|
42
|
-
/**
|
|
43
|
-
* Set the callback function to be called when a preview event occurs.
|
|
44
|
-
*
|
|
45
|
-
* @param fn - The callback function to be called. It receives a Blob object representing the event and an optional promptId string.
|
|
46
|
-
* @returns The current instance of the CallWrapper.
|
|
47
|
-
*/
|
|
48
|
-
onPreview(fn) {
|
|
49
|
-
this.onPreviewFn = fn;
|
|
50
|
-
return this;
|
|
51
|
-
}
|
|
52
|
-
/**
|
|
53
|
-
* Set the callback function to be called when a preview-with-metadata event occurs.
|
|
54
|
-
*/
|
|
55
|
-
onPreviewMeta(fn) {
|
|
56
|
-
this.onPreviewMetaFn = fn;
|
|
57
|
-
return this;
|
|
58
|
-
}
|
|
59
|
-
/**
|
|
60
|
-
* Set a callback function to be executed when the job is queued.
|
|
61
|
-
* @param {Function} fn - The callback function to be executed.
|
|
62
|
-
* @returns The current instance of the CallWrapper.
|
|
63
|
-
*/
|
|
64
|
-
onPending(fn) {
|
|
65
|
-
this.onPendingFn = fn;
|
|
66
|
-
return this;
|
|
67
|
-
}
|
|
68
|
-
/**
|
|
69
|
-
* Set the callback function to be executed when the job start.
|
|
70
|
-
*
|
|
71
|
-
* @param fn - The callback function to be executed. It can optionally receive a `promptId` parameter.
|
|
72
|
-
* @returns The current instance of the CallWrapper.
|
|
73
|
-
*/
|
|
74
|
-
onStart(fn) {
|
|
75
|
-
this.onStartFn = fn;
|
|
76
|
-
return this;
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* Sets the callback function to handle the output node when the workflow is executing. This is
|
|
80
|
-
* useful when you want to handle the output of each nodes as they are being processed.
|
|
81
|
-
*
|
|
82
|
-
* All the nodes defined in the `mapOutputKeys` will be passed to this function when node is executed.
|
|
83
|
-
*
|
|
84
|
-
* @param fn - The callback function to handle the output.
|
|
85
|
-
* @returns The current instance of the class.
|
|
86
|
-
*/
|
|
87
|
-
onOutput(fn) {
|
|
88
|
-
this.onOutputFn = fn;
|
|
89
|
-
return this;
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* Set the callback function to be executed when the asynchronous operation is finished.
|
|
93
|
-
*
|
|
94
|
-
* @param fn - The callback function to be executed. It receives the data returned by the operation
|
|
95
|
-
* and an optional promptId parameter.
|
|
96
|
-
* @returns The current instance of the CallWrapper.
|
|
97
|
-
*/
|
|
98
|
-
onFinished(fn) {
|
|
99
|
-
this.onFinishedFn = fn;
|
|
100
|
-
return this;
|
|
101
|
-
}
|
|
102
|
-
/**
|
|
103
|
-
* Set the callback function to be executed when the API call fails.
|
|
104
|
-
*
|
|
105
|
-
* @param fn - The callback function to be executed when the API call fails.
|
|
106
|
-
* It receives an `Error` object as the first parameter and an optional `promptId` as the second parameter.
|
|
107
|
-
* @returns The current instance of the CallWrapper.
|
|
108
|
-
*/
|
|
109
|
-
onFailed(fn) {
|
|
110
|
-
this.onFailedFn = fn;
|
|
111
|
-
return this;
|
|
112
|
-
}
|
|
113
|
-
/**
|
|
114
|
-
* Set a callback function to be called when progress information is available.
|
|
115
|
-
* @param fn - The callback function to be called with the progress information.
|
|
116
|
-
* @returns The current instance of the CallWrapper.
|
|
117
|
-
*/
|
|
118
|
-
onProgress(fn) {
|
|
119
|
-
this.onProgressFn = fn;
|
|
120
|
-
return this;
|
|
121
|
-
}
|
|
122
|
-
/**
|
|
123
|
-
* Run the call wrapper and returns the output of the executed job.
|
|
124
|
-
* If the job is already cached, it returns the cached output.
|
|
125
|
-
* If the job is not cached, it executes the job and returns the output.
|
|
126
|
-
*
|
|
127
|
-
* @returns A promise that resolves to the output of the executed job,
|
|
128
|
-
* or `undefined` if the job is not found,
|
|
129
|
-
* or `false` if the job execution fails.
|
|
130
|
-
*/
|
|
131
|
-
async run() {
|
|
132
|
-
/**
|
|
133
|
-
* Start the job execution.
|
|
134
|
-
*/
|
|
135
|
-
this.emitLog("CallWrapper.run", "enqueue start");
|
|
136
|
-
const job = await this.enqueueJob();
|
|
137
|
-
if (!job) {
|
|
138
|
-
// enqueueJob already invoked onFailed with a rich error instance; just abort.
|
|
139
|
-
this.emitLog("CallWrapper.run", "enqueue failed -> abort");
|
|
140
|
-
return false;
|
|
141
|
-
}
|
|
142
|
-
let promptLoadTrigger;
|
|
143
|
-
const promptLoadCached = new Promise((resolve) => {
|
|
144
|
-
promptLoadTrigger = resolve;
|
|
145
|
-
});
|
|
146
|
-
let jobDoneTrigger;
|
|
147
|
-
const jobDonePromise = new Promise((resolve) => {
|
|
148
|
-
jobDoneTrigger = resolve;
|
|
149
|
-
});
|
|
150
|
-
/**
|
|
151
|
-
* Declare the function to check if the job is executing.
|
|
152
|
-
*/
|
|
153
|
-
const checkExecutingFn = (event) => {
|
|
154
|
-
if (event.detail && event.detail.prompt_id === job.prompt_id) {
|
|
155
|
-
this.emitLog("CallWrapper.run", "executing observed", { node: event.detail.node });
|
|
156
|
-
promptLoadTrigger(false);
|
|
157
|
-
}
|
|
158
|
-
};
|
|
159
|
-
/**
|
|
160
|
-
* Declare the function to check if the job is cached.
|
|
161
|
-
*/
|
|
162
|
-
const checkExecutionCachedFn = (event) => {
|
|
163
|
-
const outputNodes = Object.values(this.prompt.mapOutputKeys).filter((n) => !!n);
|
|
164
|
-
if (event.detail.nodes.length > 0 && event.detail.prompt_id === job.prompt_id) {
|
|
165
|
-
/**
|
|
166
|
-
* Cached is true if all output nodes are included in the cached nodes.
|
|
167
|
-
*/
|
|
168
|
-
const cached = outputNodes.every((node) => event.detail.nodes.includes(node));
|
|
169
|
-
this.emitLog("CallWrapper.run", "execution_cached observed", {
|
|
170
|
-
cached,
|
|
171
|
-
nodes: event.detail.nodes,
|
|
172
|
-
expected: outputNodes
|
|
173
|
-
});
|
|
174
|
-
promptLoadTrigger(cached);
|
|
175
|
-
}
|
|
176
|
-
};
|
|
177
|
-
/**
|
|
178
|
-
* Listen to the executing event.
|
|
179
|
-
*/
|
|
180
|
-
this.checkExecutingOffFn = this.client.on("executing", checkExecutingFn);
|
|
181
|
-
this.checkExecutedOffFn = this.client.on("execution_cached", checkExecutionCachedFn);
|
|
182
|
-
// race condition handling
|
|
183
|
-
let wentMissing = false;
|
|
184
|
-
let cachedOutputDone = false;
|
|
185
|
-
let cachedOutputPromise = Promise.resolve(null);
|
|
186
|
-
const statusHandler = async () => {
|
|
187
|
-
const queue = await this.client.getQueue();
|
|
188
|
-
const queueItems = [...queue.queue_pending, ...queue.queue_running];
|
|
189
|
-
this.emitLog("CallWrapper.status", "queue snapshot", {
|
|
190
|
-
running: queue.queue_running.length,
|
|
191
|
-
pending: queue.queue_pending.length
|
|
192
|
-
});
|
|
193
|
-
for (const queueItem of queueItems) {
|
|
194
|
-
if (queueItem[1] === job.prompt_id) {
|
|
195
|
-
return;
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
await cachedOutputPromise;
|
|
199
|
-
if (cachedOutputDone) {
|
|
200
|
-
this.emitLog("CallWrapper.status", "cached output already handled");
|
|
201
|
-
return;
|
|
202
|
-
}
|
|
203
|
-
wentMissing = true;
|
|
204
|
-
const output = await this.handleCachedOutput(job.prompt_id);
|
|
205
|
-
if (output) {
|
|
206
|
-
cachedOutputDone = true;
|
|
207
|
-
this.emitLog("CallWrapper.status", "output from history after missing", {
|
|
208
|
-
prompt_id: job.prompt_id
|
|
209
|
-
});
|
|
210
|
-
jobDoneTrigger(output);
|
|
211
|
-
this.cleanupListeners("status handler resolved from history");
|
|
212
|
-
return;
|
|
213
|
-
}
|
|
214
|
-
cachedOutputDone = true;
|
|
215
|
-
this.emitLog("CallWrapper.status", "job missing without cached output", {
|
|
216
|
-
prompt_id: job.prompt_id
|
|
217
|
-
});
|
|
218
|
-
promptLoadTrigger(false);
|
|
219
|
-
jobDoneTrigger(false);
|
|
220
|
-
this.cleanupListeners("status handler missing");
|
|
221
|
-
this.onFailedFn?.(new WentMissingError("The job went missing!"), job.prompt_id);
|
|
222
|
-
};
|
|
223
|
-
this.statusHandlerOffFn = this.client.on("status", statusHandler);
|
|
224
|
-
// Attach execution listeners immediately so fast jobs cannot finish before we subscribe
|
|
225
|
-
this.handleJobExecution(job.prompt_id, jobDoneTrigger);
|
|
226
|
-
await promptLoadCached;
|
|
227
|
-
if (wentMissing) {
|
|
228
|
-
return jobDonePromise;
|
|
229
|
-
}
|
|
230
|
-
cachedOutputPromise = this.handleCachedOutput(job.prompt_id);
|
|
231
|
-
const output = await cachedOutputPromise;
|
|
232
|
-
if (output) {
|
|
233
|
-
cachedOutputDone = true;
|
|
234
|
-
this.cleanupListeners("no cached output values returned");
|
|
235
|
-
jobDoneTrigger(output);
|
|
236
|
-
return output;
|
|
237
|
-
}
|
|
238
|
-
if (output === false) {
|
|
239
|
-
cachedOutputDone = true;
|
|
240
|
-
this.cleanupListeners("cached output ready before execution listeners");
|
|
241
|
-
this.onFailedFn?.(new FailedCacheError("Failed to get cached output"), this.promptId);
|
|
242
|
-
jobDoneTrigger(false);
|
|
243
|
-
return false;
|
|
244
|
-
}
|
|
245
|
-
this.emitLog("CallWrapper.run", "no cached output -> proceed with execution listeners");
|
|
246
|
-
return jobDonePromise;
|
|
247
|
-
}
|
|
248
|
-
async bypassWorkflowNodes(workflow) {
|
|
249
|
-
const nodeDefs = {}; // cache node definitions
|
|
250
|
-
for (const nodeId of this.prompt.bypassNodes) {
|
|
251
|
-
if (!workflow[nodeId]) {
|
|
252
|
-
throw new MissingNodeError(`Node ${nodeId.toString()} is missing from the workflow!`);
|
|
253
|
-
}
|
|
254
|
-
const classType = workflow[nodeId].class_type;
|
|
255
|
-
// Directly use feature namespace to avoid deprecated internal call
|
|
256
|
-
const def = nodeDefs[classType] || (await this.client.ext.node.getNodeDefs(classType))?.[classType];
|
|
257
|
-
if (!def) {
|
|
258
|
-
throw new MissingNodeError(`Node type ${workflow[nodeId].class_type} is missing from server!`);
|
|
259
|
-
}
|
|
260
|
-
nodeDefs[classType] = def;
|
|
261
|
-
const connections = new Map();
|
|
262
|
-
const connectedInputs = [];
|
|
263
|
-
// connect output nodes to matching input nodes
|
|
264
|
-
for (const [outputIdx, outputType] of def.output.entries()) {
|
|
265
|
-
for (const [inputName, inputValue] of Object.entries(workflow[nodeId].inputs)) {
|
|
266
|
-
if (connectedInputs.includes(inputName)) {
|
|
267
|
-
continue;
|
|
268
|
-
}
|
|
269
|
-
if (def.input.required[inputName]?.[0] === outputType) {
|
|
270
|
-
connections.set(outputIdx, inputValue);
|
|
271
|
-
connectedInputs.push(inputName);
|
|
272
|
-
break;
|
|
273
|
-
}
|
|
274
|
-
if (def.input.optional?.[inputName]?.[0] === outputType) {
|
|
275
|
-
connections.set(outputIdx, inputValue);
|
|
276
|
-
connectedInputs.push(inputName);
|
|
277
|
-
break;
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
// search and replace all nodes' inputs referencing this node based on matching output type, or remove reference
|
|
282
|
-
// if no matching output type was found
|
|
283
|
-
for (const [conNodeId, conNode] of Object.entries(workflow)) {
|
|
284
|
-
for (const [conInputName, conInputValue] of Object.entries(conNode.inputs)) {
|
|
285
|
-
if (!Array.isArray(conInputValue) || conInputValue[0] !== nodeId) {
|
|
286
|
-
continue;
|
|
287
|
-
}
|
|
288
|
-
if (connections.has(conInputValue[1])) {
|
|
289
|
-
workflow[conNodeId].inputs[conInputName] = connections.get(conInputValue[1]);
|
|
290
|
-
}
|
|
291
|
-
else {
|
|
292
|
-
delete workflow[conNodeId].inputs[conInputName];
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
delete workflow[nodeId];
|
|
297
|
-
}
|
|
298
|
-
return workflow;
|
|
299
|
-
}
|
|
300
|
-
async enqueueJob() {
|
|
301
|
-
let workflow = structuredClone(this.prompt.workflow);
|
|
302
|
-
if (this.prompt.bypassNodes.length > 0) {
|
|
303
|
-
try {
|
|
304
|
-
workflow = await this.bypassWorkflowNodes(workflow);
|
|
305
|
-
}
|
|
306
|
-
catch (e) {
|
|
307
|
-
if (e instanceof Response) {
|
|
308
|
-
this.onFailedFn?.(new MissingNodeError("Failed to get workflow node definitions", { cause: await e.json() }));
|
|
309
|
-
}
|
|
310
|
-
else {
|
|
311
|
-
this.onFailedFn?.(new MissingNodeError("There was a missing node in the workflow bypass.", { cause: e }));
|
|
312
|
-
}
|
|
313
|
-
return null;
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
let job;
|
|
317
|
-
try {
|
|
318
|
-
job = await this.client.ext.queue.appendPrompt(workflow);
|
|
319
|
-
}
|
|
320
|
-
catch (e) {
|
|
321
|
-
try {
|
|
322
|
-
if (e instanceof EnqueueFailedError) {
|
|
323
|
-
this.onFailedFn?.(e);
|
|
324
|
-
}
|
|
325
|
-
else if (e instanceof Response) {
|
|
326
|
-
const err = await buildEnqueueFailedError(e);
|
|
327
|
-
this.onFailedFn?.(err);
|
|
328
|
-
}
|
|
329
|
-
else if (e && typeof e === "object" && "response" in e && e.response instanceof Response) {
|
|
330
|
-
const err = await buildEnqueueFailedError(e.response);
|
|
331
|
-
this.onFailedFn?.(err);
|
|
332
|
-
}
|
|
333
|
-
else {
|
|
334
|
-
this.onFailedFn?.(new EnqueueFailedError("Failed to queue prompt", { cause: e, reason: e?.message }));
|
|
335
|
-
}
|
|
336
|
-
}
|
|
337
|
-
catch (inner) {
|
|
338
|
-
this.onFailedFn?.(new EnqueueFailedError("Failed to queue prompt", { cause: inner }));
|
|
339
|
-
}
|
|
340
|
-
job = null;
|
|
341
|
-
}
|
|
342
|
-
if (!job) {
|
|
343
|
-
return;
|
|
344
|
-
}
|
|
345
|
-
this.promptId = job.prompt_id;
|
|
346
|
-
this.emitLog("CallWrapper.enqueueJob", "queued", { prompt_id: this.promptId });
|
|
347
|
-
this.onPendingFn?.(this.promptId);
|
|
348
|
-
this.onDisconnectedHandlerOffFn = this.client.on("disconnected", () => {
|
|
349
|
-
// Ignore disconnection if we are already successfully completing
|
|
350
|
-
// This prevents a race condition where outputs are collected successfully
|
|
351
|
-
// but the WebSocket disconnects before cleanupListeners() is called
|
|
352
|
-
if (this.isCompletingSuccessfully) {
|
|
353
|
-
this.emitLog("CallWrapper.enqueueJob", "disconnected during success completion -> ignored");
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
|
-
this.onFailedFn?.(new DisconnectedError("Disconnected"), this.promptId);
|
|
357
|
-
});
|
|
358
|
-
return job;
|
|
359
|
-
}
|
|
360
|
-
async handleCachedOutput(promptId) {
|
|
361
|
-
const hisData = await this.client.ext.history.getHistory(promptId);
|
|
362
|
-
this.emitLog("CallWrapper.handleCachedOutput", "history fetched", {
|
|
363
|
-
promptId,
|
|
364
|
-
status: hisData?.status?.status_str,
|
|
365
|
-
completed: hisData?.status?.completed,
|
|
366
|
-
outputKeys: hisData?.outputs ? Object.keys(hisData.outputs) : [],
|
|
367
|
-
hasOutputs: !!(hisData && hisData.outputs && Object.keys(hisData.outputs).length > 0)
|
|
368
|
-
});
|
|
369
|
-
// Only return outputs if execution is actually completed
|
|
370
|
-
if (hisData && hisData.status?.completed && hisData.outputs) {
|
|
371
|
-
const output = this.mapOutput(hisData.outputs);
|
|
372
|
-
const hasDefinedValue = Object.entries(output).some(([key, value]) => {
|
|
373
|
-
if (key === "_raw") {
|
|
374
|
-
return value !== undefined && value !== null && Object.keys(value).length > 0;
|
|
375
|
-
}
|
|
376
|
-
return value !== undefined;
|
|
377
|
-
});
|
|
378
|
-
if (hasDefinedValue) {
|
|
379
|
-
this.emitLog("CallWrapper.handleCachedOutput", "returning completed outputs");
|
|
380
|
-
this.onFinishedFn?.(output, this.promptId);
|
|
381
|
-
return output;
|
|
382
|
-
}
|
|
383
|
-
else {
|
|
384
|
-
this.emitLog("CallWrapper.handleCachedOutput", "cached output missing defined values", {
|
|
385
|
-
promptId,
|
|
386
|
-
outputKeys: Object.keys(hisData.outputs ?? {}),
|
|
387
|
-
mappedKeys: this.prompt.mapOutputKeys
|
|
388
|
-
});
|
|
389
|
-
return false;
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
if (hisData && hisData.status?.completed && !hisData.outputs) {
|
|
393
|
-
this.emitLog("CallWrapper.handleCachedOutput", "history completed without outputs", { promptId });
|
|
394
|
-
return false;
|
|
395
|
-
}
|
|
396
|
-
if (hisData && !hisData.status?.completed) {
|
|
397
|
-
this.emitLog("CallWrapper.handleCachedOutput", "history not completed yet");
|
|
398
|
-
}
|
|
399
|
-
if (!hisData) {
|
|
400
|
-
this.emitLog("CallWrapper.handleCachedOutput", "history entry not available");
|
|
401
|
-
}
|
|
402
|
-
return null;
|
|
403
|
-
}
|
|
404
|
-
mapOutput(outputNodes) {
|
|
405
|
-
const outputMapped = this.prompt.mapOutputKeys;
|
|
406
|
-
const output = {};
|
|
407
|
-
for (const key in outputMapped) {
|
|
408
|
-
const node = outputMapped[key];
|
|
409
|
-
if (node) {
|
|
410
|
-
output[key] = outputNodes[node];
|
|
411
|
-
}
|
|
412
|
-
else {
|
|
413
|
-
if (!output._raw) {
|
|
414
|
-
output._raw = {};
|
|
415
|
-
}
|
|
416
|
-
output._raw[key] = outputNodes[key];
|
|
417
|
-
}
|
|
418
|
-
}
|
|
419
|
-
return output;
|
|
420
|
-
}
|
|
421
|
-
handleJobExecution(promptId, jobDoneTrigger) {
|
|
422
|
-
if (this.executionHandlerOffFn) {
|
|
423
|
-
return;
|
|
424
|
-
}
|
|
425
|
-
const reverseOutputMapped = this.reverseMapOutputKeys();
|
|
426
|
-
this.progressHandlerOffFn = this.client.on("progress", (ev) => this.handleProgress(ev, promptId));
|
|
427
|
-
this.previewHandlerOffFn = this.client.on("b_preview", (ev) => this.onPreviewFn?.(ev.detail, this.promptId));
|
|
428
|
-
// Also forward preview with metadata if available
|
|
429
|
-
const offPreviewMeta = this.client.on("b_preview_meta", (ev) => this.onPreviewMetaFn?.(ev.detail, this.promptId));
|
|
430
|
-
const prevCleanup = this.previewHandlerOffFn;
|
|
431
|
-
this.previewHandlerOffFn = () => {
|
|
432
|
-
prevCleanup?.();
|
|
433
|
-
offPreviewMeta?.();
|
|
434
|
-
};
|
|
435
|
-
const totalOutput = Object.keys(reverseOutputMapped).length;
|
|
436
|
-
let remainingOutput = totalOutput;
|
|
437
|
-
const executionHandler = (ev) => {
|
|
438
|
-
if (ev.detail.prompt_id !== promptId)
|
|
439
|
-
return;
|
|
440
|
-
const outputKey = reverseOutputMapped[ev.detail.node];
|
|
441
|
-
this.emitLog("CallWrapper.executionHandler", "executed event received", {
|
|
442
|
-
node: ev.detail.node,
|
|
443
|
-
outputKey,
|
|
444
|
-
remainingBefore: remainingOutput,
|
|
445
|
-
isTrackedOutput: !!outputKey
|
|
446
|
-
});
|
|
447
|
-
if (outputKey) {
|
|
448
|
-
this.output[outputKey] = ev.detail.output;
|
|
449
|
-
this.onOutputFn?.(outputKey, ev.detail.output, this.promptId);
|
|
450
|
-
remainingOutput--;
|
|
451
|
-
}
|
|
452
|
-
else {
|
|
453
|
-
this.output._raw = this.output._raw || {};
|
|
454
|
-
this.output._raw[ev.detail.node] = ev.detail.output;
|
|
455
|
-
this.onOutputFn?.(ev.detail.node, ev.detail.output, this.promptId);
|
|
456
|
-
}
|
|
457
|
-
this.emitLog("CallWrapper.executionHandler", "after processing executed event", {
|
|
458
|
-
remainingAfter: remainingOutput,
|
|
459
|
-
willTriggerCompletion: remainingOutput === 0
|
|
460
|
-
});
|
|
461
|
-
if (remainingOutput === 0) {
|
|
462
|
-
this.emitLog("CallWrapper.handleJobExecution", "all outputs collected");
|
|
463
|
-
// Mark as successfully completing BEFORE cleanup to prevent race condition with disconnection handler
|
|
464
|
-
this.isCompletingSuccessfully = true;
|
|
465
|
-
this.cleanupListeners("all outputs collected");
|
|
466
|
-
this.onFinishedFn?.(this.output, this.promptId);
|
|
467
|
-
jobDoneTrigger(this.output);
|
|
468
|
-
}
|
|
469
|
-
};
|
|
470
|
-
const executedEnd = async () => {
|
|
471
|
-
this.emitLog("CallWrapper.executedEnd", "execution_success fired", {
|
|
472
|
-
promptId,
|
|
473
|
-
remainingOutput,
|
|
474
|
-
totalOutput
|
|
475
|
-
});
|
|
476
|
-
if (remainingOutput === 0) {
|
|
477
|
-
this.emitLog("CallWrapper.executedEnd", "all outputs already collected, nothing to do");
|
|
478
|
-
return;
|
|
479
|
-
}
|
|
480
|
-
const hisData = await this.client.ext.history.getHistory(promptId);
|
|
481
|
-
if (hisData?.status?.completed) {
|
|
482
|
-
const outputCount = Object.keys(hisData.outputs ?? {}).length;
|
|
483
|
-
if (outputCount > 0 && outputCount - totalOutput === 0) {
|
|
484
|
-
this.emitLog("CallWrapper.executedEnd", "outputs equal total after history check -> ignore false end");
|
|
485
|
-
return;
|
|
486
|
-
}
|
|
487
|
-
}
|
|
488
|
-
this.emitLog("CallWrapper.executedEnd", "execution failed due to missing outputs", {
|
|
489
|
-
remainingOutput,
|
|
490
|
-
totalOutput
|
|
491
|
-
});
|
|
492
|
-
this.onFailedFn?.(new ExecutionFailedError("Execution failed"), this.promptId);
|
|
493
|
-
this.cleanupListeners("executedEnd missing outputs");
|
|
494
|
-
jobDoneTrigger(false);
|
|
495
|
-
};
|
|
496
|
-
this.executionEndSuccessOffFn = this.client.on("execution_success", executedEnd);
|
|
497
|
-
this.executionHandlerOffFn = this.client.on("executed", executionHandler);
|
|
498
|
-
this.errorHandlerOffFn = this.client.on("execution_error", (ev) => this.handleError(ev, promptId, jobDoneTrigger));
|
|
499
|
-
this.interruptionHandlerOffFn = this.client.on("execution_interrupted", (ev) => {
|
|
500
|
-
if (ev.detail.prompt_id !== promptId)
|
|
501
|
-
return;
|
|
502
|
-
this.onFailedFn?.(new ExecutionInterruptedError("The execution was interrupted!", { cause: ev.detail }), ev.detail.prompt_id);
|
|
503
|
-
this.cleanupListeners("execution interrupted");
|
|
504
|
-
jobDoneTrigger(false);
|
|
505
|
-
});
|
|
506
|
-
}
|
|
507
|
-
reverseMapOutputKeys() {
|
|
508
|
-
const outputMapped = this.prompt.mapOutputKeys;
|
|
509
|
-
return Object.entries(outputMapped).reduce((acc, [k, v]) => {
|
|
510
|
-
if (v)
|
|
511
|
-
acc[v] = k;
|
|
512
|
-
return acc;
|
|
513
|
-
}, {});
|
|
514
|
-
}
|
|
515
|
-
handleProgress(ev, promptId) {
|
|
516
|
-
if (ev.detail.prompt_id === promptId && !this.started) {
|
|
517
|
-
this.started = true;
|
|
518
|
-
this.onStartFn?.(this.promptId);
|
|
519
|
-
}
|
|
520
|
-
this.onProgressFn?.(ev.detail, this.promptId);
|
|
521
|
-
}
|
|
522
|
-
handleError(ev, promptId, resolve) {
|
|
523
|
-
if (ev.detail.prompt_id !== promptId)
|
|
524
|
-
return;
|
|
525
|
-
this.emitLog("CallWrapper.handleError", ev.detail.exception_type, {
|
|
526
|
-
prompt_id: ev.detail.prompt_id,
|
|
527
|
-
node_id: ev.detail?.node_id
|
|
528
|
-
});
|
|
529
|
-
this.onFailedFn?.(new CustomEventError(ev.detail.exception_type, { cause: ev.detail }), ev.detail.prompt_id);
|
|
530
|
-
this.cleanupListeners("execution_error received");
|
|
531
|
-
resolve(false);
|
|
532
|
-
}
|
|
533
|
-
emitLog(fnName, message, data) {
|
|
534
|
-
const detail = { fnName, message, data };
|
|
535
|
-
const customEvent = new CustomEvent("log", { detail });
|
|
536
|
-
const clientAny = this.client;
|
|
537
|
-
if (typeof clientAny.emit === "function") {
|
|
538
|
-
clientAny.emit("log", customEvent);
|
|
539
|
-
return;
|
|
540
|
-
}
|
|
541
|
-
clientAny.dispatchEvent?.(customEvent);
|
|
542
|
-
}
|
|
543
|
-
cleanupListeners(reason) {
|
|
544
|
-
const debugPayload = { reason, promptId: this.promptId };
|
|
545
|
-
this.emitLog("CallWrapper.cleanupListeners", "removing listeners", debugPayload);
|
|
546
|
-
this.onDisconnectedHandlerOffFn?.();
|
|
547
|
-
this.onDisconnectedHandlerOffFn = undefined;
|
|
548
|
-
this.checkExecutingOffFn?.();
|
|
549
|
-
this.checkExecutingOffFn = undefined;
|
|
550
|
-
this.checkExecutedOffFn?.();
|
|
551
|
-
this.checkExecutedOffFn = undefined;
|
|
552
|
-
this.progressHandlerOffFn?.();
|
|
553
|
-
this.progressHandlerOffFn = undefined;
|
|
554
|
-
this.previewHandlerOffFn?.();
|
|
555
|
-
this.previewHandlerOffFn = undefined;
|
|
556
|
-
this.executionHandlerOffFn?.();
|
|
557
|
-
this.executionHandlerOffFn = undefined;
|
|
558
|
-
this.errorHandlerOffFn?.();
|
|
559
|
-
this.errorHandlerOffFn = undefined;
|
|
560
|
-
this.executionEndSuccessOffFn?.();
|
|
561
|
-
this.executionEndSuccessOffFn = undefined;
|
|
562
|
-
this.interruptionHandlerOffFn?.();
|
|
563
|
-
this.interruptionHandlerOffFn = undefined;
|
|
564
|
-
this.statusHandlerOffFn?.();
|
|
565
|
-
this.statusHandlerOffFn = undefined;
|
|
566
|
-
}
|
|
567
|
-
}
|
|
1
|
+
import { FailedCacheError, WentMissingError, EnqueueFailedError, DisconnectedError, CustomEventError, ExecutionFailedError, ExecutionInterruptedError, MissingNodeError } from "./types/error.js";
|
|
2
|
+
import { buildEnqueueFailedError } from "./utils/response-error.js";
|
|
3
|
+
/**
|
|
4
|
+
* Represents a wrapper class for making API calls using the ComfyApi client.
|
|
5
|
+
* Provides methods for setting callback functions and executing the job.
|
|
6
|
+
*/
|
|
7
|
+
export class CallWrapper {
|
|
8
|
+
client;
|
|
9
|
+
prompt;
|
|
10
|
+
started = false;
|
|
11
|
+
isCompletingSuccessfully = false;
|
|
12
|
+
promptId;
|
|
13
|
+
output = {};
|
|
14
|
+
onPreviewFn;
|
|
15
|
+
onPreviewMetaFn;
|
|
16
|
+
onPendingFn;
|
|
17
|
+
onStartFn;
|
|
18
|
+
onOutputFn;
|
|
19
|
+
onFinishedFn;
|
|
20
|
+
onFailedFn;
|
|
21
|
+
onProgressFn;
|
|
22
|
+
onDisconnectedHandlerOffFn;
|
|
23
|
+
checkExecutingOffFn;
|
|
24
|
+
checkExecutedOffFn;
|
|
25
|
+
progressHandlerOffFn;
|
|
26
|
+
previewHandlerOffFn;
|
|
27
|
+
executionHandlerOffFn;
|
|
28
|
+
errorHandlerOffFn;
|
|
29
|
+
executionEndSuccessOffFn;
|
|
30
|
+
statusHandlerOffFn;
|
|
31
|
+
interruptionHandlerOffFn;
|
|
32
|
+
/**
|
|
33
|
+
* Constructs a new CallWrapper instance.
|
|
34
|
+
* @param client The ComfyApi client.
|
|
35
|
+
* @param workflow The workflow object.
|
|
36
|
+
*/
|
|
37
|
+
constructor(client, workflow) {
|
|
38
|
+
this.client = client;
|
|
39
|
+
this.prompt = workflow;
|
|
40
|
+
return this;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Set the callback function to be called when a preview event occurs.
|
|
44
|
+
*
|
|
45
|
+
* @param fn - The callback function to be called. It receives a Blob object representing the event and an optional promptId string.
|
|
46
|
+
* @returns The current instance of the CallWrapper.
|
|
47
|
+
*/
|
|
48
|
+
onPreview(fn) {
|
|
49
|
+
this.onPreviewFn = fn;
|
|
50
|
+
return this;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Set the callback function to be called when a preview-with-metadata event occurs.
|
|
54
|
+
*/
|
|
55
|
+
onPreviewMeta(fn) {
|
|
56
|
+
this.onPreviewMetaFn = fn;
|
|
57
|
+
return this;
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Set a callback function to be executed when the job is queued.
|
|
61
|
+
* @param {Function} fn - The callback function to be executed.
|
|
62
|
+
* @returns The current instance of the CallWrapper.
|
|
63
|
+
*/
|
|
64
|
+
onPending(fn) {
|
|
65
|
+
this.onPendingFn = fn;
|
|
66
|
+
return this;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Set the callback function to be executed when the job start.
|
|
70
|
+
*
|
|
71
|
+
* @param fn - The callback function to be executed. It can optionally receive a `promptId` parameter.
|
|
72
|
+
* @returns The current instance of the CallWrapper.
|
|
73
|
+
*/
|
|
74
|
+
onStart(fn) {
|
|
75
|
+
this.onStartFn = fn;
|
|
76
|
+
return this;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Sets the callback function to handle the output node when the workflow is executing. This is
|
|
80
|
+
* useful when you want to handle the output of each nodes as they are being processed.
|
|
81
|
+
*
|
|
82
|
+
* All the nodes defined in the `mapOutputKeys` will be passed to this function when node is executed.
|
|
83
|
+
*
|
|
84
|
+
* @param fn - The callback function to handle the output.
|
|
85
|
+
* @returns The current instance of the class.
|
|
86
|
+
*/
|
|
87
|
+
onOutput(fn) {
|
|
88
|
+
this.onOutputFn = fn;
|
|
89
|
+
return this;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Set the callback function to be executed when the asynchronous operation is finished.
|
|
93
|
+
*
|
|
94
|
+
* @param fn - The callback function to be executed. It receives the data returned by the operation
|
|
95
|
+
* and an optional promptId parameter.
|
|
96
|
+
* @returns The current instance of the CallWrapper.
|
|
97
|
+
*/
|
|
98
|
+
onFinished(fn) {
|
|
99
|
+
this.onFinishedFn = fn;
|
|
100
|
+
return this;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Set the callback function to be executed when the API call fails.
|
|
104
|
+
*
|
|
105
|
+
* @param fn - The callback function to be executed when the API call fails.
|
|
106
|
+
* It receives an `Error` object as the first parameter and an optional `promptId` as the second parameter.
|
|
107
|
+
* @returns The current instance of the CallWrapper.
|
|
108
|
+
*/
|
|
109
|
+
onFailed(fn) {
|
|
110
|
+
this.onFailedFn = fn;
|
|
111
|
+
return this;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Set a callback function to be called when progress information is available.
|
|
115
|
+
* @param fn - The callback function to be called with the progress information.
|
|
116
|
+
* @returns The current instance of the CallWrapper.
|
|
117
|
+
*/
|
|
118
|
+
onProgress(fn) {
|
|
119
|
+
this.onProgressFn = fn;
|
|
120
|
+
return this;
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Run the call wrapper and returns the output of the executed job.
|
|
124
|
+
* If the job is already cached, it returns the cached output.
|
|
125
|
+
* If the job is not cached, it executes the job and returns the output.
|
|
126
|
+
*
|
|
127
|
+
* @returns A promise that resolves to the output of the executed job,
|
|
128
|
+
* or `undefined` if the job is not found,
|
|
129
|
+
* or `false` if the job execution fails.
|
|
130
|
+
*/
|
|
131
|
+
async run() {
|
|
132
|
+
/**
|
|
133
|
+
* Start the job execution.
|
|
134
|
+
*/
|
|
135
|
+
this.emitLog("CallWrapper.run", "enqueue start");
|
|
136
|
+
const job = await this.enqueueJob();
|
|
137
|
+
if (!job) {
|
|
138
|
+
// enqueueJob already invoked onFailed with a rich error instance; just abort.
|
|
139
|
+
this.emitLog("CallWrapper.run", "enqueue failed -> abort");
|
|
140
|
+
return false;
|
|
141
|
+
}
|
|
142
|
+
let promptLoadTrigger;
|
|
143
|
+
const promptLoadCached = new Promise((resolve) => {
|
|
144
|
+
promptLoadTrigger = resolve;
|
|
145
|
+
});
|
|
146
|
+
let jobDoneTrigger;
|
|
147
|
+
const jobDonePromise = new Promise((resolve) => {
|
|
148
|
+
jobDoneTrigger = resolve;
|
|
149
|
+
});
|
|
150
|
+
/**
|
|
151
|
+
* Declare the function to check if the job is executing.
|
|
152
|
+
*/
|
|
153
|
+
const checkExecutingFn = (event) => {
|
|
154
|
+
if (event.detail && event.detail.prompt_id === job.prompt_id) {
|
|
155
|
+
this.emitLog("CallWrapper.run", "executing observed", { node: event.detail.node });
|
|
156
|
+
promptLoadTrigger(false);
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
/**
|
|
160
|
+
* Declare the function to check if the job is cached.
|
|
161
|
+
*/
|
|
162
|
+
const checkExecutionCachedFn = (event) => {
|
|
163
|
+
const outputNodes = Object.values(this.prompt.mapOutputKeys).filter((n) => !!n);
|
|
164
|
+
if (event.detail.nodes.length > 0 && event.detail.prompt_id === job.prompt_id) {
|
|
165
|
+
/**
|
|
166
|
+
* Cached is true if all output nodes are included in the cached nodes.
|
|
167
|
+
*/
|
|
168
|
+
const cached = outputNodes.every((node) => event.detail.nodes.includes(node));
|
|
169
|
+
this.emitLog("CallWrapper.run", "execution_cached observed", {
|
|
170
|
+
cached,
|
|
171
|
+
nodes: event.detail.nodes,
|
|
172
|
+
expected: outputNodes
|
|
173
|
+
});
|
|
174
|
+
promptLoadTrigger(cached);
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
/**
|
|
178
|
+
* Listen to the executing event.
|
|
179
|
+
*/
|
|
180
|
+
this.checkExecutingOffFn = this.client.on("executing", checkExecutingFn);
|
|
181
|
+
this.checkExecutedOffFn = this.client.on("execution_cached", checkExecutionCachedFn);
|
|
182
|
+
// race condition handling
|
|
183
|
+
let wentMissing = false;
|
|
184
|
+
let cachedOutputDone = false;
|
|
185
|
+
let cachedOutputPromise = Promise.resolve(null);
|
|
186
|
+
const statusHandler = async () => {
|
|
187
|
+
const queue = await this.client.getQueue();
|
|
188
|
+
const queueItems = [...queue.queue_pending, ...queue.queue_running];
|
|
189
|
+
this.emitLog("CallWrapper.status", "queue snapshot", {
|
|
190
|
+
running: queue.queue_running.length,
|
|
191
|
+
pending: queue.queue_pending.length
|
|
192
|
+
});
|
|
193
|
+
for (const queueItem of queueItems) {
|
|
194
|
+
if (queueItem[1] === job.prompt_id) {
|
|
195
|
+
return;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
await cachedOutputPromise;
|
|
199
|
+
if (cachedOutputDone) {
|
|
200
|
+
this.emitLog("CallWrapper.status", "cached output already handled");
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
wentMissing = true;
|
|
204
|
+
const output = await this.handleCachedOutput(job.prompt_id);
|
|
205
|
+
if (output) {
|
|
206
|
+
cachedOutputDone = true;
|
|
207
|
+
this.emitLog("CallWrapper.status", "output from history after missing", {
|
|
208
|
+
prompt_id: job.prompt_id
|
|
209
|
+
});
|
|
210
|
+
jobDoneTrigger(output);
|
|
211
|
+
this.cleanupListeners("status handler resolved from history");
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
cachedOutputDone = true;
|
|
215
|
+
this.emitLog("CallWrapper.status", "job missing without cached output", {
|
|
216
|
+
prompt_id: job.prompt_id
|
|
217
|
+
});
|
|
218
|
+
promptLoadTrigger(false);
|
|
219
|
+
jobDoneTrigger(false);
|
|
220
|
+
this.cleanupListeners("status handler missing");
|
|
221
|
+
this.onFailedFn?.(new WentMissingError("The job went missing!"), job.prompt_id);
|
|
222
|
+
};
|
|
223
|
+
this.statusHandlerOffFn = this.client.on("status", statusHandler);
|
|
224
|
+
// Attach execution listeners immediately so fast jobs cannot finish before we subscribe
|
|
225
|
+
this.handleJobExecution(job.prompt_id, jobDoneTrigger);
|
|
226
|
+
await promptLoadCached;
|
|
227
|
+
if (wentMissing) {
|
|
228
|
+
return jobDonePromise;
|
|
229
|
+
}
|
|
230
|
+
cachedOutputPromise = this.handleCachedOutput(job.prompt_id);
|
|
231
|
+
const output = await cachedOutputPromise;
|
|
232
|
+
if (output) {
|
|
233
|
+
cachedOutputDone = true;
|
|
234
|
+
this.cleanupListeners("no cached output values returned");
|
|
235
|
+
jobDoneTrigger(output);
|
|
236
|
+
return output;
|
|
237
|
+
}
|
|
238
|
+
if (output === false) {
|
|
239
|
+
cachedOutputDone = true;
|
|
240
|
+
this.cleanupListeners("cached output ready before execution listeners");
|
|
241
|
+
this.onFailedFn?.(new FailedCacheError("Failed to get cached output"), this.promptId);
|
|
242
|
+
jobDoneTrigger(false);
|
|
243
|
+
return false;
|
|
244
|
+
}
|
|
245
|
+
this.emitLog("CallWrapper.run", "no cached output -> proceed with execution listeners");
|
|
246
|
+
return jobDonePromise;
|
|
247
|
+
}
|
|
248
|
+
async bypassWorkflowNodes(workflow) {
|
|
249
|
+
const nodeDefs = {}; // cache node definitions
|
|
250
|
+
for (const nodeId of this.prompt.bypassNodes) {
|
|
251
|
+
if (!workflow[nodeId]) {
|
|
252
|
+
throw new MissingNodeError(`Node ${nodeId.toString()} is missing from the workflow!`);
|
|
253
|
+
}
|
|
254
|
+
const classType = workflow[nodeId].class_type;
|
|
255
|
+
// Directly use feature namespace to avoid deprecated internal call
|
|
256
|
+
const def = nodeDefs[classType] || (await this.client.ext.node.getNodeDefs(classType))?.[classType];
|
|
257
|
+
if (!def) {
|
|
258
|
+
throw new MissingNodeError(`Node type ${workflow[nodeId].class_type} is missing from server!`);
|
|
259
|
+
}
|
|
260
|
+
nodeDefs[classType] = def;
|
|
261
|
+
const connections = new Map();
|
|
262
|
+
const connectedInputs = [];
|
|
263
|
+
// connect output nodes to matching input nodes
|
|
264
|
+
for (const [outputIdx, outputType] of def.output.entries()) {
|
|
265
|
+
for (const [inputName, inputValue] of Object.entries(workflow[nodeId].inputs)) {
|
|
266
|
+
if (connectedInputs.includes(inputName)) {
|
|
267
|
+
continue;
|
|
268
|
+
}
|
|
269
|
+
if (def.input.required[inputName]?.[0] === outputType) {
|
|
270
|
+
connections.set(outputIdx, inputValue);
|
|
271
|
+
connectedInputs.push(inputName);
|
|
272
|
+
break;
|
|
273
|
+
}
|
|
274
|
+
if (def.input.optional?.[inputName]?.[0] === outputType) {
|
|
275
|
+
connections.set(outputIdx, inputValue);
|
|
276
|
+
connectedInputs.push(inputName);
|
|
277
|
+
break;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
// search and replace all nodes' inputs referencing this node based on matching output type, or remove reference
|
|
282
|
+
// if no matching output type was found
|
|
283
|
+
for (const [conNodeId, conNode] of Object.entries(workflow)) {
|
|
284
|
+
for (const [conInputName, conInputValue] of Object.entries(conNode.inputs)) {
|
|
285
|
+
if (!Array.isArray(conInputValue) || conInputValue[0] !== nodeId) {
|
|
286
|
+
continue;
|
|
287
|
+
}
|
|
288
|
+
if (connections.has(conInputValue[1])) {
|
|
289
|
+
workflow[conNodeId].inputs[conInputName] = connections.get(conInputValue[1]);
|
|
290
|
+
}
|
|
291
|
+
else {
|
|
292
|
+
delete workflow[conNodeId].inputs[conInputName];
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
delete workflow[nodeId];
|
|
297
|
+
}
|
|
298
|
+
return workflow;
|
|
299
|
+
}
|
|
300
|
+
async enqueueJob() {
|
|
301
|
+
let workflow = structuredClone(this.prompt.workflow);
|
|
302
|
+
if (this.prompt.bypassNodes.length > 0) {
|
|
303
|
+
try {
|
|
304
|
+
workflow = await this.bypassWorkflowNodes(workflow);
|
|
305
|
+
}
|
|
306
|
+
catch (e) {
|
|
307
|
+
if (e instanceof Response) {
|
|
308
|
+
this.onFailedFn?.(new MissingNodeError("Failed to get workflow node definitions", { cause: await e.json() }));
|
|
309
|
+
}
|
|
310
|
+
else {
|
|
311
|
+
this.onFailedFn?.(new MissingNodeError("There was a missing node in the workflow bypass.", { cause: e }));
|
|
312
|
+
}
|
|
313
|
+
return null;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
let job;
|
|
317
|
+
try {
|
|
318
|
+
job = await this.client.ext.queue.appendPrompt(workflow);
|
|
319
|
+
}
|
|
320
|
+
catch (e) {
|
|
321
|
+
try {
|
|
322
|
+
if (e instanceof EnqueueFailedError) {
|
|
323
|
+
this.onFailedFn?.(e);
|
|
324
|
+
}
|
|
325
|
+
else if (e instanceof Response) {
|
|
326
|
+
const err = await buildEnqueueFailedError(e);
|
|
327
|
+
this.onFailedFn?.(err);
|
|
328
|
+
}
|
|
329
|
+
else if (e && typeof e === "object" && "response" in e && e.response instanceof Response) {
|
|
330
|
+
const err = await buildEnqueueFailedError(e.response);
|
|
331
|
+
this.onFailedFn?.(err);
|
|
332
|
+
}
|
|
333
|
+
else {
|
|
334
|
+
this.onFailedFn?.(new EnqueueFailedError("Failed to queue prompt", { cause: e, reason: e?.message }));
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
catch (inner) {
|
|
338
|
+
this.onFailedFn?.(new EnqueueFailedError("Failed to queue prompt", { cause: inner }));
|
|
339
|
+
}
|
|
340
|
+
job = null;
|
|
341
|
+
}
|
|
342
|
+
if (!job) {
|
|
343
|
+
return;
|
|
344
|
+
}
|
|
345
|
+
this.promptId = job.prompt_id;
|
|
346
|
+
this.emitLog("CallWrapper.enqueueJob", "queued", { prompt_id: this.promptId });
|
|
347
|
+
this.onPendingFn?.(this.promptId);
|
|
348
|
+
this.onDisconnectedHandlerOffFn = this.client.on("disconnected", () => {
|
|
349
|
+
// Ignore disconnection if we are already successfully completing
|
|
350
|
+
// This prevents a race condition where outputs are collected successfully
|
|
351
|
+
// but the WebSocket disconnects before cleanupListeners() is called
|
|
352
|
+
if (this.isCompletingSuccessfully) {
|
|
353
|
+
this.emitLog("CallWrapper.enqueueJob", "disconnected during success completion -> ignored");
|
|
354
|
+
return;
|
|
355
|
+
}
|
|
356
|
+
this.onFailedFn?.(new DisconnectedError("Disconnected"), this.promptId);
|
|
357
|
+
});
|
|
358
|
+
return job;
|
|
359
|
+
}
|
|
360
|
+
async handleCachedOutput(promptId) {
|
|
361
|
+
const hisData = await this.client.ext.history.getHistory(promptId);
|
|
362
|
+
this.emitLog("CallWrapper.handleCachedOutput", "history fetched", {
|
|
363
|
+
promptId,
|
|
364
|
+
status: hisData?.status?.status_str,
|
|
365
|
+
completed: hisData?.status?.completed,
|
|
366
|
+
outputKeys: hisData?.outputs ? Object.keys(hisData.outputs) : [],
|
|
367
|
+
hasOutputs: !!(hisData && hisData.outputs && Object.keys(hisData.outputs).length > 0)
|
|
368
|
+
});
|
|
369
|
+
// Only return outputs if execution is actually completed
|
|
370
|
+
if (hisData && hisData.status?.completed && hisData.outputs) {
|
|
371
|
+
const output = this.mapOutput(hisData.outputs);
|
|
372
|
+
const hasDefinedValue = Object.entries(output).some(([key, value]) => {
|
|
373
|
+
if (key === "_raw") {
|
|
374
|
+
return value !== undefined && value !== null && Object.keys(value).length > 0;
|
|
375
|
+
}
|
|
376
|
+
return value !== undefined;
|
|
377
|
+
});
|
|
378
|
+
if (hasDefinedValue) {
|
|
379
|
+
this.emitLog("CallWrapper.handleCachedOutput", "returning completed outputs");
|
|
380
|
+
this.onFinishedFn?.(output, this.promptId);
|
|
381
|
+
return output;
|
|
382
|
+
}
|
|
383
|
+
else {
|
|
384
|
+
this.emitLog("CallWrapper.handleCachedOutput", "cached output missing defined values", {
|
|
385
|
+
promptId,
|
|
386
|
+
outputKeys: Object.keys(hisData.outputs ?? {}),
|
|
387
|
+
mappedKeys: this.prompt.mapOutputKeys
|
|
388
|
+
});
|
|
389
|
+
return false;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
if (hisData && hisData.status?.completed && !hisData.outputs) {
|
|
393
|
+
this.emitLog("CallWrapper.handleCachedOutput", "history completed without outputs", { promptId });
|
|
394
|
+
return false;
|
|
395
|
+
}
|
|
396
|
+
if (hisData && !hisData.status?.completed) {
|
|
397
|
+
this.emitLog("CallWrapper.handleCachedOutput", "history not completed yet");
|
|
398
|
+
}
|
|
399
|
+
if (!hisData) {
|
|
400
|
+
this.emitLog("CallWrapper.handleCachedOutput", "history entry not available");
|
|
401
|
+
}
|
|
402
|
+
return null;
|
|
403
|
+
}
|
|
404
|
+
mapOutput(outputNodes) {
|
|
405
|
+
const outputMapped = this.prompt.mapOutputKeys;
|
|
406
|
+
const output = {};
|
|
407
|
+
for (const key in outputMapped) {
|
|
408
|
+
const node = outputMapped[key];
|
|
409
|
+
if (node) {
|
|
410
|
+
output[key] = outputNodes[node];
|
|
411
|
+
}
|
|
412
|
+
else {
|
|
413
|
+
if (!output._raw) {
|
|
414
|
+
output._raw = {};
|
|
415
|
+
}
|
|
416
|
+
output._raw[key] = outputNodes[key];
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
return output;
|
|
420
|
+
}
|
|
421
|
+
handleJobExecution(promptId, jobDoneTrigger) {
|
|
422
|
+
if (this.executionHandlerOffFn) {
|
|
423
|
+
return;
|
|
424
|
+
}
|
|
425
|
+
const reverseOutputMapped = this.reverseMapOutputKeys();
|
|
426
|
+
this.progressHandlerOffFn = this.client.on("progress", (ev) => this.handleProgress(ev, promptId));
|
|
427
|
+
this.previewHandlerOffFn = this.client.on("b_preview", (ev) => this.onPreviewFn?.(ev.detail, this.promptId));
|
|
428
|
+
// Also forward preview with metadata if available
|
|
429
|
+
const offPreviewMeta = this.client.on("b_preview_meta", (ev) => this.onPreviewMetaFn?.(ev.detail, this.promptId));
|
|
430
|
+
const prevCleanup = this.previewHandlerOffFn;
|
|
431
|
+
this.previewHandlerOffFn = () => {
|
|
432
|
+
prevCleanup?.();
|
|
433
|
+
offPreviewMeta?.();
|
|
434
|
+
};
|
|
435
|
+
const totalOutput = Object.keys(reverseOutputMapped).length;
|
|
436
|
+
let remainingOutput = totalOutput;
|
|
437
|
+
const executionHandler = (ev) => {
|
|
438
|
+
if (ev.detail.prompt_id !== promptId)
|
|
439
|
+
return;
|
|
440
|
+
const outputKey = reverseOutputMapped[ev.detail.node];
|
|
441
|
+
this.emitLog("CallWrapper.executionHandler", "executed event received", {
|
|
442
|
+
node: ev.detail.node,
|
|
443
|
+
outputKey,
|
|
444
|
+
remainingBefore: remainingOutput,
|
|
445
|
+
isTrackedOutput: !!outputKey
|
|
446
|
+
});
|
|
447
|
+
if (outputKey) {
|
|
448
|
+
this.output[outputKey] = ev.detail.output;
|
|
449
|
+
this.onOutputFn?.(outputKey, ev.detail.output, this.promptId);
|
|
450
|
+
remainingOutput--;
|
|
451
|
+
}
|
|
452
|
+
else {
|
|
453
|
+
this.output._raw = this.output._raw || {};
|
|
454
|
+
this.output._raw[ev.detail.node] = ev.detail.output;
|
|
455
|
+
this.onOutputFn?.(ev.detail.node, ev.detail.output, this.promptId);
|
|
456
|
+
}
|
|
457
|
+
this.emitLog("CallWrapper.executionHandler", "after processing executed event", {
|
|
458
|
+
remainingAfter: remainingOutput,
|
|
459
|
+
willTriggerCompletion: remainingOutput === 0
|
|
460
|
+
});
|
|
461
|
+
if (remainingOutput === 0) {
|
|
462
|
+
this.emitLog("CallWrapper.handleJobExecution", "all outputs collected");
|
|
463
|
+
// Mark as successfully completing BEFORE cleanup to prevent race condition with disconnection handler
|
|
464
|
+
this.isCompletingSuccessfully = true;
|
|
465
|
+
this.cleanupListeners("all outputs collected");
|
|
466
|
+
this.onFinishedFn?.(this.output, this.promptId);
|
|
467
|
+
jobDoneTrigger(this.output);
|
|
468
|
+
}
|
|
469
|
+
};
|
|
470
|
+
const executedEnd = async () => {
|
|
471
|
+
this.emitLog("CallWrapper.executedEnd", "execution_success fired", {
|
|
472
|
+
promptId,
|
|
473
|
+
remainingOutput,
|
|
474
|
+
totalOutput
|
|
475
|
+
});
|
|
476
|
+
if (remainingOutput === 0) {
|
|
477
|
+
this.emitLog("CallWrapper.executedEnd", "all outputs already collected, nothing to do");
|
|
478
|
+
return;
|
|
479
|
+
}
|
|
480
|
+
const hisData = await this.client.ext.history.getHistory(promptId);
|
|
481
|
+
if (hisData?.status?.completed) {
|
|
482
|
+
const outputCount = Object.keys(hisData.outputs ?? {}).length;
|
|
483
|
+
if (outputCount > 0 && outputCount - totalOutput === 0) {
|
|
484
|
+
this.emitLog("CallWrapper.executedEnd", "outputs equal total after history check -> ignore false end");
|
|
485
|
+
return;
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
this.emitLog("CallWrapper.executedEnd", "execution failed due to missing outputs", {
|
|
489
|
+
remainingOutput,
|
|
490
|
+
totalOutput
|
|
491
|
+
});
|
|
492
|
+
this.onFailedFn?.(new ExecutionFailedError("Execution failed"), this.promptId);
|
|
493
|
+
this.cleanupListeners("executedEnd missing outputs");
|
|
494
|
+
jobDoneTrigger(false);
|
|
495
|
+
};
|
|
496
|
+
this.executionEndSuccessOffFn = this.client.on("execution_success", executedEnd);
|
|
497
|
+
this.executionHandlerOffFn = this.client.on("executed", executionHandler);
|
|
498
|
+
this.errorHandlerOffFn = this.client.on("execution_error", (ev) => this.handleError(ev, promptId, jobDoneTrigger));
|
|
499
|
+
this.interruptionHandlerOffFn = this.client.on("execution_interrupted", (ev) => {
|
|
500
|
+
if (ev.detail.prompt_id !== promptId)
|
|
501
|
+
return;
|
|
502
|
+
this.onFailedFn?.(new ExecutionInterruptedError("The execution was interrupted!", { cause: ev.detail }), ev.detail.prompt_id);
|
|
503
|
+
this.cleanupListeners("execution interrupted");
|
|
504
|
+
jobDoneTrigger(false);
|
|
505
|
+
});
|
|
506
|
+
}
|
|
507
|
+
reverseMapOutputKeys() {
|
|
508
|
+
const outputMapped = this.prompt.mapOutputKeys;
|
|
509
|
+
return Object.entries(outputMapped).reduce((acc, [k, v]) => {
|
|
510
|
+
if (v)
|
|
511
|
+
acc[v] = k;
|
|
512
|
+
return acc;
|
|
513
|
+
}, {});
|
|
514
|
+
}
|
|
515
|
+
handleProgress(ev, promptId) {
|
|
516
|
+
if (ev.detail.prompt_id === promptId && !this.started) {
|
|
517
|
+
this.started = true;
|
|
518
|
+
this.onStartFn?.(this.promptId);
|
|
519
|
+
}
|
|
520
|
+
this.onProgressFn?.(ev.detail, this.promptId);
|
|
521
|
+
}
|
|
522
|
+
handleError(ev, promptId, resolve) {
|
|
523
|
+
if (ev.detail.prompt_id !== promptId)
|
|
524
|
+
return;
|
|
525
|
+
this.emitLog("CallWrapper.handleError", ev.detail.exception_type, {
|
|
526
|
+
prompt_id: ev.detail.prompt_id,
|
|
527
|
+
node_id: ev.detail?.node_id
|
|
528
|
+
});
|
|
529
|
+
this.onFailedFn?.(new CustomEventError(ev.detail.exception_type, { cause: ev.detail }), ev.detail.prompt_id);
|
|
530
|
+
this.cleanupListeners("execution_error received");
|
|
531
|
+
resolve(false);
|
|
532
|
+
}
|
|
533
|
+
emitLog(fnName, message, data) {
|
|
534
|
+
const detail = { fnName, message, data };
|
|
535
|
+
const customEvent = new CustomEvent("log", { detail });
|
|
536
|
+
const clientAny = this.client;
|
|
537
|
+
if (typeof clientAny.emit === "function") {
|
|
538
|
+
clientAny.emit("log", customEvent);
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
clientAny.dispatchEvent?.(customEvent);
|
|
542
|
+
}
|
|
543
|
+
cleanupListeners(reason) {
|
|
544
|
+
const debugPayload = { reason, promptId: this.promptId };
|
|
545
|
+
this.emitLog("CallWrapper.cleanupListeners", "removing listeners", debugPayload);
|
|
546
|
+
this.onDisconnectedHandlerOffFn?.();
|
|
547
|
+
this.onDisconnectedHandlerOffFn = undefined;
|
|
548
|
+
this.checkExecutingOffFn?.();
|
|
549
|
+
this.checkExecutingOffFn = undefined;
|
|
550
|
+
this.checkExecutedOffFn?.();
|
|
551
|
+
this.checkExecutedOffFn = undefined;
|
|
552
|
+
this.progressHandlerOffFn?.();
|
|
553
|
+
this.progressHandlerOffFn = undefined;
|
|
554
|
+
this.previewHandlerOffFn?.();
|
|
555
|
+
this.previewHandlerOffFn = undefined;
|
|
556
|
+
this.executionHandlerOffFn?.();
|
|
557
|
+
this.executionHandlerOffFn = undefined;
|
|
558
|
+
this.errorHandlerOffFn?.();
|
|
559
|
+
this.errorHandlerOffFn = undefined;
|
|
560
|
+
this.executionEndSuccessOffFn?.();
|
|
561
|
+
this.executionEndSuccessOffFn = undefined;
|
|
562
|
+
this.interruptionHandlerOffFn?.();
|
|
563
|
+
this.interruptionHandlerOffFn = undefined;
|
|
564
|
+
this.statusHandlerOffFn?.();
|
|
565
|
+
this.statusHandlerOffFn = undefined;
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
568
|
//# sourceMappingURL=call-wrapper.js.map
|